mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 05:34:05 -08:00
Merge branch 'release-2.55' into merge-2.55-into-main-3
This commit is contained in:
commit
a846bf9a5e
|
@ -48,12 +48,12 @@ As is traditional with a beta release, we do **not** recommend users install 3.0
|
|||
* [CHANGE] Remove deprecated `remote-write-receiver`,`promql-at-modifier`, and `promql-negative-offset` feature flags. #13456, #14526
|
||||
* [CHANGE] Remove deprecated `storage.tsdb.allow-overlapping-blocks`, `alertmanager.timeout`, and `storage.tsdb.retention` flags. #14640, #14643
|
||||
* [ENHANCEMENT] Move AM discovery page from "Monitoring status" to "Server status". #14875
|
||||
* [FEATURE] Support config reload automatically - feature flag `auto-reload-config`. #14769
|
||||
* [BUGFIX] Scrape: Do not override target parameter labels with config params. #11029
|
||||
|
||||
## 2.55.0-rc.0 / 2024-09-20
|
||||
|
||||
* [FEATURE] Support UTF-8 characters in label names - feature flag `utf8-names`. #14482, #14880, #14736, #14727
|
||||
* [FEATURE] Support config reload automatically - feature flag `auto-reload-config`. #14769
|
||||
* [FEATURE] Scraping: Add the ability to set custom `http_headers` in config. #14817
|
||||
* [FEATURE] Scraping: Support feature flag `created-timestamp-zero-ingestion` in OpenMetrics. #14356, #14815
|
||||
* [FEATURE] Scraping: `scrape_failure_log_file` option to log failures to a file. #14734
|
||||
|
@ -66,7 +66,7 @@ As is traditional with a beta release, we do **not** recommend users install 3.0
|
|||
* [ENHANCEMENT] Remote Read client: Enable streaming remote read if the server supports it. #11379
|
||||
* [ENHANCEMENT] Remote-Write: Don't reshard if we haven't successfully sent a sample since last update. #14450
|
||||
* [ENHANCEMENT] PromQL: Delay deletion of `__name__` label to the end of the query evaluation. This is **experimental** and enabled under the feature-flag `promql-delayed-name-removal`. #14477
|
||||
* [ENHANCEMENT] PromQL: Experimental `sort_by_label` and `sort_by_label_desc` sort by all labels when label is equal. #14655
|
||||
* [ENHANCEMENT] PromQL: Experimental `sort_by_label` and `sort_by_label_desc` sort by all labels when label is equal. #14655, #14985
|
||||
* [ENHANCEMENT] PromQL: Clarify error message logged when Go runtime panic occurs during query evaluation. #14621
|
||||
* [ENHANCEMENT] PromQL: Use Kahan summation for better accuracy in `avg` and `avg_over_time`. #14413
|
||||
* [ENHANCEMENT] Tracing: Improve PromQL tracing, including showing the operation performed for aggregates, operators, and calls. #14816
|
||||
|
|
|
@ -1230,38 +1230,17 @@ func (ev *evaluator) rangeEval(ctx context.Context, prepSeries func(labels.Label
|
|||
ev.currentSamples = tempNumSamples
|
||||
// Gather input vectors for this timestamp.
|
||||
for i := range exprs {
|
||||
vectors[i] = vectors[i][:0]
|
||||
|
||||
var bh []EvalSeriesHelper
|
||||
var sh []EvalSeriesHelper
|
||||
if prepSeries != nil {
|
||||
bufHelpers[i] = bufHelpers[i][:0]
|
||||
}
|
||||
|
||||
for si, series := range matrixes[i] {
|
||||
switch {
|
||||
case len(series.Floats) > 0 && series.Floats[0].T == ts:
|
||||
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, F: series.Floats[0].F, T: ts, DropName: series.DropName})
|
||||
// Move input vectors forward so we don't have to re-scan the same
|
||||
// past points at the next step.
|
||||
matrixes[i][si].Floats = series.Floats[1:]
|
||||
case len(series.Histograms) > 0 && series.Histograms[0].T == ts:
|
||||
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, H: series.Histograms[0].H, T: ts, DropName: series.DropName})
|
||||
matrixes[i][si].Histograms = series.Histograms[1:]
|
||||
default:
|
||||
continue
|
||||
}
|
||||
if prepSeries != nil {
|
||||
bufHelpers[i] = append(bufHelpers[i], seriesHelpers[i][si])
|
||||
}
|
||||
// Don't add histogram size here because we only
|
||||
// copy the pointer above, not the whole
|
||||
// histogram.
|
||||
ev.currentSamples++
|
||||
if ev.currentSamples > ev.maxSamples {
|
||||
ev.error(ErrTooManySamples(env))
|
||||
}
|
||||
bh = bufHelpers[i][:0]
|
||||
sh = seriesHelpers[i]
|
||||
}
|
||||
vectors[i], bh = ev.gatherVector(ts, matrixes[i], vectors[i], bh, sh)
|
||||
args[i] = vectors[i]
|
||||
ev.samplesStats.UpdatePeak(ev.currentSamples)
|
||||
if prepSeries != nil {
|
||||
bufHelpers[i] = bh
|
||||
}
|
||||
}
|
||||
|
||||
// Make the function call.
|
||||
|
@ -3716,3 +3695,41 @@ func newHistogramStatsSeries(series storage.Series) *histogramStatsSeries {
|
|||
func (s histogramStatsSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
|
||||
return NewHistogramStatsIterator(s.Series.Iterator(it))
|
||||
}
|
||||
|
||||
// gatherVector gathers a Vector for ts from the series in input.
|
||||
// output is used as a buffer.
|
||||
// If bufHelpers and seriesHelpers are provided, seriesHelpers[i] is appended to bufHelpers for every input index i.
|
||||
// The gathered Vector and bufHelper are returned.
|
||||
func (ev *evaluator) gatherVector(ts int64, input Matrix, output Vector, bufHelpers, seriesHelpers []EvalSeriesHelper) (Vector, []EvalSeriesHelper) {
|
||||
output = output[:0]
|
||||
for i, series := range input {
|
||||
switch {
|
||||
case len(series.Floats) > 0 && series.Floats[0].T == ts:
|
||||
s := series.Floats[0]
|
||||
output = append(output, Sample{Metric: series.Metric, F: s.F, T: ts, DropName: series.DropName})
|
||||
// Move input vectors forward so we don't have to re-scan the same
|
||||
// past points at the next step.
|
||||
input[i].Floats = series.Floats[1:]
|
||||
case len(series.Histograms) > 0 && series.Histograms[0].T == ts:
|
||||
s := series.Histograms[0]
|
||||
output = append(output, Sample{Metric: series.Metric, H: s.H, T: ts, DropName: series.DropName})
|
||||
input[i].Histograms = series.Histograms[1:]
|
||||
default:
|
||||
continue
|
||||
}
|
||||
if len(seriesHelpers) > 0 {
|
||||
bufHelpers = append(bufHelpers, seriesHelpers[i])
|
||||
}
|
||||
|
||||
// Don't add histogram size here because we only
|
||||
// copy the pointer above, not the whole
|
||||
// histogram.
|
||||
ev.currentSamples++
|
||||
if ev.currentSamples > ev.maxSamples {
|
||||
ev.error(ErrTooManySamples(env))
|
||||
}
|
||||
}
|
||||
ev.samplesStats.UpdatePeak(ev.currentSamples)
|
||||
|
||||
return output, bufHelpers
|
||||
}
|
||||
|
|
|
@ -415,22 +415,12 @@ func funcSortDesc(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel
|
|||
|
||||
// === sort_by_label(vector parser.ValueTypeVector, label parser.ValueTypeString...) (Vector, Annotations) ===
|
||||
func funcSortByLabel(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
// First, sort by the full label set. This ensures a consistent ordering in case sorting by the
|
||||
// labels provided as arguments is not conclusive.
|
||||
lbls := stringSliceFromArgs(args[1:])
|
||||
slices.SortFunc(vals[0].(Vector), func(a, b Sample) int {
|
||||
return labels.Compare(a.Metric, b.Metric)
|
||||
})
|
||||
|
||||
labels := stringSliceFromArgs(args[1:])
|
||||
// Next, sort by the labels provided as arguments.
|
||||
slices.SortFunc(vals[0].(Vector), func(a, b Sample) int {
|
||||
// Iterate over each given label.
|
||||
for _, label := range labels {
|
||||
for _, label := range lbls {
|
||||
lv1 := a.Metric.Get(label)
|
||||
lv2 := b.Metric.Get(label)
|
||||
|
||||
// If we encounter multiple samples with the same label values, the sorting which was
|
||||
// performed in the first step will act as a "tie breaker".
|
||||
if lv1 == lv2 {
|
||||
continue
|
||||
}
|
||||
|
@ -442,7 +432,8 @@ func funcSortByLabel(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||
return +1
|
||||
}
|
||||
|
||||
return 0
|
||||
// If all labels provided as arguments were equal, sort by the full label set. This ensures a consistent ordering.
|
||||
return labels.Compare(a.Metric, b.Metric)
|
||||
})
|
||||
|
||||
return vals[0].(Vector), nil
|
||||
|
@ -450,22 +441,12 @@ func funcSortByLabel(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||
|
||||
// === sort_by_label_desc(vector parser.ValueTypeVector, label parser.ValueTypeString...) (Vector, Annotations) ===
|
||||
func funcSortByLabelDesc(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
// First, sort by the full label set. This ensures a consistent ordering in case sorting by the
|
||||
// labels provided as arguments is not conclusive.
|
||||
lbls := stringSliceFromArgs(args[1:])
|
||||
slices.SortFunc(vals[0].(Vector), func(a, b Sample) int {
|
||||
return labels.Compare(b.Metric, a.Metric)
|
||||
})
|
||||
|
||||
labels := stringSliceFromArgs(args[1:])
|
||||
// Next, sort by the labels provided as arguments.
|
||||
slices.SortFunc(vals[0].(Vector), func(a, b Sample) int {
|
||||
// Iterate over each given label.
|
||||
for _, label := range labels {
|
||||
for _, label := range lbls {
|
||||
lv1 := a.Metric.Get(label)
|
||||
lv2 := b.Metric.Get(label)
|
||||
|
||||
// If we encounter multiple samples with the same label values, the sorting which was
|
||||
// performed in the first step will act as a "tie breaker".
|
||||
if lv1 == lv2 {
|
||||
continue
|
||||
}
|
||||
|
@ -477,7 +458,8 @@ func funcSortByLabelDesc(vals []parser.Value, args parser.Expressions, enh *Eval
|
|||
return -1
|
||||
}
|
||||
|
||||
return 0
|
||||
// If all labels provided as arguments were equal, sort by the full label set. This ensures a consistent ordering.
|
||||
return -labels.Compare(a.Metric, b.Metric)
|
||||
})
|
||||
|
||||
return vals[0].(Vector), nil
|
||||
|
|
Loading…
Reference in a new issue