mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
Merge branch 'prometheus:main' into patch-exemplar_ui
This commit is contained in:
commit
fc33c31460
|
@ -534,7 +534,7 @@ func main() {
|
|||
|
||||
_, err := a.Parse(os.Args[1:])
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, fmt.Errorf("Error parsing command line arguments: %w", err))
|
||||
fmt.Fprintf(os.Stderr, "Error parsing command line arguments: %s\n", err)
|
||||
a.Usage(os.Args[1:])
|
||||
os.Exit(2)
|
||||
}
|
||||
|
@ -548,7 +548,7 @@ func main() {
|
|||
notifs.AddNotification(notifications.StartingUp)
|
||||
|
||||
if err := cfg.setFeatureListOptions(logger); err != nil {
|
||||
fmt.Fprintln(os.Stderr, fmt.Errorf("Error parsing feature list: %w", err))
|
||||
fmt.Fprintf(os.Stderr, "Error parsing feature list: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
|
@ -1742,7 +1742,7 @@ func (s *readyStorage) WALReplayStatus() (tsdb.WALReplayStatus, error) {
|
|||
}
|
||||
|
||||
// ErrNotReady is returned if the underlying scrape manager is not ready yet.
|
||||
var ErrNotReady = errors.New("Scrape manager not ready")
|
||||
var ErrNotReady = errors.New("scrape manager not ready")
|
||||
|
||||
// ReadyScrapeManager allows a scrape manager to be retrieved. Even if it's set at a later point in time.
|
||||
type readyScrapeManager struct {
|
||||
|
|
|
@ -36,7 +36,7 @@ import (
|
|||
"github.com/prometheus/client_golang/api"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/testutil/promlint"
|
||||
config_util "github.com/prometheus/common/config"
|
||||
"github.com/prometheus/common/expfmt"
|
||||
"github.com/prometheus/common/model"
|
||||
"github.com/prometheus/common/promslog"
|
||||
"github.com/prometheus/common/version"
|
||||
|
@ -45,7 +45,6 @@ import (
|
|||
|
||||
dto "github.com/prometheus/client_model/go"
|
||||
promconfig "github.com/prometheus/common/config"
|
||||
"github.com/prometheus/common/expfmt"
|
||||
|
||||
"github.com/prometheus/prometheus/config"
|
||||
"github.com/prometheus/prometheus/discovery"
|
||||
|
@ -312,12 +311,12 @@ func main() {
|
|||
kingpin.Fatalf("Cannot set base auth in the server URL and use a http.config.file at the same time")
|
||||
}
|
||||
var err error
|
||||
httpConfig, _, err := config_util.LoadHTTPConfigFile(httpConfigFilePath)
|
||||
httpConfig, _, err := promconfig.LoadHTTPConfigFile(httpConfigFilePath)
|
||||
if err != nil {
|
||||
kingpin.Fatalf("Failed to load HTTP config file: %v", err)
|
||||
}
|
||||
|
||||
httpRoundTripper, err = promconfig.NewRoundTripperFromConfig(*httpConfig, "promtool", config_util.WithUserAgent("promtool/"+version.Version))
|
||||
httpRoundTripper, err = promconfig.NewRoundTripperFromConfig(*httpConfig, "promtool", promconfig.WithUserAgent("promtool/"+version.Version))
|
||||
if err != nil {
|
||||
kingpin.Fatalf("Failed to create a new HTTP round tripper: %v", err)
|
||||
}
|
||||
|
@ -702,7 +701,7 @@ func checkConfig(agentMode bool, filename string, checkSyntaxOnly bool) ([]strin
|
|||
return ruleFiles, nil
|
||||
}
|
||||
|
||||
func checkTLSConfig(tlsConfig config_util.TLSConfig, checkSyntaxOnly bool) error {
|
||||
func checkTLSConfig(tlsConfig promconfig.TLSConfig, checkSyntaxOnly bool) error {
|
||||
if len(tlsConfig.CertFile) > 0 && len(tlsConfig.KeyFile) == 0 {
|
||||
return fmt.Errorf("client cert file %q specified without client key file", tlsConfig.CertFile)
|
||||
}
|
||||
|
|
|
@ -86,6 +86,7 @@ URL query parameters:
|
|||
- `time=<rfc3339 | unix_timestamp>`: Evaluation timestamp. Optional.
|
||||
- `timeout=<duration>`: Evaluation timeout. Optional. Defaults to and
|
||||
is capped by the value of the `-query.timeout` flag.
|
||||
- `limit=<number>`: Maximum number of returned series. Doesn’t affect scalars or strings but truncates the number of series for matrices and vectors. Optional. 0 means disabled.
|
||||
|
||||
The current server time is used if the `time` parameter is omitted.
|
||||
|
||||
|
@ -154,6 +155,7 @@ URL query parameters:
|
|||
- `step=<duration | float>`: Query resolution step width in `duration` format or float number of seconds.
|
||||
- `timeout=<duration>`: Evaluation timeout. Optional. Defaults to and
|
||||
is capped by the value of the `-query.timeout` flag.
|
||||
- `limit=<number>`: Maximum number of returned series. Optional. 0 means disabled.
|
||||
|
||||
You can URL-encode these parameters directly in the request body by using the `POST` method and
|
||||
`Content-Type: application/x-www-form-urlencoded` header. This is useful when specifying a large
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -4,20 +4,11 @@
|
|||
{
|
||||
"source": {
|
||||
"git": {
|
||||
"remote": "https://github.com/grafana/grafonnet-lib.git",
|
||||
"subdir": "grafonnet"
|
||||
"remote": "https://github.com/grafana/grafonnet.git",
|
||||
"subdir": "gen/grafonnet-latest"
|
||||
}
|
||||
},
|
||||
"version": "master"
|
||||
},
|
||||
{
|
||||
"source": {
|
||||
"git": {
|
||||
"remote": "https://github.com/grafana/jsonnet-libs.git",
|
||||
"subdir": "grafana-builder"
|
||||
}
|
||||
},
|
||||
"version": "master"
|
||||
"version": "main"
|
||||
}
|
||||
],
|
||||
"legacyImports": false
|
||||
|
|
|
@ -691,9 +691,15 @@ func funcLastOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNod
|
|||
|
||||
// === mad_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||
func funcMadOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
if len(vals[0].(Matrix)[0].Floats) == 0 {
|
||||
samples := vals[0].(Matrix)[0]
|
||||
var annos annotations.Annotations
|
||||
if len(samples.Floats) == 0 {
|
||||
return enh.Out, nil
|
||||
}
|
||||
if len(samples.Histograms) > 0 {
|
||||
metricName := samples.Metric.Get(labels.MetricName)
|
||||
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
|
||||
}
|
||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
||||
values := make(vectorByValueHeap, 0, len(s.Floats))
|
||||
for _, f := range s.Floats {
|
||||
|
@ -705,18 +711,20 @@ func funcMadOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||
values = append(values, Sample{F: math.Abs(f.F - median)})
|
||||
}
|
||||
return quantile(0.5, values)
|
||||
}), nil
|
||||
}), annos
|
||||
}
|
||||
|
||||
// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
if len(vals[0].(Matrix)[0].Floats) == 0 {
|
||||
// TODO(beorn7): The passed values only contain
|
||||
// histograms. max_over_time ignores histograms for now. If
|
||||
// there are only histograms, we have to return without adding
|
||||
// anything to enh.Out.
|
||||
samples := vals[0].(Matrix)[0]
|
||||
var annos annotations.Annotations
|
||||
if len(samples.Floats) == 0 {
|
||||
return enh.Out, nil
|
||||
}
|
||||
if len(samples.Histograms) > 0 {
|
||||
metricName := samples.Metric.Get(labels.MetricName)
|
||||
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
|
||||
}
|
||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
||||
maxVal := s.Floats[0].F
|
||||
for _, f := range s.Floats {
|
||||
|
@ -725,18 +733,20 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||
}
|
||||
}
|
||||
return maxVal
|
||||
}), nil
|
||||
}), annos
|
||||
}
|
||||
|
||||
// === min_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||
func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
if len(vals[0].(Matrix)[0].Floats) == 0 {
|
||||
// TODO(beorn7): The passed values only contain
|
||||
// histograms. min_over_time ignores histograms for now. If
|
||||
// there are only histograms, we have to return without adding
|
||||
// anything to enh.Out.
|
||||
samples := vals[0].(Matrix)[0]
|
||||
var annos annotations.Annotations
|
||||
if len(samples.Floats) == 0 {
|
||||
return enh.Out, nil
|
||||
}
|
||||
if len(samples.Histograms) > 0 {
|
||||
metricName := samples.Metric.Get(labels.MetricName)
|
||||
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
|
||||
}
|
||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
||||
minVal := s.Floats[0].F
|
||||
for _, f := range s.Floats {
|
||||
|
@ -745,7 +755,7 @@ func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||
}
|
||||
}
|
||||
return minVal
|
||||
}), nil
|
||||
}), annos
|
||||
}
|
||||
|
||||
// === sum_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||
|
@ -794,10 +804,6 @@ func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *Eva
|
|||
q := vals[0].(Vector)[0].F
|
||||
el := vals[1].(Matrix)[0]
|
||||
if len(el.Floats) == 0 {
|
||||
// TODO(beorn7): The passed values only contain
|
||||
// histograms. quantile_over_time ignores histograms for now. If
|
||||
// there are only histograms, we have to return without adding
|
||||
// anything to enh.Out.
|
||||
return enh.Out, nil
|
||||
}
|
||||
|
||||
|
@ -805,7 +811,10 @@ func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *Eva
|
|||
if math.IsNaN(q) || q < 0 || q > 1 {
|
||||
annos.Add(annotations.NewInvalidQuantileWarning(q, args[0].PositionRange()))
|
||||
}
|
||||
|
||||
if len(el.Histograms) > 0 {
|
||||
metricName := el.Metric.Get(labels.MetricName)
|
||||
annos.Add(annotations.NewHistogramIgnoredInAggregationInfo(metricName, args[0].PositionRange()))
|
||||
}
|
||||
values := make(vectorByValueHeap, 0, len(el.Floats))
|
||||
for _, f := range el.Floats {
|
||||
values = append(values, Sample{F: f.F})
|
||||
|
@ -815,13 +824,15 @@ func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *Eva
|
|||
|
||||
// === stddev_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||
func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
if len(vals[0].(Matrix)[0].Floats) == 0 {
|
||||
// TODO(beorn7): The passed values only contain
|
||||
// histograms. stddev_over_time ignores histograms for now. If
|
||||
// there are only histograms, we have to return without adding
|
||||
// anything to enh.Out.
|
||||
samples := vals[0].(Matrix)[0]
|
||||
var annos annotations.Annotations
|
||||
if len(samples.Floats) == 0 {
|
||||
return enh.Out, nil
|
||||
}
|
||||
if len(samples.Histograms) > 0 {
|
||||
metricName := samples.Metric.Get(labels.MetricName)
|
||||
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
|
||||
}
|
||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
||||
var count float64
|
||||
var mean, cMean float64
|
||||
|
@ -833,18 +844,20 @@ func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalN
|
|||
aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux)
|
||||
}
|
||||
return math.Sqrt((aux + cAux) / count)
|
||||
}), nil
|
||||
}), annos
|
||||
}
|
||||
|
||||
// === stdvar_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||
func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
if len(vals[0].(Matrix)[0].Floats) == 0 {
|
||||
// TODO(beorn7): The passed values only contain
|
||||
// histograms. stdvar_over_time ignores histograms for now. If
|
||||
// there are only histograms, we have to return without adding
|
||||
// anything to enh.Out.
|
||||
samples := vals[0].(Matrix)[0]
|
||||
var annos annotations.Annotations
|
||||
if len(samples.Floats) == 0 {
|
||||
return enh.Out, nil
|
||||
}
|
||||
if len(samples.Histograms) > 0 {
|
||||
metricName := samples.Metric.Get(labels.MetricName)
|
||||
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
|
||||
}
|
||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
||||
var count float64
|
||||
var mean, cMean float64
|
||||
|
@ -856,7 +869,7 @@ func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalN
|
|||
aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux)
|
||||
}
|
||||
return (aux + cAux) / count
|
||||
}), nil
|
||||
}), annos
|
||||
}
|
||||
|
||||
// === absent(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||
|
|
|
@ -1419,8 +1419,8 @@ func (ll *LazyLoader) appendTill(ts int64) error {
|
|||
|
||||
// WithSamplesTill loads the samples till given timestamp and executes the given function.
|
||||
func (ll *LazyLoader) WithSamplesTill(ts time.Time, fn func(error)) {
|
||||
tsMilli := ts.Sub(time.Unix(0, 0).UTC()) / time.Millisecond
|
||||
fn(ll.appendTill(int64(tsMilli)))
|
||||
till := ts.Sub(time.Unix(0, 0).UTC()) / time.Millisecond
|
||||
fn(ll.appendTill(int64(till)))
|
||||
}
|
||||
|
||||
// QueryEngine returns the LazyLoader's query engine.
|
||||
|
|
60
promql/promqltest/testdata/functions.test
vendored
60
promql/promqltest/testdata/functions.test
vendored
|
@ -930,6 +930,8 @@ eval instant at 1m avg_over_time(metric[2m])
|
|||
clear
|
||||
load 10s
|
||||
metric 0 8 8 2 3
|
||||
metric_histogram{type="only_histogram"} {{schema:1 sum:2 count:3}}x5
|
||||
metric_histogram{type="mix"} 1 1 1 {{schema:1 sum:2 count:3}} {{schema:1 sum:2 count:3}}
|
||||
|
||||
eval instant at 1m stdvar_over_time(metric[2m])
|
||||
{} 10.56
|
||||
|
@ -940,6 +942,19 @@ eval instant at 1m stddev_over_time(metric[2m])
|
|||
eval instant at 1m stddev_over_time((metric[2m]))
|
||||
{} 3.249615
|
||||
|
||||
# Tests for stddev_over_time and stdvar_over_time with histograms.
|
||||
eval instant at 1m stddev_over_time(metric_histogram{type="only_histogram"}[2m])
|
||||
#empty
|
||||
|
||||
eval_info instant at 1m stddev_over_time(metric_histogram{type="mix"}[2m])
|
||||
{type="mix"} 0
|
||||
|
||||
eval instant at 1m stdvar_over_time(metric_histogram{type="only_histogram"}[2m])
|
||||
#empty
|
||||
|
||||
eval_info instant at 1m stdvar_over_time(metric_histogram{type="mix"}[2m])
|
||||
{type="mix"} 0
|
||||
|
||||
# Tests for stddev_over_time and stdvar_over_time #4927.
|
||||
clear
|
||||
load 10s
|
||||
|
@ -955,10 +970,18 @@ eval instant at 1m stddev_over_time(metric[1m])
|
|||
clear
|
||||
load 10s
|
||||
metric 4 6 2 1 999 1 2
|
||||
metric_histogram{type="only_histogram"} {{schema:1 sum:2 count:3}}x5
|
||||
metric_histogram{type="mix"} 1 1 1 {{schema:1 sum:2 count:3}} {{schema:1 sum:2 count:3}}
|
||||
|
||||
eval instant at 70s mad_over_time(metric[70s])
|
||||
{} 1
|
||||
|
||||
eval instant at 70s mad_over_time(metric_histogram{type="only_histogram"}[70s])
|
||||
#empty
|
||||
|
||||
eval_info instant at 70s mad_over_time(metric_histogram{type="mix"}[70s])
|
||||
{type="mix"} 0
|
||||
|
||||
# Tests for quantile_over_time
|
||||
clear
|
||||
|
||||
|
@ -966,6 +989,8 @@ load 10s
|
|||
data{test="two samples"} 0 1
|
||||
data{test="three samples"} 0 1 2
|
||||
data{test="uneven samples"} 0 1 4
|
||||
data_histogram{test="only histogram samples"} {{schema:0 sum:1 count:2}}x4
|
||||
data_histogram{test="mix samples"} 0 1 2 {{schema:0 sum:1 count:2}}x2
|
||||
|
||||
eval instant at 1m quantile_over_time(0, data[2m])
|
||||
{test="two samples"} 0
|
||||
|
@ -1007,6 +1032,12 @@ eval_warn instant at 1m (quantile_over_time(2, (data[2m])))
|
|||
{test="three samples"} +Inf
|
||||
{test="uneven samples"} +Inf
|
||||
|
||||
eval instant at 1m quantile_over_time(0.5, data_histogram{test="only histogram samples"}[2m])
|
||||
#empty
|
||||
|
||||
eval_info instant at 1m quantile_over_time(0.5, data_histogram{test="mix samples"}[2m])
|
||||
{test="mix samples"} 1
|
||||
|
||||
clear
|
||||
|
||||
# Test time-related functions.
|
||||
|
@ -1120,15 +1151,17 @@ load 5m
|
|||
|
||||
eval_fail instant at 0m changes({__name__=~'testmetric1|testmetric2'}[5m])
|
||||
|
||||
# Tests for *_over_time
|
||||
clear
|
||||
|
||||
# Tests for *_over_time
|
||||
load 10s
|
||||
data{type="numbers"} 2 0 3
|
||||
data{type="some_nan"} 2 0 NaN
|
||||
data{type="some_nan2"} 2 NaN 1
|
||||
data{type="some_nan3"} NaN 0 1
|
||||
data{type="only_nan"} NaN NaN NaN
|
||||
data_histogram{type="only_histogram"} {{schema:0 sum:1 count:2}} {{schema:0 sum:2 count:3}} {{schema:0 sum:3 count:4}}
|
||||
data_histogram{type="mix_samples"} 0 1 {{schema:0 sum:1 count:2}} {{schema:0 sum:2 count:3}}
|
||||
|
||||
eval instant at 1m min_over_time(data[2m])
|
||||
{type="numbers"} 0
|
||||
|
@ -1137,6 +1170,12 @@ eval instant at 1m min_over_time(data[2m])
|
|||
{type="some_nan3"} 0
|
||||
{type="only_nan"} NaN
|
||||
|
||||
eval instant at 1m min_over_time(data_histogram{type="only_histogram"}[2m])
|
||||
#empty
|
||||
|
||||
eval_info instant at 1m min_over_time(data_histogram{type="mix_samples"}[2m])
|
||||
{type="mix_samples"} 0
|
||||
|
||||
eval instant at 1m max_over_time(data[2m])
|
||||
{type="numbers"} 3
|
||||
{type="some_nan"} 2
|
||||
|
@ -1144,12 +1183,29 @@ eval instant at 1m max_over_time(data[2m])
|
|||
{type="some_nan3"} 1
|
||||
{type="only_nan"} NaN
|
||||
|
||||
eval instant at 1m last_over_time(data[2m])
|
||||
eval instant at 1m max_over_time(data_histogram{type="only_histogram"}[2m])
|
||||
#empty
|
||||
|
||||
eval_info instant at 1m max_over_time(data_histogram{type="mix_samples"}[2m])
|
||||
{type="mix_samples"} 1
|
||||
|
||||
eval instant at 1m last_over_time({__name__=~"data(_histogram)?"}[2m])
|
||||
data{type="numbers"} 3
|
||||
data{type="some_nan"} NaN
|
||||
data{type="some_nan2"} 1
|
||||
data{type="some_nan3"} 1
|
||||
data{type="only_nan"} NaN
|
||||
data_histogram{type="only_histogram"} {{schema:0 sum:3 count:4}}
|
||||
data_histogram{type="mix_samples"} {{schema:0 sum:2 count:3}}
|
||||
|
||||
eval instant at 1m count_over_time({__name__=~"data(_histogram)?"}[2m])
|
||||
{type="numbers"} 3
|
||||
{type="some_nan"} 3
|
||||
{type="some_nan2"} 3
|
||||
{type="some_nan3"} 3
|
||||
{type="only_nan"} 3
|
||||
{type="only_histogram"} 3
|
||||
{type="mix_samples"} 4
|
||||
|
||||
clear
|
||||
|
||||
|
|
|
@ -1128,6 +1128,39 @@ eval_warn range from 0 to 12m step 6m sum(metric)
|
|||
eval_warn range from 0 to 12m step 6m avg(metric)
|
||||
{} _ {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} _
|
||||
|
||||
# Test incompatible schemas with additional aggregation operators
|
||||
eval range from 0 to 12m step 6m count(metric)
|
||||
{} 2 2 3
|
||||
|
||||
eval range from 0 to 12m step 6m group(metric)
|
||||
{} 1 1 1
|
||||
|
||||
eval range from 0 to 12m step 6m count(limitk(1, metric))
|
||||
{} 1 1 1
|
||||
|
||||
eval range from 0 to 12m step 6m limitk(3, metric)
|
||||
metric{series="1"} _ {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}}
|
||||
metric{series="2"} {{schema:-53 sum:1 count:1 custom_values:[2] buckets:[1]}} _ {{schema:-53 sum:1 count:1 custom_values:[2] buckets:[1]}}
|
||||
metric{series="3"} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}}
|
||||
|
||||
eval range from 0 to 12m step 6m limit_ratio(1, metric)
|
||||
metric{series="1"} _ {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}}
|
||||
metric{series="2"} {{schema:-53 sum:1 count:1 custom_values:[2] buckets:[1]}} _ {{schema:-53 sum:1 count:1 custom_values:[2] buckets:[1]}}
|
||||
metric{series="3"} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}}
|
||||
|
||||
# Test incompatible schemas with and/or
|
||||
eval range from 0 to 12m step 6m metric{series="1"} and ignoring(series) metric{series="2"}
|
||||
metric{series="1"} _ _ {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}}
|
||||
|
||||
eval range from 0 to 12m step 6m metric{series="1"} or ignoring(series) metric{series="2"}
|
||||
metric{series="1"} _ {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}} {{schema:-53 sum:1 count:1 custom_values:[5 10] buckets:[1]}}
|
||||
metric{series="2"} {{schema:-53 sum:1 count:1 custom_values:[2] buckets:[1]}} _ _
|
||||
|
||||
# Test incompatible schemas with arithmetic binary operators
|
||||
eval_warn range from 0 to 12m step 6m metric{series="2"} + ignoring (series) metric{series="3"}
|
||||
|
||||
eval_warn range from 0 to 12m step 6m metric{series="2"} - ignoring (series) metric{series="3"}
|
||||
|
||||
clear
|
||||
|
||||
load 1m
|
||||
|
|
|
@ -295,12 +295,12 @@ func (t *Target) intervalAndTimeout(defaultInterval, defaultDuration time.Durati
|
|||
intervalLabel := t.labels.Get(model.ScrapeIntervalLabel)
|
||||
interval, err := model.ParseDuration(intervalLabel)
|
||||
if err != nil {
|
||||
return defaultInterval, defaultDuration, fmt.Errorf("Error parsing interval label %q: %w", intervalLabel, err)
|
||||
return defaultInterval, defaultDuration, fmt.Errorf("error parsing interval label %q: %w", intervalLabel, err)
|
||||
}
|
||||
timeoutLabel := t.labels.Get(model.ScrapeTimeoutLabel)
|
||||
timeout, err := model.ParseDuration(timeoutLabel)
|
||||
if err != nil {
|
||||
return defaultInterval, defaultDuration, fmt.Errorf("Error parsing timeout label %q: %w", timeoutLabel, err)
|
||||
return defaultInterval, defaultDuration, fmt.Errorf("error parsing timeout label %q: %w", timeoutLabel, err)
|
||||
}
|
||||
|
||||
return time.Duration(interval), time.Duration(timeout), nil
|
||||
|
|
|
@ -38,7 +38,7 @@ type Watchable interface {
|
|||
type noopScrapeManager struct{}
|
||||
|
||||
func (noop *noopScrapeManager) Get() (*scrape.Manager, error) {
|
||||
return nil, errors.New("Scrape manager not ready")
|
||||
return nil, errors.New("scrape manager not ready")
|
||||
}
|
||||
|
||||
// MetadataWatcher watches the Scrape Manager for a given WriteMetadataTo.
|
||||
|
|
|
@ -2119,7 +2119,7 @@ func compressPayload(tmpbuf *[]byte, inp []byte, enc Compression) (compressed []
|
|||
}
|
||||
return compressed, nil
|
||||
default:
|
||||
return compressed, fmt.Errorf("Unknown compression scheme [%v]", enc)
|
||||
return compressed, fmt.Errorf("unknown compression scheme [%v]", enc)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -679,7 +679,7 @@ func (w *Watcher) readCheckpoint(checkpointDir string, readFn segmentReadFn) err
|
|||
// Ensure we read the whole contents of every segment in the checkpoint dir.
|
||||
segs, err := listSegments(checkpointDir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Unable to get segments checkpoint dir: %w", err)
|
||||
return fmt.Errorf("unable to get segments checkpoint dir: %w", err)
|
||||
}
|
||||
for _, segRef := range segs {
|
||||
size, err := getSegmentSize(checkpointDir, segRef.index)
|
||||
|
|
|
@ -438,6 +438,10 @@ func (api *API) options(*http.Request) apiFuncResult {
|
|||
}
|
||||
|
||||
func (api *API) query(r *http.Request) (result apiFuncResult) {
|
||||
limit, err := parseLimitParam(r.FormValue("limit"))
|
||||
if err != nil {
|
||||
return invalidParamError(err, "limit")
|
||||
}
|
||||
ts, err := parseTimeParam(r, "time", api.now())
|
||||
if err != nil {
|
||||
return invalidParamError(err, "time")
|
||||
|
@ -479,6 +483,15 @@ func (api *API) query(r *http.Request) (result apiFuncResult) {
|
|||
return apiFuncResult{nil, returnAPIError(res.Err), res.Warnings, qry.Close}
|
||||
}
|
||||
|
||||
warnings := res.Warnings
|
||||
if limit > 0 {
|
||||
var isTruncated bool
|
||||
|
||||
res, isTruncated = truncateResults(res, limit)
|
||||
if isTruncated {
|
||||
warnings = warnings.Add(errors.New("results truncated due to limit"))
|
||||
}
|
||||
}
|
||||
// Optional stats field in response if parameter "stats" is not empty.
|
||||
sr := api.statsRenderer
|
||||
if sr == nil {
|
||||
|
@ -490,7 +503,7 @@ func (api *API) query(r *http.Request) (result apiFuncResult) {
|
|||
ResultType: res.Value.Type(),
|
||||
Result: res.Value,
|
||||
Stats: qs,
|
||||
}, nil, res.Warnings, qry.Close}
|
||||
}, nil, warnings, qry.Close}
|
||||
}
|
||||
|
||||
func (api *API) formatQuery(r *http.Request) (result apiFuncResult) {
|
||||
|
@ -526,6 +539,10 @@ func extractQueryOpts(r *http.Request) (promql.QueryOpts, error) {
|
|||
}
|
||||
|
||||
func (api *API) queryRange(r *http.Request) (result apiFuncResult) {
|
||||
limit, err := parseLimitParam(r.FormValue("limit"))
|
||||
if err != nil {
|
||||
return invalidParamError(err, "limit")
|
||||
}
|
||||
start, err := parseTime(r.FormValue("start"))
|
||||
if err != nil {
|
||||
return invalidParamError(err, "start")
|
||||
|
@ -590,6 +607,16 @@ func (api *API) queryRange(r *http.Request) (result apiFuncResult) {
|
|||
return apiFuncResult{nil, returnAPIError(res.Err), res.Warnings, qry.Close}
|
||||
}
|
||||
|
||||
warnings := res.Warnings
|
||||
if limit > 0 {
|
||||
var isTruncated bool
|
||||
|
||||
res, isTruncated = truncateResults(res, limit)
|
||||
if isTruncated {
|
||||
warnings = warnings.Add(errors.New("results truncated due to limit"))
|
||||
}
|
||||
}
|
||||
|
||||
// Optional stats field in response if parameter "stats" is not empty.
|
||||
sr := api.statsRenderer
|
||||
if sr == nil {
|
||||
|
@ -601,7 +628,7 @@ func (api *API) queryRange(r *http.Request) (result apiFuncResult) {
|
|||
ResultType: res.Value.Type(),
|
||||
Result: res.Value,
|
||||
Stats: qs,
|
||||
}, nil, res.Warnings, qry.Close}
|
||||
}, nil, warnings, qry.Close}
|
||||
}
|
||||
|
||||
func (api *API) queryExemplars(r *http.Request) apiFuncResult {
|
||||
|
@ -2016,7 +2043,7 @@ func parseTimeParam(r *http.Request, paramName string, defaultValue time.Time) (
|
|||
}
|
||||
result, err := parseTime(val)
|
||||
if err != nil {
|
||||
return time.Time{}, fmt.Errorf("Invalid time value for '%s': %w", paramName, err)
|
||||
return time.Time{}, fmt.Errorf("invalid time value for '%s': %w", paramName, err)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
@ -2102,3 +2129,25 @@ func toHintLimit(limit int) int {
|
|||
}
|
||||
return limit
|
||||
}
|
||||
|
||||
// truncateResults truncates result for queryRange() and query().
|
||||
// No truncation for other types(Scalars or Strings).
|
||||
func truncateResults(result *promql.Result, limit int) (*promql.Result, bool) {
|
||||
isTruncated := false
|
||||
|
||||
switch v := result.Value.(type) {
|
||||
case promql.Matrix:
|
||||
if len(v) > limit {
|
||||
result.Value = v[:limit]
|
||||
isTruncated = true
|
||||
}
|
||||
case promql.Vector:
|
||||
if len(v) > limit {
|
||||
result.Value = v[:limit]
|
||||
isTruncated = true
|
||||
}
|
||||
}
|
||||
|
||||
// Return the modified result. Unchanged for other types.
|
||||
return result, isTruncated
|
||||
}
|
||||
|
|
|
@ -1164,6 +1164,49 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
},
|
||||
},
|
||||
},
|
||||
// Only matrix and vector responses are limited/truncated. String and scalar responses aren't truncated.
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
"query": []string{"2"},
|
||||
"time": []string{"123.4"},
|
||||
"limit": []string{"1"},
|
||||
},
|
||||
response: &QueryData{
|
||||
ResultType: parser.ValueTypeScalar,
|
||||
Result: promql.Scalar{
|
||||
V: 2,
|
||||
T: timestamp.FromTime(start.Add(123*time.Second + 400*time.Millisecond)),
|
||||
},
|
||||
},
|
||||
warningsCount: 0,
|
||||
},
|
||||
// When limit = 0, limit is disabled.
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
"query": []string{"2"},
|
||||
"time": []string{"123.4"},
|
||||
"limit": []string{"0"},
|
||||
},
|
||||
response: &QueryData{
|
||||
ResultType: parser.ValueTypeScalar,
|
||||
Result: promql.Scalar{
|
||||
V: 2,
|
||||
T: timestamp.FromTime(start.Add(123*time.Second + 400*time.Millisecond)),
|
||||
},
|
||||
},
|
||||
warningsCount: 0,
|
||||
},
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
"query": []string{"2"},
|
||||
"time": []string{"123.4"},
|
||||
"limit": []string{"-1"},
|
||||
},
|
||||
errType: errorBadData,
|
||||
},
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
|
@ -1205,6 +1248,179 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
"query": []string{
|
||||
`label_replace(vector(42), "foo", "bar", "", "") or label_replace(vector(3.1415), "dings", "bums", "", "")`,
|
||||
},
|
||||
"time": []string{"123.4"},
|
||||
"limit": []string{"2"},
|
||||
},
|
||||
warningsCount: 0,
|
||||
responseAsJSON: `{
|
||||
"resultType": "vector",
|
||||
"result": [
|
||||
{
|
||||
"metric": {
|
||||
"foo": "bar"
|
||||
},
|
||||
"value": [123.4, "42"]
|
||||
},
|
||||
{
|
||||
"metric": {
|
||||
"dings": "bums"
|
||||
},
|
||||
"value": [123.4, "3.1415"]
|
||||
}
|
||||
]
|
||||
}`,
|
||||
},
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
"query": []string{
|
||||
`label_replace(vector(42), "foo", "bar", "", "") or label_replace(vector(3.1415), "dings", "bums", "", "")`,
|
||||
},
|
||||
"time": []string{"123.4"},
|
||||
"limit": []string{"1"},
|
||||
},
|
||||
warningsCount: 1,
|
||||
responseAsJSON: `{
|
||||
"resultType": "vector",
|
||||
"result": [
|
||||
{
|
||||
"metric": {
|
||||
"foo": "bar"
|
||||
},
|
||||
"value": [123.4, "42"]
|
||||
}
|
||||
]
|
||||
}`,
|
||||
},
|
||||
{
|
||||
endpoint: api.query,
|
||||
query: url.Values{
|
||||
"query": []string{
|
||||
`label_replace(vector(42), "foo", "bar", "", "") or label_replace(vector(3.1415), "dings", "bums", "", "")`,
|
||||
},
|
||||
"time": []string{"123.4"},
|
||||
"limit": []string{"0"},
|
||||
},
|
||||
responseAsJSON: `{
|
||||
"resultType": "vector",
|
||||
"result": [
|
||||
{
|
||||
"metric": {
|
||||
"foo": "bar"
|
||||
},
|
||||
"value": [123.4, "42"]
|
||||
},
|
||||
{
|
||||
"metric": {
|
||||
"dings": "bums"
|
||||
},
|
||||
"value": [123.4, "3.1415"]
|
||||
}
|
||||
]
|
||||
}`,
|
||||
warningsCount: 0,
|
||||
},
|
||||
// limit=0 means no limit.
|
||||
{
|
||||
endpoint: api.queryRange,
|
||||
query: url.Values{
|
||||
"query": []string{
|
||||
`label_replace(vector(42), "foo", "bar", "", "") or label_replace(vector(3.1415), "dings", "bums", "", "")`,
|
||||
},
|
||||
"start": []string{"0"},
|
||||
"end": []string{"2"},
|
||||
"step": []string{"1"},
|
||||
"limit": []string{"0"},
|
||||
},
|
||||
response: &QueryData{
|
||||
ResultType: parser.ValueTypeMatrix,
|
||||
Result: promql.Matrix{
|
||||
promql.Series{
|
||||
Metric: labels.FromMap(map[string]string{"dings": "bums"}),
|
||||
Floats: []promql.FPoint{
|
||||
{F: 3.1415, T: timestamp.FromTime(start)},
|
||||
{F: 3.1415, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||
{F: 3.1415, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||
},
|
||||
},
|
||||
promql.Series{
|
||||
Metric: labels.FromMap(map[string]string{"foo": "bar"}),
|
||||
Floats: []promql.FPoint{
|
||||
{F: 42, T: timestamp.FromTime(start)},
|
||||
{F: 42, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||
{F: 42, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
warningsCount: 0,
|
||||
},
|
||||
{
|
||||
endpoint: api.queryRange,
|
||||
query: url.Values{
|
||||
"query": []string{
|
||||
`label_replace(vector(42), "foo", "bar", "", "") or label_replace(vector(3.1415), "dings", "bums", "", "")`,
|
||||
},
|
||||
"start": []string{"0"},
|
||||
"end": []string{"2"},
|
||||
"step": []string{"1"},
|
||||
"limit": []string{"1"},
|
||||
},
|
||||
response: &QueryData{
|
||||
ResultType: parser.ValueTypeMatrix,
|
||||
Result: promql.Matrix{
|
||||
promql.Series{
|
||||
Metric: labels.FromMap(map[string]string{"dings": "bums"}),
|
||||
Floats: []promql.FPoint{
|
||||
{F: 3.1415, T: timestamp.FromTime(start)},
|
||||
{F: 3.1415, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||
{F: 3.1415, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
warningsCount: 1,
|
||||
},
|
||||
{
|
||||
endpoint: api.queryRange,
|
||||
query: url.Values{
|
||||
"query": []string{
|
||||
`label_replace(vector(42), "foo", "bar", "", "") or label_replace(vector(3.1415), "dings", "bums", "", "")`,
|
||||
},
|
||||
"start": []string{"0"},
|
||||
"end": []string{"2"},
|
||||
"step": []string{"1"},
|
||||
"limit": []string{"2"},
|
||||
},
|
||||
response: &QueryData{
|
||||
ResultType: parser.ValueTypeMatrix,
|
||||
Result: promql.Matrix{
|
||||
promql.Series{
|
||||
Metric: labels.FromMap(map[string]string{"dings": "bums"}),
|
||||
Floats: []promql.FPoint{
|
||||
{F: 3.1415, T: timestamp.FromTime(start)},
|
||||
{F: 3.1415, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||
{F: 3.1415, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||
},
|
||||
},
|
||||
promql.Series{
|
||||
Metric: labels.FromMap(map[string]string{"foo": "bar"}),
|
||||
Floats: []promql.FPoint{
|
||||
{F: 42, T: timestamp.FromTime(start)},
|
||||
{F: 42, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||
{F: 42, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
warningsCount: 0,
|
||||
},
|
||||
{
|
||||
endpoint: api.queryRange,
|
||||
query: url.Values{
|
||||
|
@ -1222,7 +1438,6 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
{F: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||
{F: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||
},
|
||||
// No Metric returned - use zero value for comparison.
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -1235,6 +1450,17 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
},
|
||||
responseAsJSON: `{"resultType":"vector","result":[]}`,
|
||||
},
|
||||
{
|
||||
endpoint: api.queryRange,
|
||||
query: url.Values{
|
||||
"query": []string{"bottomk(2, notExists)"},
|
||||
"start": []string{"0"},
|
||||
"end": []string{"2"},
|
||||
"step": []string{"1"},
|
||||
"limit": []string{"-1"},
|
||||
},
|
||||
errType: errorBadData,
|
||||
},
|
||||
// Test empty matrix result
|
||||
{
|
||||
endpoint: api.queryRange,
|
||||
|
@ -3960,7 +4186,7 @@ func TestParseTimeParam(t *testing.T) {
|
|||
asTime: time.Time{},
|
||||
asError: func() error {
|
||||
_, err := parseTime("baz")
|
||||
return fmt.Errorf("Invalid time value for '%s': %w", "foo", err)
|
||||
return fmt.Errorf("invalid time value for '%s': %w", "foo", err)
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue