mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
PromQL: Make lookback delta exclusive
This excludes samples that are precisely the full lookback delta before the current evaluation timestamp. This is in line with the new "left open" range selectors. Signed-off-by: beorn7 <beorn@grafana.com>
This commit is contained in:
parent
bcf9d77bbb
commit
1838d56aee
4
cmd/promtool/testdata/unittest.yml
vendored
4
cmd/promtool/testdata/unittest.yml
vendored
|
@ -89,11 +89,11 @@ tests:
|
|||
|
||||
# Ensure lookback delta is respected, when a value is missing.
|
||||
- expr: timestamp(test_missing)
|
||||
eval_time: 5m
|
||||
eval_time: 4m59s
|
||||
exp_samples:
|
||||
- value: 0
|
||||
- expr: timestamp(test_missing)
|
||||
eval_time: 5m1s
|
||||
eval_time: 5m
|
||||
exp_samples: []
|
||||
|
||||
# Minimal test case to check edge case of a single sample.
|
||||
|
|
|
@ -499,8 +499,6 @@ func (ng *Engine) newQuery(q storage.Queryable, qs string, opts QueryOpts, start
|
|||
if lookbackDelta <= 0 {
|
||||
lookbackDelta = ng.lookbackDelta
|
||||
}
|
||||
// lookback shall ignore the sample falling on the left bound
|
||||
lookbackDelta -= time.Duration(time.Millisecond.Nanoseconds() * 1)
|
||||
|
||||
es := &parser.EvalStmt{
|
||||
Start: start,
|
||||
|
@ -884,11 +882,17 @@ func getTimeRangesForSelector(s *parser.EvalStmt, n *parser.VectorSelector, path
|
|||
}
|
||||
|
||||
if evalRange == 0 {
|
||||
start -= durationMilliseconds(s.LookbackDelta)
|
||||
// Reduce the start by one fewer ms than the lookback delta
|
||||
// because wo want to exclude samples that are precisely the
|
||||
// lookback delta before the eval time.
|
||||
start -= durationMilliseconds(s.LookbackDelta) - 1
|
||||
} else {
|
||||
// For all matrix queries we want to ensure that we have (end-start) + range selected
|
||||
// this way we have `range` data before the start time
|
||||
start -= durationMilliseconds(evalRange)
|
||||
// For all matrix queries we want to ensure that we have
|
||||
// (end-start) + range selected this way we have `range` data
|
||||
// before the start time. We subtract one from the range to
|
||||
// exclude samples positioned directly at the lower boundary of
|
||||
// the range.
|
||||
start -= durationMilliseconds(evalRange) - 1
|
||||
}
|
||||
|
||||
offsetMilliseconds := durationMilliseconds(n.OriginalOffset)
|
||||
|
@ -2013,7 +2017,7 @@ func (ev *evaluator) rangeEvalTimestampFunctionOverVectorSelector(vs *parser.Vec
|
|||
seriesIterators := make([]*storage.MemoizedSeriesIterator, len(vs.Series))
|
||||
for i, s := range vs.Series {
|
||||
it := s.Iterator(nil)
|
||||
seriesIterators[i] = storage.NewMemoizedIterator(it, durationMilliseconds(ev.lookbackDelta))
|
||||
seriesIterators[i] = storage.NewMemoizedIterator(it, durationMilliseconds(ev.lookbackDelta)-1)
|
||||
}
|
||||
|
||||
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||
|
@ -2075,7 +2079,7 @@ func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, no
|
|||
if valueType == chunkenc.ValNone || t > refTime {
|
||||
var ok bool
|
||||
t, v, h, ok = it.PeekPrev()
|
||||
if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) {
|
||||
if !ok || t <= refTime-durationMilliseconds(ev.lookbackDelta) {
|
||||
return 0, 0, nil, false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -314,271 +314,271 @@ func TestSelectHintsSetCorrectly(t *testing.T) {
|
|||
{
|
||||
query: "foo", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 5000, End: 10000},
|
||||
{Start: 5001, End: 10000},
|
||||
},
|
||||
}, {
|
||||
query: "foo @ 15", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 10000, End: 15000},
|
||||
{Start: 10001, End: 15000},
|
||||
},
|
||||
}, {
|
||||
query: "foo @ 1", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -4000, End: 1000},
|
||||
{Start: -3999, End: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m]", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 80000, End: 200000, Range: 120000},
|
||||
{Start: 80001, End: 200000, Range: 120000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m] @ 180", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 60000, End: 180000, Range: 120000},
|
||||
{Start: 60001, End: 180000, Range: 120000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m] @ 300", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 180000, End: 300000, Range: 120000},
|
||||
{Start: 180001, End: 300000, Range: 120000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m] @ 60", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -60000, End: 60000, Range: 120000},
|
||||
{Start: -59999, End: 60000, Range: 120000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m] offset 2m", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 60000, End: 180000, Range: 120000},
|
||||
{Start: 60001, End: 180000, Range: 120000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m] @ 200 offset 2m", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -40000, End: 80000, Range: 120000},
|
||||
{Start: -39999, End: 80000, Range: 120000},
|
||||
},
|
||||
}, {
|
||||
query: "foo[2m:1s]", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 175000, End: 300000, Step: 1000},
|
||||
{Start: 175001, End: 300000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s])", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 175001, End: 300000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] @ 300)", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 175001, End: 300000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] @ 200)", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 75000, End: 200000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 75001, End: 200000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] @ 100)", start: 200000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -25000, End: 100000, Func: "count_over_time", Step: 1000},
|
||||
{Start: -24999, End: 100000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] offset 10s)", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 165000, End: 290000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 165001, End: 290000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time((foo offset 10s)[2m:1s] offset 10s)", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 155000, End: 280000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 155001, End: 280000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
// When the @ is on the vector selector, the enclosing subquery parameters
|
||||
// don't affect the hint ranges.
|
||||
query: "count_over_time((foo @ 200 offset 10s)[2m:1s] offset 10s)", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 185001, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
// When the @ is on the vector selector, the enclosing subquery parameters
|
||||
// don't affect the hint ranges.
|
||||
query: "count_over_time((foo @ 200 offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 185001, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time((foo offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -45000, End: 80000, Func: "count_over_time", Step: 1000},
|
||||
{Start: -44999, End: 80000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "foo", start: 10000, end: 20000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 5000, End: 20000, Step: 1000},
|
||||
{Start: 5001, End: 20000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "foo @ 15", start: 10000, end: 20000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 10000, End: 15000, Step: 1000},
|
||||
{Start: 10001, End: 15000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "foo @ 1", start: 10000, end: 20000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -4000, End: 1000, Step: 1000},
|
||||
{Start: -3999, End: 1000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2m] @ 180)", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 60000, End: 180000, Range: 120000, Func: "rate", Step: 1000},
|
||||
{Start: 60001, End: 180000, Range: 120000, Func: "rate", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2m] @ 300)", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 180000, End: 300000, Range: 120000, Func: "rate", Step: 1000},
|
||||
{Start: 180001, End: 300000, Range: 120000, Func: "rate", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2m] @ 60)", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -60000, End: 60000, Range: 120000, Func: "rate", Step: 1000},
|
||||
{Start: -59999, End: 60000, Range: 120000, Func: "rate", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2m])", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 80000, End: 500000, Range: 120000, Func: "rate", Step: 1000},
|
||||
{Start: 80001, End: 500000, Range: 120000, Func: "rate", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2m] offset 2m)", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 60000, End: 380000, Range: 120000, Func: "rate", Step: 1000},
|
||||
{Start: 60001, End: 380000, Range: 120000, Func: "rate", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2m:1s])", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 175000, End: 500000, Func: "rate", Step: 1000},
|
||||
{Start: 175001, End: 500000, Func: "rate", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s])", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 175000, End: 500000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 175001, End: 500000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] offset 10s)", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 165000, End: 490000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 165001, End: 490000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] @ 300)", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 175000, End: 300000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 175001, End: 300000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] @ 200)", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 75000, End: 200000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 75001, End: 200000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time(foo[2m:1s] @ 100)", start: 200000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -25000, End: 100000, Func: "count_over_time", Step: 1000},
|
||||
{Start: -24999, End: 100000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time((foo offset 10s)[2m:1s] offset 10s)", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 155000, End: 480000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 155001, End: 480000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
// When the @ is on the vector selector, the enclosing subquery parameters
|
||||
// don't affect the hint ranges.
|
||||
query: "count_over_time((foo @ 200 offset 10s)[2m:1s] offset 10s)", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 185001, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
// When the @ is on the vector selector, the enclosing subquery parameters
|
||||
// don't affect the hint ranges.
|
||||
query: "count_over_time((foo @ 200 offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 185000, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
{Start: 185001, End: 190000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "count_over_time((foo offset 10s)[2m:1s] @ 100 offset 10s)", start: 300000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -45000, End: 80000, Func: "count_over_time", Step: 1000},
|
||||
{Start: -44999, End: 80000, Func: "count_over_time", Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "sum by (dim1) (foo)", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 5000, End: 10000, Func: "sum", By: true, Grouping: []string{"dim1"}},
|
||||
{Start: 5001, End: 10000, Func: "sum", By: true, Grouping: []string{"dim1"}},
|
||||
},
|
||||
}, {
|
||||
query: "sum without (dim1) (foo)", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 5000, End: 10000, Func: "sum", Grouping: []string{"dim1"}},
|
||||
{Start: 5001, End: 10000, Func: "sum", Grouping: []string{"dim1"}},
|
||||
},
|
||||
}, {
|
||||
query: "sum by (dim1) (avg_over_time(foo[1s]))", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 9000, End: 10000, Func: "avg_over_time", Range: 1000},
|
||||
{Start: 9001, End: 10000, Func: "avg_over_time", Range: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "sum by (dim1) (max by (dim2) (foo))", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 5000, End: 10000, Func: "max", By: true, Grouping: []string{"dim2"}},
|
||||
{Start: 5001, End: 10000, Func: "max", By: true, Grouping: []string{"dim2"}},
|
||||
},
|
||||
}, {
|
||||
query: "(max by (dim1) (foo))[5s:1s]", start: 10000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 0, End: 10000, Func: "max", By: true, Grouping: []string{"dim1"}, Step: 1000},
|
||||
{Start: 1, End: 10000, Func: "max", By: true, Grouping: []string{"dim1"}, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "(sum(http_requests{group=~\"p.*\"})+max(http_requests{group=~\"c.*\"}))[20s:5s]", start: 120000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 95000, End: 120000, Func: "sum", By: true, Step: 5000},
|
||||
{Start: 95000, End: 120000, Func: "max", By: true, Step: 5000},
|
||||
{Start: 95001, End: 120000, Func: "sum", By: true, Step: 5000},
|
||||
{Start: 95001, End: 120000, Func: "max", By: true, Step: 5000},
|
||||
},
|
||||
}, {
|
||||
query: "foo @ 50 + bar @ 250 + baz @ 900", start: 100000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 45000, End: 50000, Step: 1000},
|
||||
{Start: 245000, End: 250000, Step: 1000},
|
||||
{Start: 895000, End: 900000, Step: 1000},
|
||||
{Start: 45001, End: 50000, Step: 1000},
|
||||
{Start: 245001, End: 250000, Step: 1000},
|
||||
{Start: 895001, End: 900000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "foo @ 50 + bar + baz @ 900", start: 100000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 45000, End: 50000, Step: 1000},
|
||||
{Start: 95000, End: 500000, Step: 1000},
|
||||
{Start: 895000, End: 900000, Step: 1000},
|
||||
{Start: 45001, End: 50000, Step: 1000},
|
||||
{Start: 95001, End: 500000, Step: 1000},
|
||||
{Start: 895001, End: 900000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2s] @ 50) + bar @ 250 + baz @ 900", start: 100000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 48000, End: 50000, Step: 1000, Func: "rate", Range: 2000},
|
||||
{Start: 245000, End: 250000, Step: 1000},
|
||||
{Start: 895000, End: 900000, Step: 1000},
|
||||
{Start: 48001, End: 50000, Step: 1000, Func: "rate", Range: 2000},
|
||||
{Start: 245001, End: 250000, Step: 1000},
|
||||
{Start: 895001, End: 900000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2s:1s] @ 50) + bar + baz", start: 100000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 43000, End: 50000, Step: 1000, Func: "rate"},
|
||||
{Start: 95000, End: 500000, Step: 1000},
|
||||
{Start: 95000, End: 500000, Step: 1000},
|
||||
{Start: 43001, End: 50000, Step: 1000, Func: "rate"},
|
||||
{Start: 95001, End: 500000, Step: 1000},
|
||||
{Start: 95001, End: 500000, Step: 1000},
|
||||
},
|
||||
}, {
|
||||
query: "rate(foo[2s:1s] @ 50) + bar + rate(baz[2m:1s] @ 900 offset 2m) ", start: 100000, end: 500000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 43000, End: 50000, Step: 1000, Func: "rate"},
|
||||
{Start: 95000, End: 500000, Step: 1000},
|
||||
{Start: 655000, End: 780000, Step: 1000, Func: "rate"},
|
||||
{Start: 43001, End: 50000, Step: 1000, Func: "rate"},
|
||||
{Start: 95001, End: 500000, Step: 1000},
|
||||
{Start: 655001, End: 780000, Step: 1000, Func: "rate"},
|
||||
},
|
||||
}, { // Hints are based on the inner most subquery timestamp.
|
||||
query: `sum_over_time(sum_over_time(metric{job="1"}[100s])[100s:25s] @ 50)[3s:1s] @ 3000`, start: 100000,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: -150000, End: 50000, Range: 100000, Func: "sum_over_time", Step: 25000},
|
||||
{Start: -149999, End: 50000, Range: 100000, Func: "sum_over_time", Step: 25000},
|
||||
},
|
||||
}, { // Hints are based on the inner most subquery timestamp.
|
||||
query: `sum_over_time(sum_over_time(metric{job="1"}[100s])[100s:25s] @ 3000)[3s:1s] @ 50`,
|
||||
expected: []*storage.SelectHints{
|
||||
{Start: 2800000, End: 3000000, Range: 100000, Func: "sum_over_time", Step: 25000},
|
||||
{Start: 2800001, End: 3000000, Range: 100000, Func: "sum_over_time", Step: 25000},
|
||||
},
|
||||
},
|
||||
} {
|
||||
|
@ -4904,43 +4904,43 @@ metric 0 1 2
|
|||
}{
|
||||
{
|
||||
name: "default lookback delta",
|
||||
ts: lastDatapointTs.Add(defaultLookbackDelta),
|
||||
ts: lastDatapointTs.Add(defaultLookbackDelta - time.Millisecond),
|
||||
expectSamples: true,
|
||||
},
|
||||
{
|
||||
name: "outside default lookback delta",
|
||||
ts: lastDatapointTs.Add(defaultLookbackDelta + time.Millisecond),
|
||||
ts: lastDatapointTs.Add(defaultLookbackDelta),
|
||||
expectSamples: false,
|
||||
},
|
||||
{
|
||||
name: "custom engine lookback delta",
|
||||
ts: lastDatapointTs.Add(10 * time.Minute),
|
||||
ts: lastDatapointTs.Add(10*time.Minute - time.Millisecond),
|
||||
engineLookback: 10 * time.Minute,
|
||||
expectSamples: true,
|
||||
},
|
||||
{
|
||||
name: "outside custom engine lookback delta",
|
||||
ts: lastDatapointTs.Add(10*time.Minute + time.Millisecond),
|
||||
ts: lastDatapointTs.Add(10 * time.Minute),
|
||||
engineLookback: 10 * time.Minute,
|
||||
expectSamples: false,
|
||||
},
|
||||
{
|
||||
name: "custom query lookback delta",
|
||||
ts: lastDatapointTs.Add(20 * time.Minute),
|
||||
ts: lastDatapointTs.Add(20*time.Minute - time.Millisecond),
|
||||
engineLookback: 10 * time.Minute,
|
||||
queryLookback: 20 * time.Minute,
|
||||
expectSamples: true,
|
||||
},
|
||||
{
|
||||
name: "outside custom query lookback delta",
|
||||
ts: lastDatapointTs.Add(20*time.Minute + time.Millisecond),
|
||||
ts: lastDatapointTs.Add(20 * time.Minute),
|
||||
engineLookback: 10 * time.Minute,
|
||||
queryLookback: 20 * time.Minute,
|
||||
expectSamples: false,
|
||||
},
|
||||
{
|
||||
name: "negative custom query lookback delta",
|
||||
ts: lastDatapointTs.Add(20 * time.Minute),
|
||||
ts: lastDatapointTs.Add(20*time.Minute - time.Millisecond),
|
||||
engineLookback: -10 * time.Minute,
|
||||
queryLookback: 20 * time.Minute,
|
||||
expectSamples: true,
|
||||
|
|
|
@ -237,7 +237,7 @@ eval instant at 5m sum by (group) (http_requests)
|
|||
load 5m
|
||||
testmetric {{}}
|
||||
|
||||
eval instant at 5m testmetric
|
||||
eval instant at 0m testmetric
|
||||
`,
|
||||
expectedError: `error in eval testmetric (line 5): unexpected metric {__name__="testmetric"} in result, has value {count:0, sum:0}`,
|
||||
},
|
||||
|
|
12
promql/promqltest/testdata/aggregators.test
vendored
12
promql/promqltest/testdata/aggregators.test
vendored
|
@ -250,7 +250,7 @@ clear
|
|||
load 5m
|
||||
http_requests{job="api-server", instance="0", group="production"} 0+10x10
|
||||
http_requests{job="api-server", instance="1", group="production"} 0+20x10
|
||||
http_requests{job="api-server", instance="2", group="production"} NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
|
||||
http_requests{job="api-server", instance="2", group="production"} NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
|
||||
http_requests{job="api-server", instance="0", group="canary"} 0+30x10
|
||||
http_requests{job="api-server", instance="1", group="canary"} 0+40x10
|
||||
http_requests{job="app-server", instance="0", group="production"} 0+50x10
|
||||
|
@ -337,32 +337,32 @@ load 5m
|
|||
version{job="app-server", instance="0", group="canary"} 7
|
||||
version{job="app-server", instance="1", group="canary"} 7
|
||||
|
||||
eval instant at 5m count_values("version", version)
|
||||
eval instant at 1m count_values("version", version)
|
||||
{version="6"} 5
|
||||
{version="7"} 2
|
||||
{version="8"} 2
|
||||
|
||||
|
||||
eval instant at 5m count_values(((("version"))), version)
|
||||
eval instant at 1m count_values(((("version"))), version)
|
||||
{version="6"} 5
|
||||
{version="7"} 2
|
||||
{version="8"} 2
|
||||
|
||||
|
||||
eval instant at 5m count_values without (instance)("version", version)
|
||||
eval instant at 1m count_values without (instance)("version", version)
|
||||
{job="api-server", group="production", version="6"} 3
|
||||
{job="api-server", group="canary", version="8"} 2
|
||||
{job="app-server", group="production", version="6"} 2
|
||||
{job="app-server", group="canary", version="7"} 2
|
||||
|
||||
# Overwrite label with output. Don't do this.
|
||||
eval instant at 5m count_values without (instance)("job", version)
|
||||
eval instant at 1m count_values without (instance)("job", version)
|
||||
{job="6", group="production"} 5
|
||||
{job="8", group="canary"} 2
|
||||
{job="7", group="canary"} 2
|
||||
|
||||
# Overwrite label with output. Don't do this.
|
||||
eval instant at 5m count_values by (job, group)("job", version)
|
||||
eval instant at 1m count_values by (job, group)("job", version)
|
||||
{job="6", group="production"} 5
|
||||
{job="8", group="canary"} 2
|
||||
{job="7", group="canary"} 2
|
||||
|
|
32
promql/promqltest/testdata/functions.test
vendored
32
promql/promqltest/testdata/functions.test
vendored
|
@ -449,7 +449,7 @@ load 5m
|
|||
http_requests{job="api-server", instance="1", group="production"} 0+20x10
|
||||
http_requests{job="api-server", instance="0", group="canary"} 0+30x10
|
||||
http_requests{job="api-server", instance="1", group="canary"} 0+40x10
|
||||
http_requests{job="api-server", instance="2", group="canary"} NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
|
||||
http_requests{job="api-server", instance="2", group="canary"} NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
|
||||
http_requests{job="app-server", instance="0", group="production"} 0+50x10
|
||||
http_requests{job="app-server", instance="1", group="production"} 0+60x10
|
||||
http_requests{job="app-server", instance="0", group="canary"} 0+70x10
|
||||
|
@ -484,7 +484,7 @@ load 5m
|
|||
http_requests{job="api-server", instance="1", group="production"} 0+20x10
|
||||
http_requests{job="api-server", instance="0", group="canary"} 0+30x10
|
||||
http_requests{job="api-server", instance="1", group="canary"} 0+40x10
|
||||
http_requests{job="api-server", instance="2", group="canary"} NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
|
||||
http_requests{job="api-server", instance="2", group="canary"} NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
|
||||
http_requests{job="app-server", instance="0", group="production"} 0+50x10
|
||||
http_requests{job="app-server", instance="1", group="production"} 0+60x10
|
||||
http_requests{job="app-server", instance="0", group="canary"} 0+70x10
|
||||
|
@ -1162,59 +1162,59 @@ load 5m
|
|||
exp_root_log{l="x"} 10
|
||||
exp_root_log{l="y"} 20
|
||||
|
||||
eval instant at 5m exp(exp_root_log)
|
||||
eval instant at 1m exp(exp_root_log)
|
||||
{l="x"} 22026.465794806718
|
||||
{l="y"} 485165195.4097903
|
||||
|
||||
eval instant at 5m exp(exp_root_log - 10)
|
||||
eval instant at 1m exp(exp_root_log - 10)
|
||||
{l="y"} 22026.465794806718
|
||||
{l="x"} 1
|
||||
|
||||
eval instant at 5m exp(exp_root_log - 20)
|
||||
eval instant at 1m exp(exp_root_log - 20)
|
||||
{l="x"} 4.5399929762484854e-05
|
||||
{l="y"} 1
|
||||
|
||||
eval instant at 5m ln(exp_root_log)
|
||||
eval instant at 1m ln(exp_root_log)
|
||||
{l="x"} 2.302585092994046
|
||||
{l="y"} 2.995732273553991
|
||||
|
||||
eval instant at 5m ln(exp_root_log - 10)
|
||||
eval instant at 1m ln(exp_root_log - 10)
|
||||
{l="y"} 2.302585092994046
|
||||
{l="x"} -Inf
|
||||
|
||||
eval instant at 5m ln(exp_root_log - 20)
|
||||
eval instant at 1m ln(exp_root_log - 20)
|
||||
{l="y"} -Inf
|
||||
{l="x"} NaN
|
||||
|
||||
eval instant at 5m exp(ln(exp_root_log))
|
||||
eval instant at 1m exp(ln(exp_root_log))
|
||||
{l="y"} 20
|
||||
{l="x"} 10
|
||||
|
||||
eval instant at 5m sqrt(exp_root_log)
|
||||
eval instant at 1m sqrt(exp_root_log)
|
||||
{l="x"} 3.1622776601683795
|
||||
{l="y"} 4.47213595499958
|
||||
|
||||
eval instant at 5m log2(exp_root_log)
|
||||
eval instant at 1m log2(exp_root_log)
|
||||
{l="x"} 3.3219280948873626
|
||||
{l="y"} 4.321928094887363
|
||||
|
||||
eval instant at 5m log2(exp_root_log - 10)
|
||||
eval instant at 1m log2(exp_root_log - 10)
|
||||
{l="y"} 3.3219280948873626
|
||||
{l="x"} -Inf
|
||||
|
||||
eval instant at 5m log2(exp_root_log - 20)
|
||||
eval instant at 1m log2(exp_root_log - 20)
|
||||
{l="x"} NaN
|
||||
{l="y"} -Inf
|
||||
|
||||
eval instant at 5m log10(exp_root_log)
|
||||
eval instant at 1m log10(exp_root_log)
|
||||
{l="x"} 1
|
||||
{l="y"} 1.301029995663981
|
||||
|
||||
eval instant at 5m log10(exp_root_log - 10)
|
||||
eval instant at 1m log10(exp_root_log - 10)
|
||||
{l="y"} 1
|
||||
{l="x"} -Inf
|
||||
|
||||
eval instant at 5m log10(exp_root_log - 20)
|
||||
eval instant at 1m log10(exp_root_log - 20)
|
||||
{l="x"} NaN
|
||||
{l="y"} -Inf
|
||||
|
||||
|
|
102
promql/promqltest/testdata/native_histograms.test
vendored
102
promql/promqltest/testdata/native_histograms.test
vendored
|
@ -2,55 +2,55 @@
|
|||
load 5m
|
||||
empty_histogram {{}}
|
||||
|
||||
eval instant at 5m empty_histogram
|
||||
eval instant at 1m empty_histogram
|
||||
{__name__="empty_histogram"} {{}}
|
||||
|
||||
eval instant at 5m histogram_count(empty_histogram)
|
||||
eval instant at 1m histogram_count(empty_histogram)
|
||||
{} 0
|
||||
|
||||
eval instant at 5m histogram_sum(empty_histogram)
|
||||
eval instant at 1m histogram_sum(empty_histogram)
|
||||
{} 0
|
||||
|
||||
eval instant at 5m histogram_avg(empty_histogram)
|
||||
eval instant at 1m histogram_avg(empty_histogram)
|
||||
{} NaN
|
||||
|
||||
eval instant at 5m histogram_fraction(-Inf, +Inf, empty_histogram)
|
||||
eval instant at 1m histogram_fraction(-Inf, +Inf, empty_histogram)
|
||||
{} NaN
|
||||
|
||||
eval instant at 5m histogram_fraction(0, 8, empty_histogram)
|
||||
eval instant at 1m histogram_fraction(0, 8, empty_histogram)
|
||||
{} NaN
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# buckets:[1 2 1] means 1 observation in the 1st bucket, 2 observations in the 2nd and 1 observation in the 3rd (total 4).
|
||||
load 5m
|
||||
single_histogram {{schema:0 sum:5 count:4 buckets:[1 2 1]}}
|
||||
|
||||
# histogram_count extracts the count property from the histogram.
|
||||
eval instant at 5m histogram_count(single_histogram)
|
||||
eval instant at 1m histogram_count(single_histogram)
|
||||
{} 4
|
||||
|
||||
# histogram_sum extracts the sum property from the histogram.
|
||||
eval instant at 5m histogram_sum(single_histogram)
|
||||
eval instant at 1m histogram_sum(single_histogram)
|
||||
{} 5
|
||||
|
||||
# histogram_avg calculates the average from sum and count properties.
|
||||
eval instant at 5m histogram_avg(single_histogram)
|
||||
eval instant at 1m histogram_avg(single_histogram)
|
||||
{} 1.25
|
||||
|
||||
# We expect half of the values to fall in the range 1 < x <= 2.
|
||||
eval instant at 5m histogram_fraction(1, 2, single_histogram)
|
||||
eval instant at 1m histogram_fraction(1, 2, single_histogram)
|
||||
{} 0.5
|
||||
|
||||
# We expect all values to fall in the range 0 < x <= 8.
|
||||
eval instant at 5m histogram_fraction(0, 8, single_histogram)
|
||||
eval instant at 1m histogram_fraction(0, 8, single_histogram)
|
||||
{} 1
|
||||
|
||||
# Median is 1.5 due to linear estimation of the midpoint of the middle bucket, whose values are within range 1 < x <= 2.
|
||||
eval instant at 5m histogram_quantile(0.5, single_histogram)
|
||||
eval instant at 1m histogram_quantile(0.5, single_histogram)
|
||||
{} 1.5
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# Repeat the same histogram 10 times.
|
||||
load 5m
|
||||
|
@ -88,7 +88,7 @@ eval instant at 50m histogram_fraction(1, 2, multi_histogram)
|
|||
eval instant at 50m histogram_quantile(0.5, multi_histogram)
|
||||
{} 1.5
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# Accumulate the histogram addition for 10 iterations, offset is a bucket position where offset:0 is always the bucket
|
||||
# with an upper limit of 1 and offset:1 is the bucket which follows to the right. Negative offsets represent bucket
|
||||
|
@ -140,7 +140,7 @@ eval instant at 50m rate(incr_histogram[10m])
|
|||
eval instant at 50m histogram_quantile(0.5, rate(incr_histogram[10m]))
|
||||
{} 1.5
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# Schema represents the histogram resolution, different schema have compatible bucket boundaries, e.g.:
|
||||
# 0: 1 2 4 8 16 32 64 (higher resolution)
|
||||
|
@ -166,77 +166,77 @@ eval instant at 5m histogram_avg(low_res_histogram)
|
|||
eval instant at 5m histogram_fraction(1, 4, low_res_histogram)
|
||||
{} 1
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# z_bucket:1 means there is one observation in the zero bucket and z_bucket_w:0.5 means the zero bucket has the range
|
||||
# 0 < x <= 0.5. Sum and count are expected to represent all observations in the histogram, including those in the zero bucket.
|
||||
load 5m
|
||||
single_zero_histogram {{schema:0 z_bucket:1 z_bucket_w:0.5 sum:0.25 count:1}}
|
||||
|
||||
eval instant at 5m histogram_count(single_zero_histogram)
|
||||
eval instant at 1m histogram_count(single_zero_histogram)
|
||||
{} 1
|
||||
|
||||
eval instant at 5m histogram_sum(single_zero_histogram)
|
||||
eval instant at 1m histogram_sum(single_zero_histogram)
|
||||
{} 0.25
|
||||
|
||||
eval instant at 5m histogram_avg(single_zero_histogram)
|
||||
eval instant at 1m histogram_avg(single_zero_histogram)
|
||||
{} 0.25
|
||||
|
||||
# When only the zero bucket is populated, or there are negative buckets, the distribution is assumed to be equally
|
||||
# distributed around zero; i.e. that there are an equal number of positive and negative observations. Therefore the
|
||||
# entire distribution must lie within the full range of the zero bucket, in this case: -0.5 < x <= +0.5.
|
||||
eval instant at 5m histogram_fraction(-0.5, 0.5, single_zero_histogram)
|
||||
eval instant at 1m histogram_fraction(-0.5, 0.5, single_zero_histogram)
|
||||
{} 1
|
||||
|
||||
# Half of the observations are estimated to be zero, as this is the midpoint between -0.5 and +0.5.
|
||||
eval instant at 5m histogram_quantile(0.5, single_zero_histogram)
|
||||
eval instant at 1m histogram_quantile(0.5, single_zero_histogram)
|
||||
{} 0
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# Let's turn single_histogram upside-down.
|
||||
load 5m
|
||||
negative_histogram {{schema:0 sum:-5 count:4 n_buckets:[1 2 1]}}
|
||||
|
||||
eval instant at 5m histogram_count(negative_histogram)
|
||||
eval instant at 1m histogram_count(negative_histogram)
|
||||
{} 4
|
||||
|
||||
eval instant at 5m histogram_sum(negative_histogram)
|
||||
eval instant at 1m histogram_sum(negative_histogram)
|
||||
{} -5
|
||||
|
||||
eval instant at 5m histogram_avg(negative_histogram)
|
||||
eval instant at 1m histogram_avg(negative_histogram)
|
||||
{} -1.25
|
||||
|
||||
# We expect half of the values to fall in the range -2 < x <= -1.
|
||||
eval instant at 5m histogram_fraction(-2, -1, negative_histogram)
|
||||
eval instant at 1m histogram_fraction(-2, -1, negative_histogram)
|
||||
{} 0.5
|
||||
|
||||
eval instant at 5m histogram_quantile(0.5, negative_histogram)
|
||||
eval instant at 1m histogram_quantile(0.5, negative_histogram)
|
||||
{} -1.5
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# Two histogram samples.
|
||||
load 5m
|
||||
two_samples_histogram {{schema:0 sum:4 count:4 buckets:[1 2 1]}} {{schema:0 sum:-4 count:4 n_buckets:[1 2 1]}}
|
||||
|
||||
# We expect to see the newest sample.
|
||||
eval instant at 10m histogram_count(two_samples_histogram)
|
||||
eval instant at 5m histogram_count(two_samples_histogram)
|
||||
{} 4
|
||||
|
||||
eval instant at 10m histogram_sum(two_samples_histogram)
|
||||
eval instant at 5m histogram_sum(two_samples_histogram)
|
||||
{} -4
|
||||
|
||||
eval instant at 10m histogram_avg(two_samples_histogram)
|
||||
eval instant at 5m histogram_avg(two_samples_histogram)
|
||||
{} -1
|
||||
|
||||
eval instant at 10m histogram_fraction(-2, -1, two_samples_histogram)
|
||||
eval instant at 5m histogram_fraction(-2, -1, two_samples_histogram)
|
||||
{} 0.5
|
||||
|
||||
eval instant at 10m histogram_quantile(0.5, two_samples_histogram)
|
||||
eval instant at 5m histogram_quantile(0.5, two_samples_histogram)
|
||||
{} -1.5
|
||||
|
||||
|
||||
clear
|
||||
|
||||
# Add two histograms with negated data.
|
||||
load 5m
|
||||
|
@ -259,6 +259,8 @@ eval instant at 5m histogram_fraction(0, 4, balanced_histogram)
|
|||
eval instant at 5m histogram_quantile(0.5, balanced_histogram)
|
||||
{} 0.5
|
||||
|
||||
clear
|
||||
|
||||
# Add histogram to test sum(last_over_time) regression
|
||||
load 5m
|
||||
incr_sum_histogram{number="1"} {{schema:0 sum:0 count:0 buckets:[1]}}+{{schema:0 sum:1 count:1 buckets:[1]}}x10
|
||||
|
@ -270,6 +272,8 @@ eval instant at 50m histogram_sum(sum(incr_sum_histogram))
|
|||
eval instant at 50m histogram_sum(sum(last_over_time(incr_sum_histogram[5m])))
|
||||
{} 30
|
||||
|
||||
clear
|
||||
|
||||
# Apply rate function to histogram.
|
||||
load 15s
|
||||
histogram_rate {{schema:1 count:12 sum:18.4 z_bucket:2 z_bucket_w:0.001 buckets:[1 2 0 1 1] n_buckets:[1 2 0 1 1]}}+{{schema:1 count:9 sum:18.4 z_bucket:1 z_bucket_w:0.001 buckets:[1 1 0 1 1] n_buckets:[1 1 0 1 1]}}x100
|
||||
|
@ -280,6 +284,8 @@ eval instant at 5m rate(histogram_rate[45s])
|
|||
eval range from 5m to 5m30s step 30s rate(histogram_rate[45s])
|
||||
{} {{schema:1 count:0.6 sum:1.2266666666666652 z_bucket:0.06666666666666667 z_bucket_w:0.001 buckets:[0.06666666666666667 0.06666666666666667 0 0.06666666666666667 0.06666666666666667] n_buckets:[0.06666666666666667 0.06666666666666667 0 0.06666666666666667 0.06666666666666667]}}x1
|
||||
|
||||
clear
|
||||
|
||||
# Apply count and sum function to histogram.
|
||||
load 10m
|
||||
histogram_count_sum_2 {{schema:0 count:24 sum:100 z_bucket:4 z_bucket_w:0.001 buckets:[2 3 0 1 4] n_buckets:[2 3 0 1 4]}}x1
|
||||
|
@ -290,6 +296,8 @@ eval instant at 10m histogram_count(histogram_count_sum_2)
|
|||
eval instant at 10m histogram_sum(histogram_count_sum_2)
|
||||
{} 100
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {1, 2, 3, 4} (low res).
|
||||
load 10m
|
||||
histogram_stddev_stdvar_1 {{schema:2 count:4 sum:10 buckets:[1 0 0 0 1 0 0 1 1]}}x1
|
||||
|
@ -300,6 +308,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_1)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_1)
|
||||
{} 1.163807968526718
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {1, 1, 1, 1} (high res).
|
||||
load 10m
|
||||
histogram_stddev_stdvar_2 {{schema:8 count:10 sum:10 buckets:[1 2 3 4]}}x1
|
||||
|
@ -310,6 +320,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_2)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_2)
|
||||
{} 2.3971123370139447e-05
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {-50, -8, 0, 3, 8, 9}.
|
||||
load 10m
|
||||
histogram_stddev_stdvar_3 {{schema:3 count:7 sum:62 z_bucket:1 buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ] n_buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ]}}x1
|
||||
|
@ -320,6 +332,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_3)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_3)
|
||||
{} 1844.4651144196398
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {-100000, -10000, -1000, -888, -888, -100, -50, -9, -8, -3}.
|
||||
load 10m
|
||||
histogram_stddev_stdvar_4 {{schema:0 count:10 sum:-112946 z_bucket:0 n_buckets:[0 0 1 1 1 0 1 1 0 0 3 0 0 0 1 0 0 1]}}x1
|
||||
|
@ -330,6 +344,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_4)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_4)
|
||||
{} 759352122.1939945
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {-10x10}.
|
||||
load 10m
|
||||
histogram_stddev_stdvar_5 {{schema:0 count:10 sum:-100 z_bucket:0 n_buckets:[0 0 0 0 10]}}x1
|
||||
|
@ -340,6 +356,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_5)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_5)
|
||||
{} 1.725830020304794
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {-50, -8, 0, 3, 8, 9, NaN}.
|
||||
load 10m
|
||||
histogram_stddev_stdvar_6 {{schema:3 count:7 sum:NaN z_bucket:1 buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ] n_buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ]}}x1
|
||||
|
@ -350,6 +368,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_6)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_6)
|
||||
{} NaN
|
||||
|
||||
clear
|
||||
|
||||
# Apply stddev and stdvar function to histogram with {-50, -8, 0, 3, 8, 9, Inf}.
|
||||
load 10m
|
||||
histogram_stddev_stdvar_7 {{schema:3 count:7 sum:Inf z_bucket:1 buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ] n_buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ]}}x1
|
||||
|
@ -360,6 +380,8 @@ eval instant at 10m histogram_stddev(histogram_stddev_stdvar_7)
|
|||
eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_7)
|
||||
{} NaN
|
||||
|
||||
clear
|
||||
|
||||
# Apply quantile function to histogram with all positive buckets with zero bucket.
|
||||
load 10m
|
||||
histogram_quantile_1 {{schema:0 count:12 sum:100 z_bucket:2 z_bucket_w:0.001 buckets:[2 3 0 1 4]}}x1
|
||||
|
@ -391,6 +413,8 @@ eval instant at 10m histogram_quantile(0, histogram_quantile_1)
|
|||
eval instant at 10m histogram_quantile(-1, histogram_quantile_1)
|
||||
{} -Inf
|
||||
|
||||
clear
|
||||
|
||||
# Apply quantile function to histogram with all negative buckets with zero bucket.
|
||||
load 10m
|
||||
histogram_quantile_2 {{schema:0 count:12 sum:100 z_bucket:2 z_bucket_w:0.001 n_buckets:[2 3 0 1 4]}}x1
|
||||
|
@ -419,6 +443,8 @@ eval instant at 10m histogram_quantile(0, histogram_quantile_2)
|
|||
eval instant at 10m histogram_quantile(-1, histogram_quantile_2)
|
||||
{} -Inf
|
||||
|
||||
clear
|
||||
|
||||
# Apply quantile function to histogram with both positive and negative buckets with zero bucket.
|
||||
load 10m
|
||||
histogram_quantile_3 {{schema:0 count:24 sum:100 z_bucket:4 z_bucket_w:0.001 buckets:[2 3 0 1 4] n_buckets:[2 3 0 1 4]}}x1
|
||||
|
@ -462,6 +488,8 @@ eval instant at 10m histogram_quantile(0, histogram_quantile_3)
|
|||
eval instant at 10m histogram_quantile(-1, histogram_quantile_3)
|
||||
{} -Inf
|
||||
|
||||
clear
|
||||
|
||||
# Apply fraction function to empty histogram.
|
||||
load 10m
|
||||
histogram_fraction_1 {{}}x1
|
||||
|
@ -469,6 +497,8 @@ load 10m
|
|||
eval instant at 10m histogram_fraction(3.1415, 42, histogram_fraction_1)
|
||||
{} NaN
|
||||
|
||||
clear
|
||||
|
||||
# Apply fraction function to histogram with positive and zero buckets.
|
||||
load 10m
|
||||
histogram_fraction_2 {{schema:0 count:12 sum:100 z_bucket:2 z_bucket_w:0.001 buckets:[2 3 0 1 4]}}x1
|
||||
|
@ -633,6 +663,8 @@ eval instant at 10m histogram_fraction(NaN, NaN, histogram_fraction_3)
|
|||
eval instant at 10m histogram_fraction(-Inf, +Inf, histogram_fraction_3)
|
||||
{} 1
|
||||
|
||||
clear
|
||||
|
||||
# Apply fraction function to histogram with both positive, negative and zero buckets.
|
||||
load 10m
|
||||
histogram_fraction_4 {{schema:0 count:24 sum:100 z_bucket:4 z_bucket_w:0.001 buckets:[2 3 0 1 4] n_buckets:[2 3 0 1 4]}}x1
|
||||
|
|
66
promql/promqltest/testdata/operators.test
vendored
66
promql/promqltest/testdata/operators.test
vendored
|
@ -308,65 +308,65 @@ load 5m
|
|||
threshold{instance="abc",job="node",target="a@b.com"} 0
|
||||
|
||||
# Copy machine role to node variable.
|
||||
eval instant at 5m node_role * on (instance) group_right (role) node_var
|
||||
eval instant at 1m node_role * on (instance) group_right (role) node_var
|
||||
{instance="abc",job="node",role="prometheus"} 2
|
||||
|
||||
eval instant at 5m node_var * on (instance) group_left (role) node_role
|
||||
eval instant at 1m node_var * on (instance) group_left (role) node_role
|
||||
{instance="abc",job="node",role="prometheus"} 2
|
||||
|
||||
eval instant at 5m node_var * ignoring (role) group_left (role) node_role
|
||||
eval instant at 1m node_var * ignoring (role) group_left (role) node_role
|
||||
{instance="abc",job="node",role="prometheus"} 2
|
||||
|
||||
eval instant at 5m node_role * ignoring (role) group_right (role) node_var
|
||||
eval instant at 1m node_role * ignoring (role) group_right (role) node_var
|
||||
{instance="abc",job="node",role="prometheus"} 2
|
||||
|
||||
# Copy machine role to node variable with instrumentation labels.
|
||||
eval instant at 5m node_cpu * ignoring (role, mode) group_left (role) node_role
|
||||
eval instant at 1m node_cpu * ignoring (role, mode) group_left (role) node_role
|
||||
{instance="abc",job="node",mode="idle",role="prometheus"} 3
|
||||
{instance="abc",job="node",mode="user",role="prometheus"} 1
|
||||
|
||||
eval instant at 5m node_cpu * on (instance) group_left (role) node_role
|
||||
eval instant at 1m node_cpu * on (instance) group_left (role) node_role
|
||||
{instance="abc",job="node",mode="idle",role="prometheus"} 3
|
||||
{instance="abc",job="node",mode="user",role="prometheus"} 1
|
||||
|
||||
|
||||
# Ratio of total.
|
||||
eval instant at 5m node_cpu / on (instance) group_left sum by (instance,job)(node_cpu)
|
||||
eval instant at 1m node_cpu / on (instance) group_left sum by (instance,job)(node_cpu)
|
||||
{instance="abc",job="node",mode="idle"} .75
|
||||
{instance="abc",job="node",mode="user"} .25
|
||||
{instance="def",job="node",mode="idle"} .80
|
||||
{instance="def",job="node",mode="user"} .20
|
||||
|
||||
eval instant at 5m sum by (mode, job)(node_cpu) / on (job) group_left sum by (job)(node_cpu)
|
||||
eval instant at 1m sum by (mode, job)(node_cpu) / on (job) group_left sum by (job)(node_cpu)
|
||||
{job="node",mode="idle"} 0.7857142857142857
|
||||
{job="node",mode="user"} 0.21428571428571427
|
||||
|
||||
eval instant at 5m sum(sum by (mode, job)(node_cpu) / on (job) group_left sum by (job)(node_cpu))
|
||||
eval instant at 1m sum(sum by (mode, job)(node_cpu) / on (job) group_left sum by (job)(node_cpu))
|
||||
{} 1.0
|
||||
|
||||
|
||||
eval instant at 5m node_cpu / ignoring (mode) group_left sum without (mode)(node_cpu)
|
||||
eval instant at 1m node_cpu / ignoring (mode) group_left sum without (mode)(node_cpu)
|
||||
{instance="abc",job="node",mode="idle"} .75
|
||||
{instance="abc",job="node",mode="user"} .25
|
||||
{instance="def",job="node",mode="idle"} .80
|
||||
{instance="def",job="node",mode="user"} .20
|
||||
|
||||
eval instant at 5m node_cpu / ignoring (mode) group_left(dummy) sum without (mode)(node_cpu)
|
||||
eval instant at 1m node_cpu / ignoring (mode) group_left(dummy) sum without (mode)(node_cpu)
|
||||
{instance="abc",job="node",mode="idle"} .75
|
||||
{instance="abc",job="node",mode="user"} .25
|
||||
{instance="def",job="node",mode="idle"} .80
|
||||
{instance="def",job="node",mode="user"} .20
|
||||
|
||||
eval instant at 5m sum without (instance)(node_cpu) / ignoring (mode) group_left sum without (instance, mode)(node_cpu)
|
||||
eval instant at 1m sum without (instance)(node_cpu) / ignoring (mode) group_left sum without (instance, mode)(node_cpu)
|
||||
{job="node",mode="idle"} 0.7857142857142857
|
||||
{job="node",mode="user"} 0.21428571428571427
|
||||
|
||||
eval instant at 5m sum(sum without (instance)(node_cpu) / ignoring (mode) group_left sum without (instance, mode)(node_cpu))
|
||||
eval instant at 1m sum(sum without (instance)(node_cpu) / ignoring (mode) group_left sum without (instance, mode)(node_cpu))
|
||||
{} 1.0
|
||||
|
||||
|
||||
# Copy over label from metric with no matching labels, without having to list cross-job target labels ('job' here).
|
||||
eval instant at 5m node_cpu + on(dummy) group_left(foo) random*0
|
||||
eval instant at 1m node_cpu + on(dummy) group_left(foo) random*0
|
||||
{instance="abc",job="node",mode="idle",foo="bar"} 3
|
||||
{instance="abc",job="node",mode="user",foo="bar"} 1
|
||||
{instance="def",job="node",mode="idle",foo="bar"} 8
|
||||
|
@ -374,12 +374,12 @@ eval instant at 5m node_cpu + on(dummy) group_left(foo) random*0
|
|||
|
||||
|
||||
# Use threshold from metric, and copy over target.
|
||||
eval instant at 5m node_cpu > on(job, instance) group_left(target) threshold
|
||||
eval instant at 1m node_cpu > on(job, instance) group_left(target) threshold
|
||||
node_cpu{instance="abc",job="node",mode="idle",target="a@b.com"} 3
|
||||
node_cpu{instance="abc",job="node",mode="user",target="a@b.com"} 1
|
||||
|
||||
# Use threshold from metric, and a default (1) if it's not present.
|
||||
eval instant at 5m node_cpu > on(job, instance) group_left(target) (threshold or on (job, instance) (sum by (job, instance)(node_cpu) * 0 + 1))
|
||||
eval instant at 1m node_cpu > on(job, instance) group_left(target) (threshold or on (job, instance) (sum by (job, instance)(node_cpu) * 0 + 1))
|
||||
node_cpu{instance="abc",job="node",mode="idle",target="a@b.com"} 3
|
||||
node_cpu{instance="abc",job="node",mode="user",target="a@b.com"} 1
|
||||
node_cpu{instance="def",job="node",mode="idle"} 8
|
||||
|
@ -387,37 +387,37 @@ eval instant at 5m node_cpu > on(job, instance) group_left(target) (threshold or
|
|||
|
||||
|
||||
# Check that binops drop the metric name.
|
||||
eval instant at 5m node_cpu + 2
|
||||
eval instant at 1m node_cpu + 2
|
||||
{instance="abc",job="node",mode="idle"} 5
|
||||
{instance="abc",job="node",mode="user"} 3
|
||||
{instance="def",job="node",mode="idle"} 10
|
||||
{instance="def",job="node",mode="user"} 4
|
||||
|
||||
eval instant at 5m node_cpu - 2
|
||||
eval instant at 1m node_cpu - 2
|
||||
{instance="abc",job="node",mode="idle"} 1
|
||||
{instance="abc",job="node",mode="user"} -1
|
||||
{instance="def",job="node",mode="idle"} 6
|
||||
{instance="def",job="node",mode="user"} 0
|
||||
|
||||
eval instant at 5m node_cpu / 2
|
||||
eval instant at 1m node_cpu / 2
|
||||
{instance="abc",job="node",mode="idle"} 1.5
|
||||
{instance="abc",job="node",mode="user"} 0.5
|
||||
{instance="def",job="node",mode="idle"} 4
|
||||
{instance="def",job="node",mode="user"} 1
|
||||
|
||||
eval instant at 5m node_cpu * 2
|
||||
eval instant at 1m node_cpu * 2
|
||||
{instance="abc",job="node",mode="idle"} 6
|
||||
{instance="abc",job="node",mode="user"} 2
|
||||
{instance="def",job="node",mode="idle"} 16
|
||||
{instance="def",job="node",mode="user"} 4
|
||||
|
||||
eval instant at 5m node_cpu ^ 2
|
||||
eval instant at 1m node_cpu ^ 2
|
||||
{instance="abc",job="node",mode="idle"} 9
|
||||
{instance="abc",job="node",mode="user"} 1
|
||||
{instance="def",job="node",mode="idle"} 64
|
||||
{instance="def",job="node",mode="user"} 4
|
||||
|
||||
eval instant at 5m node_cpu % 2
|
||||
eval instant at 1m node_cpu % 2
|
||||
{instance="abc",job="node",mode="idle"} 1
|
||||
{instance="abc",job="node",mode="user"} 1
|
||||
{instance="def",job="node",mode="idle"} 0
|
||||
|
@ -432,14 +432,14 @@ load 5m
|
|||
metricB{baz="meh"} 4
|
||||
|
||||
# On with no labels, for metrics with no common labels.
|
||||
eval instant at 5m random + on() metricA
|
||||
eval instant at 1m random + on() metricA
|
||||
{} 5
|
||||
|
||||
# Ignoring with no labels is the same as no ignoring.
|
||||
eval instant at 5m metricA + ignoring() metricB
|
||||
eval instant at 1m metricA + ignoring() metricB
|
||||
{baz="meh"} 7
|
||||
|
||||
eval instant at 5m metricA + metricB
|
||||
eval instant at 1m metricA + metricB
|
||||
{baz="meh"} 7
|
||||
|
||||
clear
|
||||
|
@ -457,16 +457,16 @@ load 5m
|
|||
test_total{instance="localhost"} 50
|
||||
test_smaller{instance="localhost"} 10
|
||||
|
||||
eval instant at 5m test_total > bool test_smaller
|
||||
eval instant at 1m test_total > bool test_smaller
|
||||
{instance="localhost"} 1
|
||||
|
||||
eval instant at 5m test_total > test_smaller
|
||||
eval instant at 1m test_total > test_smaller
|
||||
test_total{instance="localhost"} 50
|
||||
|
||||
eval instant at 5m test_total < bool test_smaller
|
||||
eval instant at 1m test_total < bool test_smaller
|
||||
{instance="localhost"} 0
|
||||
|
||||
eval instant at 5m test_total < test_smaller
|
||||
eval instant at 1m test_total < test_smaller
|
||||
|
||||
clear
|
||||
|
||||
|
@ -476,14 +476,14 @@ load 5m
|
|||
trigx{} 20
|
||||
trigNaN{} NaN
|
||||
|
||||
eval instant at 5m trigy atan2 trigx
|
||||
eval instant at 1m trigy atan2 trigx
|
||||
{} 0.4636476090008061
|
||||
|
||||
eval instant at 5m trigy atan2 trigNaN
|
||||
eval instant at 1m trigy atan2 trigNaN
|
||||
{} NaN
|
||||
|
||||
eval instant at 5m 10 atan2 20
|
||||
eval instant at 1m 10 atan2 20
|
||||
0.4636476090008061
|
||||
|
||||
eval instant at 5m 10 atan2 NaN
|
||||
eval instant at 1m 10 atan2 NaN
|
||||
NaN
|
||||
|
|
8
promql/promqltest/testdata/staleness.test
vendored
8
promql/promqltest/testdata/staleness.test
vendored
|
@ -14,10 +14,10 @@ eval instant at 40s metric
|
|||
{__name__="metric"} 2
|
||||
|
||||
# It goes stale 5 minutes after the last sample.
|
||||
eval instant at 330s metric
|
||||
eval instant at 329s metric
|
||||
{__name__="metric"} 2
|
||||
|
||||
eval instant at 331s metric
|
||||
eval instant at 330s metric
|
||||
|
||||
|
||||
# Range vector ignores stale sample.
|
||||
|
@ -47,7 +47,7 @@ eval instant at 0s metric
|
|||
eval instant at 150s metric
|
||||
{__name__="metric"} 0
|
||||
|
||||
eval instant at 300s metric
|
||||
eval instant at 299s metric
|
||||
{__name__="metric"} 0
|
||||
|
||||
eval instant at 301s metric
|
||||
eval instant at 300s metric
|
||||
|
|
2
promql/promqltest/testdata/subquery.test
vendored
2
promql/promqltest/testdata/subquery.test
vendored
|
@ -14,7 +14,7 @@ eval instant at 5m sum_over_time(metric[50s:10s])
|
|||
|
||||
# Series becomes stale at 5m10s (5m after last sample)
|
||||
# Hence subquery gets a single sample at 5m10s.
|
||||
eval instant at 6m sum_over_time(metric[60s:10s])
|
||||
eval instant at 5m59s sum_over_time(metric[60s:10s])
|
||||
{} 2
|
||||
|
||||
eval instant at 10s rate(metric[20s:10s])
|
||||
|
|
36
promql/promqltest/testdata/trig_functions.test
vendored
36
promql/promqltest/testdata/trig_functions.test
vendored
|
@ -5,92 +5,92 @@ load 5m
|
|||
trig{l="y"} 20
|
||||
trig{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m sin(trig)
|
||||
eval instant at 1m sin(trig)
|
||||
{l="x"} -0.5440211108893699
|
||||
{l="y"} 0.9129452507276277
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m cos(trig)
|
||||
eval instant at 1m cos(trig)
|
||||
{l="x"} -0.8390715290764524
|
||||
{l="y"} 0.40808206181339196
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m tan(trig)
|
||||
eval instant at 1m tan(trig)
|
||||
{l="x"} 0.6483608274590867
|
||||
{l="y"} 2.2371609442247427
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m asin(trig - 10.1)
|
||||
eval instant at 1m asin(trig - 10.1)
|
||||
{l="x"} -0.10016742116155944
|
||||
{l="y"} NaN
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m acos(trig - 10.1)
|
||||
eval instant at 1m acos(trig - 10.1)
|
||||
{l="x"} 1.670963747956456
|
||||
{l="y"} NaN
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m atan(trig)
|
||||
eval instant at 1m atan(trig)
|
||||
{l="x"} 1.4711276743037345
|
||||
{l="y"} 1.5208379310729538
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m sinh(trig)
|
||||
eval instant at 1m sinh(trig)
|
||||
{l="x"} 11013.232920103324
|
||||
{l="y"} 2.4258259770489514e+08
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m cosh(trig)
|
||||
eval instant at 1m cosh(trig)
|
||||
{l="x"} 11013.232920103324
|
||||
{l="y"} 2.4258259770489514e+08
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m tanh(trig)
|
||||
eval instant at 1m tanh(trig)
|
||||
{l="x"} 0.9999999958776927
|
||||
{l="y"} 1
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m asinh(trig)
|
||||
eval instant at 1m asinh(trig)
|
||||
{l="x"} 2.99822295029797
|
||||
{l="y"} 3.6895038689889055
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m acosh(trig)
|
||||
eval instant at 1m acosh(trig)
|
||||
{l="x"} 2.993222846126381
|
||||
{l="y"} 3.6882538673612966
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m atanh(trig - 10.1)
|
||||
eval instant at 1m atanh(trig - 10.1)
|
||||
{l="x"} -0.10033534773107522
|
||||
{l="y"} NaN
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m rad(trig)
|
||||
eval instant at 1m rad(trig)
|
||||
{l="x"} 0.17453292519943295
|
||||
{l="y"} 0.3490658503988659
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m rad(trig - 10)
|
||||
eval instant at 1m rad(trig - 10)
|
||||
{l="x"} 0
|
||||
{l="y"} 0.17453292519943295
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m rad(trig - 20)
|
||||
eval instant at 1m rad(trig - 20)
|
||||
{l="x"} -0.17453292519943295
|
||||
{l="y"} 0
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m deg(trig)
|
||||
eval instant at 1m deg(trig)
|
||||
{l="x"} 572.9577951308232
|
||||
{l="y"} 1145.9155902616465
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m deg(trig - 10)
|
||||
eval instant at 1m deg(trig - 10)
|
||||
{l="x"} 0
|
||||
{l="y"} 572.9577951308232
|
||||
{l="NaN"} NaN
|
||||
|
||||
eval instant at 5m deg(trig - 20)
|
||||
eval instant at 1m deg(trig - 20)
|
||||
{l="x"} -572.9577951308232
|
||||
{l="y"} 0
|
||||
{l="NaN"} NaN
|
||||
|
|
Loading…
Reference in a new issue