mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
promql: Add NHCB tests
This adds equivalent NHCB tests to the existing classic histogram tests. Signed-off-by: beorn7 <beorn@grafana.com>
This commit is contained in:
parent
bab098a4c1
commit
c39776c5b5
185
promql/promqltest/testdata/histograms.test
vendored
185
promql/promqltest/testdata/histograms.test
vendored
|
@ -73,22 +73,32 @@ eval instant at 50m histogram_count(testhistogram3)
|
|||
{start="positive"} 110
|
||||
{start="negative"} 20
|
||||
|
||||
# Classic way of accessing the count still works.
|
||||
eval instant at 50m testhistogram3_count
|
||||
testhistogram3_count{start="positive"} 110
|
||||
testhistogram3_count{start="negative"} 20
|
||||
|
||||
# Test histogram_sum.
|
||||
eval instant at 50m histogram_sum(testhistogram3)
|
||||
{start="positive"} 330
|
||||
{start="negative"} 80
|
||||
|
||||
# Test histogram_avg.
|
||||
# Classic way of accessing the sum still works.
|
||||
eval instant at 50m testhistogram3_sum
|
||||
testhistogram3_sum{start="positive"} 330
|
||||
testhistogram3_sum{start="negative"} 80
|
||||
|
||||
# Test histogram_avg. This has no classic equivalent.
|
||||
eval instant at 50m histogram_avg(testhistogram3)
|
||||
{start="positive"} 3
|
||||
{start="negative"} 4
|
||||
|
||||
# Test histogram_stddev.
|
||||
# Test histogram_stddev. This has no classic equivalent.
|
||||
eval instant at 50m histogram_stddev(testhistogram3)
|
||||
{start="positive"} 2.8189265757336734
|
||||
{start="negative"} 4.182715937754936
|
||||
|
||||
# Test histogram_stdvar.
|
||||
# Test histogram_stdvar. This has no classic equivalent.
|
||||
eval instant at 50m histogram_stdvar(testhistogram3)
|
||||
{start="positive"} 7.946347039377573
|
||||
{start="negative"} 17.495112615949154
|
||||
|
@ -103,137 +113,282 @@ eval instant at 50m histogram_fraction(0, 0.2, rate(testhistogram3[5m]))
|
|||
{start="positive"} 0.6363636363636364
|
||||
{start="negative"} 0
|
||||
|
||||
# Test histogram_quantile.
|
||||
# In the classic histogram, we can access the corresponding bucket (if
|
||||
# it exists) and divide by the count to get the same result.
|
||||
|
||||
eval instant at 50m testhistogram3_bucket{le=".2"} / ignoring(le) testhistogram3_count
|
||||
{start="positive"} 0.6363636363636364
|
||||
|
||||
eval instant at 50m rate(testhistogram3_bucket{le=".2"}[5m]) / ignoring(le) rate(testhistogram3_count[5m])
|
||||
{start="positive"} 0.6363636363636364
|
||||
|
||||
# Test histogram_quantile, native and classic.
|
||||
|
||||
eval instant at 50m histogram_quantile(0, testhistogram3)
|
||||
{start="positive"} 0
|
||||
{start="negative"} -0.25
|
||||
|
||||
eval instant at 50m histogram_quantile(0, testhistogram3_bucket)
|
||||
{start="positive"} 0
|
||||
{start="negative"} -0.25
|
||||
|
||||
eval instant at 50m histogram_quantile(0.25, testhistogram3)
|
||||
{start="positive"} 0.055
|
||||
{start="negative"} -0.225
|
||||
|
||||
eval instant at 50m histogram_quantile(0.25, testhistogram3_bucket)
|
||||
{start="positive"} 0.055
|
||||
{start="negative"} -0.225
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, testhistogram3)
|
||||
{start="positive"} 0.125
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, testhistogram3_bucket)
|
||||
{start="positive"} 0.125
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0.75, testhistogram3)
|
||||
{start="positive"} 0.45
|
||||
{start="negative"} -0.15
|
||||
|
||||
eval instant at 50m histogram_quantile(0.75, testhistogram3_bucket)
|
||||
{start="positive"} 0.45
|
||||
{start="negative"} -0.15
|
||||
|
||||
eval instant at 50m histogram_quantile(1, testhistogram3)
|
||||
{start="positive"} 1
|
||||
{start="negative"} -0.1
|
||||
|
||||
eval instant at 50m histogram_quantile(1, testhistogram3_bucket)
|
||||
{start="positive"} 1
|
||||
{start="negative"} -0.1
|
||||
|
||||
# Quantile too low.
|
||||
|
||||
eval_warn instant at 50m histogram_quantile(-0.1, testhistogram)
|
||||
{start="positive"} -Inf
|
||||
{start="negative"} -Inf
|
||||
|
||||
eval_warn instant at 50m histogram_quantile(-0.1, testhistogram_bucket)
|
||||
{start="positive"} -Inf
|
||||
{start="negative"} -Inf
|
||||
|
||||
# Quantile too high.
|
||||
|
||||
eval_warn instant at 50m histogram_quantile(1.01, testhistogram)
|
||||
{start="positive"} +Inf
|
||||
{start="negative"} +Inf
|
||||
|
||||
eval_warn instant at 50m histogram_quantile(1.01, testhistogram_bucket)
|
||||
{start="positive"} +Inf
|
||||
{start="negative"} +Inf
|
||||
|
||||
# Quantile invalid.
|
||||
|
||||
eval_warn instant at 50m histogram_quantile(NaN, testhistogram)
|
||||
{start="positive"} NaN
|
||||
{start="negative"} NaN
|
||||
|
||||
eval_warn instant at 50m histogram_quantile(NaN, testhistogram_bucket)
|
||||
{start="positive"} NaN
|
||||
{start="negative"} NaN
|
||||
|
||||
# Quantile value in lowest bucket.
|
||||
|
||||
eval instant at 50m histogram_quantile(0, testhistogram)
|
||||
{start="positive"} 0
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0, testhistogram_bucket)
|
||||
{start="positive"} 0
|
||||
{start="negative"} -0.2
|
||||
|
||||
# Quantile value in highest bucket.
|
||||
|
||||
eval instant at 50m histogram_quantile(1, testhistogram)
|
||||
{start="positive"} 1
|
||||
{start="negative"} 0.3
|
||||
|
||||
eval instant at 50m histogram_quantile(1, testhistogram_bucket)
|
||||
{start="positive"} 1
|
||||
{start="negative"} 0.3
|
||||
|
||||
# Finally some useful quantiles.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.2, testhistogram)
|
||||
{start="positive"} 0.048
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0.2, testhistogram_bucket)
|
||||
{start="positive"} 0.048
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, testhistogram)
|
||||
{start="positive"} 0.15
|
||||
{start="negative"} -0.15
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, testhistogram_bucket)
|
||||
{start="positive"} 0.15
|
||||
{start="negative"} -0.15
|
||||
|
||||
eval instant at 50m histogram_quantile(0.8, testhistogram)
|
||||
{start="positive"} 0.72
|
||||
{start="negative"} 0.3
|
||||
|
||||
eval instant at 50m histogram_quantile(0.8, testhistogram_bucket)
|
||||
{start="positive"} 0.72
|
||||
{start="negative"} 0.3
|
||||
|
||||
# More realistic with rates.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.2, rate(testhistogram[5m]))
|
||||
{start="positive"} 0.048
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0.2, rate(testhistogram_bucket[5m]))
|
||||
{start="positive"} 0.048
|
||||
{start="negative"} -0.2
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, rate(testhistogram[5m]))
|
||||
{start="positive"} 0.15
|
||||
{start="negative"} -0.15
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, rate(testhistogram_bucket[5m]))
|
||||
{start="positive"} 0.15
|
||||
{start="negative"} -0.15
|
||||
|
||||
eval instant at 50m histogram_quantile(0.8, rate(testhistogram[5m]))
|
||||
{start="positive"} 0.72
|
||||
{start="negative"} 0.3
|
||||
|
||||
eval instant at 50m histogram_quantile(0.8, rate(testhistogram_bucket[5m]))
|
||||
{start="positive"} 0.72
|
||||
{start="negative"} 0.3
|
||||
|
||||
# Want results exactly in the middle of the bucket.
|
||||
|
||||
eval instant at 7m histogram_quantile(1./6., testhistogram2)
|
||||
{} 1
|
||||
|
||||
eval instant at 7m histogram_quantile(1./6., testhistogram2_bucket)
|
||||
{} 1
|
||||
|
||||
eval instant at 7m histogram_quantile(0.5, testhistogram2)
|
||||
{} 3
|
||||
|
||||
eval instant at 7m histogram_quantile(0.5, testhistogram2_bucket)
|
||||
{} 3
|
||||
|
||||
eval instant at 7m histogram_quantile(5./6., testhistogram2)
|
||||
{} 5
|
||||
|
||||
eval instant at 7m histogram_quantile(5./6., testhistogram2_bucket)
|
||||
{} 5
|
||||
|
||||
eval instant at 47m histogram_quantile(1./6., rate(testhistogram2[15m]))
|
||||
{} 1
|
||||
|
||||
eval instant at 47m histogram_quantile(1./6., rate(testhistogram2_bucket[15m]))
|
||||
{} 1
|
||||
|
||||
eval instant at 47m histogram_quantile(0.5, rate(testhistogram2[15m]))
|
||||
{} 3
|
||||
|
||||
eval instant at 47m histogram_quantile(0.5, rate(testhistogram2_bucket[15m]))
|
||||
{} 3
|
||||
|
||||
eval instant at 47m histogram_quantile(5./6., rate(testhistogram2[15m]))
|
||||
{} 5
|
||||
|
||||
eval instant at 47m histogram_quantile(5./6., rate(testhistogram2_bucket[15m]))
|
||||
{} 5
|
||||
|
||||
# Aggregated histogram: Everything in one.
|
||||
# Aggregated histogram: Everything in one. Note how native histograms
|
||||
# don't require aggregation by le.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds[5m])))
|
||||
{} 0.075
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||
{} 0.075
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds[5m])))
|
||||
{} 0.1277777777777778
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||
{} 0.1277777777777778
|
||||
|
||||
# Aggregated histogram: Everything in one. Now with avg, which does not change anything.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, avg(rate(request_duration_seconds[5m])))
|
||||
{} 0.075
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, avg(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||
{} 0.075
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, avg(rate(request_duration_seconds[5m])))
|
||||
{} 0.12777777777777778
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, avg(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||
{} 0.12777777777777778
|
||||
|
||||
# Aggregated histogram: By instance.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds[5m])) by (instance))
|
||||
{instance="ins1"} 0.075
|
||||
{instance="ins2"} 0.075
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, instance))
|
||||
{instance="ins1"} 0.075
|
||||
{instance="ins2"} 0.075
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds[5m])) by (instance))
|
||||
{instance="ins1"} 0.1333333333
|
||||
{instance="ins2"} 0.125
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, instance))
|
||||
{instance="ins1"} 0.1333333333
|
||||
{instance="ins2"} 0.125
|
||||
|
||||
# Aggregated histogram: By job.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds[5m])) by (job))
|
||||
{job="job1"} 0.1
|
||||
{job="job2"} 0.0642857142857143
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, job))
|
||||
{job="job1"} 0.1
|
||||
{job="job2"} 0.0642857142857143
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds[5m])) by (job))
|
||||
{job="job1"} 0.14
|
||||
{job="job2"} 0.1125
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, job))
|
||||
{job="job1"} 0.14
|
||||
{job="job2"} 0.1125
|
||||
|
||||
# Aggregated histogram: By job and instance.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds[5m])) by (job, instance))
|
||||
{instance="ins1", job="job1"} 0.11
|
||||
{instance="ins2", job="job1"} 0.09
|
||||
{instance="ins1", job="job2"} 0.06
|
||||
{instance="ins2", job="job2"} 0.0675
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, job, instance))
|
||||
{instance="ins1", job="job1"} 0.11
|
||||
{instance="ins2", job="job1"} 0.09
|
||||
{instance="ins1", job="job2"} 0.06
|
||||
{instance="ins2", job="job2"} 0.0675
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds[5m])) by (job, instance))
|
||||
{instance="ins1", job="job1"} 0.15
|
||||
{instance="ins2", job="job1"} 0.1333333333333333
|
||||
{instance="ins1", job="job2"} 0.1
|
||||
{instance="ins2", job="job2"} 0.1166666666666667
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, job, instance))
|
||||
{instance="ins1", job="job1"} 0.15
|
||||
{instance="ins2", job="job1"} 0.1333333333333333
|
||||
|
@ -241,18 +396,32 @@ eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bu
|
|||
{instance="ins2", job="job2"} 0.1166666666666667
|
||||
|
||||
# The unaggregated histogram for comparison. Same result as the previous one.
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, rate(request_duration_seconds[5m]))
|
||||
{instance="ins1", job="job1"} 0.11
|
||||
{instance="ins2", job="job1"} 0.09
|
||||
{instance="ins1", job="job2"} 0.06
|
||||
{instance="ins2", job="job2"} 0.0675
|
||||
|
||||
eval instant at 50m histogram_quantile(0.3, rate(request_duration_seconds_bucket[5m]))
|
||||
{instance="ins1", job="job1"} 0.11
|
||||
{instance="ins2", job="job1"} 0.09
|
||||
{instance="ins1", job="job2"} 0.06
|
||||
{instance="ins2", job="job2"} 0.0675
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, rate(request_duration_seconds[5m]))
|
||||
{instance="ins1", job="job1"} 0.15
|
||||
{instance="ins2", job="job1"} 0.13333333333333333
|
||||
{instance="ins1", job="job2"} 0.1
|
||||
{instance="ins2", job="job2"} 0.11666666666666667
|
||||
|
||||
eval instant at 50m histogram_quantile(0.5, rate(request_duration_seconds_bucket[5m]))
|
||||
{instance="ins1", job="job1"} 0.15
|
||||
{instance="ins2", job="job1"} 0.13333333333333333
|
||||
{instance="ins1", job="job2"} 0.1
|
||||
{instance="ins2", job="job2"} 0.11666666666666667
|
||||
|
||||
# All NHCBs summed into one.
|
||||
eval instant at 50m sum(request_duration_seconds)
|
||||
{} {{schema:-53 count:250 custom_values:[0.1 0.2] buckets:[100 90 60]}}
|
||||
|
||||
|
@ -303,11 +472,13 @@ load_with_nhcb 5m
|
|||
eval instant at 50m histogram_quantile(0.2, rate(empty_bucket[5m]))
|
||||
{instance="ins1", job="job1"} NaN
|
||||
|
||||
# Load a duplicate histogram with a different name to test failure scenario on multiple histograms with the same label set
|
||||
# Load a duplicate histogram with a different name to test failure scenario on multiple histograms with the same label set.
|
||||
# https://github.com/prometheus/prometheus/issues/9910
|
||||
load_with_nhcb 5m
|
||||
request_duration_seconds2_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
||||
request_duration_seconds2_bucket{job="job1", instance="ins1", le="0.2"} 0+3x10
|
||||
request_duration_seconds2_bucket{job="job1", instance="ins1", le="+Inf"} 0+4x10
|
||||
|
||||
eval_fail instant at 50m histogram_quantile(0.99, {__name__=~"request_duration_seconds\\d*_bucket$"})
|
||||
eval_fail instant at 50m histogram_quantile(0.99, {__name__=~"request_duration_seconds\\d*_bucket"})
|
||||
|
||||
eval_fail instant at 50m histogram_quantile(0.99, {__name__=~"request_duration_seconds\\d*"})
|
||||
|
|
Loading…
Reference in a new issue