mirror of
https://github.com/prometheus/prometheus.git
synced 2025-02-02 08:31:11 -08:00
[nhcb branch] Add basic unit tests for native histograms with custom buckets converted from classic histograms (#13794)
* modify unit test framework to automatically generate native histograms with custom buckets from classic histogram series * add very basic tests for classic histogram converted into native histogram with custom bounds * fix histogram_quantile for native histograms with custom buckets * make loading with nhcb explicit * evaluate native histograms with custom buckets on queries with explicit keyword * use regex replacer * use temp histogram struct for automatically loading converted nhcb Signed-off-by: Jeanette Tan <jeanette.tan@grafana.com> Signed-off-by: George Krajcsovits <krajorama@users.noreply.github.com>
This commit is contained in:
parent
2a4aa085d2
commit
81862aabd7
|
@ -34,6 +34,7 @@ var (
|
||||||
ErrHistogramSpansBucketsMismatch = errors.New("histogram spans specify different number of buckets than provided")
|
ErrHistogramSpansBucketsMismatch = errors.New("histogram spans specify different number of buckets than provided")
|
||||||
ErrHistogramCustomBucketsMismatch = errors.New("histogram custom bounds are too few")
|
ErrHistogramCustomBucketsMismatch = errors.New("histogram custom bounds are too few")
|
||||||
ErrHistogramCustomBucketsInvalid = errors.New("histogram custom bounds must be in strictly increasing order")
|
ErrHistogramCustomBucketsInvalid = errors.New("histogram custom bounds must be in strictly increasing order")
|
||||||
|
ErrHistogramCustomBucketsInfinite = errors.New("histogram custom bounds must be finite")
|
||||||
ErrHistogramsIncompatibleSchema = errors.New("cannot apply this operation on histograms with a mix of exponential and custom bucket schemas")
|
ErrHistogramsIncompatibleSchema = errors.New("cannot apply this operation on histograms with a mix of exponential and custom bucket schemas")
|
||||||
ErrHistogramsIncompatibleBounds = errors.New("cannot apply this operation on custom buckets histograms with different custom bounds")
|
ErrHistogramsIncompatibleBounds = errors.New("cannot apply this operation on custom buckets histograms with different custom bounds")
|
||||||
)
|
)
|
||||||
|
@ -426,6 +427,9 @@ func checkHistogramCustomBounds(bounds []float64, spans []Span, numBuckets int)
|
||||||
}
|
}
|
||||||
prev = curr
|
prev = curr
|
||||||
}
|
}
|
||||||
|
if prev == math.Inf(1) {
|
||||||
|
return fmt.Errorf("last +Inf bound must not be explicitly defined: %w", ErrHistogramCustomBucketsInfinite)
|
||||||
|
}
|
||||||
|
|
||||||
var spanBuckets int
|
var spanBuckets int
|
||||||
var totalSpanLength int
|
var totalSpanLength int
|
||||||
|
|
|
@ -205,12 +205,15 @@ func histogramQuantile(q float64, h *histogram.FloatHistogram) float64 {
|
||||||
|
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
bucket = it.At()
|
bucket = it.At()
|
||||||
|
if bucket.Count == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
count += bucket.Count
|
count += bucket.Count
|
||||||
if count >= rank {
|
if count >= rank {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if bucket.Lower < 0 && bucket.Upper > 0 {
|
if !h.UsesCustomBuckets() && bucket.Lower < 0 && bucket.Upper > 0 {
|
||||||
switch {
|
switch {
|
||||||
case len(h.NegativeBuckets) == 0 && len(h.PositiveBuckets) > 0:
|
case len(h.NegativeBuckets) == 0 && len(h.PositiveBuckets) > 0:
|
||||||
// The result is in the zero bucket and the histogram has only
|
// The result is in the zero bucket and the histogram has only
|
||||||
|
@ -221,6 +224,17 @@ func histogramQuantile(q float64, h *histogram.FloatHistogram) float64 {
|
||||||
// negative buckets. So we consider 0 to be the upper bound.
|
// negative buckets. So we consider 0 to be the upper bound.
|
||||||
bucket.Upper = 0
|
bucket.Upper = 0
|
||||||
}
|
}
|
||||||
|
} else if h.UsesCustomBuckets() {
|
||||||
|
if bucket.Lower == math.Inf(-1) {
|
||||||
|
// first bucket, with lower bound -Inf
|
||||||
|
if bucket.Upper <= 0 {
|
||||||
|
return bucket.Upper
|
||||||
|
}
|
||||||
|
bucket.Lower = 0
|
||||||
|
} else if bucket.Upper == math.Inf(1) {
|
||||||
|
// last bucket, with upper bound +Inf
|
||||||
|
return bucket.Lower
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Due to numerical inaccuracies, we could end up with a higher count
|
// Due to numerical inaccuracies, we could end up with a higher count
|
||||||
// than h.Count. Thus, make sure count is never higher than h.Count.
|
// than h.Count. Thus, make sure count is never higher than h.Count.
|
||||||
|
|
249
promql/test.go
249
promql/test.go
|
@ -20,6 +20,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"math"
|
"math"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -44,9 +45,34 @@ var (
|
||||||
minNormal = math.Float64frombits(0x0010000000000000) // The smallest positive normal value of type float64.
|
minNormal = math.Float64frombits(0x0010000000000000) // The smallest positive normal value of type float64.
|
||||||
|
|
||||||
patSpace = regexp.MustCompile("[\t ]+")
|
patSpace = regexp.MustCompile("[\t ]+")
|
||||||
patLoad = regexp.MustCompile(`^load\s+(.+?)$`)
|
patLoad = regexp.MustCompile(`^load(?:_(with_nhcb))?\s+(.+?)$`)
|
||||||
patEvalInstant = regexp.MustCompile(`^eval(?:_(fail|ordered))?\s+instant\s+(?:at\s+(.+?))?\s+(.+)$`)
|
patEvalInstant = regexp.MustCompile(`^eval(?:_(with_nhcb))?(?:_(fail|ordered))?\s+instant\s+(?:at\s+(.+?))?\s+(.+)$`)
|
||||||
patEvalRange = regexp.MustCompile(`^eval(?:_(fail))?\s+range\s+from\s+(.+)\s+to\s+(.+)\s+step\s+(.+?)\s+(.+)$`)
|
patEvalRange = regexp.MustCompile(`^eval(?:_(fail))?\s+range\s+from\s+(.+)\s+to\s+(.+)\s+step\s+(.+?)\s+(.+)$`)
|
||||||
|
histogramBucketReplacements = []struct {
|
||||||
|
pattern *regexp.Regexp
|
||||||
|
repl string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
pattern: regexp.MustCompile(`_bucket\b`),
|
||||||
|
repl: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
pattern: regexp.MustCompile(`\s+by\s+\(le\)`),
|
||||||
|
repl: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
pattern: regexp.MustCompile(`\(le,\s*`),
|
||||||
|
repl: "(",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
pattern: regexp.MustCompile(`,\s*le,\s*`),
|
||||||
|
repl: ", ",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
pattern: regexp.MustCompile(`,\s*le\)`),
|
||||||
|
repl: ")",
|
||||||
|
},
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -163,15 +189,18 @@ func raise(line int, format string, v ...interface{}) error {
|
||||||
|
|
||||||
func parseLoad(lines []string, i int) (int, *loadCmd, error) {
|
func parseLoad(lines []string, i int) (int, *loadCmd, error) {
|
||||||
if !patLoad.MatchString(lines[i]) {
|
if !patLoad.MatchString(lines[i]) {
|
||||||
return i, nil, raise(i, "invalid load command. (load <step:duration>)")
|
return i, nil, raise(i, "invalid load command. (load[_with_nhcb] <step:duration>)")
|
||||||
}
|
}
|
||||||
parts := patLoad.FindStringSubmatch(lines[i])
|
parts := patLoad.FindStringSubmatch(lines[i])
|
||||||
|
var (
|
||||||
gap, err := model.ParseDuration(parts[1])
|
withNhcb = parts[1] == "with_nhcb"
|
||||||
|
step = parts[2]
|
||||||
|
)
|
||||||
|
gap, err := model.ParseDuration(step)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
|
return i, nil, raise(i, "invalid step definition %q: %s", step, err)
|
||||||
}
|
}
|
||||||
cmd := newLoadCmd(time.Duration(gap))
|
cmd := newLoadCmd(time.Duration(gap), withNhcb)
|
||||||
for i+1 < len(lines) {
|
for i+1 < len(lines) {
|
||||||
i++
|
i++
|
||||||
defLine := lines[i]
|
defLine := lines[i]
|
||||||
|
@ -204,17 +233,19 @@ func (t *test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
||||||
rangeParts := patEvalRange.FindStringSubmatch(lines[i])
|
rangeParts := patEvalRange.FindStringSubmatch(lines[i])
|
||||||
|
|
||||||
if instantParts == nil && rangeParts == nil {
|
if instantParts == nil && rangeParts == nil {
|
||||||
return i, nil, raise(i, "invalid evaluation command. Must be either 'eval[_fail|_ordered] instant [at <offset:duration>] <query>' or 'eval[_fail] range from <from> to <to> step <step> <query>'")
|
return i, nil, raise(i, "invalid evaluation command. Must be either 'eval[_with_nhcb][_fail|_ordered] instant [at <offset:duration>] <query>' or 'eval[_fail] range from <from> to <to> step <step> <query>'")
|
||||||
}
|
}
|
||||||
|
|
||||||
isInstant := instantParts != nil
|
isInstant := instantParts != nil
|
||||||
|
|
||||||
|
var withNhcb bool
|
||||||
var mod string
|
var mod string
|
||||||
var expr string
|
var expr string
|
||||||
|
|
||||||
if isInstant {
|
if isInstant {
|
||||||
mod = instantParts[1]
|
withNhcb = instantParts[1] == "with_nhcb"
|
||||||
expr = instantParts[3]
|
mod = instantParts[2]
|
||||||
|
expr = instantParts[4]
|
||||||
} else {
|
} else {
|
||||||
mod = rangeParts[1]
|
mod = rangeParts[1]
|
||||||
expr = rangeParts[5]
|
expr = rangeParts[5]
|
||||||
|
@ -242,7 +273,7 @@ func (t *test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
||||||
var cmd *evalCmd
|
var cmd *evalCmd
|
||||||
|
|
||||||
if isInstant {
|
if isInstant {
|
||||||
at := instantParts[2]
|
at := instantParts[3]
|
||||||
offset, err := model.ParseDuration(at)
|
offset, err := model.ParseDuration(at)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return i, nil, formatErr("invalid timestamp definition %q: %s", at, err)
|
return i, nil, formatErr("invalid timestamp definition %q: %s", at, err)
|
||||||
|
@ -284,6 +315,7 @@ func (t *test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
||||||
case "fail":
|
case "fail":
|
||||||
cmd.fail = true
|
cmd.fail = true
|
||||||
}
|
}
|
||||||
|
cmd.withNhcb = withNhcb
|
||||||
|
|
||||||
for j := 1; i+1 < len(lines); j++ {
|
for j := 1; i+1 < len(lines); j++ {
|
||||||
i++
|
i++
|
||||||
|
@ -338,7 +370,7 @@ func (t *test) parse(input string) error {
|
||||||
switch c := strings.ToLower(patSpace.Split(l, 2)[0]); {
|
switch c := strings.ToLower(patSpace.Split(l, 2)[0]); {
|
||||||
case c == "clear":
|
case c == "clear":
|
||||||
cmd = &clearCmd{}
|
cmd = &clearCmd{}
|
||||||
case c == "load":
|
case strings.HasPrefix(c, "load"):
|
||||||
i, cmd, err = parseLoad(lines, i)
|
i, cmd, err = parseLoad(lines, i)
|
||||||
case strings.HasPrefix(c, "eval"):
|
case strings.HasPrefix(c, "eval"):
|
||||||
i, cmd, err = t.parseEval(lines, i)
|
i, cmd, err = t.parseEval(lines, i)
|
||||||
|
@ -370,14 +402,16 @@ type loadCmd struct {
|
||||||
metrics map[uint64]labels.Labels
|
metrics map[uint64]labels.Labels
|
||||||
defs map[uint64][]Sample
|
defs map[uint64][]Sample
|
||||||
exemplars map[uint64][]exemplar.Exemplar
|
exemplars map[uint64][]exemplar.Exemplar
|
||||||
|
withNhcb bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func newLoadCmd(gap time.Duration) *loadCmd {
|
func newLoadCmd(gap time.Duration, withNhcb bool) *loadCmd {
|
||||||
return &loadCmd{
|
return &loadCmd{
|
||||||
gap: gap,
|
gap: gap,
|
||||||
metrics: map[uint64]labels.Labels{},
|
metrics: map[uint64]labels.Labels{},
|
||||||
defs: map[uint64][]Sample{},
|
defs: map[uint64][]Sample{},
|
||||||
exemplars: map[uint64][]exemplar.Exemplar{},
|
exemplars: map[uint64][]exemplar.Exemplar{},
|
||||||
|
withNhcb: withNhcb,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -416,6 +450,167 @@ func (cmd *loadCmd) append(a storage.Appender) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if cmd.withNhcb {
|
||||||
|
return cmd.appendCustomHistogram(a)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getHistogramMetricBase(m labels.Labels, suffix string) (labels.Labels, uint64) {
|
||||||
|
mName := m.Get(labels.MetricName)
|
||||||
|
baseM := labels.NewBuilder(m).
|
||||||
|
Set(labels.MetricName, strings.TrimSuffix(mName, suffix)).
|
||||||
|
Del(labels.BucketLabel).
|
||||||
|
Labels()
|
||||||
|
hash := baseM.Hash()
|
||||||
|
return baseM, hash
|
||||||
|
}
|
||||||
|
|
||||||
|
type tempHistogramWrapper struct {
|
||||||
|
metric labels.Labels
|
||||||
|
upperBounds []float64
|
||||||
|
histByTs map[int64]tempHistogram
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTempHistogramWrapper() tempHistogramWrapper {
|
||||||
|
return tempHistogramWrapper{
|
||||||
|
upperBounds: []float64{},
|
||||||
|
histByTs: map[int64]tempHistogram{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type tempHistogram struct {
|
||||||
|
bucketCounts map[float64]float64
|
||||||
|
count float64
|
||||||
|
sum float64
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTempHistogram() tempHistogram {
|
||||||
|
return tempHistogram{
|
||||||
|
bucketCounts: map[float64]float64{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func processClassicHistogramSeries(m labels.Labels, suffix string, histMap map[uint64]tempHistogramWrapper, smpls []Sample, updateHistWrapper func(*tempHistogramWrapper), updateHist func(*tempHistogram, float64)) {
|
||||||
|
m2, m2hash := getHistogramMetricBase(m, suffix)
|
||||||
|
histWrapper, exists := histMap[m2hash]
|
||||||
|
if !exists {
|
||||||
|
histWrapper = newTempHistogramWrapper()
|
||||||
|
}
|
||||||
|
histWrapper.metric = m2
|
||||||
|
if updateHistWrapper != nil {
|
||||||
|
updateHistWrapper(&histWrapper)
|
||||||
|
}
|
||||||
|
for _, s := range smpls {
|
||||||
|
if s.H != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
hist, exists := histWrapper.histByTs[s.T]
|
||||||
|
if !exists {
|
||||||
|
hist = newTempHistogram()
|
||||||
|
}
|
||||||
|
updateHist(&hist, s.F)
|
||||||
|
histWrapper.histByTs[s.T] = hist
|
||||||
|
}
|
||||||
|
histMap[m2hash] = histWrapper
|
||||||
|
}
|
||||||
|
|
||||||
|
func processUpperBoundsAndCreateBaseHistogram(upperBounds0 []float64) ([]float64, *histogram.FloatHistogram) {
|
||||||
|
sort.Float64s(upperBounds0)
|
||||||
|
upperBounds := make([]float64, 0, len(upperBounds0))
|
||||||
|
prevLe := math.Inf(-1)
|
||||||
|
for _, le := range upperBounds0 {
|
||||||
|
if le != prevLe { // deduplicate
|
||||||
|
upperBounds = append(upperBounds, le)
|
||||||
|
prevLe = le
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var customBounds []float64
|
||||||
|
if upperBounds[len(upperBounds)-1] == math.Inf(1) {
|
||||||
|
customBounds = upperBounds[:len(upperBounds)-1]
|
||||||
|
} else {
|
||||||
|
customBounds = upperBounds
|
||||||
|
}
|
||||||
|
return upperBounds, &histogram.FloatHistogram{
|
||||||
|
Count: 0,
|
||||||
|
Sum: 0,
|
||||||
|
Schema: histogram.CustomBucketsSchema,
|
||||||
|
PositiveSpans: []histogram.Span{
|
||||||
|
{Offset: 0, Length: uint32(len(upperBounds))},
|
||||||
|
},
|
||||||
|
PositiveBuckets: make([]float64, len(upperBounds)),
|
||||||
|
CustomValues: customBounds,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If classic histograms are defined, convert them into native histograms with custom
|
||||||
|
// bounds and append the defined time series to the storage.
|
||||||
|
func (cmd *loadCmd) appendCustomHistogram(a storage.Appender) error {
|
||||||
|
histMap := map[uint64]tempHistogramWrapper{}
|
||||||
|
|
||||||
|
// Go through all the time series to collate classic histogram data
|
||||||
|
// and organise them by timestamp.
|
||||||
|
for hash, smpls := range cmd.defs {
|
||||||
|
m := cmd.metrics[hash]
|
||||||
|
mName := m.Get(labels.MetricName)
|
||||||
|
switch {
|
||||||
|
case strings.HasSuffix(mName, "_bucket") && m.Has(labels.BucketLabel):
|
||||||
|
le, err := strconv.ParseFloat(m.Get(labels.BucketLabel), 64)
|
||||||
|
if err != nil || math.IsNaN(le) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
processClassicHistogramSeries(m, "_bucket", histMap, smpls, func(histWrapper *tempHistogramWrapper) {
|
||||||
|
histWrapper.upperBounds = append(histWrapper.upperBounds, le)
|
||||||
|
}, func(hist *tempHistogram, f float64) {
|
||||||
|
hist.bucketCounts[le] = f
|
||||||
|
})
|
||||||
|
case strings.HasSuffix(mName, "_count"):
|
||||||
|
processClassicHistogramSeries(m, "_count", histMap, smpls, nil, func(hist *tempHistogram, f float64) {
|
||||||
|
hist.count = f
|
||||||
|
})
|
||||||
|
case strings.HasSuffix(mName, "_sum"):
|
||||||
|
processClassicHistogramSeries(m, "_sum", histMap, smpls, nil, func(hist *tempHistogram, f float64) {
|
||||||
|
hist.sum = f
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the collated classic histogram data into native histograms
|
||||||
|
// with custom bounds and append them to the storage.
|
||||||
|
for _, histWrapper := range histMap {
|
||||||
|
upperBounds, fhBase := processUpperBoundsAndCreateBaseHistogram(histWrapper.upperBounds)
|
||||||
|
samples := make([]Sample, 0, len(histWrapper.histByTs))
|
||||||
|
for t, hist := range histWrapper.histByTs {
|
||||||
|
fh := fhBase.Copy()
|
||||||
|
var prevCount, total float64
|
||||||
|
for i, le := range upperBounds {
|
||||||
|
currCount, exists := hist.bucketCounts[le]
|
||||||
|
if !exists {
|
||||||
|
currCount = 0
|
||||||
|
}
|
||||||
|
count := currCount - prevCount
|
||||||
|
fh.PositiveBuckets[i] = count
|
||||||
|
total += count
|
||||||
|
prevCount = currCount
|
||||||
|
}
|
||||||
|
fh.Sum = hist.sum
|
||||||
|
if hist.count != 0 {
|
||||||
|
total = hist.count
|
||||||
|
}
|
||||||
|
fh.Count = total
|
||||||
|
s := Sample{T: t, H: fh.Compact(0)}
|
||||||
|
if err := s.H.Validate(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
samples = append(samples, s)
|
||||||
|
}
|
||||||
|
sort.Slice(samples, func(i, j int) bool { return samples[i].T < samples[j].T })
|
||||||
|
for _, s := range samples {
|
||||||
|
if err := appendSample(a, s, histWrapper.metric); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -443,6 +638,7 @@ type evalCmd struct {
|
||||||
|
|
||||||
isRange bool // if false, instant query
|
isRange bool // if false, instant query
|
||||||
fail, ordered bool
|
fail, ordered bool
|
||||||
|
withNhcb bool
|
||||||
|
|
||||||
metrics map[uint64]labels.Labels
|
metrics map[uint64]labels.Labels
|
||||||
expected map[uint64]entry
|
expected map[uint64]entry
|
||||||
|
@ -796,6 +992,25 @@ func (t *test) execInstantEval(cmd *evalCmd, engine QueryEngine) error {
|
||||||
}
|
}
|
||||||
queries = append([]atModifierTestCase{{expr: cmd.expr, evalTime: cmd.start}}, queries...)
|
queries = append([]atModifierTestCase{{expr: cmd.expr, evalTime: cmd.start}}, queries...)
|
||||||
for _, iq := range queries {
|
for _, iq := range queries {
|
||||||
|
if err := t.runInstantQuery(iq, cmd, engine); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if cmd.withNhcb {
|
||||||
|
if !strings.Contains(iq.expr, "_bucket") {
|
||||||
|
return fmt.Errorf("expected _bucket in the expression %q", iq.expr)
|
||||||
|
}
|
||||||
|
for _, rep := range histogramBucketReplacements {
|
||||||
|
iq.expr = rep.pattern.ReplaceAllString(iq.expr, rep.repl)
|
||||||
|
}
|
||||||
|
if err := t.runInstantQuery(iq, cmd, engine); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *test) runInstantQuery(iq atModifierTestCase, cmd *evalCmd, engine QueryEngine) error {
|
||||||
q, err := engine.NewInstantQuery(t.context, t.storage, nil, iq.expr, iq.evalTime)
|
q, err := engine.NewInstantQuery(t.context, t.storage, nil, iq.expr, iq.evalTime)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -804,7 +1019,7 @@ func (t *test) execInstantEval(cmd *evalCmd, engine QueryEngine) error {
|
||||||
res := q.Exec(t.context)
|
res := q.Exec(t.context)
|
||||||
if res.Err != nil {
|
if res.Err != nil {
|
||||||
if cmd.fail {
|
if cmd.fail {
|
||||||
continue
|
return nil
|
||||||
}
|
}
|
||||||
return fmt.Errorf("error evaluating query %q (line %d): %w", iq.expr, cmd.line, res.Err)
|
return fmt.Errorf("error evaluating query %q (line %d): %w", iq.expr, cmd.line, res.Err)
|
||||||
}
|
}
|
||||||
|
@ -829,7 +1044,7 @@ func (t *test) execInstantEval(cmd *evalCmd, engine QueryEngine) error {
|
||||||
defer q.Close()
|
defer q.Close()
|
||||||
if cmd.ordered {
|
if cmd.ordered {
|
||||||
// Range queries are always sorted by labels, so skip this test case that expects results in a particular order.
|
// Range queries are always sorted by labels, so skip this test case that expects results in a particular order.
|
||||||
continue
|
return nil
|
||||||
}
|
}
|
||||||
mat := rangeRes.Value.(Matrix)
|
mat := rangeRes.Value.(Matrix)
|
||||||
if err := assertMatrixSorted(mat); err != nil {
|
if err := assertMatrixSorted(mat); err != nil {
|
||||||
|
@ -860,8 +1075,6 @@ func (t *test) execInstantEval(cmd *evalCmd, engine QueryEngine) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error in %s %s (line %d) range mode: %w", cmd, iq.expr, cmd.line, err)
|
return fmt.Errorf("error in %s %s (line %d) range mode: %w", cmd, iq.expr, cmd.line, err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -975,7 +1188,7 @@ func (ll *LazyLoader) parse(input string) error {
|
||||||
if len(l) == 0 {
|
if len(l) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if strings.ToLower(patSpace.Split(l, 2)[0]) == "load" {
|
if strings.HasPrefix(strings.ToLower(patSpace.Split(l, 2)[0]), "load") {
|
||||||
_, cmd, err := parseLoad(lines, i)
|
_, cmd, err := parseLoad(lines, i)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
164
promql/testdata/histograms.test
vendored
164
promql/testdata/histograms.test
vendored
|
@ -5,7 +5,7 @@
|
||||||
# server has to cope with it.
|
# server has to cope with it.
|
||||||
|
|
||||||
# Test histogram.
|
# Test histogram.
|
||||||
load 5m
|
load_with_nhcb 5m
|
||||||
testhistogram_bucket{le="0.1", start="positive"} 0+5x10
|
testhistogram_bucket{le="0.1", start="positive"} 0+5x10
|
||||||
testhistogram_bucket{le=".2", start="positive"} 0+7x10
|
testhistogram_bucket{le=".2", start="positive"} 0+7x10
|
||||||
testhistogram_bucket{le="1e0", start="positive"} 0+11x10
|
testhistogram_bucket{le="1e0", start="positive"} 0+11x10
|
||||||
|
@ -18,15 +18,34 @@ load 5m
|
||||||
# Another test histogram, where q(1/6), q(1/2), and q(5/6) are each in
|
# Another test histogram, where q(1/6), q(1/2), and q(5/6) are each in
|
||||||
# the middle of a bucket and should therefore be 1, 3, and 5,
|
# the middle of a bucket and should therefore be 1, 3, and 5,
|
||||||
# respectively.
|
# respectively.
|
||||||
load 5m
|
load_with_nhcb 5m
|
||||||
testhistogram2_bucket{le="0"} 0+0x10
|
testhistogram2_bucket{le="0"} 0+0x10
|
||||||
testhistogram2_bucket{le="2"} 0+1x10
|
testhistogram2_bucket{le="2"} 0+1x10
|
||||||
testhistogram2_bucket{le="4"} 0+2x10
|
testhistogram2_bucket{le="4"} 0+2x10
|
||||||
testhistogram2_bucket{le="6"} 0+3x10
|
testhistogram2_bucket{le="6"} 0+3x10
|
||||||
testhistogram2_bucket{le="+Inf"} 0+3x10
|
testhistogram2_bucket{le="+Inf"} 0+3x10
|
||||||
|
|
||||||
|
# Another test histogram, where there are 0 counts where there is
|
||||||
|
# an infinite bound, allowing us to calculate standard deviation
|
||||||
|
# and variance properly.
|
||||||
|
load_with_nhcb 5m
|
||||||
|
testhistogram3_bucket{le="0", start="positive"} 0+0x10
|
||||||
|
testhistogram3_bucket{le="0.1", start="positive"} 0+5x10
|
||||||
|
testhistogram3_bucket{le=".2", start="positive"} 0+7x10
|
||||||
|
testhistogram3_bucket{le="1e0", start="positive"} 0+11x10
|
||||||
|
testhistogram3_bucket{le="+Inf", start="positive"} 0+11x10
|
||||||
|
testhistogram3_sum{start="positive"} 0+33x10
|
||||||
|
testhistogram3_count{start="positive"} 0+11x10
|
||||||
|
testhistogram3_bucket{le="-.25", start="negative"} 0+0x10
|
||||||
|
testhistogram3_bucket{le="-.2", start="negative"} 0+1x10
|
||||||
|
testhistogram3_bucket{le="-0.1", start="negative"} 0+2x10
|
||||||
|
testhistogram3_bucket{le="0.3", start="negative"} 0+2x10
|
||||||
|
testhistogram3_bucket{le="+Inf", start="negative"} 0+2x10
|
||||||
|
testhistogram3_sum{start="negative"} 0+8x10
|
||||||
|
testhistogram3_count{start="negative"} 0+2x10
|
||||||
|
|
||||||
# Now a more realistic histogram per job and instance to test aggregation.
|
# Now a more realistic histogram per job and instance to test aggregation.
|
||||||
load 5m
|
load_with_nhcb 5m
|
||||||
request_duration_seconds_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
request_duration_seconds_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
||||||
request_duration_seconds_bucket{job="job1", instance="ins1", le="0.2"} 0+3x10
|
request_duration_seconds_bucket{job="job1", instance="ins1", le="0.2"} 0+3x10
|
||||||
request_duration_seconds_bucket{job="job1", instance="ins1", le="+Inf"} 0+4x10
|
request_duration_seconds_bucket{job="job1", instance="ins1", le="+Inf"} 0+4x10
|
||||||
|
@ -41,7 +60,7 @@ load 5m
|
||||||
request_duration_seconds_bucket{job="job2", instance="ins2", le="+Inf"} 0+9x10
|
request_duration_seconds_bucket{job="job2", instance="ins2", le="+Inf"} 0+9x10
|
||||||
|
|
||||||
# Different le representations in one histogram.
|
# Different le representations in one histogram.
|
||||||
load 5m
|
load_with_nhcb 5m
|
||||||
mixed_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
mixed_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
||||||
mixed_bucket{job="job1", instance="ins1", le="0.2"} 0+1x10
|
mixed_bucket{job="job1", instance="ins1", le="0.2"} 0+1x10
|
||||||
mixed_bucket{job="job1", instance="ins1", le="2e-1"} 0+1x10
|
mixed_bucket{job="job1", instance="ins1", le="2e-1"} 0+1x10
|
||||||
|
@ -50,133 +69,186 @@ load 5m
|
||||||
mixed_bucket{job="job1", instance="ins2", le="+inf"} 0+0x10
|
mixed_bucket{job="job1", instance="ins2", le="+inf"} 0+0x10
|
||||||
mixed_bucket{job="job1", instance="ins2", le="+Inf"} 0+0x10
|
mixed_bucket{job="job1", instance="ins2", le="+Inf"} 0+0x10
|
||||||
|
|
||||||
|
# Test histogram_count.
|
||||||
|
eval instant at 50m histogram_count(testhistogram3)
|
||||||
|
{start="positive"} 110
|
||||||
|
{start="negative"} 20
|
||||||
|
|
||||||
|
# Test histogram_sum.
|
||||||
|
eval instant at 50m histogram_sum(testhistogram3)
|
||||||
|
{start="positive"} 330
|
||||||
|
{start="negative"} 80
|
||||||
|
|
||||||
|
# Test histogram_avg.
|
||||||
|
eval instant at 50m histogram_avg(testhistogram3)
|
||||||
|
{start="positive"} 3
|
||||||
|
{start="negative"} 4
|
||||||
|
|
||||||
|
# Test histogram_stddev.
|
||||||
|
eval instant at 50m histogram_stddev(testhistogram3)
|
||||||
|
{start="positive"} 2.8189265757336734
|
||||||
|
{start="negative"} 4.182715937754936
|
||||||
|
|
||||||
|
# Test histogram_stdvar.
|
||||||
|
eval instant at 50m histogram_stdvar(testhistogram3)
|
||||||
|
{start="positive"} 7.946347039377573
|
||||||
|
{start="negative"} 17.495112615949154
|
||||||
|
|
||||||
|
# Test histogram_fraction.
|
||||||
|
|
||||||
|
eval instant at 50m histogram_fraction(0, 0.2, testhistogram3)
|
||||||
|
{start="positive"} 0.6363636363636364
|
||||||
|
{start="negative"} 0
|
||||||
|
|
||||||
|
eval instant at 50m histogram_fraction(0, 0.2, rate(testhistogram3[5m]))
|
||||||
|
{start="positive"} 0.6363636363636364
|
||||||
|
{start="negative"} 0
|
||||||
|
|
||||||
|
# Test histogram_quantile.
|
||||||
|
|
||||||
|
eval_with_nhcb instant at 50m histogram_quantile(0, testhistogram3_bucket)
|
||||||
|
{start="positive"} 0
|
||||||
|
{start="negative"} -0.25
|
||||||
|
|
||||||
|
eval_with_nhcb instant at 50m histogram_quantile(0.25, testhistogram3_bucket)
|
||||||
|
{start="positive"} 0.055
|
||||||
|
{start="negative"} -0.225
|
||||||
|
|
||||||
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, testhistogram3_bucket)
|
||||||
|
{start="positive"} 0.125
|
||||||
|
{start="negative"} -0.2
|
||||||
|
|
||||||
|
eval_with_nhcb instant at 50m histogram_quantile(0.75, testhistogram3_bucket)
|
||||||
|
{start="positive"} 0.45
|
||||||
|
{start="negative"} -0.15
|
||||||
|
|
||||||
|
eval_with_nhcb instant at 50m histogram_quantile(1, testhistogram3_bucket)
|
||||||
|
{start="positive"} 1
|
||||||
|
{start="negative"} -0.1
|
||||||
|
|
||||||
# Quantile too low.
|
# Quantile too low.
|
||||||
eval instant at 50m histogram_quantile(-0.1, testhistogram_bucket)
|
eval_with_nhcb instant at 50m histogram_quantile(-0.1, testhistogram_bucket)
|
||||||
{start="positive"} -Inf
|
{start="positive"} -Inf
|
||||||
{start="negative"} -Inf
|
{start="negative"} -Inf
|
||||||
|
|
||||||
# Quantile too high.
|
# Quantile too high.
|
||||||
eval instant at 50m histogram_quantile(1.01, testhistogram_bucket)
|
eval_with_nhcb instant at 50m histogram_quantile(1.01, testhistogram_bucket)
|
||||||
{start="positive"} +Inf
|
{start="positive"} +Inf
|
||||||
{start="negative"} +Inf
|
{start="negative"} +Inf
|
||||||
|
|
||||||
# Quantile invalid.
|
# Quantile invalid.
|
||||||
eval instant at 50m histogram_quantile(NaN, testhistogram_bucket)
|
eval_with_nhcb instant at 50m histogram_quantile(NaN, testhistogram_bucket)
|
||||||
{start="positive"} NaN
|
{start="positive"} NaN
|
||||||
{start="negative"} NaN
|
{start="negative"} NaN
|
||||||
|
|
||||||
# Quantile value in lowest bucket, which is positive.
|
# Quantile value in lowest bucket.
|
||||||
eval instant at 50m histogram_quantile(0, testhistogram_bucket{start="positive"})
|
eval_with_nhcb instant at 50m histogram_quantile(0, testhistogram_bucket)
|
||||||
{start="positive"} 0
|
{start="positive"} 0
|
||||||
|
|
||||||
# Quantile value in lowest bucket, which is negative.
|
|
||||||
eval instant at 50m histogram_quantile(0, testhistogram_bucket{start="negative"})
|
|
||||||
{start="negative"} -0.2
|
{start="negative"} -0.2
|
||||||
|
|
||||||
# Quantile value in highest bucket.
|
# Quantile value in highest bucket.
|
||||||
eval instant at 50m histogram_quantile(1, testhistogram_bucket)
|
eval_with_nhcb instant at 50m histogram_quantile(1, testhistogram_bucket)
|
||||||
{start="positive"} 1
|
{start="positive"} 1
|
||||||
{start="negative"} 0.3
|
{start="negative"} 0.3
|
||||||
|
|
||||||
# Finally some useful quantiles.
|
# Finally some useful quantiles.
|
||||||
eval instant at 50m histogram_quantile(0.2, testhistogram_bucket)
|
eval_with_nhcb instant at 50m histogram_quantile(0.2, testhistogram_bucket)
|
||||||
{start="positive"} 0.048
|
{start="positive"} 0.048
|
||||||
{start="negative"} -0.2
|
{start="negative"} -0.2
|
||||||
|
|
||||||
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, testhistogram_bucket)
|
||||||
eval instant at 50m histogram_quantile(0.5, testhistogram_bucket)
|
|
||||||
{start="positive"} 0.15
|
{start="positive"} 0.15
|
||||||
{start="negative"} -0.15
|
{start="negative"} -0.15
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.8, testhistogram_bucket)
|
eval_with_nhcb instant at 50m histogram_quantile(0.8, testhistogram_bucket)
|
||||||
{start="positive"} 0.72
|
{start="positive"} 0.72
|
||||||
{start="negative"} 0.3
|
{start="negative"} 0.3
|
||||||
|
|
||||||
# More realistic with rates.
|
# More realistic with rates.
|
||||||
eval instant at 50m histogram_quantile(0.2, rate(testhistogram_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.2, rate(testhistogram_bucket[5m]))
|
||||||
{start="positive"} 0.048
|
{start="positive"} 0.048
|
||||||
{start="negative"} -0.2
|
{start="negative"} -0.2
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, rate(testhistogram_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, rate(testhistogram_bucket[5m]))
|
||||||
{start="positive"} 0.15
|
{start="positive"} 0.15
|
||||||
{start="negative"} -0.15
|
{start="negative"} -0.15
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.8, rate(testhistogram_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.8, rate(testhistogram_bucket[5m]))
|
||||||
{start="positive"} 0.72
|
{start="positive"} 0.72
|
||||||
{start="negative"} 0.3
|
{start="negative"} 0.3
|
||||||
|
|
||||||
# Want results exactly in the middle of the bucket.
|
# Want results exactly in the middle of the bucket.
|
||||||
eval instant at 7m histogram_quantile(1./6., testhistogram2_bucket)
|
eval_with_nhcb instant at 7m histogram_quantile(1./6., testhistogram2_bucket)
|
||||||
{} 1
|
{} 1
|
||||||
|
|
||||||
eval instant at 7m histogram_quantile(0.5, testhistogram2_bucket)
|
eval_with_nhcb instant at 7m histogram_quantile(0.5, testhistogram2_bucket)
|
||||||
{} 3
|
{} 3
|
||||||
|
|
||||||
eval instant at 7m histogram_quantile(5./6., testhistogram2_bucket)
|
eval_with_nhcb instant at 7m histogram_quantile(5./6., testhistogram2_bucket)
|
||||||
{} 5
|
{} 5
|
||||||
|
|
||||||
eval instant at 47m histogram_quantile(1./6., rate(testhistogram2_bucket[15m]))
|
eval_with_nhcb instant at 47m histogram_quantile(1./6., rate(testhistogram2_bucket[15m]))
|
||||||
{} 1
|
{} 1
|
||||||
|
|
||||||
eval instant at 47m histogram_quantile(0.5, rate(testhistogram2_bucket[15m]))
|
eval_with_nhcb instant at 47m histogram_quantile(0.5, rate(testhistogram2_bucket[15m]))
|
||||||
{} 3
|
{} 3
|
||||||
|
|
||||||
eval instant at 47m histogram_quantile(5./6., rate(testhistogram2_bucket[15m]))
|
eval_with_nhcb instant at 47m histogram_quantile(5./6., rate(testhistogram2_bucket[15m]))
|
||||||
{} 5
|
{} 5
|
||||||
|
|
||||||
# Aggregated histogram: Everything in one.
|
# Aggregated histogram: Everything in one.
|
||||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le))
|
eval_with_nhcb instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||||
{} 0.075
|
{} 0.075
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||||
{} 0.1277777777777778
|
{} 0.1277777777777778
|
||||||
|
|
||||||
# Aggregated histogram: Everything in one. Now with avg, which does not change anything.
|
# Aggregated histogram: Everything in one. Now with avg, which does not change anything.
|
||||||
eval instant at 50m histogram_quantile(0.3, avg(rate(request_duration_seconds_bucket[5m])) by (le))
|
eval_with_nhcb instant at 50m histogram_quantile(0.3, avg(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||||
{} 0.075
|
{} 0.075
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, avg(rate(request_duration_seconds_bucket[5m])) by (le))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, avg(rate(request_duration_seconds_bucket[5m])) by (le))
|
||||||
{} 0.12777777777777778
|
{} 0.12777777777777778
|
||||||
|
|
||||||
# Aggregated histogram: By instance.
|
# Aggregated histogram: By instance.
|
||||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, instance))
|
eval_with_nhcb instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, instance))
|
||||||
{instance="ins1"} 0.075
|
{instance="ins1"} 0.075
|
||||||
{instance="ins2"} 0.075
|
{instance="ins2"} 0.075
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, instance))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, instance))
|
||||||
{instance="ins1"} 0.1333333333
|
{instance="ins1"} 0.1333333333
|
||||||
{instance="ins2"} 0.125
|
{instance="ins2"} 0.125
|
||||||
|
|
||||||
# Aggregated histogram: By job.
|
# Aggregated histogram: By job.
|
||||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, job))
|
eval_with_nhcb instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, job))
|
||||||
{job="job1"} 0.1
|
{job="job1"} 0.1
|
||||||
{job="job2"} 0.0642857142857143
|
{job="job2"} 0.0642857142857143
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, job))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, job))
|
||||||
{job="job1"} 0.14
|
{job="job1"} 0.14
|
||||||
{job="job2"} 0.1125
|
{job="job2"} 0.1125
|
||||||
|
|
||||||
# Aggregated histogram: By job and instance.
|
# Aggregated histogram: By job and instance.
|
||||||
eval instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, job, instance))
|
eval_with_nhcb instant at 50m histogram_quantile(0.3, sum(rate(request_duration_seconds_bucket[5m])) by (le, job, instance))
|
||||||
{instance="ins1", job="job1"} 0.11
|
{instance="ins1", job="job1"} 0.11
|
||||||
{instance="ins2", job="job1"} 0.09
|
{instance="ins2", job="job1"} 0.09
|
||||||
{instance="ins1", job="job2"} 0.06
|
{instance="ins1", job="job2"} 0.06
|
||||||
{instance="ins2", job="job2"} 0.0675
|
{instance="ins2", job="job2"} 0.0675
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, job, instance))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, sum(rate(request_duration_seconds_bucket[5m])) by (le, job, instance))
|
||||||
{instance="ins1", job="job1"} 0.15
|
{instance="ins1", job="job1"} 0.15
|
||||||
{instance="ins2", job="job1"} 0.1333333333333333
|
{instance="ins2", job="job1"} 0.1333333333333333
|
||||||
{instance="ins1", job="job2"} 0.1
|
{instance="ins1", job="job2"} 0.1
|
||||||
{instance="ins2", job="job2"} 0.1166666666666667
|
{instance="ins2", job="job2"} 0.1166666666666667
|
||||||
|
|
||||||
# The unaggregated histogram for comparison. Same result as the previous one.
|
# The unaggregated histogram for comparison. Same result as the previous one.
|
||||||
eval instant at 50m histogram_quantile(0.3, rate(request_duration_seconds_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.3, rate(request_duration_seconds_bucket[5m]))
|
||||||
{instance="ins1", job="job1"} 0.11
|
{instance="ins1", job="job1"} 0.11
|
||||||
{instance="ins2", job="job1"} 0.09
|
{instance="ins2", job="job1"} 0.09
|
||||||
{instance="ins1", job="job2"} 0.06
|
{instance="ins1", job="job2"} 0.06
|
||||||
{instance="ins2", job="job2"} 0.0675
|
{instance="ins2", job="job2"} 0.0675
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.5, rate(request_duration_seconds_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.5, rate(request_duration_seconds_bucket[5m]))
|
||||||
{instance="ins1", job="job1"} 0.15
|
{instance="ins1", job="job1"} 0.15
|
||||||
{instance="ins2", job="job1"} 0.13333333333333333
|
{instance="ins2", job="job1"} 0.13333333333333333
|
||||||
{instance="ins1", job="job2"} 0.1
|
{instance="ins1", job="job2"} 0.1
|
||||||
|
@ -209,27 +281,31 @@ eval instant at 50m histogram_quantile(0.5, rate(mixed_bucket[5m]))
|
||||||
{instance="ins1", job="job1"} 0.15
|
{instance="ins1", job="job1"} 0.15
|
||||||
{instance="ins2", job="job1"} NaN
|
{instance="ins2", job="job1"} NaN
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.75, rate(mixed_bucket[5m]))
|
eval instant at 50m histogram_quantile(0.5, rate(mixed[5m]))
|
||||||
{instance="ins1", job="job1"} 0.2
|
{instance="ins1", job="job1"} 0.2
|
||||||
{instance="ins2", job="job1"} NaN
|
{instance="ins2", job="job1"} NaN
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(1, rate(mixed_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.75, rate(mixed_bucket[5m]))
|
||||||
{instance="ins1", job="job1"} 0.2
|
{instance="ins1", job="job1"} 0.2
|
||||||
{instance="ins2", job="job1"} NaN
|
{instance="ins2", job="job1"} NaN
|
||||||
|
|
||||||
load 5m
|
eval_with_nhcb instant at 50m histogram_quantile(1, rate(mixed_bucket[5m]))
|
||||||
|
{instance="ins1", job="job1"} 0.2
|
||||||
|
{instance="ins2", job="job1"} NaN
|
||||||
|
|
||||||
|
load_with_nhcb 5m
|
||||||
empty_bucket{le="0.1", job="job1", instance="ins1"} 0x10
|
empty_bucket{le="0.1", job="job1", instance="ins1"} 0x10
|
||||||
empty_bucket{le="0.2", job="job1", instance="ins1"} 0x10
|
empty_bucket{le="0.2", job="job1", instance="ins1"} 0x10
|
||||||
empty_bucket{le="+Inf", job="job1", instance="ins1"} 0x10
|
empty_bucket{le="+Inf", job="job1", instance="ins1"} 0x10
|
||||||
|
|
||||||
eval instant at 50m histogram_quantile(0.2, rate(empty_bucket[5m]))
|
eval_with_nhcb instant at 50m histogram_quantile(0.2, rate(empty_bucket[5m]))
|
||||||
{instance="ins1", job="job1"} NaN
|
{instance="ins1", job="job1"} NaN
|
||||||
|
|
||||||
# Load a duplicate histogram with a different name to test failure scenario on multiple histograms with the same label set
|
# Load a duplicate histogram with a different name to test failure scenario on multiple histograms with the same label set
|
||||||
# https://github.com/prometheus/prometheus/issues/9910
|
# https://github.com/prometheus/prometheus/issues/9910
|
||||||
load 5m
|
load_with_nhcb 5m
|
||||||
request_duration_seconds2_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
request_duration_seconds2_bucket{job="job1", instance="ins1", le="0.1"} 0+1x10
|
||||||
request_duration_seconds2_bucket{job="job1", instance="ins1", le="0.2"} 0+3x10
|
request_duration_seconds2_bucket{job="job1", instance="ins1", le="0.2"} 0+3x10
|
||||||
request_duration_seconds2_bucket{job="job1", instance="ins1", le="+Inf"} 0+4x10
|
request_duration_seconds2_bucket{job="job1", instance="ins1", le="+Inf"} 0+4x10
|
||||||
|
|
||||||
eval_fail instant at 50m histogram_quantile(0.99, {__name__=~"request_duration.*"})
|
eval_with_nhcb_fail instant at 50m histogram_quantile(0.99, {__name__=~"request_duration_seconds\\d*_bucket$"})
|
||||||
|
|
Loading…
Reference in a new issue