Implement offset operator.

This allows changing the time offset for individual instant and range
vectors in a query.

For example, this returns the value of `foo` 5 minutes in the past
relative to the current query evaluation time:

    foo offset 5m

Note that the `offset` modifier always needs to follow the selector
immediately. I.e. the following would be correct:

    sum(foo offset 5m) // GOOD.

While the following would be *incorrect*:

    sum(foo) offset 5m // INVALID.

The same works for range vectors. This returns the 5-minutes-rate that
`foo` had a week ago:

    rate(foo[5m] offset 1w)

This change touches the following components:

* Lexer/parser: additions to correctly parse the new `offset`/`OFFSET`
  keyword.
* AST: vector and matrix nodes now have an additional `offset` field.
  This is used during their evaluation to adjust query and result times
  appropriately.
* Query analyzer: now works on separate sets of ranges and instants per
  offset. Isolating different offsets from each other completely in this
  way keeps the preloading code relatively simple.

No storage engine changes were needed by this change.

The rules tests have been changed to not probe the internal
implementation details of the query analyzer anymore (how many instants
and ranges have been preloaded). This would also become too cumbersome
to test with the new model, and measuring the result of the query should
be sufficient.

This fixes https://github.com/prometheus/prometheus/issues/529
This fixed https://github.com/prometheus/promdash/issues/201
This commit is contained in:
Julius Volz 2015-02-18 02:30:41 +01:00
parent 79a4a6d8e8
commit 72d7b325a1
8 changed files with 963 additions and 926 deletions

View file

@ -215,6 +215,7 @@ type (
// A VectorSelector represents a metric name plus labelset.
VectorSelector struct {
labelMatchers metric.LabelMatchers
offset time.Duration
// The series iterators are populated at query analysis time.
iterators map[clientmodel.Fingerprint]local.SeriesIterator
metrics map[clientmodel.Fingerprint]clientmodel.COWMetric
@ -261,6 +262,7 @@ type (
// Fingerprints are populated from label matchers at query analysis time.
fingerprints clientmodel.Fingerprints
interval time.Duration
offset time.Duration
}
)
@ -561,8 +563,8 @@ func (node *VectorSelector) Eval(timestamp clientmodel.Timestamp) Vector {
//// timer := v.stats.GetTimer(stats.GetValueAtTimeTime).Start()
samples := Vector{}
for fp, it := range node.iterators {
sampleCandidates := it.GetValueAtTime(timestamp)
samplePair := chooseClosestSample(sampleCandidates, timestamp)
sampleCandidates := it.GetValueAtTime(timestamp.Add(-node.offset))
samplePair := chooseClosestSample(sampleCandidates, timestamp.Add(-node.offset))
if samplePair != nil {
samples = append(samples, &Sample{
Metric: node.metrics[fp],
@ -823,8 +825,8 @@ func (node *VectorArithExpr) Eval(timestamp clientmodel.Timestamp) Vector {
// the selector.
func (node *MatrixSelector) Eval(timestamp clientmodel.Timestamp) Matrix {
interval := &metric.Interval{
OldestInclusive: timestamp.Add(-node.interval),
NewestInclusive: timestamp,
OldestInclusive: timestamp.Add(-node.interval - node.offset),
NewestInclusive: timestamp.Add(-node.offset),
}
//// timer := v.stats.GetTimer(stats.GetRangeValuesTime).Start()
@ -835,6 +837,12 @@ func (node *MatrixSelector) Eval(timestamp clientmodel.Timestamp) Matrix {
continue
}
if node.offset != 0 {
for _, sp := range samplePairs {
sp.Timestamp = sp.Timestamp.Add(node.offset)
}
}
sampleStream := SampleStream{
Metric: node.metrics[fp],
Values: samplePairs,
@ -910,9 +918,10 @@ func NewScalarLiteral(value clientmodel.SampleValue) *ScalarLiteral {
// NewVectorSelector returns a (not yet evaluated) VectorSelector with
// the given LabelSet.
func NewVectorSelector(m metric.LabelMatchers) *VectorSelector {
func NewVectorSelector(m metric.LabelMatchers, offset time.Duration) *VectorSelector {
return &VectorSelector{
labelMatchers: m,
offset: offset,
iterators: map[clientmodel.Fingerprint]local.SeriesIterator{},
metrics: map[clientmodel.Fingerprint]clientmodel.COWMetric{},
}
@ -1002,10 +1011,11 @@ func NewArithExpr(opType BinOpType, lhs Node, rhs Node) (Node, error) {
// NewMatrixSelector returns a (not yet evaluated) MatrixSelector with
// the given VectorSelector and Duration.
func NewMatrixSelector(vector *VectorSelector, interval time.Duration) *MatrixSelector {
func NewMatrixSelector(vector *VectorSelector, interval time.Duration, offset time.Duration) *MatrixSelector {
return &MatrixSelector{
labelMatchers: vector.labelMatchers,
interval: interval,
offset: offset,
iterators: map[clientmodel.Fingerprint]local.SeriesIterator{},
metrics: map[clientmodel.Fingerprint]clientmodel.COWMetric{},
}

View file

@ -22,28 +22,27 @@ import (
"github.com/prometheus/prometheus/storage/local"
)
// FullRangeMap maps the fingerprint of a full range to the duration
// of the matrix selector it resulted from.
type FullRangeMap map[clientmodel.Fingerprint]time.Duration
// IntervalRangeMap is a set of fingerprints of interval ranges.
type IntervalRangeMap map[clientmodel.Fingerprint]bool
// preloadTimes tracks which instants or ranges to preload for a set of
// fingerprints. One of these structs is collected for each offset by the query
// analyzer.
type preloadTimes struct {
// Instants require single samples to be loaded along the entire query
// range, with intervals between the samples corresponding to the query
// resolution.
instants map[clientmodel.Fingerprint]struct{}
// Ranges require loading a range of samples at each resolution step,
// stretching backwards from the current evaluation timestamp. The length of
// the range into the past is given by the duration, as in "foo[5m]".
ranges map[clientmodel.Fingerprint]time.Duration
}
// A QueryAnalyzer recursively traverses the AST to look for any nodes
// which will need data from the datastore. Instantiate with
// NewQueryAnalyzer.
type QueryAnalyzer struct {
// Values collected by query analysis.
//
// Full ranges always implicitly span a time range of:
// - start: query interval start - duration
// - end: query interval end
//
// This is because full ranges can only result from matrix selectors (like
// "foo[5m]"), which have said time-spanning behavior during a ranged query.
FullRanges FullRangeMap
// Interval ranges always implicitly span the whole query range.
IntervalRanges IntervalRangeMap
// Tracks one set of times to preload per offset that occurs in the query
// expression.
offsetPreloadTimes map[time.Duration]preloadTimes
// The underlying storage to which the query will be applied. Needed for
// extracting timeseries fingerprint information during query analysis.
storage local.Storage
@ -54,37 +53,49 @@ type QueryAnalyzer struct {
// fingerprint information during query analysis.
func NewQueryAnalyzer(storage local.Storage) *QueryAnalyzer {
return &QueryAnalyzer{
FullRanges: FullRangeMap{},
IntervalRanges: IntervalRangeMap{},
storage: storage,
offsetPreloadTimes: map[time.Duration]preloadTimes{},
storage: storage,
}
}
func (analyzer *QueryAnalyzer) getPreloadTimes(offset time.Duration) preloadTimes {
if _, ok := analyzer.offsetPreloadTimes[offset]; !ok {
analyzer.offsetPreloadTimes[offset] = preloadTimes{
instants: map[clientmodel.Fingerprint]struct{}{},
ranges: map[clientmodel.Fingerprint]time.Duration{},
}
}
return analyzer.offsetPreloadTimes[offset]
}
// Visit implements the Visitor interface.
func (analyzer *QueryAnalyzer) Visit(node Node) {
switch n := node.(type) {
case *VectorSelector:
pt := analyzer.getPreloadTimes(n.offset)
fingerprints := analyzer.storage.GetFingerprintsForLabelMatchers(n.labelMatchers)
n.fingerprints = fingerprints
for _, fp := range fingerprints {
// Only add the fingerprint to IntervalRanges if not yet present in FullRanges.
// Full ranges always contain more points and span more time than interval ranges.
if _, alreadyInFullRanges := analyzer.FullRanges[fp]; !alreadyInFullRanges {
analyzer.IntervalRanges[fp] = true
// Only add the fingerprint to the instants if not yet present in the
// ranges. Ranges always contain more points and span more time than
// instants for the same offset.
if _, alreadyInRanges := pt.ranges[fp]; !alreadyInRanges {
pt.instants[fp] = struct{}{}
}
n.metrics[fp] = analyzer.storage.GetMetricForFingerprint(fp)
}
case *MatrixSelector:
pt := analyzer.getPreloadTimes(n.offset)
fingerprints := analyzer.storage.GetFingerprintsForLabelMatchers(n.labelMatchers)
n.fingerprints = fingerprints
for _, fp := range fingerprints {
if analyzer.FullRanges[fp] < n.interval {
analyzer.FullRanges[fp] = n.interval
// Delete the fingerprint from IntervalRanges. Full ranges always contain
// more points and span more time than interval ranges, so we don't need
// an interval range for the same fingerprint, should we have one.
delete(analyzer.IntervalRanges, fp)
if pt.ranges[fp] < n.interval {
pt.ranges[fp] = n.interval
// Delete the fingerprint from the instants. Ranges always contain more
// points and span more time than instants, so we don't need to track
// an instant for the same fingerprint, should we have one.
delete(pt.instants, fp)
}
n.metrics[fp] = analyzer.storage.GetMetricForFingerprint(fp)
@ -119,28 +130,31 @@ func prepareInstantQuery(node Node, timestamp clientmodel.Timestamp, storage loc
preloadTimer := queryStats.GetTimer(stats.PreloadTime).Start()
p := storage.NewPreloader()
for fp, rangeDuration := range analyzer.FullRanges {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
for offset, pt := range analyzer.offsetPreloadTimes {
ts := timestamp.Add(-offset)
for fp, rangeDuration := range pt.ranges {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
}
if err := p.PreloadRange(fp, ts.Add(-rangeDuration), ts, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
}
}
if err := p.PreloadRange(fp, timestamp.Add(-rangeDuration), timestamp, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
}
}
for fp := range analyzer.IntervalRanges {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
}
if err := p.PreloadRange(fp, timestamp, timestamp, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
for fp := range pt.instants {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
}
if err := p.PreloadRange(fp, ts, ts, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
}
}
}
preloadTimer.Stop()
@ -163,41 +177,45 @@ func prepareRangeQuery(node Node, start clientmodel.Timestamp, end clientmodel.T
preloadTimer := queryStats.GetTimer(stats.PreloadTime).Start()
p := storage.NewPreloader()
for fp, rangeDuration := range analyzer.FullRanges {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
}
if err := p.PreloadRange(fp, start.Add(-rangeDuration), end, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
}
/*
if interval < rangeDuration {
if err := p.GetMetricRange(fp, end, end.Sub(start)+rangeDuration); err != nil {
p.Close()
return nil, err
}
} else {
if err := p.GetMetricRangeAtInterval(fp, start, end, interval, rangeDuration); err != nil {
p.Close()
return nil, err
}
for offset, pt := range analyzer.offsetPreloadTimes {
offsetStart := start.Add(-offset)
offsetEnd := end.Add(-offset)
for fp, rangeDuration := range pt.ranges {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
}
*/
}
for fp := range analyzer.IntervalRanges {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
if err := p.PreloadRange(fp, offsetStart.Add(-rangeDuration), offsetEnd, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
}
/*
if interval < rangeDuration {
if err := p.GetMetricRange(fp, offsetEnd, offsetEnd.Sub(offsetStart)+rangeDuration); err != nil {
p.Close()
return nil, err
}
} else {
if err := p.GetMetricRangeAtInterval(fp, offsetStart, offsetEnd, interval, rangeDuration); err != nil {
p.Close()
return nil, err
}
}
*/
}
if err := p.PreloadRange(fp, start, end, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
for fp := range pt.instants {
if et := totalTimer.ElapsedTime(); et > *queryTimeout {
preloadTimer.Stop()
p.Close()
return nil, queryTimeoutError{et}
}
if err := p.PreloadRange(fp, offsetStart, offsetEnd, *stalenessDelta); err != nil {
preloadTimer.Stop()
p.Close()
return nil, err
}
}
}
preloadTimer.Stop()

View file

@ -110,22 +110,30 @@ func NewArithExpr(opTypeStr string, lhs ast.Node, rhs ast.Node) (ast.Node, error
return expr, nil
}
// NewMatrixSelector is a convenience function to create a new AST matrix selector.
func NewMatrixSelector(vector ast.Node, intervalStr string) (ast.MatrixNode, error) {
switch vector.(type) {
case *ast.VectorSelector:
{
break
}
default:
return nil, fmt.Errorf("intervals are currently only supported for vector selectors")
// NewVectorSelector is a convenience function to create a new AST vector selector.
func NewVectorSelector(m metric.LabelMatchers, offsetStr string) (ast.VectorNode, error) {
offset, err := utility.StringToDuration(offsetStr)
if err != nil {
return nil, err
}
return ast.NewVectorSelector(m, offset), nil
}
// NewMatrixSelector is a convenience function to create a new AST matrix selector.
func NewMatrixSelector(vector ast.Node, intervalStr string, offsetStr string) (ast.MatrixNode, error) {
interval, err := utility.StringToDuration(intervalStr)
if err != nil {
return nil, err
}
vectorSelector := vector.(*ast.VectorSelector)
return ast.NewMatrixSelector(vectorSelector, interval), nil
offset, err := utility.StringToDuration(offsetStr)
if err != nil {
return nil, err
}
vectorSelector, ok := vector.(*ast.VectorSelector)
if !ok {
return nil, fmt.Errorf("intervals are currently only supported for vector selectors")
}
return ast.NewMatrixSelector(vectorSelector, interval, offset), nil
}
func newLabelMatcher(matchTypeStr string, name clientmodel.LabelName, value clientmodel.LabelValue) (*metric.LabelMatcher, error) {

View file

@ -80,6 +80,7 @@ DESCRIPTION|description return DESCRIPTION
PERMANENT|permanent return PERMANENT
BY|by return GROUP_OP
KEEPING_EXTRA|keeping_extra return KEEPING_EXTRA
OFFSET|offset return OFFSET
AVG|SUM|MAX|MIN|COUNT lval.str = lexer.token(); return AGGR_OP
avg|sum|max|min|count lval.str = strings.ToUpper(lexer.token()); return AGGR_OP
\<|>|AND|OR|and|or lval.str = strings.ToUpper(lexer.token()); return CMP_OP

File diff suppressed because it is too large Load diff

View file

@ -42,7 +42,7 @@
%token <str> IDENTIFIER STRING DURATION METRICNAME
%token <num> NUMBER
%token PERMANENT GROUP_OP KEEPING_EXTRA
%token PERMANENT GROUP_OP KEEPING_EXTRA OFFSET
%token <str> AGGR_OP CMP_OP ADDITIVE_OP MULT_OP
%token ALERT IF FOR WITH SUMMARY DESCRIPTION
@ -53,7 +53,7 @@
%type <labelMatchers> label_match_list label_matches
%type <ruleNode> rule_expr func_arg
%type <boolean> qualifier extra_labels_opts
%type <str> for_duration metric_name label_match_type
%type <str> for_duration metric_name label_match_type offset_opts
%right '='
%left CMP_OP
@ -152,17 +152,28 @@ label_match_type : '='
{ $$ = $1 }
;
offset_opts : /* empty */
{ $$ = "0s" }
| OFFSET DURATION
{ $$ = $2 }
;
rule_expr : '(' rule_expr ')'
{ $$ = $2 }
| '{' label_match_list '}'
{ $$ = ast.NewVectorSelector($2) }
| metric_name label_matches
| '{' label_match_list '}' offset_opts
{
var err error
$$, err = NewVectorSelector($2, $4)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| metric_name label_matches offset_opts
{
var err error
m, err := metric.NewLabelMatcher(metric.Equal, clientmodel.MetricNameLabel, clientmodel.LabelValue($1))
if err != nil { yylex.Error(err.Error()); return 1 }
$2 = append($2, m)
$$ = ast.NewVectorSelector($2)
$$, err = NewVectorSelector($2, $3)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| IDENTIFIER '(' func_arg_list ')'
{
@ -176,10 +187,10 @@ rule_expr : '(' rule_expr ')'
$$, err = NewFunctionCall($1, []ast.Node{})
if err != nil { yylex.Error(err.Error()); return 1 }
}
| rule_expr '[' DURATION ']'
| rule_expr '[' DURATION ']' offset_opts
{
var err error
$$, err = NewMatrixSelector($1, $3)
$$, err = NewMatrixSelector($1, $3, $5)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| AGGR_OP '(' rule_expr ')' grouping_opts extra_labels_opts

View file

@ -35,16 +35,17 @@ const NUMBER = 57352
const PERMANENT = 57353
const GROUP_OP = 57354
const KEEPING_EXTRA = 57355
const AGGR_OP = 57356
const CMP_OP = 57357
const ADDITIVE_OP = 57358
const MULT_OP = 57359
const ALERT = 57360
const IF = 57361
const FOR = 57362
const WITH = 57363
const SUMMARY = 57364
const DESCRIPTION = 57365
const OFFSET = 57356
const AGGR_OP = 57357
const CMP_OP = 57358
const ADDITIVE_OP = 57359
const MULT_OP = 57360
const ALERT = 57361
const IF = 57362
const FOR = 57363
const WITH = 57364
const SUMMARY = 57365
const DESCRIPTION = 57366
var yyToknames = []string{
"START_RULES",
@ -57,6 +58,7 @@ var yyToknames = []string{
"PERMANENT",
"GROUP_OP",
"KEEPING_EXTRA",
"OFFSET",
"AGGR_OP",
"CMP_OP",
"ADDITIVE_OP",
@ -75,7 +77,7 @@ const yyEofCode = 1
const yyErrCode = 2
const yyMaxDepth = 200
//line parser.y:250
//line parser.y:261
//line yacctab:1
var yyExca = []int{
@ -87,91 +89,97 @@ var yyExca = []int{
-2, 10,
}
const yyNprod = 50
const yyNprod = 52
const yyPrivate = 57344
var yyTokenNames []string
var yyStates []string
const yyLast = 137
const yyLast = 142
var yyAct = []int{
56, 72, 50, 53, 30, 24, 6, 20, 49, 59,
22, 10, 51, 18, 13, 12, 21, 19, 20, 11,
18, 85, 36, 37, 38, 21, 19, 20, 81, 82,
8, 18, 52, 7, 48, 66, 21, 19, 20, 87,
18, 10, 51, 31, 13, 12, 60, 55, 28, 11,
65, 18, 21, 19, 20, 57, 21, 19, 20, 29,
8, 74, 23, 7, 62, 40, 39, 18, 73, 77,
76, 18, 80, 75, 10, 19, 20, 13, 12, 79,
86, 78, 11, 64, 89, 63, 41, 40, 71, 18,
46, 25, 93, 8, 44, 27, 7, 83, 69, 96,
94, 91, 58, 43, 9, 17, 54, 31, 92, 35,
33, 45, 16, 13, 97, 95, 90, 61, 73, 88,
32, 68, 25, 34, 2, 3, 14, 5, 4, 1,
42, 84, 15, 26, 70, 67, 47,
58, 76, 55, 52, 51, 30, 45, 6, 24, 20,
61, 22, 10, 53, 18, 13, 12, 84, 68, 83,
67, 11, 18, 36, 37, 38, 21, 19, 20, 21,
19, 20, 8, 54, 90, 7, 50, 21, 19, 20,
92, 18, 10, 53, 18, 13, 12, 70, 63, 62,
88, 11, 18, 57, 31, 21, 19, 20, 86, 87,
66, 40, 8, 10, 78, 7, 13, 12, 79, 69,
18, 29, 11, 80, 82, 81, 28, 85, 21, 19,
20, 19, 20, 8, 91, 77, 7, 41, 40, 94,
25, 23, 39, 18, 59, 18, 44, 98, 27, 73,
101, 99, 60, 96, 17, 43, 75, 9, 46, 56,
31, 47, 16, 33, 97, 102, 13, 65, 35, 48,
100, 95, 64, 32, 77, 93, 72, 25, 34, 2,
3, 14, 5, 4, 1, 42, 89, 15, 26, 74,
71, 49,
}
var yyPact = []int{
120, -1000, -1000, 68, 94, -1000, 41, 68, 116, 70,
20, 31, -1000, -1000, -1000, 104, 117, -1000, 101, 68,
68, 68, 37, 60, -1000, 79, -1000, 85, 5, 68,
93, 19, 30, -1000, 83, -22, -10, -17, 59, -1000,
116, -1000, 110, -1000, -1000, -1000, 38, 56, -1000, -1000,
41, -1000, 21, 7, -1000, 115, 74, 62, 68, -1000,
-1000, -1000, -1000, -1000, 35, 95, 68, 52, -1000, 68,
2, -1000, -1000, 73, 1, -1000, 93, 10, -1000, 113,
41, -1000, 112, 109, 80, 100, -1000, -1000, -1000, -1000,
-1000, 30, -1000, 78, 108, 76, 107, -1000,
125, -1000, -1000, 57, 93, -1000, 21, 57, 121, 72,
47, 42, -1000, -1000, -1000, 107, 122, -1000, 110, 57,
57, 57, 62, 60, -1000, 80, 94, 84, 6, 57,
96, 24, 68, -1000, 82, -22, -9, -17, 64, -1000,
121, 94, 115, -1000, -1000, -1000, 109, -1000, 33, -10,
-1000, -1000, 21, -1000, 39, 18, -1000, 120, 74, 79,
57, 94, -1000, -1000, -1000, -1000, -1000, -1000, 36, 98,
57, -11, -1000, 57, 31, -1000, -1000, 25, 13, -1000,
-1000, 96, 10, -1000, 119, 21, -1000, 118, 114, 81,
106, -1000, -1000, -1000, -1000, -1000, 68, -1000, 78, 113,
76, 108, -1000,
}
var yyPgo = []int{
0, 136, 135, 4, 1, 134, 0, 5, 62, 133,
2, 8, 132, 3, 131, 104, 130, 129, 128, 127,
126,
0, 141, 140, 5, 1, 139, 0, 8, 91, 138,
3, 4, 137, 2, 136, 107, 135, 6, 134, 133,
132, 131,
}
var yyR1 = []int{
0, 17, 17, 18, 18, 19, 20, 20, 14, 14,
0, 18, 18, 19, 19, 20, 21, 21, 14, 14,
12, 12, 15, 15, 6, 6, 6, 5, 5, 4,
9, 9, 9, 8, 8, 7, 16, 16, 10, 10,
9, 9, 9, 8, 8, 7, 16, 16, 17, 17,
10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
13, 13, 3, 3, 2, 2, 1, 1, 11, 11,
10, 10, 13, 13, 3, 3, 2, 2, 1, 1,
11, 11,
}
var yyR2 = []int{
0, 2, 2, 0, 2, 1, 5, 11, 0, 2,
0, 1, 1, 1, 0, 3, 2, 1, 3, 3,
0, 2, 3, 1, 3, 3, 1, 1, 3, 3,
2, 4, 3, 4, 6, 6, 3, 3, 3, 1,
0, 1, 0, 4, 1, 3, 1, 3, 1, 1,
0, 2, 3, 1, 3, 3, 1, 1, 0, 2,
3, 4, 3, 4, 3, 5, 6, 6, 3, 3,
3, 1, 0, 1, 0, 4, 1, 3, 1, 3,
1, 1,
}
var yyChk = []int{
-1000, -17, 4, 5, -18, -19, -10, 28, 25, -15,
6, 14, 10, 9, -20, -12, 18, 11, 30, 16,
17, 15, -10, -8, -7, 6, -9, 25, 28, 28,
-3, 12, -15, 6, 6, 8, -10, -10, -10, 29,
27, 26, -16, 24, 15, 26, -8, -1, 29, -11,
-10, 7, -10, -13, 13, 28, -6, 25, 19, 31,
-7, 7, 26, 29, 27, 29, 28, -2, 6, 24,
-5, 26, -4, 6, -10, -11, -3, -10, 29, 27,
-10, 26, 27, 24, -14, 20, -13, 29, 6, -4,
7, 21, 8, -6, 22, 7, 23, 7,
-1000, -18, 4, 5, -19, -20, -10, 29, 26, -15,
6, 15, 10, 9, -21, -12, 19, 11, 31, 17,
18, 16, -10, -8, -7, 6, -9, 26, 29, 29,
-3, 12, -15, 6, 6, 8, -10, -10, -10, 30,
28, 27, -16, 25, 16, -17, 14, 27, -8, -1,
30, -11, -10, 7, -10, -13, 13, 29, -6, 26,
20, 32, -7, -17, 7, 8, 27, 30, 28, 30,
29, -2, 6, 25, -5, 27, -4, 6, -10, -17,
-11, -3, -10, 30, 28, -10, 27, 28, 25, -14,
21, -13, 30, 6, -4, 7, 22, 8, -6, 23,
7, 24, 7,
}
var yyDef = []int{
0, -2, 3, 0, -2, 2, 5, 0, 0, 20,
13, 42, 39, 12, 4, 0, 0, 11, 0, 0,
0, 0, 0, 0, 23, 0, 30, 0, 0, 0,
40, 0, 14, 13, 0, 0, 36, 37, 38, 28,
0, 29, 0, 26, 27, 21, 0, 0, 32, 46,
48, 49, 0, 0, 41, 0, 0, 0, 0, 33,
24, 25, 22, 31, 0, 42, 0, 0, 44, 0,
0, 16, 17, 0, 8, 47, 40, 0, 43, 0,
6, 15, 0, 0, 0, 0, 34, 35, 45, 18,
19, 14, 9, 0, 0, 0, 0, 7,
13, 44, 41, 12, 4, 0, 0, 11, 0, 0,
0, 0, 0, 0, 23, 0, 28, 0, 0, 0,
42, 0, 14, 13, 0, 0, 38, 39, 40, 30,
0, 28, 0, 26, 27, 32, 0, 21, 0, 0,
34, 48, 50, 51, 0, 0, 43, 0, 0, 0,
0, 28, 24, 31, 25, 29, 22, 33, 0, 44,
0, 0, 46, 0, 0, 16, 17, 0, 8, 35,
49, 42, 0, 45, 0, 6, 15, 0, 0, 0,
0, 36, 37, 47, 18, 19, 14, 9, 0, 0,
0, 0, 7,
}
var yyTok1 = []int{
@ -179,21 +187,21 @@ var yyTok1 = []int{
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
28, 29, 3, 3, 27, 3, 3, 3, 3, 3,
29, 30, 3, 3, 28, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 24, 3, 3, 3, 3, 3, 3, 3, 3,
3, 25, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 30, 3, 31, 3, 3, 3, 3, 3, 3,
3, 31, 3, 32, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 25, 3, 26,
3, 3, 3, 26, 3, 27,
}
var yyTok2 = []int{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23,
22, 23, 24,
}
var yyTok3 = []int{
0,
@ -559,27 +567,46 @@ yydefault:
case 28:
//line parser.y:156
{
yyVAL.ruleNode = yyS[yypt-1].ruleNode
yyVAL.str = "0s"
}
case 29:
//line parser.y:158
{
yyVAL.ruleNode = ast.NewVectorSelector(yyS[yypt-1].labelMatchers)
yyVAL.str = yyS[yypt-0].str
}
case 30:
//line parser.y:160
//line parser.y:162
{
yyVAL.ruleNode = yyS[yypt-1].ruleNode
}
case 31:
//line parser.y:164
{
var err error
m, err := metric.NewLabelMatcher(metric.Equal, clientmodel.MetricNameLabel, clientmodel.LabelValue(yyS[yypt-1].str))
yyVAL.ruleNode, err = NewVectorSelector(yyS[yypt-2].labelMatchers, yyS[yypt-0].str)
if err != nil {
yylex.Error(err.Error())
return 1
}
yyS[yypt-0].labelMatchers = append(yyS[yypt-0].labelMatchers, m)
yyVAL.ruleNode = ast.NewVectorSelector(yyS[yypt-0].labelMatchers)
}
case 31:
//line parser.y:168
case 32:
//line parser.y:170
{
var err error
m, err := metric.NewLabelMatcher(metric.Equal, clientmodel.MetricNameLabel, clientmodel.LabelValue(yyS[yypt-2].str))
if err != nil {
yylex.Error(err.Error())
return 1
}
yyS[yypt-1].labelMatchers = append(yyS[yypt-1].labelMatchers, m)
yyVAL.ruleNode, err = NewVectorSelector(yyS[yypt-1].labelMatchers, yyS[yypt-0].str)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 33:
//line parser.y:179
{
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice)
@ -588,8 +615,8 @@ yydefault:
return 1
}
}
case 32:
//line parser.y:174
case 34:
//line parser.y:185
{
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{})
@ -598,18 +625,18 @@ yydefault:
return 1
}
}
case 33:
//line parser.y:180
case 35:
//line parser.y:191
{
var err error
yyVAL.ruleNode, err = NewMatrixSelector(yyS[yypt-3].ruleNode, yyS[yypt-1].str)
yyVAL.ruleNode, err = NewMatrixSelector(yyS[yypt-4].ruleNode, yyS[yypt-2].str, yyS[yypt-0].str)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 34:
//line parser.y:186
case 36:
//line parser.y:197
{
var err error
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-5].str, yyS[yypt-3].ruleNode, yyS[yypt-1].labelNameSlice, yyS[yypt-0].boolean)
@ -618,8 +645,8 @@ yydefault:
return 1
}
}
case 35:
//line parser.y:192
case 37:
//line parser.y:203
{
var err error
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-5].str, yyS[yypt-1].ruleNode, yyS[yypt-4].labelNameSlice, yyS[yypt-3].boolean)
@ -628,28 +655,8 @@ yydefault:
return 1
}
}
case 36:
//line parser.y:200
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 37:
//line parser.y:206
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 38:
//line parser.y:212
//line parser.y:211
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
@ -659,57 +666,77 @@ yydefault:
}
}
case 39:
//line parser.y:218
//line parser.y:217
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 40:
//line parser.y:223
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 41:
//line parser.y:229
{
yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num)
}
case 40:
//line parser.y:222
case 42:
//line parser.y:233
{
yyVAL.boolean = false
}
case 41:
//line parser.y:224
case 43:
//line parser.y:235
{
yyVAL.boolean = true
}
case 42:
//line parser.y:228
case 44:
//line parser.y:239
{
yyVAL.labelNameSlice = clientmodel.LabelNames{}
}
case 43:
//line parser.y:230
case 45:
//line parser.y:241
{
yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice
}
case 44:
//line parser.y:234
case 46:
//line parser.y:245
{
yyVAL.labelNameSlice = clientmodel.LabelNames{clientmodel.LabelName(yyS[yypt-0].str)}
}
case 45:
//line parser.y:236
case 47:
//line parser.y:247
{
yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, clientmodel.LabelName(yyS[yypt-0].str))
}
case 46:
//line parser.y:240
case 48:
//line parser.y:251
{
yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode}
}
case 47:
//line parser.y:242
case 49:
//line parser.y:253
{
yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode)
}
case 48:
//line parser.y:246
case 50:
//line parser.y:257
{
yyVAL.ruleNode = yyS[yypt-0].ruleNode
}
case 49:
//line parser.y:248
case 51:
//line parser.y:259
{
yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str)
}

View file

@ -62,42 +62,32 @@ func newTestStorage(t testing.TB) (storage local.Storage, closer test.Closer) {
func TestExpressions(t *testing.T) {
// Labels in expected output need to be alphabetically sorted.
expressionTests := []struct {
expr string
output []string
shouldFail bool
checkOrder bool
fullRanges int
intervalRanges int
expr string
output []string
shouldFail bool
checkOrder bool
}{
{
expr: `SUM(http_requests)`,
output: []string{`{} => 3600 @[%v]`},
fullRanges: 0,
intervalRanges: 8,
expr: `SUM(http_requests)`,
output: []string{`{} => 3600 @[%v]`},
}, {
expr: `SUM(http_requests{instance="0"}) BY(job)`,
output: []string{
`{job="api-server"} => 400 @[%v]`,
`{job="app-server"} => 1200 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
}, {
expr: `SUM(http_requests{instance="0"}) BY(job) KEEPING_EXTRA`,
output: []string{
`{instance="0", job="api-server"} => 400 @[%v]`,
`{instance="0", job="app-server"} => 1200 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
}, {
expr: `SUM(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 1000 @[%v]`,
`{job="app-server"} => 2600 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
// Non-existent labels mentioned in BY-clauses shouldn't propagate to output.
expr: `SUM(http_requests) BY (job, nonexistent)`,
@ -105,8 +95,6 @@ func TestExpressions(t *testing.T) {
`{job="api-server"} => 1000 @[%v]`,
`{job="app-server"} => 2600 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `
// Test comment.
@ -116,16 +104,12 @@ func TestExpressions(t *testing.T) {
`{job="api-server"} => 1000 @[%v]`,
`{job="app-server"} => 2600 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `COUNT(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 4 @[%v]`,
`{job="app-server"} => 4 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job, group)`,
output: []string{
@ -134,139 +118,103 @@ func TestExpressions(t *testing.T) {
`{group="production", job="api-server"} => 300 @[%v]`,
`{group="production", job="app-server"} => 1100 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `AVG(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 250 @[%v]`,
`{job="app-server"} => 650 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `MIN(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 100 @[%v]`,
`{job="app-server"} => 500 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `MAX(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 400 @[%v]`,
`{job="app-server"} => 800 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) - COUNT(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 996 @[%v]`,
`{job="app-server"} => 2596 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `2 - SUM(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => -998 @[%v]`,
`{job="app-server"} => -2598 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `1000 / SUM(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 1 @[%v]`,
`{job="app-server"} => 0.38461538461538464 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) - 2`,
output: []string{
`{job="api-server"} => 998 @[%v]`,
`{job="app-server"} => 2598 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) % 3`,
output: []string{
`{job="api-server"} => 1 @[%v]`,
`{job="app-server"} => 2 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) / 0`,
output: []string{
`{job="api-server"} => +Inf @[%v]`,
`{job="app-server"} => +Inf @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) > 1000`,
output: []string{
`{job="app-server"} => 2600 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `1000 < SUM(http_requests) BY (job)`,
output: []string{
`{job="app-server"} => 1000 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) <= 1000`,
output: []string{
`{job="api-server"} => 1000 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) != 1000`,
output: []string{
`{job="app-server"} => 2600 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) == 1000`,
output: []string{
`{job="api-server"} => 1000 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `SUM(http_requests) BY (job) + SUM(http_requests) BY (job)`,
output: []string{
`{job="api-server"} => 2000 @[%v]`,
`{job="app-server"} => 5200 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
expr: `http_requests{job="api-server", group="canary"}`,
output: []string{
`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
}, {
expr: `http_requests{job="api-server", group="canary"} + rate(http_requests{job="api-server"}[5m]) * 5 * 60`,
output: []string{
`{group="canary", instance="0", job="api-server"} => 330 @[%v]`,
`{group="canary", instance="1", job="api-server"} => 440 @[%v]`,
},
fullRanges: 4,
intervalRanges: 0,
}, {
expr: `rate(http_requests[25m]) * 25 * 60`,
output: []string{
@ -279,8 +227,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="1", job="api-server"} => 100 @[%v]`,
`{group="production", instance="1", job="app-server"} => 300 @[%v]`,
},
fullRanges: 8,
intervalRanges: 0,
}, {
expr: `delta(http_requests[25m], 1)`,
output: []string{
@ -293,8 +239,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="1", job="api-server"} => 100 @[%v]`,
`{group="production", instance="1", job="app-server"} => 300 @[%v]`,
},
fullRanges: 8,
intervalRanges: 0,
}, {
expr: `sort(http_requests)`,
output: []string{
@ -307,9 +251,7 @@ func TestExpressions(t *testing.T) {
`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
},
checkOrder: true,
fullRanges: 0,
intervalRanges: 8,
checkOrder: true,
}, {
expr: `sort_desc(http_requests)`,
output: []string{
@ -322,9 +264,7 @@ func TestExpressions(t *testing.T) {
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
},
checkOrder: true,
fullRanges: 0,
intervalRanges: 8,
checkOrder: true,
}, {
expr: `topk(3, http_requests)`,
output: []string{
@ -332,18 +272,14 @@ func TestExpressions(t *testing.T) {
`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
},
checkOrder: true,
fullRanges: 0,
intervalRanges: 8,
checkOrder: true,
}, {
expr: `topk(5, http_requests{group="canary",job="app-server"})`,
output: []string{
`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
},
checkOrder: true,
fullRanges: 0,
intervalRanges: 2,
checkOrder: true,
}, {
expr: `bottomk(3, http_requests)`,
output: []string{
@ -351,26 +287,20 @@ func TestExpressions(t *testing.T) {
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
},
checkOrder: true,
fullRanges: 0,
intervalRanges: 8,
checkOrder: true,
}, {
expr: `bottomk(5, http_requests{group="canary",job="app-server"})`,
output: []string{
`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
},
checkOrder: true,
fullRanges: 0,
intervalRanges: 2,
checkOrder: true,
}, {
// Single-letter label names and values.
expr: `x{y="testvalue"}`,
output: []string{
`x{y="testvalue"} => 100 @[%v]`,
},
fullRanges: 0,
intervalRanges: 1,
}, {
// Lower-cased aggregation operators should work too.
expr: `sum(http_requests) by (job) + min(http_requests) by (job) + max(http_requests) by (job) + avg(http_requests) by (job)`,
@ -378,62 +308,42 @@ func TestExpressions(t *testing.T) {
`{job="app-server"} => 4550 @[%v]`,
`{job="api-server"} => 1750 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
}, {
// Deltas should be adjusted for target interval vs. samples under target interval.
expr: `delta(http_requests{group="canary", instance="1", job="app-server"}[18m])`,
output: []string{`{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `delta(http_requests{group="canary", instance="1", job="app-server"}[18m])`,
output: []string{`{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
}, {
// Deltas should perform the same operation when 2nd argument is 0.
expr: `delta(http_requests{group="canary", instance="1", job="app-server"}[18m], 0)`,
output: []string{`{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `delta(http_requests{group="canary", instance="1", job="app-server"}[18m], 0)`,
output: []string{`{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
}, {
// Rates should calculate per-second rates.
expr: `rate(http_requests{group="canary", instance="1", job="app-server"}[60m])`,
output: []string{`{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `rate(http_requests{group="canary", instance="1", job="app-server"}[60m])`,
output: []string{`{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
}, {
// Deriv should return the same as rate in simple cases.
expr: `deriv(http_requests{group="canary", instance="1", job="app-server"}[60m])`,
output: []string{`{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `deriv(http_requests{group="canary", instance="1", job="app-server"}[60m])`,
output: []string{`{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
}, {
// Counter resets at in the middle of range are handled correctly by rate().
expr: `rate(testcounter_reset_middle[60m])`,
output: []string{`{} => 0.03 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `rate(testcounter_reset_middle[60m])`,
output: []string{`{} => 0.03 @[%v]`},
}, {
// Counter resets at end of range are ignored by rate().
expr: `rate(testcounter_reset_end[5m])`,
output: []string{`{} => 0 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `rate(testcounter_reset_end[5m])`,
output: []string{`{} => 0 @[%v]`},
}, {
// Deriv should return correct result.
expr: `deriv(testcounter_reset_middle[100m])`,
output: []string{`{} => 0.010606060606060607 @[%v]`},
fullRanges: 1,
intervalRanges: 0,
expr: `deriv(testcounter_reset_middle[100m])`,
output: []string{`{} => 0.010606060606060607 @[%v]`},
}, {
// count_scalar for a non-empty vector should return scalar element count.
expr: `count_scalar(http_requests)`,
output: []string{`scalar: 8 @[%v]`},
fullRanges: 0,
intervalRanges: 8,
expr: `count_scalar(http_requests)`,
output: []string{`scalar: 8 @[%v]`},
}, {
// count_scalar for an empty vector should return scalar 0.
expr: `count_scalar(nonexistent)`,
output: []string{`scalar: 0 @[%v]`},
fullRanges: 0,
intervalRanges: 0,
expr: `count_scalar(nonexistent)`,
output: []string{`scalar: 0 @[%v]`},
}, {
// Empty expressions shouldn't parse.
expr: ``,
@ -454,8 +364,6 @@ func TestExpressions(t *testing.T) {
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
}, {
expr: `http_requests{job=~"server",group!="canary"}`,
output: []string{
@ -464,29 +372,21 @@ func TestExpressions(t *testing.T) {
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
}, {
expr: `http_requests{job!~"api",group!="canary"}`,
output: []string{
`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
}, {
expr: `count_scalar(http_requests{job=~"^server$"})`,
output: []string{`scalar: 0 @[%v]`},
fullRanges: 0,
intervalRanges: 0,
expr: `count_scalar(http_requests{job=~"^server$"})`,
output: []string{`scalar: 0 @[%v]`},
}, {
expr: `http_requests{group="production",job=~"^api"}`,
output: []string{
`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `abs(-1 * http_requests{group="production",job="api-server"})`,
@ -494,8 +394,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 100 @[%v]`,
`{group="production", instance="1", job="api-server"} => 200 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `floor(0.004 * http_requests{group="production",job="api-server"})`,
@ -503,8 +401,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 0 @[%v]`,
`{group="production", instance="1", job="api-server"} => 0 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `ceil(0.004 * http_requests{group="production",job="api-server"})`,
@ -512,8 +408,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 1 @[%v]`,
`{group="production", instance="1", job="api-server"} => 1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(0.004 * http_requests{group="production",job="api-server"})`,
@ -521,8 +415,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 0 @[%v]`,
`{group="production", instance="1", job="api-server"} => 1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{ // Round should correctly handle negative numbers.
expr: `round(-1 * (0.004 * http_requests{group="production",job="api-server"}))`,
@ -530,8 +422,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 0 @[%v]`,
`{group="production", instance="1", job="api-server"} => -1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{ // Round should round half up.
expr: `round(0.005 * http_requests{group="production",job="api-server"})`,
@ -539,8 +429,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 1 @[%v]`,
`{group="production", instance="1", job="api-server"} => 1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(-1 * (0.005 * http_requests{group="production",job="api-server"}))`,
@ -548,8 +436,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 0 @[%v]`,
`{group="production", instance="1", job="api-server"} => -1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(1 + 0.005 * http_requests{group="production",job="api-server"})`,
@ -557,8 +443,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 2 @[%v]`,
`{group="production", instance="1", job="api-server"} => 2 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(-1 * (1 + 0.005 * http_requests{group="production",job="api-server"}))`,
@ -566,8 +450,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => -1 @[%v]`,
`{group="production", instance="1", job="api-server"} => -2 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{ // Round should accept the number to round nearest to.
expr: `round(0.0005 * http_requests{group="production",job="api-server"}, 0.1)`,
@ -575,8 +457,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 0.1 @[%v]`,
`{group="production", instance="1", job="api-server"} => 0.1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(2.1 + 0.0005 * http_requests{group="production",job="api-server"}, 0.1)`,
@ -584,8 +464,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 2.2 @[%v]`,
`{group="production", instance="1", job="api-server"} => 2.2 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(5.2 + 0.0005 * http_requests{group="production",job="api-server"}, 0.1)`,
@ -593,8 +471,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 5.3 @[%v]`,
`{group="production", instance="1", job="api-server"} => 5.3 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{ // Round should work correctly with negative numbers and multiple decimal places.
expr: `round(-1 * (5.2 + 0.0005 * http_requests{group="production",job="api-server"}), 0.1)`,
@ -602,8 +478,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => -5.2 @[%v]`,
`{group="production", instance="1", job="api-server"} => -5.3 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{ // Round should work correctly with big toNearests.
expr: `round(0.025 * http_requests{group="production",job="api-server"}, 5)`,
@ -611,8 +485,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 5 @[%v]`,
`{group="production", instance="1", job="api-server"} => 5 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `round(0.045 * http_requests{group="production",job="api-server"}, 5)`,
@ -620,8 +492,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 5 @[%v]`,
`{group="production", instance="1", job="api-server"} => 10 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `avg_over_time(http_requests{group="production",job="api-server"}[1h])`,
@ -629,8 +499,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 50 @[%v]`,
`{group="production", instance="1", job="api-server"} => 100 @[%v]`,
},
fullRanges: 2,
intervalRanges: 0,
},
{
expr: `count_over_time(http_requests{group="production",job="api-server"}[1h])`,
@ -638,8 +506,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 11 @[%v]`,
`{group="production", instance="1", job="api-server"} => 11 @[%v]`,
},
fullRanges: 2,
intervalRanges: 0,
},
{
expr: `max_over_time(http_requests{group="production",job="api-server"}[1h])`,
@ -647,8 +513,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 100 @[%v]`,
`{group="production", instance="1", job="api-server"} => 200 @[%v]`,
},
fullRanges: 2,
intervalRanges: 0,
},
{
expr: `min_over_time(http_requests{group="production",job="api-server"}[1h])`,
@ -656,8 +520,6 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 0 @[%v]`,
`{group="production", instance="1", job="api-server"} => 0 @[%v]`,
},
fullRanges: 2,
intervalRanges: 0,
},
{
expr: `sum_over_time(http_requests{group="production",job="api-server"}[1h])`,
@ -665,14 +527,10 @@ func TestExpressions(t *testing.T) {
`{group="production", instance="0", job="api-server"} => 550 @[%v]`,
`{group="production", instance="1", job="api-server"} => 1100 @[%v]`,
},
fullRanges: 2,
intervalRanges: 0,
},
{
expr: `time()`,
output: []string{`scalar: 3000 @[%v]`},
fullRanges: 0,
intervalRanges: 0,
expr: `time()`,
output: []string{`scalar: 3000 @[%v]`},
},
{
expr: `drop_common_labels(http_requests{group="production",job="api-server"})`,
@ -680,8 +538,6 @@ func TestExpressions(t *testing.T) {
`http_requests{instance="0"} => 100 @[%v]`,
`http_requests{instance="1"} => 200 @[%v]`,
},
fullRanges: 0,
intervalRanges: 2,
},
{
expr: `{` + string(clientmodel.MetricNameLabel) + `=~".*"}`,
@ -698,8 +554,6 @@ func TestExpressions(t *testing.T) {
`testcounter_reset_middle => 50 @[%v]`,
`x{y="testvalue"} => 100 @[%v]`,
},
fullRanges: 0,
intervalRanges: 11,
},
{
expr: `{job=~"server", job!~"api"}`,
@ -709,8 +563,6 @@ func TestExpressions(t *testing.T) {
`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
},
{
// Test alternative "by"-clause order.
@ -719,8 +571,6 @@ func TestExpressions(t *testing.T) {
`{group="canary"} => 700 @[%v]`,
`{group="production"} => 300 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
},
{
// Test alternative "by"-clause order with "keeping_extra".
@ -729,8 +579,6 @@ func TestExpressions(t *testing.T) {
`{group="canary", job="api-server"} => 700 @[%v]`,
`{group="production", job="api-server"} => 300 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
},
{
// Test both alternative "by"-clause orders in one expression.
@ -740,48 +588,50 @@ func TestExpressions(t *testing.T) {
output: []string{
`{job="api-server"} => 1000 @[%v]`,
},
fullRanges: 0,
intervalRanges: 4,
},
{
expr: `absent(nonexistent)`,
output: []string{
`{} => 1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 0,
},
{
expr: `absent(nonexistent{job="testjob", instance="testinstance", method=~".*"})`,
output: []string{
`{instance="testinstance", job="testjob"} => 1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 0,
},
{
expr: `count_scalar(absent(http_requests))`,
output: []string{
`scalar: 0 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
},
{
expr: `count_scalar(absent(sum(http_requests)))`,
output: []string{
`scalar: 0 @[%v]`,
},
fullRanges: 0,
intervalRanges: 8,
},
{
expr: `absent(sum(nonexistent{job="testjob", instance="testinstance"}))`,
output: []string{
`{} => 1 @[%v]`,
},
fullRanges: 0,
intervalRanges: 0,
},
{
expr: `http_requests{group="production",job="api-server"} offset 5m`,
output: []string{
`http_requests{group="production", instance="0", job="api-server"} => 90 @[%v]`,
`http_requests{group="production", instance="1", job="api-server"} => 180 @[%v]`,
},
},
{
expr: `rate(http_requests{group="production",job="api-server"}[10m] offset 5m)`,
output: []string{
`{group="production", instance="0", job="api-server"} => 0.03333333333333333 @[%v]`,
`{group="production", instance="1", job="api-server"} => 0.06666666666666667 @[%v]`,
},
},
}
@ -834,17 +684,6 @@ func TestExpressions(t *testing.T) {
}
}
analyzer := ast.NewQueryAnalyzer(storage)
ast.Walk(analyzer, testExpr)
if exprTest.fullRanges != len(analyzer.FullRanges) {
t.Errorf("%d. Count of full ranges didn't match: %v vs %v", i, exprTest.fullRanges, len(analyzer.FullRanges))
failed = true
}
if exprTest.intervalRanges != len(analyzer.IntervalRanges) {
t.Errorf("%d. Count of interval ranges didn't match: %v vs %v", i, exprTest.intervalRanges, len(analyzer.IntervalRanges))
failed = true
}
if failed {
t.Errorf("%d. Expression: %v\n%v", i, exprTest.expr, vectorComparisonString(expectedLines, resultLines))
}