From 6043298a9f0af26e6af04986eed1bcc8ecb33787 Mon Sep 17 00:00:00 2001 From: SuperQ Date: Thu, 18 Jul 2024 14:29:33 +0200 Subject: [PATCH] Apply struct alignment to promql package Apply struct alignment with betteralign to the promql packages. Signed-off-by: SuperQ --- promql/engine.go | 65 ++-- promql/parser/ast.go | 48 +-- promql/parser/functions.go | 2 +- promql/parser/lex.go | 32 +- promql/parser/lex_test.go | 650 ++++++++++++++++++------------------- promql/parser/parse.go | 24 +- promql/promqltest/test.go | 41 +-- promql/query_logger.go | 4 +- promql/value.go | 12 +- 9 files changed, 444 insertions(+), 434 deletions(-) diff --git a/promql/engine.go b/promql/engine.go index 25e67db633..a9eec6ad07 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -188,13 +188,13 @@ type query struct { stats *stats.QueryTimers // Sample stats for the query execution. sampleStats *stats.QuerySamples - // Result matrix for reuse. - matrix Matrix // Cancellation function for the query. cancel func() // The engine against which the query is executed. ng *Engine + // Result matrix for reuse. + matrix Matrix } type QueryOrigin struct{} @@ -287,17 +287,18 @@ type QueryTracker interface { type EngineOpts struct { Logger log.Logger Reg prometheus.Registerer - MaxSamples int - Timeout time.Duration ActiveQueryTracker QueryTracker - // LookbackDelta determines the time since the last sample after which a time - // series is considered stale. - LookbackDelta time.Duration // NoStepSubqueryIntervalFn is the default evaluation interval of // a subquery in milliseconds if no step in range vector was specified `[30m:]`. NoStepSubqueryIntervalFn func(rangeMillis int64) int64 + MaxSamples int + Timeout time.Duration + // LookbackDelta determines the time since the last sample after which a time + // series is considered stale. + LookbackDelta time.Duration + // EnableAtModifier if true enables @ modifier. Disabled otherwise. This // is supposed to be enabled for regular PromQL (as of Prometheus v2.33) // but the option to disable it is still provided here for those using @@ -319,14 +320,14 @@ type EngineOpts struct { // It is connected to a querier. type Engine struct { logger log.Logger - metrics *engineMetrics - timeout time.Duration - maxSamplesPerQuery int activeQueryTracker QueryTracker queryLogger QueryLogger - queryLoggerLock sync.RWMutex - lookbackDelta time.Duration + metrics *engineMetrics noStepSubqueryIntervalFn func(rangeMillis int64) int64 + timeout time.Duration + maxSamplesPerQuery int + lookbackDelta time.Duration + queryLoggerLock sync.RWMutex enableAtModifier bool enableNegativeOffset bool enablePerStepStats bool @@ -1022,16 +1023,17 @@ func (e errWithWarnings) Error() string { return e.err.Error() } type evaluator struct { ctx context.Context + logger log.Logger + samplesStats *stats.QuerySamples + noStepSubqueryIntervalFn func(rangeMillis int64) int64 + startTimestamp int64 // Start time in milliseconds. endTimestamp int64 // End time in milliseconds. interval int64 // Interval in milliseconds. - maxSamples int - currentSamples int - logger log.Logger - lookbackDelta time.Duration - samplesStats *stats.QuerySamples - noStepSubqueryIntervalFn func(rangeMillis int64) int64 + maxSamples int + currentSamples int + lookbackDelta time.Duration } // errorf causes a panic with the input formatted into an error. @@ -1084,23 +1086,24 @@ type EvalSeriesHelper struct { // EvalNodeHelper stores extra information and caches for evaluating a single node across steps. type EvalNodeHelper struct { - // Evaluation timestamp. - Ts int64 - // Vector that can be used for output. - Out Vector - // Caches. // funcHistogramQuantile for classic histograms. signatureToMetricWithBuckets map[string]*metricWithBuckets - lb *labels.Builder - lblBuf []byte - lblResultBuf []byte + lb *labels.Builder // For binary vector matching. rightSigs map[string]Sample matchedSigs map[string]map[uint64]struct{} resultMetric map[string]labels.Labels + // Vector that can be used for output. + Out Vector + + lblBuf []byte + lblResultBuf []byte + + // Evaluation timestamp. + Ts int64 } func (enh *EvalNodeHelper) resetBuilder(lbls labels.Labels) { @@ -2773,15 +2776,15 @@ func vectorElemBinop(op parser.ItemType, lhs, rhs float64, hlhs, hrhs *histogram } type groupedAggregation struct { + histogramValue *histogram.FloatHistogram + heap vectorByValueHeap + floatValue float64 + floatMean float64 // Mean, or "compensating value" for Kahan summation. + groupCount int seen bool // Was this output groups seen in the input at this timestamp. hasFloat bool // Has at least 1 float64 sample aggregated. hasHistogram bool // Has at least 1 histogram sample aggregated. - floatValue float64 - histogramValue *histogram.FloatHistogram - floatMean float64 // Mean, or "compensating value" for Kahan summation. - groupCount int groupAggrComplete bool // Used by LIMITK to short-cut series loop when we've reached K elem on every group - heap vectorByValueHeap } // aggregation evaluates sum, avg, count, stdvar, stddev or quantile at one timestep on inputMatrix. diff --git a/promql/parser/ast.go b/promql/parser/ast.go index 830e8a2c5e..00304f94ef 100644 --- a/promql/parser/ast.go +++ b/promql/parser/ast.go @@ -90,23 +90,24 @@ type Expressions []Expr // AggregateExpr represents an aggregation operation on a Vector. type AggregateExpr struct { - Op ItemType // The used aggregation operation. Expr Expr // The Vector expression over which is aggregated. Param Expr // Parameter used by some aggregators. Grouping []string // The labels by which to group the Vector. - Without bool // Whether to drop the given labels rather than keep them. PosRange posrange.PositionRange + Op ItemType // The used aggregation operation. + Without bool // Whether to drop the given labels rather than keep them. } // BinaryExpr represents a binary expression between two child expressions. type BinaryExpr struct { - Op ItemType // The operation of the expression. - LHS, RHS Expr // The operands on the respective sides of the operator. + LHS, RHS Expr // The operands on the respective sides of the operator. // The matching behavior for the operation if both operands are Vectors. // If they are not this field is nil. VectorMatching *VectorMatching + Op ItemType // The operation of the expression. + // If a comparison operator, return 0/1 rather than filtering. ReturnBool bool } @@ -131,8 +132,9 @@ type MatrixSelector struct { // SubqueryExpr represents a subquery. type SubqueryExpr struct { - Expr Expr - Range time.Duration + Expr Expr + Timestamp *int64 + Range time.Duration // OriginalOffset is the actual offset that was set in the query. // This never changes. OriginalOffset time.Duration @@ -140,7 +142,6 @@ type SubqueryExpr struct { // which is calculated using the original offset, at modifier time, // eval time, and subquery offsets in the AST tree. Offset time.Duration - Timestamp *int64 StartOrEnd ItemType // Set when @ is used with start() or end() Step time.Duration @@ -170,9 +171,10 @@ type StringLiteral struct { // UnaryExpr represents a unary operation on another expression. // Currently unary operations are only supported for Scalars. type UnaryExpr struct { - Op ItemType Expr Expr + Op ItemType + StartPos posrange.Pos } @@ -191,7 +193,15 @@ func (e *StepInvariantExpr) PositionRange() posrange.PositionRange { // VectorSelector represents a Vector selection. type VectorSelector struct { - Name string + // The unexpanded seriesSet populated at query preparation time. + UnexpandedSeriesSet storage.SeriesSet + Timestamp *int64 + Name string + LabelMatchers []*labels.Matcher + + Series []storage.Series + + PosRange posrange.PositionRange // OriginalOffset is the actual offset that was set in the query. // This never changes. OriginalOffset time.Duration @@ -199,16 +209,8 @@ type VectorSelector struct { // which is calculated using the original offset, at modifier time, // eval time, and subquery offsets in the AST tree. Offset time.Duration - Timestamp *int64 - SkipHistogramBuckets bool // Set when decoding native histogram buckets is not needed for query evaluation. StartOrEnd ItemType // Set when @ is used with start() or end() - LabelMatchers []*labels.Matcher - - // The unexpanded seriesSet populated at query preparation time. - UnexpandedSeriesSet storage.SeriesSet - Series []storage.Series - - PosRange posrange.PositionRange + SkipHistogramBuckets bool // Set when decoding native histogram buckets is not needed for query evaluation. } // TestStmt is an internal helper statement that allows execution @@ -282,17 +284,17 @@ func (vmc VectorMatchCardinality) String() string { // VectorMatching describes how elements from two Vectors in a binary // operation are supposed to be matched. type VectorMatching struct { - // The cardinality of the two Vectors. - Card VectorMatchCardinality // MatchingLabels contains the labels which define equality of a pair of // elements from the Vectors. MatchingLabels []string - // On includes the given label names from matching, - // rather than excluding them. - On bool // Include contains additional labels that should be included in // the result from the side with the lower cardinality. Include []string + // The cardinality of the two Vectors. + Card VectorMatchCardinality + // On includes the given label names from matching, + // rather than excluding them. + On bool } // Visitor allows visiting a Node and its child nodes. The Visit method is diff --git a/promql/parser/functions.go b/promql/parser/functions.go index 99b41321fe..2948fb881a 100644 --- a/promql/parser/functions.go +++ b/promql/parser/functions.go @@ -17,9 +17,9 @@ package parser // used by function nodes. type Function struct { Name string + ReturnType ValueType ArgTypes []ValueType Variadic int - ReturnType ValueType Experimental bool } diff --git a/promql/parser/lex.go b/promql/parser/lex.go index 18abd49ead..9261e6d60f 100644 --- a/promql/parser/lex.go +++ b/promql/parser/lex.go @@ -24,9 +24,9 @@ import ( // Item represents a token or text string returned from the scanner. type Item struct { + Val string // The value of this Item. Typ ItemType // The type of this Item. Pos posrange.Pos // The starting position, in bytes, of this Item in the input string. - Val string // The value of this Item. } // String returns a descriptive string for the Item. @@ -250,25 +250,27 @@ const ( // Lexer holds the state of the scanner. type Lexer struct { - input string // The string being scanned. - state stateFn // The next lexing function to enter. - pos posrange.Pos // Current position in the input. - start posrange.Pos // Start position of this Item. - width posrange.Pos // Width of last rune read from input. - lastPos posrange.Pos // Position of most recent Item returned by NextItem. - itemp *Item // Pointer to where the next scanned item should be placed. - scannedItem bool // Set to true every time an item is scanned. + state stateFn // The next lexing function to enter. + itemp *Item // Pointer to where the next scanned item should be placed. + input string // The string being scanned. + pos posrange.Pos // Current position in the input. + start posrange.Pos // Start position of this Item. + width posrange.Pos // Width of last rune read from input. + lastPos posrange.Pos // Position of most recent Item returned by NextItem. + + parenDepth int // Nesting depth of ( ) exprs. + histogramState histogramState // Determines whether or not inside of a histogram description. + stringOpen rune // Quote rune of the string currently being read. + + scannedItem bool // Set to true every time an item is scanned. - parenDepth int // Nesting depth of ( ) exprs. braceOpen bool // Whether a { is opened. bracketOpen bool // Whether a [ is opened. gotColon bool // Whether we got a ':' after [ was opened. - stringOpen rune // Quote rune of the string currently being read. // series description variables for internal PromQL testing framework as well as in promtool rules unit tests. // see https://prometheus.io/docs/prometheus/latest/configuration/unit_testing_rules/#series - seriesDesc bool // Whether we are lexing a series description. - histogramState histogramState // Determines whether or not inside of a histogram description. + seriesDesc bool // Whether we are lexing a series description. } // next returns the next rune in the input. @@ -297,7 +299,7 @@ func (l *Lexer) backup() { // emit passes an Item back to the client. func (l *Lexer) emit(t ItemType) { - *l.itemp = Item{t, l.start, l.input[l.start:l.pos]} + *l.itemp = Item{Typ: t, Pos: l.start, Val: l.input[l.start:l.pos]} l.start = l.pos l.scannedItem = true } @@ -332,7 +334,7 @@ func (l *Lexer) acceptRun(valid string) { // errorf returns an error token and terminates the scan by passing // back a nil pointer that will be the next state, terminating l.NextItem. func (l *Lexer) errorf(format string, args ...interface{}) stateFn { - *l.itemp = Item{ERROR, l.start, fmt.Sprintf(format, args...)} + *l.itemp = Item{Typ: ERROR, Pos: l.start, Val: fmt.Sprintf(format, args...)} l.scannedItem = true return nil diff --git a/promql/parser/lex_test.go b/promql/parser/lex_test.go index ac9aa27625..cc6b4c6e4f 100644 --- a/promql/parser/lex_test.go +++ b/promql/parser/lex_test.go @@ -37,40 +37,40 @@ var tests = []struct { tests: []testCase{ { input: ",", - expected: []Item{{COMMA, 0, ","}}, + expected: []Item{{Typ: COMMA, Pos: 0, Val: ","}}, }, { input: "()", - expected: []Item{{LEFT_PAREN, 0, `(`}, {RIGHT_PAREN, 1, `)`}}, + expected: []Item{{Typ: LEFT_PAREN, Pos: 0, Val: `(`}, {Typ: RIGHT_PAREN, Pos: 1, Val: `)`}}, }, { input: "{}", - expected: []Item{{LEFT_BRACE, 0, `{`}, {RIGHT_BRACE, 1, `}`}}, + expected: []Item{{Typ: LEFT_BRACE, Pos: 0, Val: `{`}, {Typ: RIGHT_BRACE, Pos: 1, Val: `}`}}, }, { input: "[5m]", expected: []Item{ - {LEFT_BRACKET, 0, `[`}, - {DURATION, 1, `5m`}, - {RIGHT_BRACKET, 3, `]`}, + {Typ: LEFT_BRACKET, Pos: 0, Val: `[`}, + {Typ: DURATION, Pos: 1, Val: `5m`}, + {Typ: RIGHT_BRACKET, Pos: 3, Val: `]`}, }, }, { input: "[ 5m]", expected: []Item{ - {LEFT_BRACKET, 0, `[`}, - {DURATION, 2, `5m`}, - {RIGHT_BRACKET, 4, `]`}, + {Typ: LEFT_BRACKET, Pos: 0, Val: `[`}, + {Typ: DURATION, Pos: 2, Val: `5m`}, + {Typ: RIGHT_BRACKET, Pos: 4, Val: `]`}, }, }, { input: "[ 5m]", expected: []Item{ - {LEFT_BRACKET, 0, `[`}, - {DURATION, 3, `5m`}, - {RIGHT_BRACKET, 5, `]`}, + {Typ: LEFT_BRACKET, Pos: 0, Val: `[`}, + {Typ: DURATION, Pos: 3, Val: `5m`}, + {Typ: RIGHT_BRACKET, Pos: 5, Val: `]`}, }, }, { input: "[ 5m ]", expected: []Item{ - {LEFT_BRACKET, 0, `[`}, - {DURATION, 3, `5m`}, - {RIGHT_BRACKET, 6, `]`}, + {Typ: LEFT_BRACKET, Pos: 0, Val: `[`}, + {Typ: DURATION, Pos: 3, Val: `5m`}, + {Typ: RIGHT_BRACKET, Pos: 6, Val: `]`}, }, }, { input: "\r\n\r", @@ -83,55 +83,55 @@ var tests = []struct { tests: []testCase{ { input: "1", - expected: []Item{{NUMBER, 0, "1"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "1"}}, }, { input: "4.23", - expected: []Item{{NUMBER, 0, "4.23"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "4.23"}}, }, { input: ".3", - expected: []Item{{NUMBER, 0, ".3"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: ".3"}}, }, { input: "5.", - expected: []Item{{NUMBER, 0, "5."}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "5."}}, }, { input: "NaN", - expected: []Item{{NUMBER, 0, "NaN"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "NaN"}}, }, { input: "nAN", - expected: []Item{{NUMBER, 0, "nAN"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "nAN"}}, }, { input: "NaN 123", - expected: []Item{{NUMBER, 0, "NaN"}, {NUMBER, 4, "123"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "NaN"}, {Typ: NUMBER, Pos: 4, Val: "123"}}, }, { input: "NaN123", - expected: []Item{{IDENTIFIER, 0, "NaN123"}}, + expected: []Item{{Typ: IDENTIFIER, Pos: 0, Val: "NaN123"}}, }, { input: "iNf", - expected: []Item{{NUMBER, 0, "iNf"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "iNf"}}, }, { input: "Inf", - expected: []Item{{NUMBER, 0, "Inf"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "Inf"}}, }, { input: "+Inf", - expected: []Item{{ADD, 0, "+"}, {NUMBER, 1, "Inf"}}, + expected: []Item{{Typ: ADD, Pos: 0, Val: "+"}, {Typ: NUMBER, Pos: 1, Val: "Inf"}}, }, { input: "+Inf 123", - expected: []Item{{ADD, 0, "+"}, {NUMBER, 1, "Inf"}, {NUMBER, 5, "123"}}, + expected: []Item{{Typ: ADD, Pos: 0, Val: "+"}, {Typ: NUMBER, Pos: 1, Val: "Inf"}, {Typ: NUMBER, Pos: 5, Val: "123"}}, }, { input: "-Inf", - expected: []Item{{SUB, 0, "-"}, {NUMBER, 1, "Inf"}}, + expected: []Item{{Typ: SUB, Pos: 0, Val: "-"}, {Typ: NUMBER, Pos: 1, Val: "Inf"}}, }, { input: "Infoo", - expected: []Item{{IDENTIFIER, 0, "Infoo"}}, + expected: []Item{{Typ: IDENTIFIER, Pos: 0, Val: "Infoo"}}, }, { input: "-Infoo", - expected: []Item{{SUB, 0, "-"}, {IDENTIFIER, 1, "Infoo"}}, + expected: []Item{{Typ: SUB, Pos: 0, Val: "-"}, {Typ: IDENTIFIER, Pos: 1, Val: "Infoo"}}, }, { input: "-Inf 123", - expected: []Item{{SUB, 0, "-"}, {NUMBER, 1, "Inf"}, {NUMBER, 5, "123"}}, + expected: []Item{{Typ: SUB, Pos: 0, Val: "-"}, {Typ: NUMBER, Pos: 1, Val: "Inf"}, {Typ: NUMBER, Pos: 5, Val: "123"}}, }, { input: "0x123", - expected: []Item{{NUMBER, 0, "0x123"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "0x123"}}, }, { input: "1..2", fail: true, @@ -140,7 +140,7 @@ var tests = []struct { fail: true, }, { input: "00_1_23_4.56_7_8", - expected: []Item{{NUMBER, 0, "00_1_23_4.56_7_8"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "00_1_23_4.56_7_8"}}, }, { input: "00_1_23__4.56_7_8", fail: true, @@ -152,7 +152,7 @@ var tests = []struct { fail: true, }, { input: "0x1_2_34", - expected: []Item{{NUMBER, 0, "0x1_2_34"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "0x1_2_34"}}, }, { input: "0x1_2__34", fail: true, @@ -170,7 +170,7 @@ var tests = []struct { fail: true, }, { input: "1.e2", - expected: []Item{{NUMBER, 0, "1.e2"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "1.e2"}}, }, { input: "1e.2", fail: true, @@ -191,7 +191,7 @@ var tests = []struct { fail: true, }, { input: "1e1_2_34", - expected: []Item{{NUMBER, 0, "1e1_2_34"}}, + expected: []Item{{Typ: NUMBER, Pos: 0, Val: "1e1_2_34"}}, }, { input: "1e_1_2_34", fail: true, @@ -209,7 +209,7 @@ var tests = []struct { fail: true, }, { input: "_1_2", - expected: []Item{{IDENTIFIER, 0, "_1_2"}}, + expected: []Item{{Typ: IDENTIFIER, Pos: 0, Val: "_1_2"}}, }, }, }, @@ -218,22 +218,22 @@ var tests = []struct { tests: []testCase{ { input: "\"test\\tsequence\"", - expected: []Item{{STRING, 0, `"test\tsequence"`}}, + expected: []Item{{Typ: STRING, Pos: 0, Val: `"test\tsequence"`}}, }, { input: "\"test\\\\.expression\"", - expected: []Item{{STRING, 0, `"test\\.expression"`}}, + expected: []Item{{Typ: STRING, Pos: 0, Val: `"test\\.expression"`}}, }, { input: "\"test\\.expression\"", expected: []Item{ - {ERROR, 0, "unknown escape sequence U+002E '.'"}, - {STRING, 0, `"test\.expression"`}, + {Typ: ERROR, Pos: 0, Val: "unknown escape sequence U+002E '.'"}, + {Typ: STRING, Pos: 0, Val: `"test\.expression"`}, }, }, { input: "`test\\.expression`", - expected: []Item{{STRING, 0, "`test\\.expression`"}}, + expected: []Item{{Typ: STRING, Pos: 0, Val: "`test\\.expression`"}}, }, { // See https://github.com/prometheus/prometheus/issues/939. @@ -247,19 +247,19 @@ var tests = []struct { tests: []testCase{ { input: "5s", - expected: []Item{{DURATION, 0, "5s"}}, + expected: []Item{{Typ: DURATION, Pos: 0, Val: "5s"}}, }, { input: "123m", - expected: []Item{{DURATION, 0, "123m"}}, + expected: []Item{{Typ: DURATION, Pos: 0, Val: "123m"}}, }, { input: "1h", - expected: []Item{{DURATION, 0, "1h"}}, + expected: []Item{{Typ: DURATION, Pos: 0, Val: "1h"}}, }, { input: "3w", - expected: []Item{{DURATION, 0, "3w"}}, + expected: []Item{{Typ: DURATION, Pos: 0, Val: "3w"}}, }, { input: "1y", - expected: []Item{{DURATION, 0, "1y"}}, + expected: []Item{{Typ: DURATION, Pos: 0, Val: "1y"}}, }, }, }, @@ -268,16 +268,16 @@ var tests = []struct { tests: []testCase{ { input: "abc", - expected: []Item{{IDENTIFIER, 0, "abc"}}, + expected: []Item{{Typ: IDENTIFIER, Pos: 0, Val: "abc"}}, }, { input: "a:bc", - expected: []Item{{METRIC_IDENTIFIER, 0, "a:bc"}}, + expected: []Item{{Typ: METRIC_IDENTIFIER, Pos: 0, Val: "a:bc"}}, }, { input: "abc d", - expected: []Item{{IDENTIFIER, 0, "abc"}, {IDENTIFIER, 4, "d"}}, + expected: []Item{{Typ: IDENTIFIER, Pos: 0, Val: "abc"}, {Typ: IDENTIFIER, Pos: 4, Val: "d"}}, }, { input: ":bc", - expected: []Item{{METRIC_IDENTIFIER, 0, ":bc"}}, + expected: []Item{{Typ: METRIC_IDENTIFIER, Pos: 0, Val: ":bc"}}, }, { input: "0a:bc", fail: true, @@ -289,13 +289,13 @@ var tests = []struct { tests: []testCase{ { input: "# some comment", - expected: []Item{{COMMENT, 0, "# some comment"}}, + expected: []Item{{Typ: COMMENT, Pos: 0, Val: "# some comment"}}, }, { input: "5 # 1+1\n5", expected: []Item{ - {NUMBER, 0, "5"}, - {COMMENT, 2, "# 1+1"}, - {NUMBER, 8, "5"}, + {Typ: NUMBER, Pos: 0, Val: "5"}, + {Typ: COMMENT, Pos: 2, Val: "# 1+1"}, + {Typ: NUMBER, Pos: 8, Val: "5"}, }, }, }, @@ -305,60 +305,60 @@ var tests = []struct { tests: []testCase{ { input: `=`, - expected: []Item{{EQL, 0, `=`}}, + expected: []Item{{Typ: EQL, Pos: 0, Val: `=`}}, }, { // Inside braces equality is a single '=' character but in terms of a token // it should be treated as ASSIGN. input: `{=}`, - expected: []Item{{LEFT_BRACE, 0, `{`}, {EQL, 1, `=`}, {RIGHT_BRACE, 2, `}`}}, + expected: []Item{{Typ: LEFT_BRACE, Pos: 0, Val: `{`}, {Typ: EQL, Pos: 1, Val: `=`}, {Typ: RIGHT_BRACE, Pos: 2, Val: `}`}}, }, { input: `==`, - expected: []Item{{EQLC, 0, `==`}}, + expected: []Item{{Typ: EQLC, Pos: 0, Val: `==`}}, }, { input: `!=`, - expected: []Item{{NEQ, 0, `!=`}}, + expected: []Item{{Typ: NEQ, Pos: 0, Val: `!=`}}, }, { input: `<`, - expected: []Item{{LSS, 0, `<`}}, + expected: []Item{{Typ: LSS, Pos: 0, Val: `<`}}, }, { input: `>`, - expected: []Item{{GTR, 0, `>`}}, + expected: []Item{{Typ: GTR, Pos: 0, Val: `>`}}, }, { input: `>=`, - expected: []Item{{GTE, 0, `>=`}}, + expected: []Item{{Typ: GTE, Pos: 0, Val: `>=`}}, }, { input: `<=`, - expected: []Item{{LTE, 0, `<=`}}, + expected: []Item{{Typ: LTE, Pos: 0, Val: `<=`}}, }, { input: `+`, - expected: []Item{{ADD, 0, `+`}}, + expected: []Item{{Typ: ADD, Pos: 0, Val: `+`}}, }, { input: `-`, - expected: []Item{{SUB, 0, `-`}}, + expected: []Item{{Typ: SUB, Pos: 0, Val: `-`}}, }, { input: `*`, - expected: []Item{{MUL, 0, `*`}}, + expected: []Item{{Typ: MUL, Pos: 0, Val: `*`}}, }, { input: `/`, - expected: []Item{{DIV, 0, `/`}}, + expected: []Item{{Typ: DIV, Pos: 0, Val: `/`}}, }, { input: `^`, - expected: []Item{{POW, 0, `^`}}, + expected: []Item{{Typ: POW, Pos: 0, Val: `^`}}, }, { input: `%`, - expected: []Item{{MOD, 0, `%`}}, + expected: []Item{{Typ: MOD, Pos: 0, Val: `%`}}, }, { input: `AND`, - expected: []Item{{LAND, 0, `AND`}}, + expected: []Item{{Typ: LAND, Pos: 0, Val: `AND`}}, }, { input: `or`, - expected: []Item{{LOR, 0, `or`}}, + expected: []Item{{Typ: LOR, Pos: 0, Val: `or`}}, }, { input: `unless`, - expected: []Item{{LUNLESS, 0, `unless`}}, + expected: []Item{{Typ: LUNLESS, Pos: 0, Val: `unless`}}, }, { input: `@`, - expected: []Item{{AT, 0, `@`}}, + expected: []Item{{Typ: AT, Pos: 0, Val: `@`}}, }, }, }, @@ -367,28 +367,28 @@ var tests = []struct { tests: []testCase{ { input: `sum`, - expected: []Item{{SUM, 0, `sum`}}, + expected: []Item{{Typ: SUM, Pos: 0, Val: `sum`}}, }, { input: `AVG`, - expected: []Item{{AVG, 0, `AVG`}}, + expected: []Item{{Typ: AVG, Pos: 0, Val: `AVG`}}, }, { input: `GROUP`, - expected: []Item{{GROUP, 0, `GROUP`}}, + expected: []Item{{Typ: GROUP, Pos: 0, Val: `GROUP`}}, }, { input: `MAX`, - expected: []Item{{MAX, 0, `MAX`}}, + expected: []Item{{Typ: MAX, Pos: 0, Val: `MAX`}}, }, { input: `min`, - expected: []Item{{MIN, 0, `min`}}, + expected: []Item{{Typ: MIN, Pos: 0, Val: `min`}}, }, { input: `count`, - expected: []Item{{COUNT, 0, `count`}}, + expected: []Item{{Typ: COUNT, Pos: 0, Val: `count`}}, }, { input: `stdvar`, - expected: []Item{{STDVAR, 0, `stdvar`}}, + expected: []Item{{Typ: STDVAR, Pos: 0, Val: `stdvar`}}, }, { input: `stddev`, - expected: []Item{{STDDEV, 0, `stddev`}}, + expected: []Item{{Typ: STDDEV, Pos: 0, Val: `stddev`}}, }, }, }, @@ -397,39 +397,39 @@ var tests = []struct { tests: []testCase{ { input: "offset", - expected: []Item{{OFFSET, 0, "offset"}}, + expected: []Item{{Typ: OFFSET, Pos: 0, Val: "offset"}}, }, { input: "by", - expected: []Item{{BY, 0, "by"}}, + expected: []Item{{Typ: BY, Pos: 0, Val: "by"}}, }, { input: "without", - expected: []Item{{WITHOUT, 0, "without"}}, + expected: []Item{{Typ: WITHOUT, Pos: 0, Val: "without"}}, }, { input: "on", - expected: []Item{{ON, 0, "on"}}, + expected: []Item{{Typ: ON, Pos: 0, Val: "on"}}, }, { input: "ignoring", - expected: []Item{{IGNORING, 0, "ignoring"}}, + expected: []Item{{Typ: IGNORING, Pos: 0, Val: "ignoring"}}, }, { input: "group_left", - expected: []Item{{GROUP_LEFT, 0, "group_left"}}, + expected: []Item{{Typ: GROUP_LEFT, Pos: 0, Val: "group_left"}}, }, { input: "group_right", - expected: []Item{{GROUP_RIGHT, 0, "group_right"}}, + expected: []Item{{Typ: GROUP_RIGHT, Pos: 0, Val: "group_right"}}, }, { input: "bool", - expected: []Item{{BOOL, 0, "bool"}}, + expected: []Item{{Typ: BOOL, Pos: 0, Val: "bool"}}, }, { input: "atan2", - expected: []Item{{ATAN2, 0, "atan2"}}, + expected: []Item{{Typ: ATAN2, Pos: 0, Val: "atan2"}}, }, }, }, @@ -438,11 +438,11 @@ var tests = []struct { tests: []testCase{ { input: `start`, - expected: []Item{{START, 0, `start`}}, + expected: []Item{{Typ: START, Pos: 0, Val: `start`}}, }, { input: `end`, - expected: []Item{{END, 0, `end`}}, + expected: []Item{{Typ: END, Pos: 0, Val: `end`}}, }, }, }, @@ -461,56 +461,56 @@ var tests = []struct { }, { input: `{foo='bar'}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {IDENTIFIER, 1, `foo`}, - {EQL, 4, `=`}, - {STRING, 5, `'bar'`}, - {RIGHT_BRACE, 10, `}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: IDENTIFIER, Pos: 1, Val: `foo`}, + {Typ: EQL, Pos: 4, Val: `=`}, + {Typ: STRING, Pos: 5, Val: `'bar'`}, + {Typ: RIGHT_BRACE, Pos: 10, Val: `}`}, }, }, { input: `{foo="bar"}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {IDENTIFIER, 1, `foo`}, - {EQL, 4, `=`}, - {STRING, 5, `"bar"`}, - {RIGHT_BRACE, 10, `}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: IDENTIFIER, Pos: 1, Val: `foo`}, + {Typ: EQL, Pos: 4, Val: `=`}, + {Typ: STRING, Pos: 5, Val: `"bar"`}, + {Typ: RIGHT_BRACE, Pos: 10, Val: `}`}, }, }, { input: `{foo="bar\"bar"}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {IDENTIFIER, 1, `foo`}, - {EQL, 4, `=`}, - {STRING, 5, `"bar\"bar"`}, - {RIGHT_BRACE, 15, `}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: IDENTIFIER, Pos: 1, Val: `foo`}, + {Typ: EQL, Pos: 4, Val: `=`}, + {Typ: STRING, Pos: 5, Val: `"bar\"bar"`}, + {Typ: RIGHT_BRACE, Pos: 15, Val: `}`}, }, }, { input: `{NaN != "bar" }`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {IDENTIFIER, 1, `NaN`}, - {NEQ, 5, `!=`}, - {STRING, 8, `"bar"`}, - {RIGHT_BRACE, 14, `}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: IDENTIFIER, Pos: 1, Val: `NaN`}, + {Typ: NEQ, Pos: 5, Val: `!=`}, + {Typ: STRING, Pos: 8, Val: `"bar"`}, + {Typ: RIGHT_BRACE, Pos: 14, Val: `}`}, }, }, { input: `{alert=~"bar" }`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {IDENTIFIER, 1, `alert`}, - {EQL_REGEX, 6, `=~`}, - {STRING, 8, `"bar"`}, - {RIGHT_BRACE, 14, `}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: IDENTIFIER, Pos: 1, Val: `alert`}, + {Typ: EQL_REGEX, Pos: 6, Val: `=~`}, + {Typ: STRING, Pos: 8, Val: `"bar"`}, + {Typ: RIGHT_BRACE, Pos: 14, Val: `}`}, }, }, { input: `{on!~"bar"}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {IDENTIFIER, 1, `on`}, - {NEQ_REGEX, 3, `!~`}, - {STRING, 5, `"bar"`}, - {RIGHT_BRACE, 10, `}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: IDENTIFIER, Pos: 1, Val: `on`}, + {Typ: NEQ_REGEX, Pos: 3, Val: `!~`}, + {Typ: STRING, Pos: 5, Val: `"bar"`}, + {Typ: RIGHT_BRACE, Pos: 10, Val: `}`}, }, }, { input: `{alert!#"bar"}`, fail: true, @@ -580,91 +580,91 @@ var tests = []struct { { input: `{} {{buckets:[5]}}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {RIGHT_BRACE, 1, `}`}, - {SPACE, 2, ` `}, - {OPEN_HIST, 3, `{{`}, - {BUCKETS_DESC, 5, `buckets`}, - {COLON, 12, `:`}, - {LEFT_BRACKET, 13, `[`}, - {NUMBER, 14, `5`}, - {RIGHT_BRACKET, 15, `]`}, - {CLOSE_HIST, 16, `}}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: RIGHT_BRACE, Pos: 1, Val: `}`}, + {Typ: SPACE, Pos: 2, Val: ` `}, + {Typ: OPEN_HIST, Pos: 3, Val: `{{`}, + {Typ: BUCKETS_DESC, Pos: 5, Val: `buckets`}, + {Typ: COLON, Pos: 12, Val: `:`}, + {Typ: LEFT_BRACKET, Pos: 13, Val: `[`}, + {Typ: NUMBER, Pos: 14, Val: `5`}, + {Typ: RIGHT_BRACKET, Pos: 15, Val: `]`}, + {Typ: CLOSE_HIST, Pos: 16, Val: `}}`}, }, seriesDesc: true, }, { input: `{} {{buckets: [5 10 7]}}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {RIGHT_BRACE, 1, `}`}, - {SPACE, 2, ` `}, - {OPEN_HIST, 3, `{{`}, - {BUCKETS_DESC, 5, `buckets`}, - {COLON, 12, `:`}, - {SPACE, 13, ` `}, - {LEFT_BRACKET, 14, `[`}, - {NUMBER, 15, `5`}, - {SPACE, 16, ` `}, - {NUMBER, 17, `10`}, - {SPACE, 19, ` `}, - {NUMBER, 20, `7`}, - {RIGHT_BRACKET, 21, `]`}, - {CLOSE_HIST, 22, `}}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: RIGHT_BRACE, Pos: 1, Val: `}`}, + {Typ: SPACE, Pos: 2, Val: ` `}, + {Typ: OPEN_HIST, Pos: 3, Val: `{{`}, + {Typ: BUCKETS_DESC, Pos: 5, Val: `buckets`}, + {Typ: COLON, Pos: 12, Val: `:`}, + {Typ: SPACE, Pos: 13, Val: ` `}, + {Typ: LEFT_BRACKET, Pos: 14, Val: `[`}, + {Typ: NUMBER, Pos: 15, Val: `5`}, + {Typ: SPACE, Pos: 16, Val: ` `}, + {Typ: NUMBER, Pos: 17, Val: `10`}, + {Typ: SPACE, Pos: 19, Val: ` `}, + {Typ: NUMBER, Pos: 20, Val: `7`}, + {Typ: RIGHT_BRACKET, Pos: 21, Val: `]`}, + {Typ: CLOSE_HIST, Pos: 22, Val: `}}`}, }, seriesDesc: true, }, { input: `{} {{buckets: [5 10 7] schema:1}}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {RIGHT_BRACE, 1, `}`}, - {SPACE, 2, ` `}, - {OPEN_HIST, 3, `{{`}, - {BUCKETS_DESC, 5, `buckets`}, - {COLON, 12, `:`}, - {SPACE, 13, ` `}, - {LEFT_BRACKET, 14, `[`}, - {NUMBER, 15, `5`}, - {SPACE, 16, ` `}, - {NUMBER, 17, `10`}, - {SPACE, 19, ` `}, - {NUMBER, 20, `7`}, - {RIGHT_BRACKET, 21, `]`}, - {SPACE, 22, ` `}, - {SCHEMA_DESC, 23, `schema`}, - {COLON, 29, `:`}, - {NUMBER, 30, `1`}, - {CLOSE_HIST, 31, `}}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: RIGHT_BRACE, Pos: 1, Val: `}`}, + {Typ: SPACE, Pos: 2, Val: ` `}, + {Typ: OPEN_HIST, Pos: 3, Val: `{{`}, + {Typ: BUCKETS_DESC, Pos: 5, Val: `buckets`}, + {Typ: COLON, Pos: 12, Val: `:`}, + {Typ: SPACE, Pos: 13, Val: ` `}, + {Typ: LEFT_BRACKET, Pos: 14, Val: `[`}, + {Typ: NUMBER, Pos: 15, Val: `5`}, + {Typ: SPACE, Pos: 16, Val: ` `}, + {Typ: NUMBER, Pos: 17, Val: `10`}, + {Typ: SPACE, Pos: 19, Val: ` `}, + {Typ: NUMBER, Pos: 20, Val: `7`}, + {Typ: RIGHT_BRACKET, Pos: 21, Val: `]`}, + {Typ: SPACE, Pos: 22, Val: ` `}, + {Typ: SCHEMA_DESC, Pos: 23, Val: `schema`}, + {Typ: COLON, Pos: 29, Val: `:`}, + {Typ: NUMBER, Pos: 30, Val: `1`}, + {Typ: CLOSE_HIST, Pos: 31, Val: `}}`}, }, seriesDesc: true, }, { // Series with sum as -Inf and count as NaN. input: `{} {{buckets: [5 10 7] sum:Inf count:NaN}}`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {RIGHT_BRACE, 1, `}`}, - {SPACE, 2, ` `}, - {OPEN_HIST, 3, `{{`}, - {BUCKETS_DESC, 5, `buckets`}, - {COLON, 12, `:`}, - {SPACE, 13, ` `}, - {LEFT_BRACKET, 14, `[`}, - {NUMBER, 15, `5`}, - {SPACE, 16, ` `}, - {NUMBER, 17, `10`}, - {SPACE, 19, ` `}, - {NUMBER, 20, `7`}, - {RIGHT_BRACKET, 21, `]`}, - {SPACE, 22, ` `}, - {SUM_DESC, 23, `sum`}, - {COLON, 26, `:`}, - {NUMBER, 27, `Inf`}, - {SPACE, 30, ` `}, - {COUNT_DESC, 31, `count`}, - {COLON, 36, `:`}, - {NUMBER, 37, `NaN`}, - {CLOSE_HIST, 40, `}}`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: RIGHT_BRACE, Pos: 1, Val: `}`}, + {Typ: SPACE, Pos: 2, Val: ` `}, + {Typ: OPEN_HIST, Pos: 3, Val: `{{`}, + {Typ: BUCKETS_DESC, Pos: 5, Val: `buckets`}, + {Typ: COLON, Pos: 12, Val: `:`}, + {Typ: SPACE, Pos: 13, Val: ` `}, + {Typ: LEFT_BRACKET, Pos: 14, Val: `[`}, + {Typ: NUMBER, Pos: 15, Val: `5`}, + {Typ: SPACE, Pos: 16, Val: ` `}, + {Typ: NUMBER, Pos: 17, Val: `10`}, + {Typ: SPACE, Pos: 19, Val: ` `}, + {Typ: NUMBER, Pos: 20, Val: `7`}, + {Typ: RIGHT_BRACKET, Pos: 21, Val: `]`}, + {Typ: SPACE, Pos: 22, Val: ` `}, + {Typ: SUM_DESC, Pos: 23, Val: `sum`}, + {Typ: COLON, Pos: 26, Val: `:`}, + {Typ: NUMBER, Pos: 27, Val: `Inf`}, + {Typ: SPACE, Pos: 30, Val: ` `}, + {Typ: COUNT_DESC, Pos: 31, Val: `count`}, + {Typ: COLON, Pos: 36, Val: `:`}, + {Typ: NUMBER, Pos: 37, Val: `NaN`}, + {Typ: CLOSE_HIST, Pos: 40, Val: `}}`}, }, seriesDesc: true, }, @@ -676,43 +676,43 @@ var tests = []struct { { input: `{} _ 1 x .3`, expected: []Item{ - {LEFT_BRACE, 0, `{`}, - {RIGHT_BRACE, 1, `}`}, - {SPACE, 2, ` `}, - {BLANK, 3, `_`}, - {SPACE, 4, ` `}, - {NUMBER, 5, `1`}, - {SPACE, 6, ` `}, - {TIMES, 7, `x`}, - {SPACE, 8, ` `}, - {NUMBER, 9, `.3`}, + {Typ: LEFT_BRACE, Pos: 0, Val: `{`}, + {Typ: RIGHT_BRACE, Pos: 1, Val: `}`}, + {Typ: SPACE, Pos: 2, Val: ` `}, + {Typ: BLANK, Pos: 3, Val: `_`}, + {Typ: SPACE, Pos: 4, Val: ` `}, + {Typ: NUMBER, Pos: 5, Val: `1`}, + {Typ: SPACE, Pos: 6, Val: ` `}, + {Typ: TIMES, Pos: 7, Val: `x`}, + {Typ: SPACE, Pos: 8, Val: ` `}, + {Typ: NUMBER, Pos: 9, Val: `.3`}, }, seriesDesc: true, }, { input: `metric +Inf Inf NaN`, expected: []Item{ - {IDENTIFIER, 0, `metric`}, - {SPACE, 6, ` `}, - {ADD, 7, `+`}, - {NUMBER, 8, `Inf`}, - {SPACE, 11, ` `}, - {NUMBER, 12, `Inf`}, - {SPACE, 15, ` `}, - {NUMBER, 16, `NaN`}, + {Typ: IDENTIFIER, Pos: 0, Val: `metric`}, + {Typ: SPACE, Pos: 6, Val: ` `}, + {Typ: ADD, Pos: 7, Val: `+`}, + {Typ: NUMBER, Pos: 8, Val: `Inf`}, + {Typ: SPACE, Pos: 11, Val: ` `}, + {Typ: NUMBER, Pos: 12, Val: `Inf`}, + {Typ: SPACE, Pos: 15, Val: ` `}, + {Typ: NUMBER, Pos: 16, Val: `NaN`}, }, seriesDesc: true, }, { input: `metric 1+1x4`, expected: []Item{ - {IDENTIFIER, 0, `metric`}, - {SPACE, 6, ` `}, - {NUMBER, 7, `1`}, - {ADD, 8, `+`}, - {NUMBER, 9, `1`}, - {TIMES, 10, `x`}, - {NUMBER, 11, `4`}, + {Typ: IDENTIFIER, Pos: 0, Val: `metric`}, + {Typ: SPACE, Pos: 6, Val: ` `}, + {Typ: NUMBER, Pos: 7, Val: `1`}, + {Typ: ADD, Pos: 8, Val: `+`}, + {Typ: NUMBER, Pos: 9, Val: `1`}, + {Typ: TIMES, Pos: 10, Val: `x`}, + {Typ: NUMBER, Pos: 11, Val: `4`}, }, seriesDesc: true, }, @@ -724,152 +724,152 @@ var tests = []struct { { input: `test_name{on!~"bar"}[4m:4s]`, expected: []Item{ - {IDENTIFIER, 0, `test_name`}, - {LEFT_BRACE, 9, `{`}, - {IDENTIFIER, 10, `on`}, - {NEQ_REGEX, 12, `!~`}, - {STRING, 14, `"bar"`}, - {RIGHT_BRACE, 19, `}`}, - {LEFT_BRACKET, 20, `[`}, - {DURATION, 21, `4m`}, - {COLON, 23, `:`}, - {DURATION, 24, `4s`}, - {RIGHT_BRACKET, 26, `]`}, + {Typ: IDENTIFIER, Pos: 0, Val: `test_name`}, + {Typ: LEFT_BRACE, Pos: 9, Val: `{`}, + {Typ: IDENTIFIER, Pos: 10, Val: `on`}, + {Typ: NEQ_REGEX, Pos: 12, Val: `!~`}, + {Typ: STRING, Pos: 14, Val: `"bar"`}, + {Typ: RIGHT_BRACE, Pos: 19, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 20, Val: `[`}, + {Typ: DURATION, Pos: 21, Val: `4m`}, + {Typ: COLON, Pos: 23, Val: `:`}, + {Typ: DURATION, Pos: 24, Val: `4s`}, + {Typ: RIGHT_BRACKET, Pos: 26, Val: `]`}, }, }, { input: `test:name{on!~"bar"}[4m:4s]`, expected: []Item{ - {METRIC_IDENTIFIER, 0, `test:name`}, - {LEFT_BRACE, 9, `{`}, - {IDENTIFIER, 10, `on`}, - {NEQ_REGEX, 12, `!~`}, - {STRING, 14, `"bar"`}, - {RIGHT_BRACE, 19, `}`}, - {LEFT_BRACKET, 20, `[`}, - {DURATION, 21, `4m`}, - {COLON, 23, `:`}, - {DURATION, 24, `4s`}, - {RIGHT_BRACKET, 26, `]`}, + {Typ: METRIC_IDENTIFIER, Pos: 0, Val: `test:name`}, + {Typ: LEFT_BRACE, Pos: 9, Val: `{`}, + {Typ: IDENTIFIER, Pos: 10, Val: `on`}, + {Typ: NEQ_REGEX, Pos: 12, Val: `!~`}, + {Typ: STRING, Pos: 14, Val: `"bar"`}, + {Typ: RIGHT_BRACE, Pos: 19, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 20, Val: `[`}, + {Typ: DURATION, Pos: 21, Val: `4m`}, + {Typ: COLON, Pos: 23, Val: `:`}, + {Typ: DURATION, Pos: 24, Val: `4s`}, + {Typ: RIGHT_BRACKET, Pos: 26, Val: `]`}, }, }, { input: `test:name{on!~"b:ar"}[4m:4s]`, expected: []Item{ - {METRIC_IDENTIFIER, 0, `test:name`}, - {LEFT_BRACE, 9, `{`}, - {IDENTIFIER, 10, `on`}, - {NEQ_REGEX, 12, `!~`}, - {STRING, 14, `"b:ar"`}, - {RIGHT_BRACE, 20, `}`}, - {LEFT_BRACKET, 21, `[`}, - {DURATION, 22, `4m`}, - {COLON, 24, `:`}, - {DURATION, 25, `4s`}, - {RIGHT_BRACKET, 27, `]`}, + {Typ: METRIC_IDENTIFIER, Pos: 0, Val: `test:name`}, + {Typ: LEFT_BRACE, Pos: 9, Val: `{`}, + {Typ: IDENTIFIER, Pos: 10, Val: `on`}, + {Typ: NEQ_REGEX, Pos: 12, Val: `!~`}, + {Typ: STRING, Pos: 14, Val: `"b:ar"`}, + {Typ: RIGHT_BRACE, Pos: 20, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 21, Val: `[`}, + {Typ: DURATION, Pos: 22, Val: `4m`}, + {Typ: COLON, Pos: 24, Val: `:`}, + {Typ: DURATION, Pos: 25, Val: `4s`}, + {Typ: RIGHT_BRACKET, Pos: 27, Val: `]`}, }, }, { input: `test:name{on!~"b:ar"}[4m:]`, expected: []Item{ - {METRIC_IDENTIFIER, 0, `test:name`}, - {LEFT_BRACE, 9, `{`}, - {IDENTIFIER, 10, `on`}, - {NEQ_REGEX, 12, `!~`}, - {STRING, 14, `"b:ar"`}, - {RIGHT_BRACE, 20, `}`}, - {LEFT_BRACKET, 21, `[`}, - {DURATION, 22, `4m`}, - {COLON, 24, `:`}, - {RIGHT_BRACKET, 25, `]`}, + {Typ: METRIC_IDENTIFIER, Pos: 0, Val: `test:name`}, + {Typ: LEFT_BRACE, Pos: 9, Val: `{`}, + {Typ: IDENTIFIER, Pos: 10, Val: `on`}, + {Typ: NEQ_REGEX, Pos: 12, Val: `!~`}, + {Typ: STRING, Pos: 14, Val: `"b:ar"`}, + {Typ: RIGHT_BRACE, Pos: 20, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 21, Val: `[`}, + {Typ: DURATION, Pos: 22, Val: `4m`}, + {Typ: COLON, Pos: 24, Val: `:`}, + {Typ: RIGHT_BRACKET, Pos: 25, Val: `]`}, }, }, { // Nested Subquery. input: `min_over_time(rate(foo{bar="baz"}[2s])[5m:])[4m:3s]`, expected: []Item{ - {IDENTIFIER, 0, `min_over_time`}, - {LEFT_PAREN, 13, `(`}, - {IDENTIFIER, 14, `rate`}, - {LEFT_PAREN, 18, `(`}, - {IDENTIFIER, 19, `foo`}, - {LEFT_BRACE, 22, `{`}, - {IDENTIFIER, 23, `bar`}, - {EQL, 26, `=`}, - {STRING, 27, `"baz"`}, - {RIGHT_BRACE, 32, `}`}, - {LEFT_BRACKET, 33, `[`}, - {DURATION, 34, `2s`}, - {RIGHT_BRACKET, 36, `]`}, - {RIGHT_PAREN, 37, `)`}, - {LEFT_BRACKET, 38, `[`}, - {DURATION, 39, `5m`}, - {COLON, 41, `:`}, - {RIGHT_BRACKET, 42, `]`}, - {RIGHT_PAREN, 43, `)`}, - {LEFT_BRACKET, 44, `[`}, - {DURATION, 45, `4m`}, - {COLON, 47, `:`}, - {DURATION, 48, `3s`}, - {RIGHT_BRACKET, 50, `]`}, + {Typ: IDENTIFIER, Pos: 0, Val: `min_over_time`}, + {Typ: LEFT_PAREN, Pos: 13, Val: `(`}, + {Typ: IDENTIFIER, Pos: 14, Val: `rate`}, + {Typ: LEFT_PAREN, Pos: 18, Val: `(`}, + {Typ: IDENTIFIER, Pos: 19, Val: `foo`}, + {Typ: LEFT_BRACE, Pos: 22, Val: `{`}, + {Typ: IDENTIFIER, Pos: 23, Val: `bar`}, + {Typ: EQL, Pos: 26, Val: `=`}, + {Typ: STRING, Pos: 27, Val: `"baz"`}, + {Typ: RIGHT_BRACE, Pos: 32, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 33, Val: `[`}, + {Typ: DURATION, Pos: 34, Val: `2s`}, + {Typ: RIGHT_BRACKET, Pos: 36, Val: `]`}, + {Typ: RIGHT_PAREN, Pos: 37, Val: `)`}, + {Typ: LEFT_BRACKET, Pos: 38, Val: `[`}, + {Typ: DURATION, Pos: 39, Val: `5m`}, + {Typ: COLON, Pos: 41, Val: `:`}, + {Typ: RIGHT_BRACKET, Pos: 42, Val: `]`}, + {Typ: RIGHT_PAREN, Pos: 43, Val: `)`}, + {Typ: LEFT_BRACKET, Pos: 44, Val: `[`}, + {Typ: DURATION, Pos: 45, Val: `4m`}, + {Typ: COLON, Pos: 47, Val: `:`}, + {Typ: DURATION, Pos: 48, Val: `3s`}, + {Typ: RIGHT_BRACKET, Pos: 50, Val: `]`}, }, }, // Subquery with offset. { input: `test:name{on!~"b:ar"}[4m:4s] offset 10m`, expected: []Item{ - {METRIC_IDENTIFIER, 0, `test:name`}, - {LEFT_BRACE, 9, `{`}, - {IDENTIFIER, 10, `on`}, - {NEQ_REGEX, 12, `!~`}, - {STRING, 14, `"b:ar"`}, - {RIGHT_BRACE, 20, `}`}, - {LEFT_BRACKET, 21, `[`}, - {DURATION, 22, `4m`}, - {COLON, 24, `:`}, - {DURATION, 25, `4s`}, - {RIGHT_BRACKET, 27, `]`}, - {OFFSET, 29, "offset"}, - {DURATION, 36, "10m"}, + {Typ: METRIC_IDENTIFIER, Pos: 0, Val: `test:name`}, + {Typ: LEFT_BRACE, Pos: 9, Val: `{`}, + {Typ: IDENTIFIER, Pos: 10, Val: `on`}, + {Typ: NEQ_REGEX, Pos: 12, Val: `!~`}, + {Typ: STRING, Pos: 14, Val: `"b:ar"`}, + {Typ: RIGHT_BRACE, Pos: 20, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 21, Val: `[`}, + {Typ: DURATION, Pos: 22, Val: `4m`}, + {Typ: COLON, Pos: 24, Val: `:`}, + {Typ: DURATION, Pos: 25, Val: `4s`}, + {Typ: RIGHT_BRACKET, Pos: 27, Val: `]`}, + {Typ: OFFSET, Pos: 29, Val: "offset"}, + {Typ: DURATION, Pos: 36, Val: "10m"}, }, }, { input: `min_over_time(rate(foo{bar="baz"}[2s])[5m:] offset 6m)[4m:3s]`, expected: []Item{ - {IDENTIFIER, 0, `min_over_time`}, - {LEFT_PAREN, 13, `(`}, - {IDENTIFIER, 14, `rate`}, - {LEFT_PAREN, 18, `(`}, - {IDENTIFIER, 19, `foo`}, - {LEFT_BRACE, 22, `{`}, - {IDENTIFIER, 23, `bar`}, - {EQL, 26, `=`}, - {STRING, 27, `"baz"`}, - {RIGHT_BRACE, 32, `}`}, - {LEFT_BRACKET, 33, `[`}, - {DURATION, 34, `2s`}, - {RIGHT_BRACKET, 36, `]`}, - {RIGHT_PAREN, 37, `)`}, - {LEFT_BRACKET, 38, `[`}, - {DURATION, 39, `5m`}, - {COLON, 41, `:`}, - {RIGHT_BRACKET, 42, `]`}, - {OFFSET, 44, `offset`}, - {DURATION, 51, `6m`}, - {RIGHT_PAREN, 53, `)`}, - {LEFT_BRACKET, 54, `[`}, - {DURATION, 55, `4m`}, - {COLON, 57, `:`}, - {DURATION, 58, `3s`}, - {RIGHT_BRACKET, 60, `]`}, + {Typ: IDENTIFIER, Pos: 0, Val: `min_over_time`}, + {Typ: LEFT_PAREN, Pos: 13, Val: `(`}, + {Typ: IDENTIFIER, Pos: 14, Val: `rate`}, + {Typ: LEFT_PAREN, Pos: 18, Val: `(`}, + {Typ: IDENTIFIER, Pos: 19, Val: `foo`}, + {Typ: LEFT_BRACE, Pos: 22, Val: `{`}, + {Typ: IDENTIFIER, Pos: 23, Val: `bar`}, + {Typ: EQL, Pos: 26, Val: `=`}, + {Typ: STRING, Pos: 27, Val: `"baz"`}, + {Typ: RIGHT_BRACE, Pos: 32, Val: `}`}, + {Typ: LEFT_BRACKET, Pos: 33, Val: `[`}, + {Typ: DURATION, Pos: 34, Val: `2s`}, + {Typ: RIGHT_BRACKET, Pos: 36, Val: `]`}, + {Typ: RIGHT_PAREN, Pos: 37, Val: `)`}, + {Typ: LEFT_BRACKET, Pos: 38, Val: `[`}, + {Typ: DURATION, Pos: 39, Val: `5m`}, + {Typ: COLON, Pos: 41, Val: `:`}, + {Typ: RIGHT_BRACKET, Pos: 42, Val: `]`}, + {Typ: OFFSET, Pos: 44, Val: `offset`}, + {Typ: DURATION, Pos: 51, Val: `6m`}, + {Typ: RIGHT_PAREN, Pos: 53, Val: `)`}, + {Typ: LEFT_BRACKET, Pos: 54, Val: `[`}, + {Typ: DURATION, Pos: 55, Val: `4m`}, + {Typ: COLON, Pos: 57, Val: `:`}, + {Typ: DURATION, Pos: 58, Val: `3s`}, + {Typ: RIGHT_BRACKET, Pos: 60, Val: `]`}, }, }, { input: `test:name[ 5m]`, expected: []Item{ - {METRIC_IDENTIFIER, 0, `test:name`}, - {LEFT_BRACKET, 9, `[`}, - {DURATION, 11, `5m`}, - {RIGHT_BRACKET, 13, `]`}, + {Typ: METRIC_IDENTIFIER, Pos: 0, Val: `test:name`}, + {Typ: LEFT_BRACKET, Pos: 9, Val: `[`}, + {Typ: DURATION, Pos: 11, Val: `5m`}, + {Typ: RIGHT_BRACKET, Pos: 13, Val: `]`}, }, }, { @@ -927,7 +927,7 @@ func TestLexer(t *testing.T) { } require.NotEqual(t, ERROR, lastItem.Typ, "%d: input %q, unexpected lexing error at position %d: %s", i, test.input, lastItem.Pos, lastItem) - eofItem := Item{EOF, posrange.Pos(len(test.input)), ""} + eofItem := Item{Typ: EOF, Pos: posrange.Pos(len(test.input)), Val: ""} require.Equal(t, lastItem, eofItem, "%d: input %q", i, test.input) out = out[:len(out)-1] diff --git a/promql/parser/parse.go b/promql/parser/parse.go index 6f73e2427b..af4b5b2dfa 100644 --- a/promql/parser/parse.go +++ b/promql/parser/parse.go @@ -45,22 +45,23 @@ type Parser interface { } type parser struct { - lex Lexer - - inject ItemType - injecting bool + generatedParserResult interface{} // functions contains all functions supported by the parser instance. functions map[string]*Function + parseErrors ParseErrors + lex Lexer + + yyParser yyParserImpl + + inject ItemType + // Everytime an Item is lexed that could be the end // of certain expressions its end position is stored here. lastClosing posrange.Pos - yyParser yyParserImpl - - generatedParserResult interface{} - parseErrors ParseErrors + injecting bool } type Opt func(p *parser) @@ -121,9 +122,10 @@ func (p *parser) Close() { // ParseErr wraps a parsing error with line and position context. type ParseErr struct { + Err error + Query string + PositionRange posrange.PositionRange - Err error - Query string // LineOffset is an additional line offset to be added. Only used inside unit tests. LineOffset int @@ -224,9 +226,9 @@ func ParseMetricSelectors(matchers []string) (m [][]*labels.Matcher, err error) // SequenceValue is an omittable value in a sequence of time series values. type SequenceValue struct { + Histogram *histogram.FloatHistogram Value float64 Omitted bool - Histogram *histogram.FloatHistogram } func (v SequenceValue) String() string { diff --git a/promql/promqltest/test.go b/promql/promqltest/test.go index 83137e661b..352ecc6b73 100644 --- a/promql/promqltest/test.go +++ b/promql/promqltest/test.go @@ -152,12 +152,13 @@ func runTest(t testutil.T, input string, engine promql.QueryEngine) error { type test struct { testutil.T - cmds []testCommand + context context.Context storage *teststorage.TestStorage - context context.Context cancelCtx context.CancelFunc + + cmds []testCommand } // newTest returns an initialized empty Test. @@ -407,10 +408,10 @@ func (*evalCmd) testCmd() {} // loadCmd is a command that loads sequences of sample values for specific // metrics into the storage. type loadCmd struct { - gap time.Duration metrics map[uint64]labels.Labels defs map[uint64][]promql.Sample exemplars map[uint64][]exemplar.Exemplar + gap time.Duration withNHCB bool } @@ -476,9 +477,9 @@ func getHistogramMetricBase(m labels.Labels, suffix string) (labels.Labels, uint } type tempHistogramWrapper struct { + histogramByTs map[int64]tempHistogram metric labels.Labels upperBounds []float64 - histogramByTs map[int64]tempHistogram } func newTempHistogramWrapper() tempHistogramWrapper { @@ -639,24 +640,24 @@ func appendSample(a storage.Appender, s promql.Sample, m labels.Labels) error { // evalCmd is a command that evaluates an expression for the given time (range) // and expects a specific result. type evalCmd struct { - expr string - start time.Time - end time.Time - step time.Duration - line int + start time.Time + end time.Time + expectedFailRegexp *regexp.Regexp + + metrics map[uint64]labels.Labels + expected map[uint64]entry + expr string + expectedFailMessage string + step time.Duration + line int isRange bool // if false, instant query fail, warn, ordered bool - expectedFailMessage string - expectedFailRegexp *regexp.Regexp - - metrics map[uint64]labels.Labels - expected map[uint64]entry } type entry struct { - pos int vals []parser.SequenceValue + pos int } func (e entry) String() string { @@ -886,8 +887,8 @@ func (cmd clearCmd) String() string { } type atModifierTestCase struct { - expr string evalTime time.Time + expr string } func atModifierTestCases(exprStr string, evalTime time.Time) ([]atModifierTestCase, error) { @@ -1167,15 +1168,15 @@ func parseNumber(s string) (float64, error) { // LazyLoader lazily loads samples into storage. // This is specifically implemented for unit testing of rules. type LazyLoader struct { + storage storage.Storage + context context.Context loadCmd *loadCmd - storage storage.Storage - SubqueryInterval time.Duration - queryEngine *promql.Engine - context context.Context cancelCtx context.CancelFunc + SubqueryInterval time.Duration + opts LazyLoaderOpts } diff --git a/promql/query_logger.go b/promql/query_logger.go index 7e06ebb97f..0f1d3f290b 100644 --- a/promql/query_logger.go +++ b/promql/query_logger.go @@ -31,10 +31,10 @@ import ( ) type ActiveQueryTracker struct { - mmapedFile []byte - getNextIndex chan int logger log.Logger closer io.Closer + getNextIndex chan int + mmapedFile []byte maxConcurrent int } diff --git a/promql/value.go b/promql/value.go index f129137d80..d4e63c975f 100644 --- a/promql/value.go +++ b/promql/value.go @@ -35,8 +35,8 @@ func (String) Type() parser.ValueType { return parser.ValueTypeString } // String represents a string value. type String struct { - T int64 V string + T int64 } func (s String) String() string { @@ -113,8 +113,8 @@ func (p FPoint) MarshalJSON() ([]byte, error) { // HPoint represents a single histogram data point for a given timestamp. // H must never be nil. type HPoint struct { - T int64 H *histogram.FloatHistogram + T int64 } func (p HPoint) String() string { @@ -189,11 +189,11 @@ func totalHPointSize(histograms []HPoint) int { // sample or a histogram sample. If H is nil, it is a float sample. Otherwise, // it is a histogram sample. type Sample struct { - T int64 - F float64 H *histogram.FloatHistogram Metric labels.Labels + T int64 + F float64 } func (s Sample) String() string { @@ -222,8 +222,8 @@ func (s Sample) MarshalJSON() ([]byte, error) { return json.Marshal(f) } h := struct { - M labels.Labels `json:"metric"` H HPoint `json:"histogram"` + M labels.Labels `json:"metric"` }{ M: s.Metric, H: HPoint{T: s.T, H: s.H}, @@ -417,12 +417,12 @@ func (ss *StorageSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator { } type storageSeriesIterator struct { + currH *histogram.FloatHistogram floats []FPoint histograms []HPoint iFloats, iHistograms int currT int64 currF float64 - currH *histogram.FloatHistogram } func newStorageSeriesIterator(series Series) *storageSeriesIterator {