Merge pull request #6744 from slrtbtfs/split_parser

Split promql package into parser and engine
This commit is contained in:
Bartlomiej Plotka 2020-02-25 16:49:19 +00:00 committed by GitHub
commit c4e74e241f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 972 additions and 902 deletions

View file

@ -31,6 +31,7 @@ import (
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/storage"
)
@ -339,7 +340,7 @@ Outer:
var expSamples []parsedSample
for _, s := range testCase.ExpSamples {
lb, err := promql.ParseMetric(s.Labels)
lb, err := parser.ParseMetric(s.Labels)
if err != nil {
err = errors.Wrapf(err, "labels %q", s.Labels)
errs = append(errs, errors.Errorf(" expr: %q, time: %s, err: %s", testCase.Expr,

View file

@ -24,7 +24,7 @@ import (
yaml "gopkg.in/yaml.v3"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/template"
)
@ -156,7 +156,7 @@ func (r *RuleNode) Validate() (nodes []WrappedError) {
err: errors.Errorf("field 'expr' must be set in rule"),
node: &r.Expr,
})
} else if _, err := promql.ParseExpr(r.Expr.Value); err != nil {
} else if _, err := parser.ParseExpr(r.Expr.Value); err != nil {
nodes = append(nodes, WrappedError{
err: errors.Wrapf(err, "could not parse expression"),
node: &r.Expr,

View file

@ -22,6 +22,7 @@ import (
"time"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/util/teststorage"
)
@ -249,7 +250,7 @@ func BenchmarkParser(b *testing.B) {
b.Run(c, func(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
ParseExpr(c)
parser.ParseExpr(c)
}
})
}
@ -258,7 +259,7 @@ func BenchmarkParser(b *testing.B) {
b.Run(name, func(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
ParseExpr(c)
parser.ParseExpr(c)
}
})
}

File diff suppressed because it is too large Load diff

View file

@ -26,6 +26,7 @@ import (
"github.com/go-kit/kit/log"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/util/testutil"
)
@ -522,7 +523,7 @@ load 10s
cases := []struct {
Query string
Result Value
Result parser.Value
Start time.Time
End time.Time
Interval time.Duration

View file

@ -25,34 +25,28 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql/parser"
)
// Function represents a function of the expression language and is
// used by function nodes.
type Function struct {
Name string
ArgTypes []ValueType
Variadic int
ReturnType ValueType
// vals is a list of the evaluated arguments for the function call.
// For range vectors it will be a Matrix with one series, instant vectors a
// Vector, scalars a Vector with one series whose value is the scalar
// value,and nil for strings.
// args are the original arguments to the function, where you can access
// matrixSelectors, vectorSelectors, and StringLiterals.
// enh.out is a pre-allocated empty vector that you may use to accumulate
// output before returning it. The vectors in vals should not be returned.a
// Range vector functions need only return a vector with the right value,
// the metric and timestamp are not needed.
// Instant vector functions need only return a vector with the right values and
// metrics, the timestamp are not needed.
// Scalar results should be returned as the value of a sample in a Vector.
Call func(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
}
// FunctionCall is the type of a PromQL function implementation
//
// vals is a list of the evaluated arguments for the function call.
// For range vectors it will be a Matrix with one series, instant vectors a
// Vector, scalars a Vector with one series whose value is the scalar
// value,and nil for strings.
// args are the original arguments to the function, where you can access
// matrixSelectors, vectorSelectors, and StringLiterals.
// enh.out is a pre-allocated empty vector that you may use to accumulate
// output before returning it. The vectors in vals should not be returned.a
// Range vector functions need only return a vector with the right value,
// the metric and timestamp are not needed.
// Instant vector functions need only return a vector with the right values and
// metrics, the timestamp are not needed.
// Scalar results should be returned as the value of a sample in a Vector.
type FunctionCall func(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector
// === time() float64 ===
func funcTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return Vector{Sample{Point: Point{
V: float64(enh.ts) / 1000,
}}}
@ -62,9 +56,9 @@ func funcTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// It calculates the rate (allowing for counter resets if isCounter is true),
// extrapolates if the first/last sample is close to the boundary, and returns
// the result as either per-second (if isRate is true) or overall.
func extrapolatedRate(vals []Value, args Expressions, enh *EvalNodeHelper, isCounter bool, isRate bool) Vector {
ms := args[0].(*MatrixSelector)
vs := ms.VectorSelector.(*VectorSelector)
func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, isCounter bool, isRate bool) Vector {
ms := args[0].(*parser.MatrixSelector)
vs := ms.VectorSelector.(*parser.VectorSelector)
var (
samples = vals[0].(Matrix)[0]
@ -137,32 +131,32 @@ func extrapolatedRate(vals []Value, args Expressions, enh *EvalNodeHelper, isCou
})
}
// === delta(Matrix ValueTypeMatrix) Vector ===
func funcDelta(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === delta(Matrix parser.ValueTypeMatrix) Vector ===
func funcDelta(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return extrapolatedRate(vals, args, enh, false, false)
}
// === rate(node ValueTypeMatrix) Vector ===
func funcRate(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === rate(node parser.ValueTypeMatrix) Vector ===
func funcRate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return extrapolatedRate(vals, args, enh, true, true)
}
// === increase(node ValueTypeMatrix) Vector ===
func funcIncrease(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === increase(node parser.ValueTypeMatrix) Vector ===
func funcIncrease(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return extrapolatedRate(vals, args, enh, true, false)
}
// === irate(node ValueTypeMatrix) Vector ===
func funcIrate(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === irate(node parser.ValueTypeMatrix) Vector ===
func funcIrate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return instantValue(vals, enh.out, true)
}
// === idelta(node model.ValMatrix) Vector ===
func funcIdelta(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcIdelta(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return instantValue(vals, enh.out, false)
}
func instantValue(vals []Value, out Vector, isRate bool) Vector {
func instantValue(vals []parser.Value, out Vector, isRate bool) Vector {
samples := vals[0].(Matrix)[0]
// No sense in trying to compute a rate without at least two points. Drop
// this Vector element.
@ -218,7 +212,7 @@ func calcTrendValue(i int, sf, tf, s0, s1, b float64) float64 {
// data. A lower smoothing factor increases the influence of historical data. The trend factor (0 < tf < 1) affects
// how trends in historical data will affect the current data. A higher trend factor increases the influence.
// of trends. Algorithm taken from https://en.wikipedia.org/wiki/Exponential_smoothing titled: "Double exponential smoothing".
func funcHoltWinters(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0]
// The smoothing factor argument.
@ -266,8 +260,8 @@ func funcHoltWinters(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
})
}
// === sort(node ValueTypeVector) Vector ===
func funcSort(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === sort(node parser.ValueTypeVector) Vector ===
func funcSort(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
// NaN should sort to the bottom, so take descending sort with NaN first and
// reverse it.
byValueSorter := vectorByReverseValueHeap(vals[0].(Vector))
@ -275,8 +269,8 @@ func funcSort(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
return Vector(byValueSorter)
}
// === sortDesc(node ValueTypeVector) Vector ===
func funcSortDesc(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === sortDesc(node parser.ValueTypeVector) Vector ===
func funcSortDesc(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
// NaN should sort to the bottom, so take ascending sort with NaN first and
// reverse it.
byValueSorter := vectorByValueHeap(vals[0].(Vector))
@ -284,8 +278,8 @@ func funcSortDesc(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
return Vector(byValueSorter)
}
// === clamp_max(Vector ValueTypeVector, max Scalar) Vector ===
func funcClampMax(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === clamp_max(Vector parser.ValueTypeVector, max Scalar) Vector ===
func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector)
max := vals[1].(Vector)[0].Point.V
for _, el := range vec {
@ -297,8 +291,8 @@ func funcClampMax(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
return enh.out
}
// === clamp_min(Vector ValueTypeVector, min Scalar) Vector ===
func funcClampMin(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === clamp_min(Vector parser.ValueTypeVector, min Scalar) Vector ===
func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector)
min := vals[1].(Vector)[0].Point.V
for _, el := range vec {
@ -310,8 +304,8 @@ func funcClampMin(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
return enh.out
}
// === round(Vector ValueTypeVector, toNearest=1 Scalar) Vector ===
func funcRound(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) Vector ===
func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector)
// round returns a number rounded to toNearest.
// Ties are solved by rounding up.
@ -332,8 +326,8 @@ func funcRound(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
return enh.out
}
// === Scalar(node ValueTypeVector) Scalar ===
func funcScalar(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === Scalar(node parser.ValueTypeVector) Scalar ===
func funcScalar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
v := vals[0].(Vector)
if len(v) != 1 {
return append(enh.out, Sample{
@ -345,7 +339,7 @@ func funcScalar(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
})
}
func aggrOverTime(vals []Value, enh *EvalNodeHelper, aggrFn func([]Point) float64) Vector {
func aggrOverTime(vals []parser.Value, enh *EvalNodeHelper, aggrFn func([]Point) float64) Vector {
el := vals[0].(Matrix)[0]
return append(enh.out, Sample{
@ -353,8 +347,8 @@ func aggrOverTime(vals []Value, enh *EvalNodeHelper, aggrFn func([]Point) float6
})
}
// === avg_over_time(Matrix ValueTypeMatrix) Vector ===
func funcAvgOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === avg_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
var mean, count float64
for _, v := range values {
@ -365,16 +359,16 @@ func funcAvgOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
})
}
// === count_over_time(Matrix ValueTypeMatrix) Vector ===
func funcCountOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === count_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcCountOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
return float64(len(values))
})
}
// === floor(Vector ValueTypeVector) Vector ===
// === max_over_time(Matrix ValueTypeMatrix) Vector ===
func funcMaxOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === floor(Vector parser.ValueTypeVector) Vector ===
// === max_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
max := values[0].V
for _, v := range values {
@ -386,8 +380,8 @@ func funcMaxOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
})
}
// === min_over_time(Matrix ValueTypeMatrix) Vector ===
func funcMinOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === min_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
min := values[0].V
for _, v := range values {
@ -399,8 +393,8 @@ func funcMinOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
})
}
// === sum_over_time(Matrix ValueTypeMatrix) Vector ===
func funcSumOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === sum_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
var sum float64
for _, v := range values {
@ -410,8 +404,8 @@ func funcSumOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
})
}
// === quantile_over_time(Matrix ValueTypeMatrix) Vector ===
func funcQuantileOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === quantile_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
q := vals[0].(Vector)[0].V
el := vals[1].(Matrix)[0]
@ -424,8 +418,8 @@ func funcQuantileOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) V
})
}
// === stddev_over_time(Matrix ValueTypeMatrix) Vector ===
func funcStddevOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === stddev_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
var aux, count, mean float64
for _, v := range values {
@ -438,8 +432,8 @@ func funcStddevOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vec
})
}
// === stdvar_over_time(Matrix ValueTypeMatrix) Vector ===
func funcStdvarOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === stdvar_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 {
var aux, count, mean float64
for _, v := range values {
@ -452,8 +446,8 @@ func funcStdvarOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vec
})
}
// === absent(Vector ValueTypeVector) Vector ===
func funcAbsent(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === absent(Vector parser.ValueTypeVector) Vector ===
func funcAbsent(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
if len(vals[0].(Vector)) > 0 {
return enh.out
}
@ -464,19 +458,19 @@ func funcAbsent(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
})
}
// === absent_over_time(Vector ValueTypeMatrix) Vector ===
// === absent_over_time(Vector parser.ValueTypeMatrix) Vector ===
// As this function has a matrix as argument, it does not get all the Series.
// This function will return 1 if the matrix has at least one element.
// Due to engine optimization, this function is only called when this condition is true.
// Then, the engine post-processes the results to get the expected output.
func funcAbsentOverTime(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcAbsentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return append(enh.out,
Sample{
Point: Point{V: 1},
})
}
func simpleFunc(vals []Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
for _, el := range vals[0].(Vector) {
enh.out = append(enh.out, Sample{
Metric: enh.dropMetricName(el.Metric),
@ -486,48 +480,48 @@ func simpleFunc(vals []Value, enh *EvalNodeHelper, f func(float64) float64) Vect
return enh.out
}
// === abs(Vector ValueTypeVector) Vector ===
func funcAbs(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === abs(Vector parser.ValueTypeVector) Vector ===
func funcAbs(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Abs)
}
// === ceil(Vector ValueTypeVector) Vector ===
func funcCeil(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === ceil(Vector parser.ValueTypeVector) Vector ===
func funcCeil(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Ceil)
}
// === floor(Vector ValueTypeVector) Vector ===
func funcFloor(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === floor(Vector parser.ValueTypeVector) Vector ===
func funcFloor(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Floor)
}
// === exp(Vector ValueTypeVector) Vector ===
func funcExp(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === exp(Vector parser.ValueTypeVector) Vector ===
func funcExp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Exp)
}
// === sqrt(Vector VectorNode) Vector ===
func funcSqrt(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcSqrt(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Sqrt)
}
// === ln(Vector ValueTypeVector) Vector ===
func funcLn(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === ln(Vector parser.ValueTypeVector) Vector ===
func funcLn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Log)
}
// === log2(Vector ValueTypeVector) Vector ===
func funcLog2(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === log2(Vector parser.ValueTypeVector) Vector ===
func funcLog2(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Log2)
}
// === log10(Vector ValueTypeVector) Vector ===
func funcLog10(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === log10(Vector parser.ValueTypeVector) Vector ===
func funcLog10(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return simpleFunc(vals, enh, math.Log10)
}
// === timestamp(Vector ValueTypeVector) Vector ===
func funcTimestamp(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === timestamp(Vector parser.ValueTypeVector) Vector ===
func funcTimestamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector)
for _, el := range vec {
enh.out = append(enh.out, Sample{
@ -563,8 +557,8 @@ func linearRegression(samples []Point, interceptTime int64) (slope, intercept fl
return slope, intercept
}
// === deriv(node ValueTypeMatrix) Vector ===
func funcDeriv(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === deriv(node parser.ValueTypeMatrix) Vector ===
func funcDeriv(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0]
// No sense in trying to compute a derivative without at least two points.
@ -582,8 +576,8 @@ func funcDeriv(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
})
}
// === predict_linear(node ValueTypeMatrix, k ValueTypeScalar) Vector ===
func funcPredictLinear(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === predict_linear(node parser.ValueTypeMatrix, k parser.ValueTypeScalar) Vector ===
func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0]
duration := vals[1].(Vector)[0].V
@ -599,8 +593,8 @@ func funcPredictLinear(vals []Value, args Expressions, enh *EvalNodeHelper) Vect
})
}
// === histogram_quantile(k ValueTypeScalar, Vector ValueTypeVector) Vector ===
func funcHistogramQuantile(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === histogram_quantile(k parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector ===
func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
q := vals[0].(Vector)[0].V
inVec := vals[1].(Vector)
sigf := enh.signatureFunc(false, excludedLabels...)
@ -647,8 +641,8 @@ func funcHistogramQuantile(vals []Value, args Expressions, enh *EvalNodeHelper)
return enh.out
}
// === resets(Matrix ValueTypeMatrix) Vector ===
func funcResets(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === resets(Matrix parser.ValueTypeMatrix) Vector ===
func funcResets(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0]
resets := 0
@ -666,8 +660,8 @@ func funcResets(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
})
}
// === changes(Matrix ValueTypeMatrix) Vector ===
func funcChanges(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === changes(Matrix parser.ValueTypeMatrix) Vector ===
func funcChanges(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0]
changes := 0
@ -685,14 +679,14 @@ func funcChanges(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
})
}
// === label_replace(Vector ValueTypeVector, dst_label, replacement, src_labelname, regex ValueTypeString) Vector ===
func funcLabelReplace(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
// === label_replace(Vector parser.ValueTypeVector, dst_label, replacement, src_labelname, regex parser.ValueTypeString) Vector ===
func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
var (
vector = vals[0].(Vector)
dst = args[1].(*StringLiteral).Val
repl = args[2].(*StringLiteral).Val
src = args[3].(*StringLiteral).Val
regexStr = args[4].(*StringLiteral).Val
dst = args[1].(*parser.StringLiteral).Val
repl = args[2].(*parser.StringLiteral).Val
src = args[3].(*parser.StringLiteral).Val
regexStr = args[4].(*parser.StringLiteral).Val
)
if enh.regex == nil {
@ -740,7 +734,7 @@ func funcLabelReplace(vals []Value, args Expressions, enh *EvalNodeHelper) Vecto
}
// === Vector(s Scalar) Vector ===
func funcVector(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcVector(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return append(enh.out,
Sample{
Metric: labels.Labels{},
@ -749,11 +743,11 @@ func funcVector(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
}
// === label_join(vector model.ValVector, dest_labelname, separator, src_labelname...) Vector ===
func funcLabelJoin(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
var (
vector = vals[0].(Vector)
dst = args[1].(*StringLiteral).Val
sep = args[2].(*StringLiteral).Val
dst = args[1].(*parser.StringLiteral).Val
sep = args[2].(*parser.StringLiteral).Val
srcLabels = make([]string, len(args)-3)
)
@ -762,7 +756,7 @@ func funcLabelJoin(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
}
for i := 3; i < len(args); i++ {
src := args[i].(*StringLiteral).Val
src := args[i].(*parser.StringLiteral).Val
if !model.LabelName(src).IsValid() {
panic(errors.Errorf("invalid source label name in label_join(): %s", src))
}
@ -807,7 +801,7 @@ func funcLabelJoin(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
}
// Common code for date related functions.
func dateWrapper(vals []Value, enh *EvalNodeHelper, f func(time.Time) float64) Vector {
func dateWrapper(vals []parser.Value, enh *EvalNodeHelper, f func(time.Time) float64) Vector {
if len(vals) == 0 {
return append(enh.out,
Sample{
@ -827,353 +821,103 @@ func dateWrapper(vals []Value, enh *EvalNodeHelper, f func(time.Time) float64) V
}
// === days_in_month(v Vector) Scalar ===
func funcDaysInMonth(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcDaysInMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(32 - time.Date(t.Year(), t.Month(), 32, 0, 0, 0, 0, time.UTC).Day())
})
}
// === day_of_month(v Vector) Scalar ===
func funcDayOfMonth(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcDayOfMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(t.Day())
})
}
// === day_of_week(v Vector) Scalar ===
func funcDayOfWeek(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcDayOfWeek(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(t.Weekday())
})
}
// === hour(v Vector) Scalar ===
func funcHour(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcHour(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(t.Hour())
})
}
// === minute(v Vector) Scalar ===
func funcMinute(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcMinute(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(t.Minute())
})
}
// === month(v Vector) Scalar ===
func funcMonth(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(t.Month())
})
}
// === year(v Vector) Scalar ===
func funcYear(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
func funcYear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return dateWrapper(vals, enh, func(t time.Time) float64 {
return float64(t.Year())
})
}
// Functions is a list of all functions supported by PromQL, including their types.
var Functions = map[string]*Function{
"abs": {
Name: "abs",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcAbs,
},
"absent": {
Name: "absent",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcAbsent,
},
"absent_over_time": {
Name: "absent_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcAbsentOverTime,
},
"avg_over_time": {
Name: "avg_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcAvgOverTime,
},
"ceil": {
Name: "ceil",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcCeil,
},
"changes": {
Name: "changes",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcChanges,
},
"clamp_max": {
Name: "clamp_max",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
ReturnType: ValueTypeVector,
Call: funcClampMax,
},
"clamp_min": {
Name: "clamp_min",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
ReturnType: ValueTypeVector,
Call: funcClampMin,
},
"count_over_time": {
Name: "count_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcCountOverTime,
},
"days_in_month": {
Name: "days_in_month",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcDaysInMonth,
},
"day_of_month": {
Name: "day_of_month",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcDayOfMonth,
},
"day_of_week": {
Name: "day_of_week",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcDayOfWeek,
},
"delta": {
Name: "delta",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcDelta,
},
"deriv": {
Name: "deriv",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcDeriv,
},
"exp": {
Name: "exp",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcExp,
},
"floor": {
Name: "floor",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcFloor,
},
"histogram_quantile": {
Name: "histogram_quantile",
ArgTypes: []ValueType{ValueTypeScalar, ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcHistogramQuantile,
},
"holt_winters": {
Name: "holt_winters",
ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar, ValueTypeScalar},
ReturnType: ValueTypeVector,
Call: funcHoltWinters,
},
"hour": {
Name: "hour",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcHour,
},
"idelta": {
Name: "idelta",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcIdelta,
},
"increase": {
Name: "increase",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcIncrease,
},
"irate": {
Name: "irate",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcIrate,
},
"label_replace": {
Name: "label_replace",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeString, ValueTypeString, ValueTypeString, ValueTypeString},
ReturnType: ValueTypeVector,
Call: funcLabelReplace,
},
"label_join": {
Name: "label_join",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeString, ValueTypeString, ValueTypeString},
Variadic: -1,
ReturnType: ValueTypeVector,
Call: funcLabelJoin,
},
"ln": {
Name: "ln",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcLn,
},
"log10": {
Name: "log10",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcLog10,
},
"log2": {
Name: "log2",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcLog2,
},
"max_over_time": {
Name: "max_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcMaxOverTime,
},
"min_over_time": {
Name: "min_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcMinOverTime,
},
"minute": {
Name: "minute",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcMinute,
},
"month": {
Name: "month",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcMonth,
},
"predict_linear": {
Name: "predict_linear",
ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar},
ReturnType: ValueTypeVector,
Call: funcPredictLinear,
},
"quantile_over_time": {
Name: "quantile_over_time",
ArgTypes: []ValueType{ValueTypeScalar, ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcQuantileOverTime,
},
"rate": {
Name: "rate",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcRate,
},
"resets": {
Name: "resets",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcResets,
},
"round": {
Name: "round",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcRound,
},
"scalar": {
Name: "scalar",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeScalar,
Call: funcScalar,
},
"sort": {
Name: "sort",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcSort,
},
"sort_desc": {
Name: "sort_desc",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcSortDesc,
},
"sqrt": {
Name: "sqrt",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcSqrt,
},
"stddev_over_time": {
Name: "stddev_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcStddevOverTime,
},
"stdvar_over_time": {
Name: "stdvar_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcStdvarOverTime,
},
"sum_over_time": {
Name: "sum_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
Call: funcSumOverTime,
},
"time": {
Name: "time",
ArgTypes: []ValueType{},
ReturnType: ValueTypeScalar,
Call: funcTime,
},
"timestamp": {
Name: "timestamp",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
Call: funcTimestamp,
},
"vector": {
Name: "vector",
ArgTypes: []ValueType{ValueTypeScalar},
ReturnType: ValueTypeVector,
Call: funcVector,
},
"year": {
Name: "year",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
Call: funcYear,
},
}
// getFunction returns a predefined Function object for the given name.
func getFunction(name string) (*Function, bool) {
function, ok := Functions[name]
return function, ok
var FunctionCalls = map[string]FunctionCall{
"abs": funcAbs,
"absent": funcAbsent,
"absent_over_time": funcAbsentOverTime,
"avg_over_time": funcAvgOverTime,
"ceil": funcCeil,
"changes": funcChanges,
"clamp_max": funcClampMax,
"clamp_min": funcClampMin,
"count_over_time": funcCountOverTime,
"days_in_month": funcDaysInMonth,
"day_of_month": funcDayOfMonth,
"day_of_week": funcDayOfWeek,
"delta": funcDelta,
"deriv": funcDeriv,
"exp": funcExp,
"floor": funcFloor,
"histogram_quantile": funcHistogramQuantile,
"holt_winters": funcHoltWinters,
"hour": funcHour,
"idelta": funcIdelta,
"increase": funcIncrease,
"irate": funcIrate,
"label_replace": funcLabelReplace,
"label_join": funcLabelJoin,
"ln": funcLn,
"log10": funcLog10,
"log2": funcLog2,
"max_over_time": funcMaxOverTime,
"min_over_time": funcMinOverTime,
"minute": funcMinute,
"month": funcMonth,
"predict_linear": funcPredictLinear,
"quantile_over_time": funcQuantileOverTime,
"rate": funcRate,
"resets": funcResets,
"round": funcRound,
"scalar": funcScalar,
"sort": funcSort,
"sort_desc": funcSortDesc,
"sqrt": funcSqrt,
"stddev_over_time": funcStddevOverTime,
"stdvar_over_time": funcStdvarOverTime,
"sum_over_time": funcSumOverTime,
"time": funcTime,
"timestamp": funcTimestamp,
"vector": funcVector,
"year": funcYear,
}
type vectorByValueHeap Vector
@ -1236,15 +980,15 @@ func (s *vectorByReverseValueHeap) Pop() interface{} {
// createLabelsForAbsentFunction returns the labels that are uniquely and exactly matched
// in a given expression. It is used in the absent functions.
func createLabelsForAbsentFunction(expr Expr) labels.Labels {
func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
m := labels.Labels{}
var lm []*labels.Matcher
switch n := expr.(type) {
case *VectorSelector:
case *parser.VectorSelector:
lm = n.LabelMatchers
case *MatrixSelector:
lm = n.VectorSelector.(*VectorSelector).LabelMatchers
case *parser.MatrixSelector:
lm = n.VectorSelector.(*parser.VectorSelector).LabelMatchers
default:
return m
}

View file

@ -15,11 +15,13 @@ package promql
import (
"context"
"fmt"
"testing"
"time"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/util/teststorage"
"github.com/prometheus/prometheus/util/testutil"
)
@ -56,3 +58,16 @@ func TestDeriv(t *testing.T) {
testutil.Assert(t, len(vec) == 1, "Expected 1 result, got %d", len(vec))
testutil.Assert(t, vec[0].V == 0.0, "Expected 0.0 as value, got %f", vec[0].V)
}
func TestFunctionList(t *testing.T) {
// Test that Functions and parser.Functions list the same functions.
for i := range FunctionCalls {
_, ok := parser.Functions[i]
testutil.Assert(t, ok, fmt.Sprintf("function %s exists in promql package, but not in parser package", i))
}
for i := range parser.Functions {
_, ok := FunctionCalls[i]
testutil.Assert(t, ok, (fmt.Sprintf("function %s exists in parser package, but not in promql package", i)))
}
}

View file

@ -20,6 +20,7 @@ import (
"io"
"github.com/prometheus/prometheus/pkg/textparse"
"github.com/prometheus/prometheus/promql/parser"
)
// PromQL parser fuzzing instrumentation for use with
@ -83,7 +84,7 @@ func FuzzParseOpenMetric(in []byte) int {
// Fuzz the metric selector parser.
func FuzzParseMetricSelector(in []byte) int {
_, err := ParseMetricSelector(string(in))
_, err := parser.ParseMetricSelector(string(in))
if err == nil {
return fuzzInteresting
}
@ -93,7 +94,7 @@ func FuzzParseMetricSelector(in []byte) int {
// Fuzz the expression parser.
func FuzzParseExpr(in []byte) int {
_, err := ParseExpr(string(in))
_, err := parser.ParseExpr(string(in))
if err == nil {
return fuzzInteresting
}

View file

@ -11,9 +11,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"context"
"time"
"github.com/pkg/errors"
@ -49,6 +50,7 @@ type Statement interface {
Node
// stmt ensures that no other type accidentally implements the interface
// nolint:unused
stmt()
}
@ -167,12 +169,25 @@ type VectorSelector struct {
LabelMatchers []*labels.Matcher
// The unexpanded seriesSet populated at query preparation time.
unexpandedSeriesSet storage.SeriesSet
series []storage.Series
UnexpandedSeriesSet storage.SeriesSet
Series []storage.Series
PosRange PositionRange
}
// TestStmt is an internal helper statement that allows execution
// of an arbitrary function during handling. It is used to test the Engine.
type TestStmt func(context.Context) error
func (TestStmt) String() string { return "test statement" }
func (TestStmt) stmt() {}
func (TestStmt) PositionRange() PositionRange {
return PositionRange{
Start: -1,
End: -1,
}
}
func (e *AggregateExpr) Type() ValueType { return ValueTypeVector }
func (e *Call) Type() ValueType { return e.Func.ReturnType }
func (e *MatrixSelector) Type() ValueType { return ValueTypeMatrix }

277
promql/parser/functions.go Normal file
View file

@ -0,0 +1,277 @@
// Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
// Function represents a function of the expression language and is
// used by function nodes.
type Function struct {
Name string
ArgTypes []ValueType
Variadic int
ReturnType ValueType
}
// Functions is a list of all functions supported by PromQL, including their types.
var Functions = map[string]*Function{
"abs": {
Name: "abs",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"absent": {
Name: "absent",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"absent_over_time": {
Name: "absent_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"avg_over_time": {
Name: "avg_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"ceil": {
Name: "ceil",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"changes": {
Name: "changes",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"clamp_max": {
Name: "clamp_max",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
ReturnType: ValueTypeVector,
},
"clamp_min": {
Name: "clamp_min",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
ReturnType: ValueTypeVector,
},
"count_over_time": {
Name: "count_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"days_in_month": {
Name: "days_in_month",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"day_of_month": {
Name: "day_of_month",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"day_of_week": {
Name: "day_of_week",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"delta": {
Name: "delta",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"deriv": {
Name: "deriv",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"exp": {
Name: "exp",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"floor": {
Name: "floor",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"histogram_quantile": {
Name: "histogram_quantile",
ArgTypes: []ValueType{ValueTypeScalar, ValueTypeVector},
ReturnType: ValueTypeVector,
},
"holt_winters": {
Name: "holt_winters",
ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar, ValueTypeScalar},
ReturnType: ValueTypeVector,
},
"hour": {
Name: "hour",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"idelta": {
Name: "idelta",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"increase": {
Name: "increase",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"irate": {
Name: "irate",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"label_replace": {
Name: "label_replace",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeString, ValueTypeString, ValueTypeString, ValueTypeString},
ReturnType: ValueTypeVector,
},
"label_join": {
Name: "label_join",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeString, ValueTypeString, ValueTypeString},
Variadic: -1,
ReturnType: ValueTypeVector,
},
"ln": {
Name: "ln",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"log10": {
Name: "log10",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"log2": {
Name: "log2",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"max_over_time": {
Name: "max_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"min_over_time": {
Name: "min_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"minute": {
Name: "minute",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"month": {
Name: "month",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"predict_linear": {
Name: "predict_linear",
ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar},
ReturnType: ValueTypeVector,
},
"quantile_over_time": {
Name: "quantile_over_time",
ArgTypes: []ValueType{ValueTypeScalar, ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"rate": {
Name: "rate",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"resets": {
Name: "resets",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"round": {
Name: "round",
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
Variadic: 1,
ReturnType: ValueTypeVector,
},
"scalar": {
Name: "scalar",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeScalar,
},
"sort": {
Name: "sort",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"sort_desc": {
Name: "sort_desc",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"sqrt": {
Name: "sqrt",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"stddev_over_time": {
Name: "stddev_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"stdvar_over_time": {
Name: "stdvar_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"sum_over_time": {
Name: "sum_over_time",
ArgTypes: []ValueType{ValueTypeMatrix},
ReturnType: ValueTypeVector,
},
"time": {
Name: "time",
ArgTypes: []ValueType{},
ReturnType: ValueTypeScalar,
},
"timestamp": {
Name: "timestamp",
ArgTypes: []ValueType{ValueTypeVector},
ReturnType: ValueTypeVector,
},
"vector": {
Name: "vector",
ArgTypes: []ValueType{ValueTypeScalar},
ReturnType: ValueTypeVector,
},
"year": {
Name: "year",
ArgTypes: []ValueType{ValueTypeVector},
Variadic: 1,
ReturnType: ValueTypeVector,
},
}
// getFunction returns a predefined Function object for the given name.
func getFunction(name string) (*Function, bool) {
function, ok := Functions[name]
return function, ok
}

View file

@ -12,7 +12,7 @@
// limitations under the License.
%{
package promql
package parser
import (
"math"
@ -33,7 +33,7 @@ import (
label labels.Label
labels labels.Labels
strings []string
series []sequenceValue
series []SequenceValue
uint uint64
float float64
duration time.Duration
@ -577,7 +577,7 @@ series_description: metric series_values
;
series_values : /*empty*/
{ $$ = []sequenceValue{} }
{ $$ = []SequenceValue{} }
| series_values SPACE series_item
{ $$ = append($1, $3...) }
| series_values SPACE
@ -587,28 +587,28 @@ series_values : /*empty*/
;
series_item : BLANK
{ $$ = []sequenceValue{{omitted: true}}}
{ $$ = []SequenceValue{{Omitted: true}}}
| BLANK TIMES uint
{
$$ = []sequenceValue{}
$$ = []SequenceValue{}
for i:=uint64(0); i < $3; i++{
$$ = append($$, sequenceValue{omitted: true})
$$ = append($$, SequenceValue{Omitted: true})
}
}
| series_value
{ $$ = []sequenceValue{{value: $1}}}
{ $$ = []SequenceValue{{Value: $1}}}
| series_value TIMES uint
{
$$ = []sequenceValue{}
$$ = []SequenceValue{}
for i:=uint64(0); i <= $3; i++{
$$ = append($$, sequenceValue{value: $1})
$$ = append($$, SequenceValue{Value: $1})
}
}
| series_value signed_number TIMES uint
{
$$ = []sequenceValue{}
$$ = []SequenceValue{}
for i:=uint64(0); i <= $4; i++{
$$ = append($$, sequenceValue{value: $1})
$$ = append($$, SequenceValue{Value: $1})
$1 += $2
}
}

View file

@ -1,11 +1,11 @@
// Code generated by goyacc -o promql/generated_parser.y.go promql/generated_parser.y. DO NOT EDIT.
// Code generated by goyacc -o generated_parser.y.go generated_parser.y. DO NOT EDIT.
//line promql/generated_parser.y:15
package promql
//line generated_parser.y:15
package parser
import __yyfmt__ "fmt"
//line promql/generated_parser.y:15
//line generated_parser.y:15
import (
"math"
@ -17,7 +17,7 @@ import (
"github.com/prometheus/prometheus/pkg/value"
)
//line promql/generated_parser.y:28
//line generated_parser.y:28
type yySymType struct {
yys int
node Node
@ -27,7 +27,7 @@ type yySymType struct {
label labels.Label
labels labels.Labels
strings []string
series []sequenceValue
series []SequenceValue
uint uint64
float float64
duration time.Duration
@ -183,7 +183,7 @@ const yyEofCode = 1
const yyErrCode = 2
const yyInitialStackSize = 16
//line promql/generated_parser.y:706
//line generated_parser.y:706
//line yacctab:1
var yyExca = [...]int{
@ -786,62 +786,62 @@ yydefault:
case 1:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:164
//line generated_parser.y:164
{
yylex.(*parser).generatedParserResult = yyDollar[2].labels
}
case 3:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:167
//line generated_parser.y:167
{
yylex.(*parser).addParseErrf(PositionRange{}, "no expression found in input")
}
case 4:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:169
//line generated_parser.y:169
{
yylex.(*parser).generatedParserResult = yyDollar[2].node
}
case 5:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:171
//line generated_parser.y:171
{
yylex.(*parser).generatedParserResult = yyDollar[2].node
}
case 7:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:174
//line generated_parser.y:174
{
yylex.(*parser).unexpected("", "")
}
case 19:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:196
//line generated_parser.y:196
{
yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[2].node, yyDollar[3].node)
}
case 20:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:198
//line generated_parser.y:198
{
yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[3].node, yyDollar[2].node)
}
case 21:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:200
//line generated_parser.y:200
{
yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, yyDollar[2].node)
}
case 22:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:202
//line generated_parser.y:202
{
yylex.(*parser).unexpected("aggregation", "")
yyVAL.node = nil
}
case 23:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:207
//line generated_parser.y:207
{
yyVAL.node = &AggregateExpr{
Grouping: yyDollar[2].strings,
@ -849,7 +849,7 @@ yydefault:
}
case 24:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:213
//line generated_parser.y:213
{
yyVAL.node = &AggregateExpr{
Grouping: yyDollar[2].strings,
@ -858,97 +858,97 @@ yydefault:
}
case 25:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:226
//line generated_parser.y:226
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 26:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:227
//line generated_parser.y:227
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 27:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:228
//line generated_parser.y:228
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 28:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:229
//line generated_parser.y:229
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 29:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:230
//line generated_parser.y:230
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 30:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:231
//line generated_parser.y:231
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 31:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:232
//line generated_parser.y:232
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 32:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:233
//line generated_parser.y:233
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 33:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:234
//line generated_parser.y:234
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 34:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:235
//line generated_parser.y:235
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 35:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:236
//line generated_parser.y:236
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 36:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:237
//line generated_parser.y:237
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 37:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:238
//line generated_parser.y:238
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 38:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:239
//line generated_parser.y:239
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 39:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:240
//line generated_parser.y:240
{
yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node)
}
case 41:
yyDollar = yyS[yypt-0 : yypt+1]
//line promql/generated_parser.y:248
//line generated_parser.y:248
{
yyVAL.node = &BinaryExpr{
VectorMatching: &VectorMatching{Card: CardOneToOne},
@ -956,7 +956,7 @@ yydefault:
}
case 42:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:253
//line generated_parser.y:253
{
yyVAL.node = &BinaryExpr{
VectorMatching: &VectorMatching{Card: CardOneToOne},
@ -965,14 +965,14 @@ yydefault:
}
case 43:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:261
//line generated_parser.y:261
{
yyVAL.node = yyDollar[1].node
yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings
}
case 44:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:266
//line generated_parser.y:266
{
yyVAL.node = yyDollar[1].node
yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings
@ -980,7 +980,7 @@ yydefault:
}
case 47:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:276
//line generated_parser.y:276
{
yyVAL.node = yyDollar[1].node
yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardManyToOne
@ -988,7 +988,7 @@ yydefault:
}
case 48:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:282
//line generated_parser.y:282
{
yyVAL.node = yyDollar[1].node
yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardOneToMany
@ -996,51 +996,51 @@ yydefault:
}
case 49:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:291
//line generated_parser.y:291
{
yyVAL.strings = yyDollar[2].strings
}
case 50:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:293
//line generated_parser.y:293
{
yyVAL.strings = yyDollar[2].strings
}
case 51:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:295
//line generated_parser.y:295
{
yyVAL.strings = []string{}
}
case 52:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:297
//line generated_parser.y:297
{
yylex.(*parser).unexpected("grouping opts", "\"(\"")
yyVAL.strings = nil
}
case 53:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:303
//line generated_parser.y:303
{
yyVAL.strings = append(yyDollar[1].strings, yyDollar[3].item.Val)
}
case 54:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:305
//line generated_parser.y:305
{
yyVAL.strings = []string{yyDollar[1].item.Val}
}
case 55:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:307
//line generated_parser.y:307
{
yylex.(*parser).unexpected("grouping opts", "\",\" or \")\"")
yyVAL.strings = yyDollar[1].strings
}
case 56:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:311
//line generated_parser.y:311
{
if !isLabel(yyDollar[1].item.Val) {
yylex.(*parser).unexpected("grouping opts", "label")
@ -1049,14 +1049,14 @@ yydefault:
}
case 57:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:318
//line generated_parser.y:318
{
yylex.(*parser).unexpected("grouping opts", "label")
yyVAL.item = Item{}
}
case 58:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:326
//line generated_parser.y:326
{
fn, exist := getFunction(yyDollar[1].item.Val)
if !exist {
@ -1073,58 +1073,58 @@ yydefault:
}
case 59:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:343
//line generated_parser.y:343
{
yyVAL.node = yyDollar[2].node
}
case 60:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:345
//line generated_parser.y:345
{
yyVAL.node = Expressions{}
}
case 61:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:349
//line generated_parser.y:349
{
yyVAL.node = append(yyDollar[1].node.(Expressions), yyDollar[3].node.(Expr))
}
case 62:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:351
//line generated_parser.y:351
{
yyVAL.node = Expressions{yyDollar[1].node.(Expr)}
}
case 63:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:353
//line generated_parser.y:353
{
yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "trailing commas not allowed in function call args")
yyVAL.node = yyDollar[1].node
}
case 64:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:364
//line generated_parser.y:364
{
yyVAL.node = &ParenExpr{Expr: yyDollar[2].node.(Expr), PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item)}
}
case 65:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:372
//line generated_parser.y:372
{
yylex.(*parser).addOffset(yyDollar[1].node, yyDollar[3].duration)
yyVAL.node = yyDollar[1].node
}
case 66:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:377
//line generated_parser.y:377
{
yylex.(*parser).unexpected("offset", "duration")
yyVAL.node = yyDollar[1].node
}
case 67:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:385
//line generated_parser.y:385
{
var errMsg string
vs, ok := yyDollar[1].node.(*VectorSelector)
@ -1147,7 +1147,7 @@ yydefault:
}
case 68:
yyDollar = yyS[yypt-6 : yypt+1]
//line promql/generated_parser.y:408
//line generated_parser.y:408
{
yyVAL.node = &SubqueryExpr{
Expr: yyDollar[1].node.(Expr),
@ -1159,35 +1159,35 @@ yydefault:
}
case 69:
yyDollar = yyS[yypt-6 : yypt+1]
//line promql/generated_parser.y:418
//line generated_parser.y:418
{
yylex.(*parser).unexpected("subquery selector", "\"]\"")
yyVAL.node = yyDollar[1].node
}
case 70:
yyDollar = yyS[yypt-5 : yypt+1]
//line promql/generated_parser.y:420
//line generated_parser.y:420
{
yylex.(*parser).unexpected("subquery selector", "duration or \"]\"")
yyVAL.node = yyDollar[1].node
}
case 71:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:422
//line generated_parser.y:422
{
yylex.(*parser).unexpected("subquery or range", "\":\" or \"]\"")
yyVAL.node = yyDollar[1].node
}
case 72:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:424
//line generated_parser.y:424
{
yylex.(*parser).unexpected("subquery selector", "duration")
yyVAL.node = yyDollar[1].node
}
case 73:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:434
//line generated_parser.y:434
{
if nl, ok := yyDollar[2].node.(*NumberLiteral); ok {
if yyDollar[1].item.Typ == SUB {
@ -1201,7 +1201,7 @@ yydefault:
}
case 74:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:452
//line generated_parser.y:452
{
vs := yyDollar[2].node.(*VectorSelector)
vs.PosRange = mergeRanges(&yyDollar[1].item, vs)
@ -1211,7 +1211,7 @@ yydefault:
}
case 75:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:460
//line generated_parser.y:460
{
vs := &VectorSelector{
Name: yyDollar[1].item.Val,
@ -1223,7 +1223,7 @@ yydefault:
}
case 76:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:470
//line generated_parser.y:470
{
vs := yyDollar[1].node.(*VectorSelector)
yylex.(*parser).assembleVectorSelector(vs)
@ -1231,7 +1231,7 @@ yydefault:
}
case 77:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:478
//line generated_parser.y:478
{
yyVAL.node = &VectorSelector{
LabelMatchers: yyDollar[2].matchers,
@ -1240,7 +1240,7 @@ yydefault:
}
case 78:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:485
//line generated_parser.y:485
{
yyVAL.node = &VectorSelector{
LabelMatchers: yyDollar[2].matchers,
@ -1249,7 +1249,7 @@ yydefault:
}
case 79:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:492
//line generated_parser.y:492
{
yyVAL.node = &VectorSelector{
LabelMatchers: []*labels.Matcher{},
@ -1258,7 +1258,7 @@ yydefault:
}
case 80:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:501
//line generated_parser.y:501
{
if yyDollar[1].matchers != nil {
yyVAL.matchers = append(yyDollar[1].matchers, yyDollar[3].matcher)
@ -1268,130 +1268,130 @@ yydefault:
}
case 81:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:509
//line generated_parser.y:509
{
yyVAL.matchers = []*labels.Matcher{yyDollar[1].matcher}
}
case 82:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:511
//line generated_parser.y:511
{
yylex.(*parser).unexpected("label matching", "\",\" or \"}\"")
yyVAL.matchers = yyDollar[1].matchers
}
case 83:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:515
//line generated_parser.y:515
{
yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item)
}
case 84:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:517
//line generated_parser.y:517
{
yylex.(*parser).unexpected("label matching", "string")
yyVAL.matcher = nil
}
case 85:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:519
//line generated_parser.y:519
{
yylex.(*parser).unexpected("label matching", "label matching operator")
yyVAL.matcher = nil
}
case 86:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:521
//line generated_parser.y:521
{
yylex.(*parser).unexpected("label matching", "identifier or \"}\"")
yyVAL.matcher = nil
}
case 87:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:529
//line generated_parser.y:529
{
yyVAL.labels = append(yyDollar[2].labels, labels.Label{Name: labels.MetricName, Value: yyDollar[1].item.Val})
sort.Sort(yyVAL.labels)
}
case 88:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:531
//line generated_parser.y:531
{
yyVAL.labels = yyDollar[1].labels
}
case 91:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:538
//line generated_parser.y:538
{
yyVAL.labels = labels.New(yyDollar[2].labels...)
}
case 92:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:540
//line generated_parser.y:540
{
yyVAL.labels = labels.New(yyDollar[2].labels...)
}
case 93:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:542
//line generated_parser.y:542
{
yyVAL.labels = labels.New()
}
case 94:
yyDollar = yyS[yypt-0 : yypt+1]
//line promql/generated_parser.y:544
//line generated_parser.y:544
{
yyVAL.labels = labels.New()
}
case 95:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:548
//line generated_parser.y:548
{
yyVAL.labels = append(yyDollar[1].labels, yyDollar[3].label)
}
case 96:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:550
//line generated_parser.y:550
{
yyVAL.labels = []labels.Label{yyDollar[1].label}
}
case 97:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:552
//line generated_parser.y:552
{
yylex.(*parser).unexpected("label set", "\",\" or \"}\"")
yyVAL.labels = yyDollar[1].labels
}
case 98:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:557
//line generated_parser.y:557
{
yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)}
}
case 99:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:559
//line generated_parser.y:559
{
yylex.(*parser).unexpected("label set", "string")
yyVAL.label = labels.Label{}
}
case 100:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:561
//line generated_parser.y:561
{
yylex.(*parser).unexpected("label set", "\"=\"")
yyVAL.label = labels.Label{}
}
case 101:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:563
//line generated_parser.y:563
{
yylex.(*parser).unexpected("label set", "identifier or \"}\"")
yyVAL.label = labels.Label{}
}
case 102:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:571
//line generated_parser.y:571
{
yylex.(*parser).generatedParserResult = &seriesDescription{
labels: yyDollar[1].labels,
@ -1400,72 +1400,72 @@ yydefault:
}
case 103:
yyDollar = yyS[yypt-0 : yypt+1]
//line promql/generated_parser.y:580
//line generated_parser.y:580
{
yyVAL.series = []sequenceValue{}
yyVAL.series = []SequenceValue{}
}
case 104:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:582
//line generated_parser.y:582
{
yyVAL.series = append(yyDollar[1].series, yyDollar[3].series...)
}
case 105:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:584
//line generated_parser.y:584
{
yyVAL.series = yyDollar[1].series
}
case 106:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:586
//line generated_parser.y:586
{
yylex.(*parser).unexpected("series values", "")
yyVAL.series = nil
}
case 107:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:590
//line generated_parser.y:590
{
yyVAL.series = []sequenceValue{{omitted: true}}
yyVAL.series = []SequenceValue{{Omitted: true}}
}
case 108:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:592
//line generated_parser.y:592
{
yyVAL.series = []sequenceValue{}
yyVAL.series = []SequenceValue{}
for i := uint64(0); i < yyDollar[3].uint; i++ {
yyVAL.series = append(yyVAL.series, sequenceValue{omitted: true})
yyVAL.series = append(yyVAL.series, SequenceValue{Omitted: true})
}
}
case 109:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:599
//line generated_parser.y:599
{
yyVAL.series = []sequenceValue{{value: yyDollar[1].float}}
yyVAL.series = []SequenceValue{{Value: yyDollar[1].float}}
}
case 110:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/generated_parser.y:601
//line generated_parser.y:601
{
yyVAL.series = []sequenceValue{}
yyVAL.series = []SequenceValue{}
for i := uint64(0); i <= yyDollar[3].uint; i++ {
yyVAL.series = append(yyVAL.series, sequenceValue{value: yyDollar[1].float})
yyVAL.series = append(yyVAL.series, SequenceValue{Value: yyDollar[1].float})
}
}
case 111:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/generated_parser.y:608
//line generated_parser.y:608
{
yyVAL.series = []sequenceValue{}
yyVAL.series = []SequenceValue{}
for i := uint64(0); i <= yyDollar[4].uint; i++ {
yyVAL.series = append(yyVAL.series, sequenceValue{value: yyDollar[1].float})
yyVAL.series = append(yyVAL.series, SequenceValue{Value: yyDollar[1].float})
yyDollar[1].float += yyDollar[2].float
}
}
case 112:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:618
//line generated_parser.y:618
{
if yyDollar[1].item.Val != "stale" {
yylex.(*parser).unexpected("series values", "number or \"stale\"")
@ -1474,7 +1474,7 @@ yydefault:
}
case 155:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:649
//line generated_parser.y:649
{
yyVAL.node = &NumberLiteral{
Val: yylex.(*parser).number(yyDollar[1].item.Val),
@ -1483,25 +1483,25 @@ yydefault:
}
case 156:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:657
//line generated_parser.y:657
{
yyVAL.float = yylex.(*parser).number(yyDollar[1].item.Val)
}
case 157:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:659
//line generated_parser.y:659
{
yyVAL.float = yyDollar[2].float
}
case 158:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/generated_parser.y:660
//line generated_parser.y:660
{
yyVAL.float = -yyDollar[2].float
}
case 159:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:664
//line generated_parser.y:664
{
var err error
yyVAL.uint, err = strconv.ParseUint(yyDollar[1].item.Val, 10, 64)
@ -1511,7 +1511,7 @@ yydefault:
}
case 160:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:674
//line generated_parser.y:674
{
var err error
yyVAL.duration, err = parseDuration(yyDollar[1].item.Val)
@ -1521,7 +1521,7 @@ yydefault:
}
case 161:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/generated_parser.y:685
//line generated_parser.y:685
{
yyVAL.node = &StringLiteral{
Val: yylex.(*parser).unquoteString(yyDollar[1].item.Val),
@ -1530,13 +1530,13 @@ yydefault:
}
case 162:
yyDollar = yyS[yypt-0 : yypt+1]
//line promql/generated_parser.y:698
//line generated_parser.y:698
{
yyVAL.duration = 0
}
case 164:
yyDollar = yyS[yypt-0 : yypt+1]
//line promql/generated_parser.y:702
//line generated_parser.y:702
{
yyVAL.strings = nil
}

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"fmt"
@ -36,11 +36,11 @@ func (i Item) String() string {
return i.Val
case i.Typ == IDENTIFIER || i.Typ == METRIC_IDENTIFIER:
return fmt.Sprintf("%q", i.Val)
case i.Typ.isKeyword():
case i.Typ.IsKeyword():
return fmt.Sprintf("<%s>", i.Val)
case i.Typ.isOperator():
case i.Typ.IsOperator():
return fmt.Sprintf("<op:%s>", i.Val)
case i.Typ.isAggregator():
case i.Typ.IsAggregator():
return fmt.Sprintf("<aggr:%s>", i.Val)
case len(i.Val) > 10:
return fmt.Sprintf("%.10q...", i.Val)
@ -50,25 +50,25 @@ func (i Item) String() string {
// isOperator returns true if the Item corresponds to a arithmetic or set operator.
// Returns false otherwise.
func (i ItemType) isOperator() bool { return i > operatorsStart && i < operatorsEnd }
func (i ItemType) IsOperator() bool { return i > operatorsStart && i < operatorsEnd }
// isAggregator returns true if the Item belongs to the aggregator functions.
// Returns false otherwise
func (i ItemType) isAggregator() bool { return i > aggregatorsStart && i < aggregatorsEnd }
func (i ItemType) IsAggregator() bool { return i > aggregatorsStart && i < aggregatorsEnd }
// isAggregator returns true if the Item is an aggregator that takes a parameter.
// Returns false otherwise
func (i ItemType) isAggregatorWithParam() bool {
func (i ItemType) IsAggregatorWithParam() bool {
return i == TOPK || i == BOTTOMK || i == COUNT_VALUES || i == QUANTILE
}
// isKeyword returns true if the Item corresponds to a keyword.
// Returns false otherwise.
func (i ItemType) isKeyword() bool { return i > keywordsStart && i < keywordsEnd }
func (i ItemType) IsKeyword() bool { return i > keywordsStart && i < keywordsEnd }
// isComparisonOperator returns true if the Item corresponds to a comparison operator.
// IsComparisonOperator returns true if the Item corresponds to a comparison operator.
// Returns false otherwise.
func (i ItemType) isComparisonOperator() bool {
func (i ItemType) IsComparisonOperator() bool {
switch i {
case EQL, NEQ, LTE, LSS, GTE, GTR:
return true
@ -78,7 +78,7 @@ func (i ItemType) isComparisonOperator() bool {
}
// isSetOperator returns whether the Item corresponds to a set operator.
func (i ItemType) isSetOperator() bool {
func (i ItemType) IsSetOperator() bool {
switch i {
case LAND, LOR, LUNLESS:
return true

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"testing"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"fmt"
@ -57,14 +57,14 @@ type ParseErr struct {
Err error
Query string
// An additional line offset to be added. Only used inside unit tests.
lineOffset int
// LineOffset is an additional line offset to be added. Only used inside unit tests.
LineOffset int
}
func (e *ParseErr) Error() string {
pos := int(e.PositionRange.Start)
lastLineBreak := -1
line := e.lineOffset + 1
line := e.LineOffset + 1
var positionStr string
@ -178,26 +178,26 @@ func newParser(input string) *parser {
return p
}
// sequenceValue is an omittable value in a sequence of time series values.
type sequenceValue struct {
value float64
omitted bool
// SequenceValue is an omittable value in a sequence of time series values.
type SequenceValue struct {
Value float64
Omitted bool
}
func (v sequenceValue) String() string {
if v.omitted {
func (v SequenceValue) String() string {
if v.Omitted {
return "_"
}
return fmt.Sprintf("%f", v.value)
return fmt.Sprintf("%f", v.Value)
}
type seriesDescription struct {
labels labels.Labels
values []sequenceValue
values []SequenceValue
}
// parseSeriesDesc parses the description of a time series.
func parseSeriesDesc(input string) (labels labels.Labels, values []sequenceValue, err error) {
// ParseSeriesDesc parses the description of a time series.
func ParseSeriesDesc(input string) (labels labels.Labels, values []SequenceValue, err error) {
p := newParser(input)
p.lex.seriesDesc = true
@ -382,7 +382,7 @@ func (p *parser) newAggregateExpr(op Item, modifier Node, args Node) (ret *Aggre
}
desiredArgs := 1
if ret.Op.isAggregatorWithParam() {
if ret.Op.IsAggregatorWithParam() {
desiredArgs = 2
ret.Param = arguments[0]
@ -416,7 +416,7 @@ func (p *parser) number(val string) float64 {
func (p *parser) expectType(node Node, want ValueType, context string) {
t := p.checkAST(node)
if t != want {
p.addParseErrf(node.PositionRange(), "expected type %s in %s, got %s", documentedType(want), context, documentedType(t))
p.addParseErrf(node.PositionRange(), "expected type %s in %s, got %s", DocumentedType(want), context, DocumentedType(t))
}
}
@ -439,18 +439,18 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
case *EvalStmt:
ty := p.checkAST(n.Expr)
if ty == ValueTypeNone {
p.addParseErrf(n.Expr.PositionRange(), "evaluation statement must have a valid expression type but got %s", documentedType(ty))
p.addParseErrf(n.Expr.PositionRange(), "evaluation statement must have a valid expression type but got %s", DocumentedType(ty))
}
case Expressions:
for _, e := range n {
ty := p.checkAST(e)
if ty == ValueTypeNone {
p.addParseErrf(e.PositionRange(), "expression must have a valid expression type but got %s", documentedType(ty))
p.addParseErrf(e.PositionRange(), "expression must have a valid expression type but got %s", DocumentedType(ty))
}
}
case *AggregateExpr:
if !n.Op.isAggregator() {
if !n.Op.IsAggregator() {
p.addParseErrf(n.PositionRange(), "aggregation operator expected in aggregation expression but got %q", n.Op)
}
p.expectType(n.Expr, ValueTypeVector, "aggregation expression")
@ -476,15 +476,15 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
return
}
if n.ReturnBool && !n.Op.isComparisonOperator() {
if n.ReturnBool && !n.Op.IsComparisonOperator() {
p.addParseErrf(opRange(), "bool modifier can only be used on comparison operators")
}
if n.Op.isComparisonOperator() && !n.ReturnBool && n.RHS.Type() == ValueTypeScalar && n.LHS.Type() == ValueTypeScalar {
if n.Op.IsComparisonOperator() && !n.ReturnBool && n.RHS.Type() == ValueTypeScalar && n.LHS.Type() == ValueTypeScalar {
p.addParseErrf(opRange(), "comparisons between scalars must use BOOL modifier")
}
if n.Op.isSetOperator() && n.VectorMatching.Card == CardOneToOne {
if n.Op.IsSetOperator() && n.VectorMatching.Card == CardOneToOne {
n.VectorMatching.Card = CardManyToMany
}
@ -496,7 +496,7 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
}
}
if !n.Op.isOperator() {
if !n.Op.IsOperator() {
p.addParseErrf(n.PositionRange(), "binary expression does not support operator %q", n.Op)
}
if lt != ValueTypeScalar && lt != ValueTypeVector {
@ -513,7 +513,7 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
n.VectorMatching = nil
} else {
// Both operands are Vectors.
if n.Op.isSetOperator() {
if n.Op.IsSetOperator() {
if n.VectorMatching.Card == CardOneToMany || n.VectorMatching.Card == CardManyToOne {
p.addParseErrf(n.PositionRange(), "no grouping allowed for %q operation", n.Op)
}
@ -523,7 +523,7 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
}
}
if (lt == ValueTypeScalar || rt == ValueTypeScalar) && n.Op.isSetOperator() {
if (lt == ValueTypeScalar || rt == ValueTypeScalar) && n.Op.IsSetOperator() {
p.addParseErrf(n.PositionRange(), "set operator %q not allowed in binary scalar expression", n.Op)
}
@ -557,7 +557,7 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
p.addParseErrf(n.PositionRange(), "only + and - operators allowed for unary expressions")
}
if t := p.checkAST(n.Expr); t != ValueTypeScalar && t != ValueTypeVector {
p.addParseErrf(n.PositionRange(), "unary expression only allowed on expressions of type scalar or instant vector, got %q", documentedType(t))
p.addParseErrf(n.PositionRange(), "unary expression only allowed on expressions of type scalar or instant vector, got %q", DocumentedType(t))
}
case *SubqueryExpr:

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"math"
@ -2525,7 +2525,7 @@ func mustGetFunction(name string) *Function {
var testSeries = []struct {
input string
expectedMetric labels.Labels
expectedValues []sequenceValue
expectedValues []SequenceValue
fail bool
}{
{
@ -2602,12 +2602,12 @@ var testSeries = []struct {
// For these tests only, we use the smallest float64 to signal an omitted value.
const none = math.SmallestNonzeroFloat64
func newSeq(vals ...float64) (res []sequenceValue) {
func newSeq(vals ...float64) (res []SequenceValue) {
for _, v := range vals {
if v == none {
res = append(res, sequenceValue{omitted: true})
res = append(res, SequenceValue{Omitted: true})
} else {
res = append(res, sequenceValue{value: v})
res = append(res, SequenceValue{Value: v})
}
}
return res
@ -2615,7 +2615,7 @@ func newSeq(vals ...float64) (res []sequenceValue) {
func TestParseSeries(t *testing.T) {
for _, test := range testSeries {
metric, vals, err := parseSeriesDesc(test.input)
metric, vals, err := ParseSeriesDesc(test.input)
// Unexpected errors are always caused by a bug.
testutil.Assert(t, err != errUnexpected, "unexpected error occurred")

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"fmt"
@ -72,7 +72,7 @@ func (node *AggregateExpr) String() string {
}
aggrString += "("
if node.Op.isAggregatorWithParam() {
if node.Op.IsAggregatorWithParam() {
aggrString += fmt.Sprintf("%s, ", node.Param)
}
aggrString += fmt.Sprintf("%s)", node.Expr)

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package promql
package parser
import (
"testing"

45
promql/parser/value.go Normal file
View file

@ -0,0 +1,45 @@
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
// Value is a generic interface for values resulting from a query evaluation.
type Value interface {
Type() ValueType
String() string
}
// ValueType describes a type of a value.
type ValueType string
// The valid value types.
const (
ValueTypeNone = "none"
ValueTypeVector = "vector"
ValueTypeScalar = "scalar"
ValueTypeMatrix = "matrix"
ValueTypeString = "string"
)
// DocumentedType returns the internal type to the equivalent
// user facing terminology as defined in the documentation.
func DocumentedType(t ValueType) string {
switch t {
case "vector":
return "instant vector"
case "matrix":
return "range vector"
default:
return string(t)
}
}

View file

@ -27,6 +27,7 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/util/teststorage"
"github.com/prometheus/prometheus/util/testutil"
@ -101,8 +102,8 @@ func (t *Test) Storage() storage.Storage {
}
func raise(line int, format string, v ...interface{}) error {
return &ParseErr{
lineOffset: line,
return &parser.ParseErr{
LineOffset: line,
Err: errors.Errorf(format, v...),
}
}
@ -125,10 +126,10 @@ func parseLoad(lines []string, i int) (int, *loadCmd, error) {
i--
break
}
metric, vals, err := parseSeriesDesc(defLine)
metric, vals, err := parser.ParseSeriesDesc(defLine)
if err != nil {
if perr, ok := err.(*ParseErr); ok {
perr.lineOffset = i
if perr, ok := err.(*parser.ParseErr); ok {
perr.LineOffset = i
}
return i, nil, err
}
@ -147,11 +148,11 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
at = parts[2]
expr = parts[3]
)
_, err := ParseExpr(expr)
_, err := parser.ParseExpr(expr)
if err != nil {
if perr, ok := err.(*ParseErr); ok {
perr.lineOffset = i
posOffset := Pos(strings.Index(lines[i], expr))
if perr, ok := err.(*parser.ParseErr); ok {
perr.LineOffset = i
posOffset := parser.Pos(strings.Index(lines[i], expr))
perr.PositionRange.Start += posOffset
perr.PositionRange.End += posOffset
perr.Query = lines[i]
@ -181,13 +182,13 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
break
}
if f, err := parseNumber(defLine); err == nil {
cmd.expect(0, nil, sequenceValue{value: f})
cmd.expect(0, nil, parser.SequenceValue{Value: f})
break
}
metric, vals, err := parseSeriesDesc(defLine)
metric, vals, err := parser.ParseSeriesDesc(defLine)
if err != nil {
if perr, ok := err.(*ParseErr); ok {
perr.lineOffset = i
if perr, ok := err.(*parser.ParseErr); ok {
perr.LineOffset = i
}
return i, nil, err
}
@ -275,16 +276,16 @@ func (cmd loadCmd) String() string {
}
// set a sequence of sample values for the given metric.
func (cmd *loadCmd) set(m labels.Labels, vals ...sequenceValue) {
func (cmd *loadCmd) set(m labels.Labels, vals ...parser.SequenceValue) {
h := m.Hash()
samples := make([]Point, 0, len(vals))
ts := testStartTime
for _, v := range vals {
if !v.omitted {
if !v.Omitted {
samples = append(samples, Point{
T: ts.UnixNano() / int64(time.Millisecond/time.Nanosecond),
V: v.value,
V: v.Value,
})
}
ts = ts.Add(cmd.gap)
@ -322,7 +323,7 @@ type evalCmd struct {
type entry struct {
pos int
vals []sequenceValue
vals []parser.SequenceValue
}
func (e entry) String() string {
@ -346,7 +347,7 @@ func (ev *evalCmd) String() string {
// expect adds a new metric with a sequence of values to the set of expected
// results for the query.
func (ev *evalCmd) expect(pos int, m labels.Labels, vals ...sequenceValue) {
func (ev *evalCmd) expect(pos int, m labels.Labels, vals ...parser.SequenceValue) {
if m == nil {
ev.expected[0] = entry{pos: pos, vals: vals}
return
@ -357,7 +358,7 @@ func (ev *evalCmd) expect(pos int, m labels.Labels, vals ...sequenceValue) {
}
// compareResult compares the result value with the defined expectation.
func (ev *evalCmd) compareResult(result Value) error {
func (ev *evalCmd) compareResult(result parser.Value) error {
switch val := result.(type) {
case Matrix:
return errors.New("received range result on instant evaluation")
@ -373,8 +374,8 @@ func (ev *evalCmd) compareResult(result Value) error {
if ev.ordered && exp.pos != pos+1 {
return errors.Errorf("expected metric %s with %v at position %d but was at %d", v.Metric, exp.vals, exp.pos, pos+1)
}
if !almostEqual(exp.vals[0].value, v.V) {
return errors.Errorf("expected %v for %s but got %v", exp.vals[0].value, v.Metric, v.V)
if !almostEqual(exp.vals[0].Value, v.V) {
return errors.Errorf("expected %v for %s but got %v", exp.vals[0].Value, v.Metric, v.V)
}
seen[fp] = true
@ -390,8 +391,8 @@ func (ev *evalCmd) compareResult(result Value) error {
}
case Scalar:
if !almostEqual(ev.expected[0].vals[0].value, val.V) {
return errors.Errorf("expected Scalar %v but got %v", val.V, ev.expected[0].vals[0].value)
if !almostEqual(ev.expected[0].vals[0].Value, val.V) {
return errors.Errorf("expected Scalar %v but got %v", val.V, ev.expected[0].vals[0].Value)
}
default:

View file

@ -20,34 +20,18 @@ import (
"strings"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage"
)
// Value is a generic interface for values resulting from a query evaluation.
type Value interface {
Type() ValueType
String() string
}
func (Matrix) Type() ValueType { return ValueTypeMatrix }
func (Vector) Type() ValueType { return ValueTypeVector }
func (Scalar) Type() ValueType { return ValueTypeScalar }
func (String) Type() ValueType { return ValueTypeString }
// ValueType describes a type of a value.
type ValueType string
// The valid value types.
const (
ValueTypeNone = "none"
ValueTypeVector = "vector"
ValueTypeScalar = "scalar"
ValueTypeMatrix = "matrix"
ValueTypeString = "string"
)
func (Matrix) Type() parser.ValueType { return parser.ValueTypeMatrix }
func (Vector) Type() parser.ValueType { return parser.ValueTypeVector }
func (Scalar) Type() parser.ValueType { return parser.ValueTypeScalar }
func (String) Type() parser.ValueType { return parser.ValueTypeString }
// String represents a string value.
type String struct {
@ -206,7 +190,7 @@ func (m Matrix) ContainsSameLabelset() bool {
// if any occurred.
type Result struct {
Err error
Value Value
Value parser.Value
Warnings storage.Warnings
}

View file

@ -34,6 +34,7 @@ import (
"github.com/prometheus/prometheus/pkg/rulefmt"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/template"
"github.com/prometheus/prometheus/util/strutil"
)
@ -112,7 +113,7 @@ type AlertingRule struct {
// The name of the alert.
name string
// The vector expression from which to generate alerts.
vector promql.Expr
vector parser.Expr
// The duration for which a labelset needs to persist in the expression
// output vector before an alert transitions from Pending to Firing state.
holdDuration time.Duration
@ -144,7 +145,7 @@ type AlertingRule struct {
// NewAlertingRule constructs a new AlertingRule.
func NewAlertingRule(
name string, vec promql.Expr, hold time.Duration,
name string, vec parser.Expr, hold time.Duration,
labels, annotations, externalLabels labels.Labels,
restored bool, logger log.Logger,
) *AlertingRule {
@ -201,7 +202,7 @@ func (r *AlertingRule) Health() RuleHealth {
}
// Query returns the query expression of the alerting rule.
func (r *AlertingRule) Query() promql.Expr {
func (r *AlertingRule) Query() parser.Expr {
return r.vector
}

View file

@ -23,12 +23,13 @@ import (
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/util/teststorage"
"github.com/prometheus/prometheus/util/testutil"
)
func TestAlertingRuleHTMLSnippet(t *testing.T) {
expr, err := promql.ParseExpr(`foo{html="<b>BOLD<b>"}`)
expr, err := parser.ParseExpr(`foo{html="<b>BOLD<b>"}`)
testutil.Ok(t, err)
rule := NewAlertingRule("testrule", expr, 0, labels.FromStrings("html", "<b>BOLD</b>"), labels.FromStrings("html", "<b>BOLD</b>"), nil, false, nil)
@ -54,7 +55,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
testutil.Ok(t, suite.Run())
expr, err := promql.ParseExpr(`http_requests < 100`)
expr, err := parser.ParseExpr(`http_requests < 100`)
testutil.Ok(t, err)
rule := NewAlertingRule(
@ -156,7 +157,7 @@ func TestAlertingRuleExternalLabelsInTemplate(t *testing.T) {
testutil.Ok(t, suite.Run())
expr, err := promql.ParseExpr(`http_requests < 100`)
expr, err := parser.ParseExpr(`http_requests < 100`)
testutil.Ok(t, err)
ruleWithoutExternalLabels := NewAlertingRule(
@ -248,7 +249,7 @@ func TestAlertingRuleEmptyLabelFromTemplate(t *testing.T) {
testutil.Ok(t, suite.Run())
expr, err := promql.ParseExpr(`http_requests < 100`)
expr, err := parser.ParseExpr(`http_requests < 100`)
testutil.Ok(t, err)
rule := NewAlertingRule(
@ -310,7 +311,7 @@ func TestAlertingRuleDuplicate(t *testing.T) {
now := time.Now()
expr, _ := promql.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
expr, _ := parser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
rule := NewAlertingRule(
"foo",
expr,

View file

@ -34,6 +34,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/pkg/value"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage"
)
@ -979,7 +980,7 @@ func (m *Manager) LoadGroups(
rules := make([]Rule, 0, len(rg.Rules))
for _, r := range rg.Rules {
expr, err := promql.ParseExpr(r.Expr.Value)
expr, err := parser.ParseExpr(r.Expr.Value)
if err != nil {
return nil, []error{errors.Wrap(err, fn)}
}

View file

@ -33,6 +33,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/pkg/value"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/util/teststorage"
"github.com/prometheus/prometheus/util/testutil"
@ -50,7 +51,7 @@ func TestAlertingRule(t *testing.T) {
err = suite.Run()
testutil.Ok(t, err)
expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
testutil.Ok(t, err)
rule := NewAlertingRule(
@ -191,7 +192,7 @@ func TestForStateAddSamples(t *testing.T) {
err = suite.Run()
testutil.Ok(t, err)
expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
testutil.Ok(t, err)
rule := NewAlertingRule(
@ -352,7 +353,7 @@ func TestForStateRestore(t *testing.T) {
err = suite.Run()
testutil.Ok(t, err)
expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
expr, err := parser.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`)
testutil.Ok(t, err)
opts := &ManagerOptions{
@ -528,7 +529,7 @@ func TestStaleness(t *testing.T) {
Logger: log.NewNopLogger(),
}
expr, err := promql.ParseExpr("a + 1")
expr, err := parser.ParseExpr("a + 1")
testutil.Ok(t, err)
rule := NewRecordingRule("a_plus_one", expr, labels.Labels{})
group := NewGroup(GroupOptions{
@ -857,7 +858,7 @@ func TestNotify(t *testing.T) {
ResendDelay: 2 * time.Second,
}
expr, err := promql.ParseExpr("a > 1")
expr, err := parser.ParseExpr("a > 1")
testutil.Ok(t, err)
rule := NewAlertingRule("aTooHigh", expr, 0, labels.Labels{}, labels.Labels{}, nil, true, log.NewNopLogger())
group := NewGroup(GroupOptions{

View file

@ -26,13 +26,14 @@ import (
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/rulefmt"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/util/strutil"
)
// A RecordingRule records its vector expression into new timeseries.
type RecordingRule struct {
name string
vector promql.Expr
vector parser.Expr
labels labels.Labels
// Protects the below.
mtx sync.Mutex
@ -47,7 +48,7 @@ type RecordingRule struct {
}
// NewRecordingRule returns a new recording rule.
func NewRecordingRule(name string, vector promql.Expr, lset labels.Labels) *RecordingRule {
func NewRecordingRule(name string, vector parser.Expr, lset labels.Labels) *RecordingRule {
return &RecordingRule{
name: name,
vector: vector,
@ -62,7 +63,7 @@ func (rule *RecordingRule) Name() string {
}
// Query returns the rule query expression.
func (rule *RecordingRule) Query() promql.Expr {
func (rule *RecordingRule) Query() parser.Expr {
return rule.vector
}

View file

@ -22,6 +22,7 @@ import (
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/util/teststorage"
"github.com/prometheus/prometheus/util/testutil"
)
@ -45,13 +46,13 @@ func TestRuleEval(t *testing.T) {
suite := []struct {
name string
expr promql.Expr
expr parser.Expr
labels labels.Labels
result promql.Vector
}{
{
name: "nolabels",
expr: &promql.NumberLiteral{Val: 1},
expr: &parser.NumberLiteral{Val: 1},
labels: labels.Labels{},
result: promql.Vector{promql.Sample{
Metric: labels.FromStrings("__name__", "nolabels"),
@ -60,7 +61,7 @@ func TestRuleEval(t *testing.T) {
},
{
name: "labels",
expr: &promql.NumberLiteral{Val: 1},
expr: &parser.NumberLiteral{Val: 1},
labels: labels.FromStrings("foo", "bar"),
result: promql.Vector{promql.Sample{
Metric: labels.FromStrings("__name__", "labels", "foo", "bar"),
@ -78,7 +79,7 @@ func TestRuleEval(t *testing.T) {
}
func TestRecordingRuleHTMLSnippet(t *testing.T) {
expr, err := promql.ParseExpr(`foo{html="<b>BOLD<b>"}`)
expr, err := parser.ParseExpr(`foo{html="<b>BOLD<b>"}`)
testutil.Ok(t, err)
rule := NewRecordingRule("testrule", expr, labels.FromStrings("html", "<b>BOLD</b>"))
@ -110,7 +111,7 @@ func TestRuleEvalDuplicate(t *testing.T) {
now := time.Now()
expr, _ := promql.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
expr, _ := parser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
rule := NewRecordingRule("foo", expr, labels.FromStrings("test", "test"))
_, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil)
testutil.NotOk(t, err)

View file

@ -45,6 +45,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
@ -307,8 +308,8 @@ func (api *API) Register(r *route.Router) {
}
type queryData struct {
ResultType promql.ValueType `json:"resultType"`
Result promql.Value `json:"result"`
ResultType parser.ValueType `json:"resultType"`
Result parser.Value `json:"result"`
Stats *stats.QueryStats `json:"stats,omitempty"`
}
@ -535,7 +536,7 @@ func (api *API) series(r *http.Request) apiFuncResult {
var matcherSets [][]*labels.Matcher
for _, s := range r.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(s)
matchers, err := parser.ParseMetricSelector(s)
if err != nil {
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
}
@ -755,7 +756,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
var err error
if matchTarget != "" {
matchers, err = promql.ParseMetricSelector(matchTarget)
matchers, err = parser.ParseMetricSelector(matchTarget)
if err != nil {
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
}
@ -1342,7 +1343,7 @@ func (api *API) deleteSeries(r *http.Request) apiFuncResult {
}
for _, s := range r.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(s)
matchers, err := parser.ParseMetricSelector(s)
if err != nil {
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
}

View file

@ -48,6 +48,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
@ -189,11 +190,11 @@ type rulesRetrieverMock struct {
}
func (m rulesRetrieverMock) AlertingRules() []*rules.AlertingRule {
expr1, err := promql.ParseExpr(`absent(test_metric3) != 1`)
expr1, err := parser.ParseExpr(`absent(test_metric3) != 1`)
if err != nil {
m.testing.Fatalf("unable to parse alert expression: %s", err)
}
expr2, err := promql.ParseExpr(`up == 1`)
expr2, err := parser.ParseExpr(`up == 1`)
if err != nil {
m.testing.Fatalf("Unable to parse alert expression: %s", err)
}
@ -251,7 +252,7 @@ func (m rulesRetrieverMock) RuleGroups() []*rules.Group {
r = append(r, alertrule)
}
recordingExpr, err := promql.ParseExpr(`vector(1)`)
recordingExpr, err := parser.ParseExpr(`vector(1)`)
if err != nil {
m.testing.Fatalf("unable to parse alert expression: %s", err)
}
@ -552,7 +553,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
"time": []string{"123.4"},
},
response: &queryData{
ResultType: promql.ValueTypeScalar,
ResultType: parser.ValueTypeScalar,
Result: promql.Scalar{
V: 2,
T: timestamp.FromTime(start.Add(123*time.Second + 400*time.Millisecond)),
@ -566,7 +567,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
"time": []string{"1970-01-01T00:02:03Z"},
},
response: &queryData{
ResultType: promql.ValueTypeScalar,
ResultType: parser.ValueTypeScalar,
Result: promql.Scalar{
V: 0.333,
T: timestamp.FromTime(start.Add(123 * time.Second)),
@ -580,7 +581,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
"time": []string{"1970-01-01T01:02:03+01:00"},
},
response: &queryData{
ResultType: promql.ValueTypeScalar,
ResultType: parser.ValueTypeScalar,
Result: promql.Scalar{
V: 0.333,
T: timestamp.FromTime(start.Add(123 * time.Second)),
@ -593,7 +594,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
"query": []string{"0.333"},
},
response: &queryData{
ResultType: promql.ValueTypeScalar,
ResultType: parser.ValueTypeScalar,
Result: promql.Scalar{
V: 0.333,
T: timestamp.FromTime(api.now()),
@ -609,7 +610,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
"step": []string{"1"},
},
response: &queryData{
ResultType: promql.ValueTypeMatrix,
ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{
promql.Series{
Points: []promql.Point{
@ -2318,7 +2319,7 @@ func TestRespond(t *testing.T) {
}{
{
response: &queryData{
ResultType: promql.ValueTypeMatrix,
ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{
promql.Series{
Points: []promql.Point{{V: 1, T: 1000}},
@ -2464,7 +2465,7 @@ func BenchmarkRespond(b *testing.B) {
points = append(points, promql.Point{V: float64(i * 1000000), T: int64(i)})
}
response := &queryData{
ResultType: promql.ValueTypeMatrix,
ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{
promql.Series{
Points: points,

View file

@ -29,6 +29,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/pkg/value"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage"
)
@ -54,7 +55,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
var matcherSets [][]*labels.Matcher
for _, s := range req.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(s)
matchers, err := parser.ParseMetricSelector(s)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return