mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-26 22:19:40 -08:00
Merge pull request #554 from fabxc/fabxc/or
or operation and vector matching opts implemented
This commit is contained in:
commit
0b5dd24fd2
268
rules/ast/ast.go
268
rules/ast/ast.go
|
@ -244,9 +244,21 @@ type (
|
|||
opType BinOpType
|
||||
lhs Node
|
||||
rhs Node
|
||||
matchCardinality VectorMatchCardinality
|
||||
matchOn clientmodel.LabelNames
|
||||
includeLabels clientmodel.LabelNames
|
||||
}
|
||||
)
|
||||
|
||||
type VectorMatchCardinality int
|
||||
|
||||
const (
|
||||
MatchOneToOne VectorMatchCardinality = iota
|
||||
MatchManyToOne
|
||||
MatchOneToMany
|
||||
MatchManyToMany
|
||||
)
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// MatrixNode types.
|
||||
|
||||
|
@ -371,14 +383,21 @@ func (node *ScalarFunctionCall) Eval(timestamp clientmodel.Timestamp) clientmode
|
|||
return node.function.callFn(timestamp, node.args).(clientmodel.SampleValue)
|
||||
}
|
||||
|
||||
func (node *VectorAggregation) labelsToGroupingKey(labels clientmodel.Metric) uint64 {
|
||||
summer := fnv.New64a()
|
||||
for _, label := range node.groupBy {
|
||||
summer.Write([]byte(labels[label]))
|
||||
summer.Write([]byte{clientmodel.SeparatorByte})
|
||||
// hashForLabels returns a hash value taken from the label/value pairs of the
|
||||
// specified labels in the metric.
|
||||
func hashForLabels(metric clientmodel.Metric, labels clientmodel.LabelNames) uint64 {
|
||||
var result uint64
|
||||
s := fnv.New64a()
|
||||
|
||||
for _, label := range labels {
|
||||
s.Write([]byte(label))
|
||||
s.Write([]byte{clientmodel.SeparatorByte})
|
||||
s.Write([]byte(metric[label]))
|
||||
result ^= s.Sum64()
|
||||
s.Reset()
|
||||
}
|
||||
|
||||
return summer.Sum64()
|
||||
return result
|
||||
}
|
||||
|
||||
// EvalVectorInstant evaluates a VectorNode with an instant query.
|
||||
|
@ -484,7 +503,7 @@ func (node *VectorAggregation) Eval(timestamp clientmodel.Timestamp) Vector {
|
|||
vector := node.vector.Eval(timestamp)
|
||||
result := map[uint64]*groupedAggregation{}
|
||||
for _, sample := range vector {
|
||||
groupingKey := node.labelsToGroupingKey(sample.Metric.Metric)
|
||||
groupingKey := hashForLabels(sample.Metric.Metric, node.groupBy)
|
||||
if groupedResult, ok := result[groupingKey]; ok {
|
||||
if node.keepExtraLabels {
|
||||
groupedResult.labels = labelIntersection(groupedResult.labels, sample.Metric)
|
||||
|
@ -729,10 +748,6 @@ func evalVectorBinop(opType BinOpType,
|
|||
return lhs, true
|
||||
}
|
||||
return 0, false
|
||||
case And:
|
||||
return lhs, true
|
||||
case Or:
|
||||
return lhs, true // TODO: implement OR
|
||||
}
|
||||
panic("Not all enum values enumerated in switch")
|
||||
}
|
||||
|
@ -749,26 +764,38 @@ func labelsEqual(labels1, labels2 clientmodel.Metric) bool {
|
|||
// Eval implements the VectorNode interface and returns the result of
|
||||
// the expression.
|
||||
func (node *VectorArithExpr) Eval(timestamp clientmodel.Timestamp) Vector {
|
||||
result := Vector{}
|
||||
// Calculate vector-to-vector operation.
|
||||
if node.lhs.Type() == VectorType && node.rhs.Type() == VectorType {
|
||||
lhs := node.lhs.(VectorNode).Eval(timestamp)
|
||||
rhs := node.rhs.(VectorNode).Eval(timestamp)
|
||||
|
||||
return node.evalVectors(timestamp, lhs, rhs)
|
||||
}
|
||||
|
||||
// Calculate vector-to-scalar operation.
|
||||
var lhs Vector
|
||||
var rhs clientmodel.SampleValue
|
||||
swap := false
|
||||
|
||||
if node.lhs.Type() == ScalarType && node.rhs.Type() == VectorType {
|
||||
lhs := node.lhs.(ScalarNode).Eval(timestamp)
|
||||
rhs := node.rhs.(VectorNode).Eval(timestamp)
|
||||
for _, rhsSample := range rhs {
|
||||
value, keep := evalVectorBinop(node.opType, lhs, rhsSample.Value)
|
||||
if keep {
|
||||
rhsSample.Value = value
|
||||
if node.opType.shouldDropMetric() {
|
||||
rhsSample.Metric.Delete(clientmodel.MetricNameLabel)
|
||||
lhs = node.rhs.(VectorNode).Eval(timestamp)
|
||||
rhs = node.lhs.(ScalarNode).Eval(timestamp)
|
||||
swap = true
|
||||
} else {
|
||||
lhs = node.lhs.(VectorNode).Eval(timestamp)
|
||||
rhs = node.rhs.(ScalarNode).Eval(timestamp)
|
||||
}
|
||||
result = append(result, rhsSample)
|
||||
}
|
||||
}
|
||||
return result
|
||||
} else if node.lhs.Type() == VectorType && node.rhs.Type() == ScalarType {
|
||||
lhs := node.lhs.(VectorNode).Eval(timestamp)
|
||||
rhs := node.rhs.(ScalarNode).Eval(timestamp)
|
||||
|
||||
result := make(Vector, 0, len(lhs))
|
||||
|
||||
for _, lhsSample := range lhs {
|
||||
value, keep := evalVectorBinop(node.opType, lhsSample.Value, rhs)
|
||||
lv, rv := lhsSample.Value, rhs
|
||||
// lhs always contains the vector. If the original position was different
|
||||
// swap for calculating the value.
|
||||
if swap {
|
||||
lv, rv = rv, lv
|
||||
}
|
||||
value, keep := evalVectorBinop(node.opType, lv, rv)
|
||||
if keep {
|
||||
lhsSample.Value = value
|
||||
if node.opType.shouldDropMetric() {
|
||||
|
@ -778,26 +805,173 @@ func (node *VectorArithExpr) Eval(timestamp clientmodel.Timestamp) Vector {
|
|||
}
|
||||
}
|
||||
return result
|
||||
} else if node.lhs.Type() == VectorType && node.rhs.Type() == VectorType {
|
||||
lhs := node.lhs.(VectorNode).Eval(timestamp)
|
||||
rhs := node.rhs.(VectorNode).Eval(timestamp)
|
||||
for _, lhsSample := range lhs {
|
||||
for _, rhsSample := range rhs {
|
||||
if labelsEqual(lhsSample.Metric.Metric, rhsSample.Metric.Metric) {
|
||||
value, keep := evalVectorBinop(node.opType, lhsSample.Value, rhsSample.Value)
|
||||
}
|
||||
|
||||
// evalVectors evaluates the binary operation for the given vectors.
|
||||
func (node *VectorArithExpr) evalVectors(timestamp clientmodel.Timestamp, lhs, rhs Vector) Vector {
|
||||
result := make(Vector, 0, len(rhs))
|
||||
// The control flow below handles one-to-one or many-to-one matching.
|
||||
// For one-to-many, swap sidedness and account for the swap when calculating
|
||||
// values.
|
||||
if node.matchCardinality == MatchOneToMany {
|
||||
lhs, rhs = rhs, lhs
|
||||
}
|
||||
// All samples from the rhs hashed by the matching label/values.
|
||||
rm := make(map[uint64]*Sample)
|
||||
// Maps the hash of the label values used for matching to the hashes of the label
|
||||
// values of the include labels (if any). It is used to keep track of already
|
||||
// inserted samples.
|
||||
added := make(map[uint64][]uint64)
|
||||
|
||||
// Add all rhs samples to a map so we can easily find matches later.
|
||||
for _, rs := range rhs {
|
||||
hash := node.hashForMetric(rs.Metric.Metric)
|
||||
// The rhs is guaranteed to be the 'one' side. Having multiple samples
|
||||
// with the same hash means that the matching is many-to-many,
|
||||
// which is not supported.
|
||||
if _, found := rm[hash]; node.matchCardinality != MatchManyToMany && found {
|
||||
// Many-to-many matching not allowed.
|
||||
// TODO(fabxc): Return a query error here once AST nodes support that.
|
||||
return Vector{}
|
||||
}
|
||||
// In many-to-many matching the entry is simply overwritten. It can thus only
|
||||
// be used to check whether any matching rhs entry exists but not retrieve them all.
|
||||
rm[hash] = rs
|
||||
}
|
||||
|
||||
// For all lhs samples find a respective rhs sample and perform
|
||||
// the binary operation.
|
||||
for _, ls := range lhs {
|
||||
hash := node.hashForMetric(ls.Metric.Metric)
|
||||
// Any lhs sample we encounter in an OR operation belongs to the result.
|
||||
if node.opType == Or {
|
||||
ls.Metric = node.resultMetric(ls, nil)
|
||||
result = append(result, ls)
|
||||
added[hash] = nil // Ensure matching rhs sample is not added later.
|
||||
continue
|
||||
}
|
||||
|
||||
rs, found := rm[hash] // Look for a match in the rhs vector.
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
var value clientmodel.SampleValue
|
||||
var keep bool
|
||||
|
||||
if node.opType == And {
|
||||
value = ls.Value
|
||||
keep = true
|
||||
} else {
|
||||
if _, exists := added[hash]; node.matchCardinality == MatchOneToOne && exists {
|
||||
// Many-to-one matching must be explicit.
|
||||
// TODO(fabxc): Return a query error here once AST nodes support that.
|
||||
return Vector{}
|
||||
}
|
||||
// Account for potentially swapped sidedness.
|
||||
vl, vr := ls.Value, rs.Value
|
||||
if node.matchCardinality == MatchOneToMany {
|
||||
vl, vr = vr, vl
|
||||
}
|
||||
value, keep = evalVectorBinop(node.opType, vl, vr)
|
||||
}
|
||||
|
||||
if keep {
|
||||
lhsSample.Value = value
|
||||
if node.opType.shouldDropMetric() {
|
||||
lhsSample.Metric.Delete(clientmodel.MetricNameLabel)
|
||||
metric := node.resultMetric(ls, rs)
|
||||
// Check if the same label set has been added for a many-to-one matching before.
|
||||
if node.matchCardinality == MatchManyToOne || node.matchCardinality == MatchOneToMany {
|
||||
insHash := hashForLabels(metric.Metric, node.includeLabels)
|
||||
if ihs, exists := added[hash]; exists {
|
||||
for _, ih := range ihs {
|
||||
if ih == insHash {
|
||||
// TODO(fabxc): Return a query error here once AST nodes support that.
|
||||
return Vector{}
|
||||
}
|
||||
result = append(result, lhsSample)
|
||||
}
|
||||
added[hash] = append(ihs, insHash)
|
||||
} else {
|
||||
added[hash] = []uint64{insHash}
|
||||
}
|
||||
}
|
||||
ns := &Sample{
|
||||
Metric: metric,
|
||||
Value: value,
|
||||
Timestamp: timestamp,
|
||||
}
|
||||
result = append(result, ns)
|
||||
added[hash] = added[hash] // Set existance to true.
|
||||
}
|
||||
}
|
||||
|
||||
// Add all remaining samples in the rhs in an OR operation if they
|
||||
// have not been matched up with a lhs sample.
|
||||
if node.opType == Or {
|
||||
for hash, rs := range rm {
|
||||
if _, exists := added[hash]; !exists {
|
||||
rs.Metric = node.resultMetric(rs, nil)
|
||||
result = append(result, rs)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
panic("Invalid vector arithmetic expression operands")
|
||||
|
||||
// resultMetric returns the metric for the given sample(s) based on the vector
|
||||
// binary operation and the matching options. If a label that has to be included is set on
|
||||
// both sides an error is returned.
|
||||
func (node *VectorArithExpr) resultMetric(ls, rs *Sample) clientmodel.COWMetric {
|
||||
if len(node.matchOn) == 0 || node.opType == Or || node.opType == And {
|
||||
if node.opType.shouldDropMetric() {
|
||||
ls.Metric.Delete(clientmodel.MetricNameLabel)
|
||||
}
|
||||
return ls.Metric
|
||||
}
|
||||
|
||||
m := clientmodel.Metric{}
|
||||
for _, ln := range node.matchOn {
|
||||
m[ln] = ls.Metric.Metric[ln]
|
||||
}
|
||||
|
||||
for _, ln := range node.includeLabels {
|
||||
// Included labels from the `group_x` modifier are taken from the "many"-side.
|
||||
v, ok := ls.Metric.Metric[ln]
|
||||
if ok {
|
||||
m[ln] = v
|
||||
}
|
||||
}
|
||||
return clientmodel.COWMetric{false, m}
|
||||
}
|
||||
|
||||
// hashForMetric calculates a hash value for the given metric based on the matching
|
||||
// options for the binary operation.
|
||||
func (node *VectorArithExpr) hashForMetric(metric clientmodel.Metric) uint64 {
|
||||
var labels clientmodel.LabelNames
|
||||
|
||||
if len(node.matchOn) > 0 {
|
||||
var match bool
|
||||
for _, ln := range node.matchOn {
|
||||
if _, match = metric[ln]; !match {
|
||||
break
|
||||
}
|
||||
}
|
||||
// If the metric does not contain the labels to match on, build the hash
|
||||
// over the whole metric to give it a unique hash.
|
||||
if !match {
|
||||
labels = make(clientmodel.LabelNames, 0, len(metric))
|
||||
for ln := range metric {
|
||||
labels = append(labels, ln)
|
||||
}
|
||||
} else {
|
||||
labels = node.matchOn
|
||||
}
|
||||
} else {
|
||||
labels = make(clientmodel.LabelNames, 0, len(metric))
|
||||
for ln := range metric {
|
||||
if ln != clientmodel.MetricNameLabel {
|
||||
labels = append(labels, ln)
|
||||
}
|
||||
}
|
||||
}
|
||||
return hashForLabels(metric, labels)
|
||||
}
|
||||
|
||||
// Eval implements the MatrixNode interface and returns the value of
|
||||
|
@ -962,7 +1136,7 @@ func nodesHaveTypes(nodes Nodes, exprTypes []ExprType) bool {
|
|||
|
||||
// NewArithExpr returns a (not yet evaluated) expression node (of type
|
||||
// VectorArithExpr or ScalarArithExpr).
|
||||
func NewArithExpr(opType BinOpType, lhs Node, rhs Node) (Node, error) {
|
||||
func NewArithExpr(opType BinOpType, lhs Node, rhs Node, matchCard VectorMatchCardinality, matchOn, include clientmodel.LabelNames) (Node, error) {
|
||||
if !nodesHaveTypes(Nodes{lhs, rhs}, []ExprType{ScalarType, VectorType}) {
|
||||
return nil, errors.New("binary operands must be of vector or scalar type")
|
||||
}
|
||||
|
@ -971,6 +1145,15 @@ func NewArithExpr(opType BinOpType, lhs Node, rhs Node) (Node, error) {
|
|||
if lhs.Type() == ScalarType || rhs.Type() == ScalarType {
|
||||
return nil, errors.New("AND and OR operators may only be used between vectors")
|
||||
}
|
||||
// Logical operations must never be used with group modifiers.
|
||||
if len(include) > 0 {
|
||||
return nil, errors.New("AND and OR operators must not have a group modifier")
|
||||
}
|
||||
}
|
||||
if lhs.Type() != VectorType || rhs.Type() != VectorType {
|
||||
if matchCard != MatchOneToOne || matchOn != nil || include != nil {
|
||||
return nil, errors.New("binary scalar expressions cannot have vector matching options")
|
||||
}
|
||||
}
|
||||
|
||||
if lhs.Type() == VectorType || rhs.Type() == VectorType {
|
||||
|
@ -978,6 +1161,9 @@ func NewArithExpr(opType BinOpType, lhs Node, rhs Node) (Node, error) {
|
|||
opType: opType,
|
||||
lhs: lhs,
|
||||
rhs: rhs,
|
||||
matchCardinality: matchCard,
|
||||
matchOn: matchOn,
|
||||
includeLabels: include,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -82,8 +82,40 @@ func NewVectorAggregation(aggrTypeStr string, vector ast.Node, groupBy clientmod
|
|||
return ast.NewVectorAggregation(aggrType, vector.(ast.VectorNode), groupBy, keepExtraLabels), nil
|
||||
}
|
||||
|
||||
// vectorMatching combines data used to match samples between vectors.
|
||||
type vectorMatching struct {
|
||||
matchCardinality ast.VectorMatchCardinality
|
||||
matchOn clientmodel.LabelNames
|
||||
includeLabels clientmodel.LabelNames
|
||||
}
|
||||
|
||||
// newVectorMatching is a convenience function to create a new vectorMatching.
|
||||
func newVectorMatching(card string, matchOn, include clientmodel.LabelNames) (*vectorMatching, error) {
|
||||
var matchCardinalities = map[string]ast.VectorMatchCardinality{
|
||||
"": ast.MatchOneToOne,
|
||||
"GROUP_LEFT": ast.MatchManyToOne,
|
||||
"GROUP_RIGHT": ast.MatchOneToMany,
|
||||
}
|
||||
matchCard, ok := matchCardinalities[card]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid vector match cardinality %q", card)
|
||||
}
|
||||
if matchCard != ast.MatchOneToOne && len(include) == 0 {
|
||||
return nil, fmt.Errorf("grouped vector matching must provide labels")
|
||||
}
|
||||
// There must be no overlap between both labelname lists.
|
||||
for _, matchLabel := range matchOn {
|
||||
for _, incLabel := range include {
|
||||
if matchLabel == incLabel {
|
||||
return nil, fmt.Errorf("use of label %s in ON and %s clauses not allowed", incLabel, card)
|
||||
}
|
||||
}
|
||||
}
|
||||
return &vectorMatching{matchCard, matchOn, include}, nil
|
||||
}
|
||||
|
||||
// NewArithExpr is a convenience function to create a new AST arithmetic expression.
|
||||
func NewArithExpr(opTypeStr string, lhs ast.Node, rhs ast.Node) (ast.Node, error) {
|
||||
func NewArithExpr(opTypeStr string, lhs ast.Node, rhs ast.Node, vecMatching *vectorMatching) (ast.Node, error) {
|
||||
var opTypes = map[string]ast.BinOpType{
|
||||
"+": ast.Add,
|
||||
"-": ast.Sub,
|
||||
|
@ -103,7 +135,15 @@ func NewArithExpr(opTypeStr string, lhs ast.Node, rhs ast.Node) (ast.Node, error
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("invalid binary operator %q", opTypeStr)
|
||||
}
|
||||
expr, err := ast.NewArithExpr(opType, lhs, rhs)
|
||||
var vm vectorMatching
|
||||
if vecMatching != nil {
|
||||
vm = *vecMatching
|
||||
// And/or always do many-to-many matching.
|
||||
if opType == ast.And || opType == ast.Or {
|
||||
vm.matchCardinality = ast.MatchManyToMany
|
||||
}
|
||||
}
|
||||
expr, err := ast.NewArithExpr(opType, lhs, rhs, vm.matchCardinality, vm.matchOn, vm.includeLabels)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(err.Error())
|
||||
}
|
||||
|
|
|
@ -423,6 +423,63 @@ var testMatrix = ast.Matrix{
|
|||
},
|
||||
Values: getTestValueStream(0, 90, 9, testStartTime),
|
||||
},
|
||||
{
|
||||
Metric: clientmodel.COWMetric{
|
||||
Metric: clientmodel.Metric{
|
||||
clientmodel.MetricNameLabel: "vector_matching_a",
|
||||
"l": "x",
|
||||
},
|
||||
},
|
||||
Values: getTestValueStream(0, 100, 1, testStartTime),
|
||||
},
|
||||
{
|
||||
Metric: clientmodel.COWMetric{
|
||||
Metric: clientmodel.Metric{
|
||||
clientmodel.MetricNameLabel: "vector_matching_a",
|
||||
"l": "y",
|
||||
},
|
||||
},
|
||||
Values: getTestValueStream(0, 100, 2, testStartTime),
|
||||
},
|
||||
{
|
||||
Metric: clientmodel.COWMetric{
|
||||
Metric: clientmodel.Metric{
|
||||
clientmodel.MetricNameLabel: "vector_matching_b",
|
||||
"l": "x",
|
||||
},
|
||||
},
|
||||
Values: getTestValueStream(0, 100, 4, testStartTime),
|
||||
},
|
||||
{
|
||||
Metric: clientmodel.COWMetric{
|
||||
Metric: clientmodel.Metric{
|
||||
clientmodel.MetricNameLabel: "cpu_count",
|
||||
"instance": "0",
|
||||
"type": "numa",
|
||||
},
|
||||
},
|
||||
Values: getTestValueStream(0, 500, 30, testStartTime),
|
||||
},
|
||||
{
|
||||
Metric: clientmodel.COWMetric{
|
||||
Metric: clientmodel.Metric{
|
||||
clientmodel.MetricNameLabel: "cpu_count",
|
||||
"instance": "0",
|
||||
"type": "smp",
|
||||
},
|
||||
},
|
||||
Values: getTestValueStream(0, 200, 10, testStartTime),
|
||||
},
|
||||
{
|
||||
Metric: clientmodel.COWMetric{
|
||||
Metric: clientmodel.Metric{
|
||||
clientmodel.MetricNameLabel: "cpu_count",
|
||||
"instance": "1",
|
||||
"type": "smp",
|
||||
},
|
||||
},
|
||||
Values: getTestValueStream(0, 200, 20, testStartTime),
|
||||
},
|
||||
}
|
||||
|
||||
var testVector = getTestVectorFromTestMatrix(testMatrix)
|
||||
|
|
|
@ -84,6 +84,9 @@ DESCRIPTION|description return DESCRIPTION
|
|||
|
||||
PERMANENT|permanent return PERMANENT
|
||||
BY|by return GROUP_OP
|
||||
ON|on return MATCH_OP
|
||||
GROUP_LEFT|GROUP_RIGHT lval.str = lexer.token(); return MATCH_MOD
|
||||
group_left|group_right lval.str = strings.ToUpper(lexer.token()); return MATCH_MOD
|
||||
KEEPING_EXTRA|keeping_extra return KEEPING_EXTRA
|
||||
OFFSET|offset return OFFSET
|
||||
AVG|SUM|MAX|MIN|COUNT lval.str = lexer.token(); return AGGR_OP
|
||||
|
|
2269
rules/lexer.l.go
2269
rules/lexer.l.go
File diff suppressed because it is too large
Load diff
|
@ -32,6 +32,7 @@
|
|||
labelSet clientmodel.LabelSet
|
||||
labelMatcher *metric.LabelMatcher
|
||||
labelMatchers metric.LabelMatchers
|
||||
vectorMatching *vectorMatching
|
||||
}
|
||||
|
||||
/* We simulate multiple start symbols for closely-related grammars via dummy tokens. See
|
||||
|
@ -42,8 +43,8 @@
|
|||
|
||||
%token <str> IDENTIFIER STRING DURATION METRICNAME
|
||||
%token <num> NUMBER
|
||||
%token PERMANENT GROUP_OP KEEPING_EXTRA OFFSET
|
||||
%token <str> AGGR_OP CMP_OP ADDITIVE_OP MULT_OP
|
||||
%token PERMANENT GROUP_OP KEEPING_EXTRA OFFSET MATCH_OP
|
||||
%token <str> AGGR_OP CMP_OP ADDITIVE_OP MULT_OP MATCH_MOD
|
||||
%token ALERT IF FOR WITH SUMMARY DESCRIPTION
|
||||
|
||||
%type <ruleNodeSlice> func_arg_list
|
||||
|
@ -51,6 +52,7 @@
|
|||
%type <labelSet> label_assign label_assign_list rule_labels
|
||||
%type <labelMatcher> label_match
|
||||
%type <labelMatchers> label_match_list label_matches
|
||||
%type <vectorMatching> vector_matching
|
||||
%type <ruleNode> rule_expr func_arg
|
||||
%type <boolean> qualifier extra_labels_opts
|
||||
%type <str> for_duration metric_name label_match_type offset_opts
|
||||
|
@ -207,34 +209,50 @@ rule_expr : '(' rule_expr ')'
|
|||
}
|
||||
/* Yacc can only attach associativity to terminals, so we
|
||||
* have to list all operators here. */
|
||||
| rule_expr ADDITIVE_OP rule_expr
|
||||
| rule_expr ADDITIVE_OP vector_matching rule_expr
|
||||
{
|
||||
var err error
|
||||
$$, err = NewArithExpr($2, $1, $3)
|
||||
$$, err = NewArithExpr($2, $1, $4, $3)
|
||||
if err != nil { yylex.Error(err.Error()); return 1 }
|
||||
}
|
||||
| rule_expr MULT_OP rule_expr
|
||||
| rule_expr MULT_OP vector_matching rule_expr
|
||||
{
|
||||
var err error
|
||||
$$, err = NewArithExpr($2, $1, $3)
|
||||
$$, err = NewArithExpr($2, $1, $4, $3)
|
||||
if err != nil { yylex.Error(err.Error()); return 1 }
|
||||
}
|
||||
| rule_expr CMP_OP rule_expr
|
||||
| rule_expr CMP_OP vector_matching rule_expr
|
||||
{
|
||||
var err error
|
||||
$$, err = NewArithExpr($2, $1, $3)
|
||||
$$, err = NewArithExpr($2, $1, $4, $3)
|
||||
if err != nil { yylex.Error(err.Error()); return 1 }
|
||||
}
|
||||
| NUMBER
|
||||
{ $$ = ast.NewScalarLiteral($1)}
|
||||
;
|
||||
|
||||
extra_labels_opts :
|
||||
extra_labels_opts : /* empty */
|
||||
{ $$ = false }
|
||||
| KEEPING_EXTRA
|
||||
{ $$ = true }
|
||||
;
|
||||
|
||||
vector_matching : /* empty */
|
||||
{ $$ = nil }
|
||||
| MATCH_OP '(' label_list ')'
|
||||
{
|
||||
var err error
|
||||
$$, err = newVectorMatching("", $3, nil)
|
||||
if err != nil { yylex.Error(err.Error()); return 1 }
|
||||
}
|
||||
| MATCH_OP '(' label_list ')' MATCH_MOD '(' label_list ')'
|
||||
{
|
||||
var err error
|
||||
$$, err = newVectorMatching($5, $3, $7)
|
||||
if err != nil { yylex.Error(err.Error()); return 1 }
|
||||
}
|
||||
;
|
||||
|
||||
grouping_opts :
|
||||
{ $$ = clientmodel.LabelNames{} }
|
||||
| GROUP_OP '(' label_list ')'
|
||||
|
|
|
@ -23,6 +23,7 @@ type yySymType struct {
|
|||
labelSet clientmodel.LabelSet
|
||||
labelMatcher *metric.LabelMatcher
|
||||
labelMatchers metric.LabelMatchers
|
||||
vectorMatching *vectorMatching
|
||||
}
|
||||
|
||||
const START_RULES = 57346
|
||||
|
@ -36,16 +37,18 @@ const PERMANENT = 57353
|
|||
const GROUP_OP = 57354
|
||||
const KEEPING_EXTRA = 57355
|
||||
const OFFSET = 57356
|
||||
const AGGR_OP = 57357
|
||||
const CMP_OP = 57358
|
||||
const ADDITIVE_OP = 57359
|
||||
const MULT_OP = 57360
|
||||
const ALERT = 57361
|
||||
const IF = 57362
|
||||
const FOR = 57363
|
||||
const WITH = 57364
|
||||
const SUMMARY = 57365
|
||||
const DESCRIPTION = 57366
|
||||
const MATCH_OP = 57357
|
||||
const AGGR_OP = 57358
|
||||
const CMP_OP = 57359
|
||||
const ADDITIVE_OP = 57360
|
||||
const MULT_OP = 57361
|
||||
const MATCH_MOD = 57362
|
||||
const ALERT = 57363
|
||||
const IF = 57364
|
||||
const FOR = 57365
|
||||
const WITH = 57366
|
||||
const SUMMARY = 57367
|
||||
const DESCRIPTION = 57368
|
||||
|
||||
var yyToknames = []string{
|
||||
"START_RULES",
|
||||
|
@ -59,10 +62,12 @@ var yyToknames = []string{
|
|||
"GROUP_OP",
|
||||
"KEEPING_EXTRA",
|
||||
"OFFSET",
|
||||
"MATCH_OP",
|
||||
"AGGR_OP",
|
||||
"CMP_OP",
|
||||
"ADDITIVE_OP",
|
||||
"MULT_OP",
|
||||
"MATCH_MOD",
|
||||
"ALERT",
|
||||
"IF",
|
||||
"FOR",
|
||||
|
@ -77,7 +82,7 @@ const yyEofCode = 1
|
|||
const yyErrCode = 2
|
||||
const yyMaxDepth = 200
|
||||
|
||||
//line parser.y:261
|
||||
//line parser.y:279
|
||||
|
||||
//line yacctab:1
|
||||
var yyExca = []int{
|
||||
|
@ -89,97 +94,101 @@ var yyExca = []int{
|
|||
-2, 10,
|
||||
}
|
||||
|
||||
const yyNprod = 52
|
||||
const yyNprod = 55
|
||||
const yyPrivate = 57344
|
||||
|
||||
var yyTokenNames []string
|
||||
var yyStates []string
|
||||
|
||||
const yyLast = 142
|
||||
const yyLast = 155
|
||||
|
||||
var yyAct = []int{
|
||||
|
||||
58, 76, 55, 52, 51, 30, 45, 6, 24, 20,
|
||||
61, 22, 10, 53, 18, 13, 12, 84, 68, 83,
|
||||
67, 11, 18, 36, 37, 38, 21, 19, 20, 21,
|
||||
19, 20, 8, 54, 90, 7, 50, 21, 19, 20,
|
||||
92, 18, 10, 53, 18, 13, 12, 70, 63, 62,
|
||||
88, 11, 18, 57, 31, 21, 19, 20, 86, 87,
|
||||
66, 40, 8, 10, 78, 7, 13, 12, 79, 69,
|
||||
18, 29, 11, 80, 82, 81, 28, 85, 21, 19,
|
||||
20, 19, 20, 8, 91, 77, 7, 41, 40, 94,
|
||||
25, 23, 39, 18, 59, 18, 44, 98, 27, 73,
|
||||
101, 99, 60, 96, 17, 43, 75, 9, 46, 56,
|
||||
31, 47, 16, 33, 97, 102, 13, 65, 35, 48,
|
||||
100, 95, 64, 32, 77, 93, 72, 25, 34, 2,
|
||||
3, 14, 5, 4, 1, 42, 89, 15, 26, 74,
|
||||
71, 49,
|
||||
76, 59, 81, 56, 53, 52, 30, 46, 6, 24,
|
||||
10, 54, 22, 13, 12, 21, 19, 20, 19, 20,
|
||||
11, 62, 21, 19, 20, 20, 18, 90, 96, 111,
|
||||
99, 18, 8, 18, 55, 7, 51, 60, 18, 18,
|
||||
90, 63, 97, 65, 66, 21, 19, 20, 107, 75,
|
||||
68, 67, 10, 54, 64, 13, 12, 21, 19, 20,
|
||||
74, 18, 11, 31, 58, 85, 83, 90, 94, 89,
|
||||
84, 27, 40, 18, 8, 28, 23, 7, 112, 86,
|
||||
88, 87, 29, 91, 21, 19, 20, 82, 73, 10,
|
||||
72, 98, 13, 12, 92, 93, 101, 71, 41, 11,
|
||||
18, 42, 41, 25, 49, 106, 45, 78, 109, 108,
|
||||
80, 8, 103, 36, 7, 61, 44, 17, 105, 37,
|
||||
9, 47, 57, 31, 104, 33, 48, 16, 13, 70,
|
||||
35, 113, 110, 102, 38, 39, 32, 69, 77, 82,
|
||||
100, 25, 34, 2, 3, 14, 5, 4, 1, 43,
|
||||
95, 15, 26, 79, 50,
|
||||
}
|
||||
var yyPact = []int{
|
||||
|
||||
125, -1000, -1000, 57, 93, -1000, 21, 57, 121, 72,
|
||||
47, 42, -1000, -1000, -1000, 107, 122, -1000, 110, 57,
|
||||
57, 57, 62, 60, -1000, 80, 94, 84, 6, 57,
|
||||
96, 24, 68, -1000, 82, -22, -9, -17, 64, -1000,
|
||||
121, 94, 115, -1000, -1000, -1000, 109, -1000, 33, -10,
|
||||
-1000, -1000, 21, -1000, 39, 18, -1000, 120, 74, 79,
|
||||
57, 94, -1000, -1000, -1000, -1000, -1000, -1000, 36, 98,
|
||||
57, -11, -1000, 57, 31, -1000, -1000, 25, 13, -1000,
|
||||
-1000, 96, 10, -1000, 119, 21, -1000, 118, 114, 81,
|
||||
106, -1000, -1000, -1000, -1000, -1000, 68, -1000, 78, 113,
|
||||
76, 108, -1000,
|
||||
139, -1000, -1000, 83, 106, -1000, 67, 83, 135, 43,
|
||||
44, 51, -1000, -1000, -1000, 119, 136, -1000, 122, 104,
|
||||
104, 104, 40, 72, -1000, 89, 107, 97, 4, 83,
|
||||
109, 33, 9, -1000, 93, -13, 83, 23, 83, 83,
|
||||
-1000, 135, 107, 130, -1000, -1000, -1000, 121, -1000, 68,
|
||||
58, -1000, -1000, 67, -1000, 28, 18, -1000, 132, 80,
|
||||
81, 83, 107, 6, 132, -7, 0, -1000, -1000, -1000,
|
||||
-1000, -1000, -1000, 46, 111, 83, 37, -1000, 83, 65,
|
||||
-1000, -1000, 41, 5, -1000, 10, -1000, 109, -2, -1000,
|
||||
134, 67, -1000, 133, 126, 88, 116, 98, -1000, -1000,
|
||||
-1000, -1000, -1000, 9, -1000, 17, 84, 132, 125, -3,
|
||||
52, -1000, 124, -1000,
|
||||
}
|
||||
var yyPgo = []int{
|
||||
|
||||
0, 141, 140, 5, 1, 139, 0, 8, 91, 138,
|
||||
3, 4, 137, 2, 136, 107, 135, 6, 134, 133,
|
||||
132, 131,
|
||||
0, 154, 0, 6, 2, 153, 1, 9, 76, 152,
|
||||
113, 4, 5, 151, 3, 150, 120, 149, 7, 148,
|
||||
147, 146, 145,
|
||||
}
|
||||
var yyR1 = []int{
|
||||
|
||||
0, 18, 18, 19, 19, 20, 21, 21, 14, 14,
|
||||
12, 12, 15, 15, 6, 6, 6, 5, 5, 4,
|
||||
9, 9, 9, 8, 8, 7, 16, 16, 17, 17,
|
||||
10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
|
||||
10, 10, 13, 13, 3, 3, 2, 2, 1, 1,
|
||||
11, 11,
|
||||
0, 19, 19, 20, 20, 21, 22, 22, 15, 15,
|
||||
13, 13, 16, 16, 6, 6, 6, 5, 5, 4,
|
||||
9, 9, 9, 8, 8, 7, 17, 17, 18, 18,
|
||||
11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
|
||||
11, 11, 14, 14, 10, 10, 10, 3, 3, 2,
|
||||
2, 1, 1, 12, 12,
|
||||
}
|
||||
var yyR2 = []int{
|
||||
|
||||
0, 2, 2, 0, 2, 1, 5, 11, 0, 2,
|
||||
0, 1, 1, 1, 0, 3, 2, 1, 3, 3,
|
||||
0, 2, 3, 1, 3, 3, 1, 1, 0, 2,
|
||||
3, 4, 3, 4, 3, 5, 6, 6, 3, 3,
|
||||
3, 1, 0, 1, 0, 4, 1, 3, 1, 3,
|
||||
1, 1,
|
||||
3, 4, 3, 4, 3, 5, 6, 6, 4, 4,
|
||||
4, 1, 0, 1, 0, 4, 8, 0, 4, 1,
|
||||
3, 1, 3, 1, 1,
|
||||
}
|
||||
var yyChk = []int{
|
||||
|
||||
-1000, -18, 4, 5, -19, -20, -10, 29, 26, -15,
|
||||
6, 15, 10, 9, -21, -12, 19, 11, 31, 17,
|
||||
18, 16, -10, -8, -7, 6, -9, 26, 29, 29,
|
||||
-3, 12, -15, 6, 6, 8, -10, -10, -10, 30,
|
||||
28, 27, -16, 25, 16, -17, 14, 27, -8, -1,
|
||||
30, -11, -10, 7, -10, -13, 13, 29, -6, 26,
|
||||
20, 32, -7, -17, 7, 8, 27, 30, 28, 30,
|
||||
29, -2, 6, 25, -5, 27, -4, 6, -10, -17,
|
||||
-11, -3, -10, 30, 28, -10, 27, 28, 25, -14,
|
||||
21, -13, 30, 6, -4, 7, 22, 8, -6, 23,
|
||||
7, 24, 7,
|
||||
-1000, -19, 4, 5, -20, -21, -11, 31, 28, -16,
|
||||
6, 16, 10, 9, -22, -13, 21, 11, 33, 18,
|
||||
19, 17, -11, -8, -7, 6, -9, 28, 31, 31,
|
||||
-3, 12, -16, 6, 6, 8, -10, 15, -10, -10,
|
||||
32, 30, 29, -17, 27, 17, -18, 14, 29, -8,
|
||||
-1, 32, -12, -11, 7, -11, -14, 13, 31, -6,
|
||||
28, 22, 34, -11, 31, -11, -11, -7, -18, 7,
|
||||
8, 29, 32, 30, 32, 31, -2, 6, 27, -5,
|
||||
29, -4, 6, -11, -18, -2, -12, -3, -11, 32,
|
||||
30, -11, 29, 30, 27, -15, 23, 32, -14, 32,
|
||||
6, -4, 7, 24, 8, 20, -6, 31, 25, -2,
|
||||
7, 32, 26, 7,
|
||||
}
|
||||
var yyDef = []int{
|
||||
|
||||
0, -2, 3, 0, -2, 2, 5, 0, 0, 20,
|
||||
13, 44, 41, 12, 4, 0, 0, 11, 0, 0,
|
||||
0, 0, 0, 0, 23, 0, 28, 0, 0, 0,
|
||||
42, 0, 14, 13, 0, 0, 38, 39, 40, 30,
|
||||
0, 28, 0, 26, 27, 32, 0, 21, 0, 0,
|
||||
34, 48, 50, 51, 0, 0, 43, 0, 0, 0,
|
||||
0, 28, 24, 31, 25, 29, 22, 33, 0, 44,
|
||||
0, 0, 46, 0, 0, 16, 17, 0, 8, 35,
|
||||
49, 42, 0, 45, 0, 6, 15, 0, 0, 0,
|
||||
0, 36, 37, 47, 18, 19, 14, 9, 0, 0,
|
||||
0, 0, 7,
|
||||
13, 47, 41, 12, 4, 0, 0, 11, 0, 44,
|
||||
44, 44, 0, 0, 23, 0, 28, 0, 0, 0,
|
||||
42, 0, 14, 13, 0, 0, 0, 0, 0, 0,
|
||||
30, 0, 28, 0, 26, 27, 32, 0, 21, 0,
|
||||
0, 34, 51, 53, 54, 0, 0, 43, 0, 0,
|
||||
0, 0, 28, 38, 0, 39, 40, 24, 31, 25,
|
||||
29, 22, 33, 0, 47, 0, 0, 49, 0, 0,
|
||||
16, 17, 0, 8, 35, 0, 52, 42, 0, 48,
|
||||
0, 6, 15, 0, 0, 0, 0, 45, 36, 37,
|
||||
50, 18, 19, 14, 9, 0, 0, 0, 0, 0,
|
||||
0, 46, 0, 7,
|
||||
}
|
||||
var yyTok1 = []int{
|
||||
|
||||
|
@ -187,21 +196,21 @@ var yyTok1 = []int{
|
|||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
29, 30, 3, 3, 28, 3, 3, 3, 3, 3,
|
||||
31, 32, 3, 3, 30, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 25, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 27, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 31, 3, 32, 3, 3, 3, 3, 3, 3,
|
||||
3, 33, 3, 34, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
3, 3, 3, 26, 3, 27,
|
||||
3, 3, 3, 28, 3, 29,
|
||||
}
|
||||
var yyTok2 = []int{
|
||||
|
||||
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
|
||||
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
|
||||
22, 23, 24,
|
||||
22, 23, 24, 25, 26,
|
||||
}
|
||||
var yyTok3 = []int{
|
||||
0,
|
||||
|
@ -433,12 +442,12 @@ yydefault:
|
|||
switch yynt {
|
||||
|
||||
case 5:
|
||||
//line parser.y:74
|
||||
//line parser.y:76
|
||||
{
|
||||
yylex.(*RulesLexer).parsedExpr = yyS[yypt-0].ruleNode
|
||||
}
|
||||
case 6:
|
||||
//line parser.y:79
|
||||
//line parser.y:81
|
||||
{
|
||||
rule, err := CreateRecordingRule(yyS[yypt-3].str, yyS[yypt-2].labelSet, yyS[yypt-0].ruleNode, yyS[yypt-4].boolean)
|
||||
if err != nil {
|
||||
|
@ -448,7 +457,7 @@ yydefault:
|
|||
yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule)
|
||||
}
|
||||
case 7:
|
||||
//line parser.y:85
|
||||
//line parser.y:87
|
||||
{
|
||||
rule, err := CreateAlertingRule(yyS[yypt-9].str, yyS[yypt-7].ruleNode, yyS[yypt-6].str, yyS[yypt-4].labelSet, yyS[yypt-2].str, yyS[yypt-0].str)
|
||||
if err != nil {
|
||||
|
@ -458,94 +467,94 @@ yydefault:
|
|||
yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule)
|
||||
}
|
||||
case 8:
|
||||
//line parser.y:93
|
||||
//line parser.y:95
|
||||
{
|
||||
yyVAL.str = "0s"
|
||||
}
|
||||
case 9:
|
||||
//line parser.y:95
|
||||
//line parser.y:97
|
||||
{
|
||||
yyVAL.str = yyS[yypt-0].str
|
||||
}
|
||||
case 10:
|
||||
//line parser.y:99
|
||||
//line parser.y:101
|
||||
{
|
||||
yyVAL.boolean = false
|
||||
}
|
||||
case 11:
|
||||
//line parser.y:101
|
||||
//line parser.y:103
|
||||
{
|
||||
yyVAL.boolean = true
|
||||
}
|
||||
case 12:
|
||||
//line parser.y:105
|
||||
{
|
||||
yyVAL.str = yyS[yypt-0].str
|
||||
}
|
||||
case 13:
|
||||
//line parser.y:107
|
||||
{
|
||||
yyVAL.str = yyS[yypt-0].str
|
||||
}
|
||||
case 13:
|
||||
//line parser.y:109
|
||||
{
|
||||
yyVAL.str = yyS[yypt-0].str
|
||||
}
|
||||
case 14:
|
||||
//line parser.y:111
|
||||
//line parser.y:113
|
||||
{
|
||||
yyVAL.labelSet = clientmodel.LabelSet{}
|
||||
}
|
||||
case 15:
|
||||
//line parser.y:113
|
||||
//line parser.y:115
|
||||
{
|
||||
yyVAL.labelSet = yyS[yypt-1].labelSet
|
||||
}
|
||||
case 16:
|
||||
//line parser.y:115
|
||||
//line parser.y:117
|
||||
{
|
||||
yyVAL.labelSet = clientmodel.LabelSet{}
|
||||
}
|
||||
case 17:
|
||||
//line parser.y:118
|
||||
//line parser.y:120
|
||||
{
|
||||
yyVAL.labelSet = yyS[yypt-0].labelSet
|
||||
}
|
||||
case 18:
|
||||
//line parser.y:120
|
||||
//line parser.y:122
|
||||
{
|
||||
for k, v := range yyS[yypt-0].labelSet {
|
||||
yyVAL.labelSet[k] = v
|
||||
}
|
||||
}
|
||||
case 19:
|
||||
//line parser.y:124
|
||||
//line parser.y:126
|
||||
{
|
||||
yyVAL.labelSet = clientmodel.LabelSet{clientmodel.LabelName(yyS[yypt-2].str): clientmodel.LabelValue(yyS[yypt-0].str)}
|
||||
}
|
||||
case 20:
|
||||
//line parser.y:128
|
||||
{
|
||||
yyVAL.labelMatchers = metric.LabelMatchers{}
|
||||
}
|
||||
case 21:
|
||||
//line parser.y:130
|
||||
{
|
||||
yyVAL.labelMatchers = metric.LabelMatchers{}
|
||||
}
|
||||
case 22:
|
||||
case 21:
|
||||
//line parser.y:132
|
||||
{
|
||||
yyVAL.labelMatchers = metric.LabelMatchers{}
|
||||
}
|
||||
case 22:
|
||||
//line parser.y:134
|
||||
{
|
||||
yyVAL.labelMatchers = yyS[yypt-1].labelMatchers
|
||||
}
|
||||
case 23:
|
||||
//line parser.y:136
|
||||
//line parser.y:138
|
||||
{
|
||||
yyVAL.labelMatchers = metric.LabelMatchers{yyS[yypt-0].labelMatcher}
|
||||
}
|
||||
case 24:
|
||||
//line parser.y:138
|
||||
//line parser.y:140
|
||||
{
|
||||
yyVAL.labelMatchers = append(yyVAL.labelMatchers, yyS[yypt-0].labelMatcher)
|
||||
}
|
||||
case 25:
|
||||
//line parser.y:142
|
||||
//line parser.y:144
|
||||
{
|
||||
var err error
|
||||
yyVAL.labelMatcher, err = newLabelMatcher(yyS[yypt-1].str, clientmodel.LabelName(yyS[yypt-2].str), clientmodel.LabelValue(yyS[yypt-0].str))
|
||||
|
@ -555,32 +564,32 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 26:
|
||||
//line parser.y:150
|
||||
//line parser.y:152
|
||||
{
|
||||
yyVAL.str = "="
|
||||
}
|
||||
case 27:
|
||||
//line parser.y:152
|
||||
//line parser.y:154
|
||||
{
|
||||
yyVAL.str = yyS[yypt-0].str
|
||||
}
|
||||
case 28:
|
||||
//line parser.y:156
|
||||
//line parser.y:158
|
||||
{
|
||||
yyVAL.str = "0s"
|
||||
}
|
||||
case 29:
|
||||
//line parser.y:158
|
||||
//line parser.y:160
|
||||
{
|
||||
yyVAL.str = yyS[yypt-0].str
|
||||
}
|
||||
case 30:
|
||||
//line parser.y:162
|
||||
//line parser.y:164
|
||||
{
|
||||
yyVAL.ruleNode = yyS[yypt-1].ruleNode
|
||||
}
|
||||
case 31:
|
||||
//line parser.y:164
|
||||
//line parser.y:166
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewVectorSelector(yyS[yypt-2].labelMatchers, yyS[yypt-0].str)
|
||||
|
@ -590,7 +599,7 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 32:
|
||||
//line parser.y:170
|
||||
//line parser.y:172
|
||||
{
|
||||
var err error
|
||||
m, err := metric.NewLabelMatcher(metric.Equal, clientmodel.MetricNameLabel, clientmodel.LabelValue(yyS[yypt-2].str))
|
||||
|
@ -606,7 +615,7 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 33:
|
||||
//line parser.y:179
|
||||
//line parser.y:181
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice)
|
||||
|
@ -616,7 +625,7 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 34:
|
||||
//line parser.y:185
|
||||
//line parser.y:187
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{})
|
||||
|
@ -626,7 +635,7 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 35:
|
||||
//line parser.y:191
|
||||
//line parser.y:193
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewMatrixSelector(yyS[yypt-4].ruleNode, yyS[yypt-2].str, yyS[yypt-0].str)
|
||||
|
@ -636,7 +645,7 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 36:
|
||||
//line parser.y:197
|
||||
//line parser.y:199
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-5].str, yyS[yypt-3].ruleNode, yyS[yypt-1].labelNameSlice, yyS[yypt-0].boolean)
|
||||
|
@ -646,7 +655,7 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 37:
|
||||
//line parser.y:203
|
||||
//line parser.y:205
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-5].str, yyS[yypt-1].ruleNode, yyS[yypt-4].labelNameSlice, yyS[yypt-3].boolean)
|
||||
|
@ -656,87 +665,112 @@ yydefault:
|
|||
}
|
||||
}
|
||||
case 38:
|
||||
//line parser.y:211
|
||||
//line parser.y:213
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
|
||||
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-2].str, yyS[yypt-3].ruleNode, yyS[yypt-0].ruleNode, yyS[yypt-1].vectorMatching)
|
||||
if err != nil {
|
||||
yylex.Error(err.Error())
|
||||
return 1
|
||||
}
|
||||
}
|
||||
case 39:
|
||||
//line parser.y:217
|
||||
//line parser.y:219
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
|
||||
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-2].str, yyS[yypt-3].ruleNode, yyS[yypt-0].ruleNode, yyS[yypt-1].vectorMatching)
|
||||
if err != nil {
|
||||
yylex.Error(err.Error())
|
||||
return 1
|
||||
}
|
||||
}
|
||||
case 40:
|
||||
//line parser.y:223
|
||||
//line parser.y:225
|
||||
{
|
||||
var err error
|
||||
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
|
||||
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-2].str, yyS[yypt-3].ruleNode, yyS[yypt-0].ruleNode, yyS[yypt-1].vectorMatching)
|
||||
if err != nil {
|
||||
yylex.Error(err.Error())
|
||||
return 1
|
||||
}
|
||||
}
|
||||
case 41:
|
||||
//line parser.y:229
|
||||
//line parser.y:231
|
||||
{
|
||||
yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num)
|
||||
}
|
||||
case 42:
|
||||
//line parser.y:233
|
||||
//line parser.y:235
|
||||
{
|
||||
yyVAL.boolean = false
|
||||
}
|
||||
case 43:
|
||||
//line parser.y:235
|
||||
//line parser.y:237
|
||||
{
|
||||
yyVAL.boolean = true
|
||||
}
|
||||
case 44:
|
||||
//line parser.y:239
|
||||
//line parser.y:241
|
||||
{
|
||||
yyVAL.vectorMatching = nil
|
||||
}
|
||||
case 45:
|
||||
//line parser.y:243
|
||||
{
|
||||
var err error
|
||||
yyVAL.vectorMatching, err = newVectorMatching("", yyS[yypt-1].labelNameSlice, nil)
|
||||
if err != nil {
|
||||
yylex.Error(err.Error())
|
||||
return 1
|
||||
}
|
||||
}
|
||||
case 46:
|
||||
//line parser.y:249
|
||||
{
|
||||
var err error
|
||||
yyVAL.vectorMatching, err = newVectorMatching(yyS[yypt-3].str, yyS[yypt-5].labelNameSlice, yyS[yypt-1].labelNameSlice)
|
||||
if err != nil {
|
||||
yylex.Error(err.Error())
|
||||
return 1
|
||||
}
|
||||
}
|
||||
case 47:
|
||||
//line parser.y:257
|
||||
{
|
||||
yyVAL.labelNameSlice = clientmodel.LabelNames{}
|
||||
}
|
||||
case 45:
|
||||
//line parser.y:241
|
||||
case 48:
|
||||
//line parser.y:259
|
||||
{
|
||||
yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice
|
||||
}
|
||||
case 46:
|
||||
//line parser.y:245
|
||||
case 49:
|
||||
//line parser.y:263
|
||||
{
|
||||
yyVAL.labelNameSlice = clientmodel.LabelNames{clientmodel.LabelName(yyS[yypt-0].str)}
|
||||
}
|
||||
case 47:
|
||||
//line parser.y:247
|
||||
case 50:
|
||||
//line parser.y:265
|
||||
{
|
||||
yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, clientmodel.LabelName(yyS[yypt-0].str))
|
||||
}
|
||||
case 48:
|
||||
//line parser.y:251
|
||||
case 51:
|
||||
//line parser.y:269
|
||||
{
|
||||
yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode}
|
||||
}
|
||||
case 49:
|
||||
//line parser.y:253
|
||||
case 52:
|
||||
//line parser.y:271
|
||||
{
|
||||
yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode)
|
||||
}
|
||||
case 50:
|
||||
//line parser.y:257
|
||||
case 53:
|
||||
//line parser.y:275
|
||||
{
|
||||
yyVAL.ruleNode = yyS[yypt-0].ruleNode
|
||||
}
|
||||
case 51:
|
||||
//line parser.y:259
|
||||
case 54:
|
||||
//line parser.y:277
|
||||
{
|
||||
yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str)
|
||||
}
|
||||
|
|
|
@ -630,6 +630,12 @@ func TestExpressions(t *testing.T) {
|
|||
`request_duration_seconds_bucket{instance="ins2", job="job2", le="0.1"} => 40 @[%v]`,
|
||||
`request_duration_seconds_bucket{instance="ins2", job="job2", le="0.2"} => 70 @[%v]`,
|
||||
`request_duration_seconds_bucket{instance="ins2", job="job2", le="+Inf"} => 90 @[%v]`,
|
||||
`vector_matching_a{l="x"} => 10 @[%v]`,
|
||||
`vector_matching_a{l="y"} => 20 @[%v]`,
|
||||
`vector_matching_b{l="x"} => 40 @[%v]`,
|
||||
`cpu_count{instance="1", type="smp"} => 200 @[%v]`,
|
||||
`cpu_count{instance="0", type="smp"} => 100 @[%v]`,
|
||||
`cpu_count{instance="0", type="numa"} => 300 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -666,6 +672,191 @@ func TestExpressions(t *testing.T) {
|
|||
`{job="api-server"} => 1000 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="canary"} and http_requests{instance="0"}`,
|
||||
output: []string{
|
||||
`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
|
||||
`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `(http_requests{group="canary"} + 1) and http_requests{instance="0"}`,
|
||||
output: []string{
|
||||
`{group="canary", instance="0", job="api-server"} => 301 @[%v]`,
|
||||
`{group="canary", instance="0", job="app-server"} => 701 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `(http_requests{group="canary"} + 1) and on(instance, job) http_requests{instance="0", group="production"}`,
|
||||
output: []string{
|
||||
`{group="canary", instance="0", job="api-server"} => 301 @[%v]`,
|
||||
`{group="canary", instance="0", job="app-server"} => 701 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `(http_requests{group="canary"} + 1) and on(instance) http_requests{instance="0", group="production"}`,
|
||||
output: []string{
|
||||
`{group="canary", instance="0", job="api-server"} => 301 @[%v]`,
|
||||
`{group="canary", instance="0", job="app-server"} => 701 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="canary"} or http_requests{group="production"}`,
|
||||
output: []string{
|
||||
`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
|
||||
`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
|
||||
`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
|
||||
`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
|
||||
`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
|
||||
`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
|
||||
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
|
||||
`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
// On overlap the rhs samples must be dropped.
|
||||
expr: `(http_requests{group="canary"} + 1) or http_requests{instance="1"}`,
|
||||
output: []string{
|
||||
`{group="canary", instance="0", job="api-server"} => 301 @[%v]`,
|
||||
`{group="canary", instance="0", job="app-server"} => 701 @[%v]`,
|
||||
`{group="canary", instance="1", job="api-server"} => 401 @[%v]`,
|
||||
`{group="canary", instance="1", job="app-server"} => 801 @[%v]`,
|
||||
`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
|
||||
`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Matching only on instance excludes everything that has instance=0/1 but includes
|
||||
// entries without the instance label.
|
||||
expr: `(http_requests{group="canary"} + 1) or on(instance) (http_requests or cpu_count or vector_matching_a)`,
|
||||
output: []string{
|
||||
`{group="canary", instance="0", job="api-server"} => 301 @[%v]`,
|
||||
`{group="canary", instance="0", job="app-server"} => 701 @[%v]`,
|
||||
`{group="canary", instance="1", job="api-server"} => 401 @[%v]`,
|
||||
`{group="canary", instance="1", job="app-server"} => 801 @[%v]`,
|
||||
`vector_matching_a{l="x"} => 10 @[%v]`,
|
||||
`vector_matching_a{l="y"} => 20 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="canary"} / on(instance,job) http_requests{group="production"}`,
|
||||
output: []string{
|
||||
`{instance="0", job="api-server"} => 3 @[%v]`,
|
||||
`{instance="0", job="app-server"} => 1.4 @[%v]`,
|
||||
`{instance="1", job="api-server"} => 2 @[%v]`,
|
||||
`{instance="1", job="app-server"} => 1.3333333333333333 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Include labels must guarantee uniquely identifiable time series.
|
||||
expr: `http_requests{group="production"} / on(instance) group_left(group) cpu_count{type="smp"}`,
|
||||
output: []string{}, // Empty result returned on error (see TODOs).
|
||||
},
|
||||
{
|
||||
// Many-to-many matching is not allowed.
|
||||
expr: `http_requests{group="production"} / on(instance) group_left(job,type) cpu_count`,
|
||||
output: []string{}, // Empty result returned on error (see TODOs).
|
||||
},
|
||||
{
|
||||
// Many-to-one matching must be explicit.
|
||||
expr: `http_requests{group="production"} / on(instance) cpu_count{type="smp"}`,
|
||||
output: []string{}, // Empty result returned on error (see TODOs).
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="production"} / on(instance) group_left(job) cpu_count{type="smp"}`,
|
||||
output: []string{
|
||||
`{instance="1", job="api-server"} => 1 @[%v]`,
|
||||
`{instance="0", job="app-server"} => 5 @[%v]`,
|
||||
`{instance="1", job="app-server"} => 3 @[%v]`,
|
||||
`{instance="0", job="api-server"} => 1 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Ensure sidedness of grouping preserves operand sides.
|
||||
expr: `cpu_count{type="smp"} / on(instance) group_right(job) http_requests{group="production"}`,
|
||||
output: []string{
|
||||
`{instance="1", job="app-server"} => 0.3333333333333333 @[%v]`,
|
||||
`{instance="0", job="app-server"} => 0.2 @[%v]`,
|
||||
`{instance="1", job="api-server"} => 1 @[%v]`,
|
||||
`{instance="0", job="api-server"} => 1 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Include labels from both sides.
|
||||
expr: `http_requests{group="production"} / on(instance) group_left(job) cpu_count{type="smp"}`,
|
||||
output: []string{
|
||||
`{instance="1", job="api-server"} => 1 @[%v]`,
|
||||
`{instance="0", job="app-server"} => 5 @[%v]`,
|
||||
`{instance="1", job="app-server"} => 3 @[%v]`,
|
||||
`{instance="0", job="api-server"} => 1 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="production"} < on(instance,job) http_requests{group="canary"}`,
|
||||
output: []string{
|
||||
`{instance="1", job="app-server"} => 600 @[%v]`,
|
||||
`{instance="0", job="app-server"} => 500 @[%v]`,
|
||||
`{instance="1", job="api-server"} => 200 @[%v]`,
|
||||
`{instance="0", job="api-server"} => 100 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="production"} > on(instance,job) http_requests{group="canary"}`,
|
||||
output: []string{},
|
||||
},
|
||||
{
|
||||
expr: `http_requests{group="production"} == on(instance,job) http_requests{group="canary"}`,
|
||||
output: []string{},
|
||||
},
|
||||
{
|
||||
expr: `http_requests > on(instance) group_left(group,job) cpu_count{type="smp"}`,
|
||||
output: []string{
|
||||
`{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
|
||||
`{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
|
||||
`{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
|
||||
`{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
|
||||
`{group="production", instance="0", job="app-server"} => 500 @[%v]`,
|
||||
`{group="production", instance="1", job="app-server"} => 600 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `http_requests / on(instance) 3`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
expr: `3 / on(instance) http_requests_total`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
expr: `3 / on(instance) 3`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
// Missing label list for grouping mod.
|
||||
expr: `http_requests{group="production"} / on(instance) group_left cpu_count{type="smp"}`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
// No group mod allowed for logical operations.
|
||||
expr: `http_requests{group="production"} or on(instance) group_left(type) cpu_count{type="smp"}`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
// No group mod allowed for logical operations.
|
||||
expr: `http_requests{group="production"} and on(instance) group_left(type) cpu_count{type="smp"}`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
// No duplicate use of label.
|
||||
expr: `http_requests{group="production"} + on(instance) group_left(job,instance) cpu_count{type="smp"}`,
|
||||
shouldFail: true,
|
||||
},
|
||||
{
|
||||
expr: `{l="x"} + on(__name__) {l="y"}`,
|
||||
output: []string{
|
||||
`vector_matching_a => 30 @[%v]`,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: `absent(nonexistent)`,
|
||||
output: []string{
|
||||
|
@ -975,10 +1166,14 @@ func TestExpressions(t *testing.T) {
|
|||
t.Errorf("%d. Test should fail, but didn't", i)
|
||||
}
|
||||
failed := false
|
||||
|
||||
resultStr := ast.EvalToString(testExpr, testEvalTime, ast.Text, storage, stats.NewTimerGroup())
|
||||
resultLines := strings.Split(resultStr, "\n")
|
||||
|
||||
if len(exprTest.output) != len(resultLines) {
|
||||
if len(exprTest.output) == 0 && strings.Trim(resultStr, "\n") == "" {
|
||||
// expected and received empty vector, everything is fine
|
||||
continue
|
||||
} else if len(exprTest.output) != len(resultLines) {
|
||||
t.Errorf("%d. Number of samples in expected and actual output don't match", i)
|
||||
failed = true
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue