mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
promql aggregations: pre-generate mapping from inputs to outputs
So we don't have to re-create it on every time step. Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
This commit is contained in:
parent
cb6c4b3092
commit
53a3138eeb
|
@ -1067,8 +1067,6 @@ func (ev *evaluator) Eval(expr parser.Expr) (v parser.Value, ws annotations.Anno
|
||||||
|
|
||||||
// EvalSeriesHelper stores extra information about a series.
|
// EvalSeriesHelper stores extra information about a series.
|
||||||
type EvalSeriesHelper struct {
|
type EvalSeriesHelper struct {
|
||||||
// The grouping key used by aggregation.
|
|
||||||
groupingKey uint64
|
|
||||||
// Used to map left-hand to right-hand in binary operations.
|
// Used to map left-hand to right-hand in binary operations.
|
||||||
signature string
|
signature string
|
||||||
}
|
}
|
||||||
|
@ -1316,13 +1314,25 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping
|
||||||
seriess := make(map[uint64]Series, biggestLen) // Output series by series hash.
|
seriess := make(map[uint64]Series, biggestLen) // Output series by series hash.
|
||||||
tempNumSamples := ev.currentSamples
|
tempNumSamples := ev.currentSamples
|
||||||
|
|
||||||
// Initialise series helpers with the grouping key.
|
// Create a mapping from input series to output groups.
|
||||||
buf := make([]byte, 0, 1024)
|
buf := make([]byte, 0, 1024)
|
||||||
|
groupToResultIndex := make(map[uint64]int)
|
||||||
seriesHelper := make([]EvalSeriesHelper, len(inputMatrix))
|
seriesToResult := make([]int, len(inputMatrix))
|
||||||
|
orderedResult := make([]*groupedAggregation, 0, 16)
|
||||||
|
|
||||||
for si, series := range inputMatrix {
|
for si, series := range inputMatrix {
|
||||||
seriesHelper[si].groupingKey, buf = generateGroupingKey(series.Metric, sortedGrouping, aggExpr.Without, buf)
|
var groupingKey uint64
|
||||||
|
groupingKey, buf = generateGroupingKey(series.Metric, sortedGrouping, aggExpr.Without, buf)
|
||||||
|
index, ok := groupToResultIndex[groupingKey]
|
||||||
|
// Add a new group if it doesn't exist.
|
||||||
|
if !ok {
|
||||||
|
m := generateGroupingLabels(enh, series.Metric, aggExpr.Without, sortedGrouping)
|
||||||
|
newAgg := &groupedAggregation{labels: m}
|
||||||
|
index = len(orderedResult)
|
||||||
|
groupToResultIndex[groupingKey] = index
|
||||||
|
orderedResult = append(orderedResult, newAgg)
|
||||||
|
}
|
||||||
|
seriesToResult[si] = index
|
||||||
}
|
}
|
||||||
|
|
||||||
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
|
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
|
||||||
|
@ -1334,7 +1344,7 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping
|
||||||
|
|
||||||
// Make the function call.
|
// Make the function call.
|
||||||
enh.Ts = ts
|
enh.Ts = ts
|
||||||
result, ws := ev.aggregation(aggExpr, sortedGrouping, param, inputMatrix, seriesHelper, enh, seriess)
|
result, ws := ev.aggregation(aggExpr, param, inputMatrix, seriesToResult, orderedResult, enh, seriess)
|
||||||
|
|
||||||
warnings.Merge(ws)
|
warnings.Merge(ws)
|
||||||
|
|
||||||
|
@ -2698,12 +2708,10 @@ type groupedAggregation struct {
|
||||||
|
|
||||||
// aggregation evaluates an aggregation operation on a Vector. The provided grouping labels
|
// aggregation evaluates an aggregation operation on a Vector. The provided grouping labels
|
||||||
// must be sorted.
|
// must be sorted.
|
||||||
func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, q float64, inputMatrix Matrix, seriesHelper []EvalSeriesHelper, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) {
|
func (ev *evaluator) aggregation(e *parser.AggregateExpr, q float64, inputMatrix Matrix, seriesToResult []int, orderedResult []*groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) {
|
||||||
op := e.Op
|
op := e.Op
|
||||||
without := e.Without
|
|
||||||
var annos annotations.Annotations
|
var annos annotations.Annotations
|
||||||
result := map[uint64]*groupedAggregation{}
|
seen := make([]bool, len(orderedResult)) // Which output groups were seen in the input at this timestamp.
|
||||||
orderedResult := []*groupedAggregation{}
|
|
||||||
k := 1
|
k := 1
|
||||||
if op == parser.TOPK || op == parser.BOTTOMK {
|
if op == parser.TOPK || op == parser.BOTTOMK {
|
||||||
if !convertibleToInt64(q) {
|
if !convertibleToInt64(q) {
|
||||||
|
@ -2743,53 +2751,47 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, q f
|
||||||
ev.error(ErrTooManySamples(env))
|
ev.error(ErrTooManySamples(env))
|
||||||
}
|
}
|
||||||
|
|
||||||
metric := s.Metric
|
group := orderedResult[seriesToResult[si]]
|
||||||
groupingKey := seriesHelper[si].groupingKey
|
// Initialize this group if it's the first time we've seen it.
|
||||||
|
if !seen[seriesToResult[si]] {
|
||||||
group, ok := result[groupingKey]
|
*group = groupedAggregation{
|
||||||
// Add a new group if it doesn't exist.
|
labels: group.labels,
|
||||||
if !ok {
|
|
||||||
m := generateGroupingLabels(enh, metric, without, grouping)
|
|
||||||
newAgg := &groupedAggregation{
|
|
||||||
labels: m,
|
|
||||||
floatValue: s.F,
|
floatValue: s.F,
|
||||||
floatMean: s.F,
|
floatMean: s.F,
|
||||||
groupCount: 1,
|
groupCount: 1,
|
||||||
}
|
}
|
||||||
switch {
|
switch {
|
||||||
case s.H == nil:
|
case s.H == nil:
|
||||||
newAgg.hasFloat = true
|
group.hasFloat = true
|
||||||
case op == parser.SUM:
|
case op == parser.SUM:
|
||||||
newAgg.histogramValue = s.H.Copy()
|
group.histogramValue = s.H.Copy()
|
||||||
newAgg.hasHistogram = true
|
group.hasHistogram = true
|
||||||
case op == parser.AVG:
|
case op == parser.AVG:
|
||||||
newAgg.histogramMean = s.H.Copy()
|
group.histogramMean = s.H.Copy()
|
||||||
newAgg.hasHistogram = true
|
group.hasHistogram = true
|
||||||
case op == parser.STDVAR || op == parser.STDDEV:
|
case op == parser.STDVAR || op == parser.STDDEV:
|
||||||
newAgg.groupCount = 0
|
group.groupCount = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
switch op {
|
switch op {
|
||||||
case parser.STDVAR, parser.STDDEV:
|
case parser.STDVAR, parser.STDDEV:
|
||||||
newAgg.floatValue = 0
|
group.floatValue = 0
|
||||||
case parser.TOPK, parser.QUANTILE:
|
case parser.TOPK, parser.QUANTILE:
|
||||||
newAgg.heap = make(vectorByValueHeap, 1, k)
|
group.heap = make(vectorByValueHeap, 1, k)
|
||||||
newAgg.heap[0] = Sample{
|
group.heap[0] = Sample{
|
||||||
F: s.F,
|
F: s.F,
|
||||||
Metric: s.Metric,
|
Metric: s.Metric,
|
||||||
}
|
}
|
||||||
case parser.BOTTOMK:
|
case parser.BOTTOMK:
|
||||||
newAgg.reverseHeap = make(vectorByReverseValueHeap, 1, k)
|
group.reverseHeap = make(vectorByReverseValueHeap, 1, k)
|
||||||
newAgg.reverseHeap[0] = Sample{
|
group.reverseHeap[0] = Sample{
|
||||||
F: s.F,
|
F: s.F,
|
||||||
Metric: s.Metric,
|
Metric: s.Metric,
|
||||||
}
|
}
|
||||||
case parser.GROUP:
|
case parser.GROUP:
|
||||||
newAgg.floatValue = 1
|
group.floatValue = 1
|
||||||
}
|
}
|
||||||
|
seen[seriesToResult[si]] = true
|
||||||
result[groupingKey] = newAgg
|
|
||||||
orderedResult = append(orderedResult, newAgg)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2950,7 +2952,10 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, q f
|
||||||
seriess[hash] = ss
|
seriess[hash] = ss
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, aggr := range orderedResult {
|
for ri, aggr := range orderedResult {
|
||||||
|
if !seen[ri] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
switch op {
|
switch op {
|
||||||
case parser.AVG:
|
case parser.AVG:
|
||||||
if aggr.hasFloat && aggr.hasHistogram {
|
if aggr.hasFloat && aggr.hasHistogram {
|
||||||
|
|
Loading…
Reference in a new issue