2013-02-07 02:49:04 -08:00
|
|
|
// Copyright 2013 Prometheus Team
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2013-01-07 14:24:26 -08:00
|
|
|
package ast
|
|
|
|
|
|
|
|
import (
|
2014-08-05 09:57:47 -07:00
|
|
|
"container/heap"
|
2013-01-07 14:24:26 -08:00
|
|
|
"fmt"
|
2013-09-17 05:59:31 -07:00
|
|
|
"math"
|
2013-04-10 01:44:13 -07:00
|
|
|
"sort"
|
2013-01-07 14:24:26 -08:00
|
|
|
"time"
|
2013-06-25 05:02:27 -07:00
|
|
|
|
|
|
|
clientmodel "github.com/prometheus/client_golang/model"
|
|
|
|
|
2014-07-28 07:12:58 -07:00
|
|
|
"github.com/prometheus/prometheus/storage/metric"
|
2013-01-07 14:24:26 -08:00
|
|
|
)
|
|
|
|
|
2014-02-13 09:48:56 -08:00
|
|
|
// Function represents a function of the expression language and is
|
|
|
|
// used by function nodes.
|
2013-01-07 14:24:26 -08:00
|
|
|
type Function struct {
|
|
|
|
name string
|
|
|
|
argTypes []ExprType
|
|
|
|
returnType ExprType
|
2014-06-06 02:55:53 -07:00
|
|
|
callFn func(timestamp clientmodel.Timestamp, args []Node) interface{}
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
|
2014-02-13 09:48:56 -08:00
|
|
|
// CheckArgTypes returns a non-nil error if the number or types of
|
|
|
|
// passed in arg nodes do not match the function's expectations.
|
2013-01-07 14:24:26 -08:00
|
|
|
func (function *Function) CheckArgTypes(args []Node) error {
|
|
|
|
if len(function.argTypes) != len(args) {
|
2014-02-13 09:48:56 -08:00
|
|
|
return fmt.Errorf(
|
|
|
|
"wrong number of arguments to function %v(): %v expected, %v given",
|
|
|
|
function.name, len(function.argTypes), len(args),
|
|
|
|
)
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
for idx, argType := range function.argTypes {
|
|
|
|
invalidType := false
|
|
|
|
var expectedType string
|
|
|
|
if _, ok := args[idx].(ScalarNode); argType == SCALAR && !ok {
|
|
|
|
invalidType = true
|
|
|
|
expectedType = "scalar"
|
|
|
|
}
|
|
|
|
if _, ok := args[idx].(VectorNode); argType == VECTOR && !ok {
|
|
|
|
invalidType = true
|
|
|
|
expectedType = "vector"
|
|
|
|
}
|
|
|
|
if _, ok := args[idx].(MatrixNode); argType == MATRIX && !ok {
|
|
|
|
invalidType = true
|
|
|
|
expectedType = "matrix"
|
|
|
|
}
|
|
|
|
if _, ok := args[idx].(StringNode); argType == STRING && !ok {
|
|
|
|
invalidType = true
|
|
|
|
expectedType = "string"
|
|
|
|
}
|
|
|
|
|
|
|
|
if invalidType {
|
2014-02-13 09:48:56 -08:00
|
|
|
return fmt.Errorf(
|
|
|
|
"wrong type for argument %v in function %v(), expected %v",
|
|
|
|
idx, function.name, expectedType,
|
|
|
|
)
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2013-06-25 05:02:27 -07:00
|
|
|
// === time() clientmodel.SampleValue ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func timeImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
2014-07-28 09:10:11 -07:00
|
|
|
return clientmodel.SampleValue(timestamp.Unix())
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
|
2013-04-10 01:44:13 -07:00
|
|
|
// === delta(matrix MatrixNode, isCounter ScalarNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func deltaImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
2013-01-07 14:24:26 -08:00
|
|
|
matrixNode := args[0].(MatrixNode)
|
2014-06-06 02:55:53 -07:00
|
|
|
isCounter := args[1].(ScalarNode).Eval(timestamp) > 0
|
2013-01-07 14:24:26 -08:00
|
|
|
resultVector := Vector{}
|
|
|
|
|
|
|
|
// If we treat these metrics as counters, we need to fetch all values
|
|
|
|
// in the interval to find breaks in the timeseries' monotonicity.
|
|
|
|
// I.e. if a counter resets, we want to ignore that reset.
|
|
|
|
var matrixValue Matrix
|
2013-05-28 08:36:53 -07:00
|
|
|
if isCounter {
|
2014-06-06 02:55:53 -07:00
|
|
|
matrixValue = matrixNode.Eval(timestamp)
|
2013-01-07 14:24:26 -08:00
|
|
|
} else {
|
2014-06-06 02:55:53 -07:00
|
|
|
matrixValue = matrixNode.EvalBoundaries(timestamp)
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
for _, samples := range matrixValue {
|
2013-05-27 02:10:40 -07:00
|
|
|
// No sense in trying to compute a delta without at least two points. Drop
|
|
|
|
// this vector element.
|
|
|
|
if len(samples.Values) < 2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2013-06-25 05:02:27 -07:00
|
|
|
counterCorrection := clientmodel.SampleValue(0)
|
|
|
|
lastValue := clientmodel.SampleValue(0)
|
2013-01-07 14:24:26 -08:00
|
|
|
for _, sample := range samples.Values {
|
|
|
|
currentValue := sample.Value
|
2013-05-28 08:36:53 -07:00
|
|
|
if isCounter && currentValue < lastValue {
|
2013-01-07 14:24:26 -08:00
|
|
|
counterCorrection += lastValue - currentValue
|
|
|
|
}
|
|
|
|
lastValue = currentValue
|
|
|
|
}
|
|
|
|
resultValue := lastValue - samples.Values[0].Value + counterCorrection
|
2013-04-12 17:45:58 -07:00
|
|
|
|
2014-02-22 13:29:32 -08:00
|
|
|
targetInterval := args[0].(*MatrixSelector).interval
|
2013-04-12 17:45:58 -07:00
|
|
|
sampledInterval := samples.Values[len(samples.Values)-1].Timestamp.Sub(samples.Values[0].Timestamp)
|
|
|
|
if sampledInterval == 0 {
|
|
|
|
// Only found one sample. Cannot compute a rate from this.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
// Correct for differences in target vs. actual delta interval.
|
|
|
|
//
|
|
|
|
// Above, we didn't actually calculate the delta for the specified target
|
|
|
|
// interval, but for an interval between the first and last found samples
|
|
|
|
// under the target interval, which will usually have less time between
|
|
|
|
// them. Depending on how many samples are found under a target interval,
|
|
|
|
// the delta results are distorted and temporal aliasing occurs (ugly
|
|
|
|
// bumps). This effect is corrected for below.
|
2013-06-25 05:02:27 -07:00
|
|
|
intervalCorrection := clientmodel.SampleValue(targetInterval) / clientmodel.SampleValue(sampledInterval)
|
2013-04-12 17:45:58 -07:00
|
|
|
resultValue *= intervalCorrection
|
|
|
|
|
2013-06-25 05:02:27 -07:00
|
|
|
resultSample := &clientmodel.Sample{
|
2013-01-07 14:24:26 -08:00
|
|
|
Metric: samples.Metric,
|
|
|
|
Value: resultValue,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
resultVector = append(resultVector, resultSample)
|
|
|
|
}
|
|
|
|
return resultVector
|
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
// === rate(node MatrixNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func rateImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
2013-01-07 14:24:26 -08:00
|
|
|
args = append(args, &ScalarLiteral{value: 1})
|
2014-06-06 02:55:53 -07:00
|
|
|
vector := deltaImpl(timestamp, args).(Vector)
|
2013-01-21 16:49:00 -08:00
|
|
|
|
|
|
|
// TODO: could be other type of MatrixNode in the future (right now, only
|
2014-02-22 13:29:32 -08:00
|
|
|
// MatrixSelector exists). Find a better way of getting the duration of a
|
2013-01-21 16:49:00 -08:00
|
|
|
// matrix, such as looking at the samples themselves.
|
2014-02-22 13:29:32 -08:00
|
|
|
interval := args[0].(*MatrixSelector).interval
|
2013-04-16 08:23:59 -07:00
|
|
|
for i := range vector {
|
2013-06-25 05:02:27 -07:00
|
|
|
vector[i].Value /= clientmodel.SampleValue(interval / time.Second)
|
2013-01-21 16:49:00 -08:00
|
|
|
}
|
|
|
|
return vector
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
type vectorByValueHeap Vector
|
|
|
|
|
|
|
|
func (s vectorByValueHeap) Len() int {
|
|
|
|
return len(s)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s vectorByValueHeap) Less(i, j int) bool {
|
|
|
|
return s[i].Value < s[j].Value
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s vectorByValueHeap) Swap(i, j int) {
|
|
|
|
s[i], s[j] = s[j], s[i]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *vectorByValueHeap) Push(x interface{}) {
|
|
|
|
*s = append(*s, x.(*clientmodel.Sample))
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
func (s *vectorByValueHeap) Pop() interface{} {
|
|
|
|
old := *s
|
|
|
|
n := len(old)
|
|
|
|
el := old[n-1]
|
|
|
|
*s = old[0 : n-1]
|
|
|
|
return el
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
type reverseHeap struct {
|
|
|
|
heap.Interface
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
func (s reverseHeap) Less(i, j int) bool {
|
|
|
|
return s.Interface.Less(j, i)
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
// === sort(node VectorNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func sortImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
byValueSorter := vectorByValueHeap(args[0].(VectorNode).Eval(timestamp))
|
2013-04-10 01:44:13 -07:00
|
|
|
sort.Sort(byValueSorter)
|
2014-08-05 09:57:47 -07:00
|
|
|
return Vector(byValueSorter)
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
// === sortDesc(node VectorNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func sortDescImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
byValueSorter := vectorByValueHeap(args[0].(VectorNode).Eval(timestamp))
|
2014-08-05 09:57:47 -07:00
|
|
|
sort.Sort(sort.Reverse(byValueSorter))
|
|
|
|
return Vector(byValueSorter)
|
|
|
|
}
|
|
|
|
|
|
|
|
// === topk(k ScalarNode, node VectorNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func topkImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
k := int(args[0].(ScalarNode).Eval(timestamp))
|
2014-08-05 09:57:47 -07:00
|
|
|
if k < 1 {
|
|
|
|
return Vector{}
|
|
|
|
}
|
|
|
|
|
|
|
|
topk := make(vectorByValueHeap, 0, k)
|
2014-06-06 02:55:53 -07:00
|
|
|
vector := args[1].(VectorNode).Eval(timestamp)
|
2014-08-05 09:57:47 -07:00
|
|
|
|
|
|
|
for _, el := range vector {
|
|
|
|
if len(topk) < k || topk[0].Value < el.Value {
|
|
|
|
if len(topk) == k {
|
|
|
|
heap.Pop(&topk)
|
|
|
|
}
|
|
|
|
heap.Push(&topk, el)
|
|
|
|
}
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
2014-08-05 09:57:47 -07:00
|
|
|
sort.Sort(sort.Reverse(topk))
|
|
|
|
return Vector(topk)
|
|
|
|
}
|
|
|
|
|
|
|
|
// === bottomk(k ScalarNode, node VectorNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func bottomkImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
k := int(args[0].(ScalarNode).Eval(timestamp))
|
2014-08-05 09:57:47 -07:00
|
|
|
if k < 1 {
|
|
|
|
return Vector{}
|
|
|
|
}
|
|
|
|
|
|
|
|
bottomk := make(vectorByValueHeap, 0, k)
|
|
|
|
bkHeap := reverseHeap{Interface: &bottomk}
|
2014-06-06 02:55:53 -07:00
|
|
|
vector := args[1].(VectorNode).Eval(timestamp)
|
2014-08-05 09:57:47 -07:00
|
|
|
|
|
|
|
for _, el := range vector {
|
|
|
|
if len(bottomk) < k || bottomk[0].Value > el.Value {
|
|
|
|
if len(bottomk) == k {
|
|
|
|
heap.Pop(&bkHeap)
|
|
|
|
}
|
|
|
|
heap.Push(&bkHeap, el)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sort.Sort(bottomk)
|
|
|
|
return Vector(bottomk)
|
2013-04-10 01:44:13 -07:00
|
|
|
}
|
|
|
|
|
2014-08-05 10:56:35 -07:00
|
|
|
// === drop_common_labels(node VectorNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func dropCommonLabelsImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
vector := args[0].(VectorNode).Eval(timestamp)
|
2014-08-05 10:56:35 -07:00
|
|
|
if len(vector) < 1 {
|
|
|
|
return Vector{}
|
|
|
|
}
|
|
|
|
common := clientmodel.LabelSet{}
|
|
|
|
for k, v := range vector[0].Metric {
|
|
|
|
// TODO(julius): Revisit this when https://github.com/prometheus/prometheus/issues/380
|
|
|
|
// is implemented.
|
|
|
|
if k == clientmodel.MetricNameLabel {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
common[k] = v
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, el := range vector[1:] {
|
|
|
|
for k, v := range common {
|
|
|
|
if el.Metric[k] != v {
|
|
|
|
// Deletion of map entries while iterating over them is safe.
|
|
|
|
// From http://golang.org/ref/spec#For_statements:
|
|
|
|
// "If map entries that have not yet been reached are deleted during
|
|
|
|
// iteration, the corresponding iteration values will not be produced."
|
|
|
|
delete(common, k)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, el := range vector {
|
|
|
|
for k, _ := range el.Metric {
|
|
|
|
if _, ok := common[k]; ok {
|
|
|
|
delete(el.Metric, k)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return vector
|
|
|
|
}
|
|
|
|
|
2013-04-10 01:44:13 -07:00
|
|
|
// === sampleVectorImpl() Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func sampleVectorImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
2013-01-07 14:24:26 -08:00
|
|
|
return Vector{
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "0",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 10,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "1",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 20,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "2",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 30,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "3",
|
|
|
|
"group": "canary",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 40,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "2",
|
|
|
|
"group": "canary",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 40,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "3",
|
|
|
|
"group": "mytest",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 40,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
2013-06-25 05:02:27 -07:00
|
|
|
&clientmodel.Sample{
|
|
|
|
Metric: clientmodel.Metric{
|
|
|
|
clientmodel.MetricNameLabel: "http_requests",
|
|
|
|
clientmodel.JobLabel: "api-server",
|
|
|
|
"instance": "3",
|
|
|
|
"group": "mytest",
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
Value: 40,
|
2013-03-28 09:05:06 -07:00
|
|
|
Timestamp: timestamp,
|
2013-01-07 14:24:26 -08:00
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-05 09:57:47 -07:00
|
|
|
// === scalar(node VectorNode) Scalar ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func scalarImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
v := args[0].(VectorNode).Eval(timestamp)
|
2013-09-17 05:59:31 -07:00
|
|
|
if len(v) != 1 {
|
|
|
|
return clientmodel.SampleValue(math.NaN())
|
|
|
|
}
|
|
|
|
return clientmodel.SampleValue(v[0].Value)
|
|
|
|
}
|
|
|
|
|
2014-01-30 04:07:26 -08:00
|
|
|
// === count_scalar(vector VectorNode) model.SampleValue ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func countScalarImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
return clientmodel.SampleValue(len(args[0].(VectorNode).Eval(timestamp)))
|
2014-01-30 04:07:26 -08:00
|
|
|
}
|
|
|
|
|
2014-06-06 02:55:53 -07:00
|
|
|
func aggrOverTime(timestamp clientmodel.Timestamp, args []Node, aggrFn func(metric.Values) clientmodel.SampleValue) interface{} {
|
2014-07-28 07:12:58 -07:00
|
|
|
n := args[0].(MatrixNode)
|
2014-06-06 02:55:53 -07:00
|
|
|
matrixVal := n.Eval(timestamp)
|
2014-07-28 07:12:58 -07:00
|
|
|
resultVector := Vector{}
|
|
|
|
|
|
|
|
for _, el := range matrixVal {
|
|
|
|
if len(el.Values) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
resultVector = append(resultVector, &clientmodel.Sample{
|
|
|
|
Metric: el.Metric,
|
|
|
|
Value: aggrFn(el.Values),
|
|
|
|
Timestamp: timestamp,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
return resultVector
|
|
|
|
}
|
|
|
|
|
|
|
|
// === avg_over_time(matrix MatrixNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func avgOverTimeImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
return aggrOverTime(timestamp, args, func(values metric.Values) clientmodel.SampleValue {
|
2014-07-28 07:12:58 -07:00
|
|
|
var sum clientmodel.SampleValue
|
|
|
|
for _, v := range values {
|
|
|
|
sum += v.Value
|
|
|
|
}
|
|
|
|
return sum / clientmodel.SampleValue(len(values))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// === count_over_time(matrix MatrixNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func countOverTimeImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
return aggrOverTime(timestamp, args, func(values metric.Values) clientmodel.SampleValue {
|
2014-07-28 07:12:58 -07:00
|
|
|
return clientmodel.SampleValue(len(values))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// === max_over_time(matrix MatrixNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func maxOverTimeImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
return aggrOverTime(timestamp, args, func(values metric.Values) clientmodel.SampleValue {
|
2014-07-28 07:12:58 -07:00
|
|
|
max := math.Inf(-1)
|
|
|
|
for _, v := range values {
|
|
|
|
max = math.Max(max, float64(v.Value))
|
|
|
|
}
|
|
|
|
return clientmodel.SampleValue(max)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// === min_over_time(matrix MatrixNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func minOverTimeImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
return aggrOverTime(timestamp, args, func(values metric.Values) clientmodel.SampleValue {
|
2014-07-28 07:12:58 -07:00
|
|
|
min := math.Inf(1)
|
|
|
|
for _, v := range values {
|
|
|
|
min = math.Min(min, float64(v.Value))
|
|
|
|
}
|
|
|
|
return clientmodel.SampleValue(min)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// === sum_over_time(matrix MatrixNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func sumOverTimeImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
|
|
|
return aggrOverTime(timestamp, args, func(values metric.Values) clientmodel.SampleValue {
|
2014-07-28 07:12:58 -07:00
|
|
|
var sum clientmodel.SampleValue
|
|
|
|
for _, v := range values {
|
|
|
|
sum += v.Value
|
|
|
|
}
|
|
|
|
return sum
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// === abs(vector VectorNode) Vector ===
|
2014-06-06 02:55:53 -07:00
|
|
|
func absImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
2014-07-28 07:12:58 -07:00
|
|
|
n := args[0].(VectorNode)
|
2014-06-06 02:55:53 -07:00
|
|
|
vector := n.Eval(timestamp)
|
2014-07-28 07:12:58 -07:00
|
|
|
for _, el := range vector {
|
|
|
|
el.Value = clientmodel.SampleValue(math.Abs(float64(el.Value)))
|
|
|
|
}
|
|
|
|
return vector
|
|
|
|
}
|
|
|
|
|
2013-01-07 14:24:26 -08:00
|
|
|
var functions = map[string]*Function{
|
2014-07-28 07:12:58 -07:00
|
|
|
"abs": {
|
|
|
|
name: "abs",
|
|
|
|
argTypes: []ExprType{VECTOR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: absImpl,
|
|
|
|
},
|
|
|
|
"avg_over_time": {
|
|
|
|
name: "avg_over_time",
|
|
|
|
argTypes: []ExprType{MATRIX},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: avgOverTimeImpl,
|
|
|
|
},
|
2014-08-05 09:57:47 -07:00
|
|
|
"bottomk": {
|
|
|
|
name: "bottomk",
|
|
|
|
argTypes: []ExprType{SCALAR, VECTOR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: bottomkImpl,
|
|
|
|
},
|
2014-07-28 07:12:58 -07:00
|
|
|
"count_over_time": {
|
|
|
|
name: "count_over_time",
|
|
|
|
argTypes: []ExprType{MATRIX},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: countOverTimeImpl,
|
|
|
|
},
|
2014-01-30 04:07:26 -08:00
|
|
|
"count_scalar": {
|
|
|
|
name: "count_scalar",
|
|
|
|
argTypes: []ExprType{VECTOR},
|
|
|
|
returnType: SCALAR,
|
|
|
|
callFn: countScalarImpl,
|
|
|
|
},
|
2013-01-07 14:24:26 -08:00
|
|
|
"delta": {
|
|
|
|
name: "delta",
|
|
|
|
argTypes: []ExprType{MATRIX, SCALAR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: deltaImpl,
|
|
|
|
},
|
2014-08-05 10:56:35 -07:00
|
|
|
"drop_common_labels": {
|
|
|
|
name: "drop_common_labels",
|
|
|
|
argTypes: []ExprType{VECTOR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: dropCommonLabelsImpl,
|
|
|
|
},
|
2014-07-28 07:12:58 -07:00
|
|
|
"max_over_time": {
|
|
|
|
name: "max_over_time",
|
|
|
|
argTypes: []ExprType{MATRIX},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: maxOverTimeImpl,
|
|
|
|
},
|
|
|
|
"min_over_time": {
|
|
|
|
name: "min_over_time",
|
|
|
|
argTypes: []ExprType{MATRIX},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: minOverTimeImpl,
|
|
|
|
},
|
2013-01-07 14:24:26 -08:00
|
|
|
"rate": {
|
|
|
|
name: "rate",
|
|
|
|
argTypes: []ExprType{MATRIX},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: rateImpl,
|
|
|
|
},
|
|
|
|
"sampleVector": {
|
|
|
|
name: "sampleVector",
|
|
|
|
argTypes: []ExprType{},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: sampleVectorImpl,
|
|
|
|
},
|
2013-09-17 05:59:31 -07:00
|
|
|
"scalar": {
|
|
|
|
name: "scalar",
|
|
|
|
argTypes: []ExprType{VECTOR},
|
|
|
|
returnType: SCALAR,
|
|
|
|
callFn: scalarImpl,
|
|
|
|
},
|
2013-04-10 01:44:13 -07:00
|
|
|
"sort": {
|
|
|
|
name: "sort",
|
|
|
|
argTypes: []ExprType{VECTOR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: sortImpl,
|
|
|
|
},
|
|
|
|
"sort_desc": {
|
|
|
|
name: "sort_desc",
|
|
|
|
argTypes: []ExprType{VECTOR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: sortDescImpl,
|
|
|
|
},
|
2014-07-28 07:12:58 -07:00
|
|
|
"sum_over_time": {
|
|
|
|
name: "sum_over_time",
|
|
|
|
argTypes: []ExprType{MATRIX},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: sumOverTimeImpl,
|
|
|
|
},
|
2013-04-10 01:44:13 -07:00
|
|
|
"time": {
|
|
|
|
name: "time",
|
|
|
|
argTypes: []ExprType{},
|
|
|
|
returnType: SCALAR,
|
|
|
|
callFn: timeImpl,
|
|
|
|
},
|
2014-08-05 09:57:47 -07:00
|
|
|
"topk": {
|
|
|
|
name: "topk",
|
|
|
|
argTypes: []ExprType{SCALAR, VECTOR},
|
|
|
|
returnType: VECTOR,
|
|
|
|
callFn: topkImpl,
|
|
|
|
},
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
|
2014-02-13 09:48:56 -08:00
|
|
|
// GetFunction returns a predefined Function object for the given
|
|
|
|
// name.
|
2013-01-07 14:24:26 -08:00
|
|
|
func GetFunction(name string) (*Function, error) {
|
|
|
|
function, ok := functions[name]
|
|
|
|
if !ok {
|
2014-02-13 09:48:56 -08:00
|
|
|
return nil, fmt.Errorf("couldn't find function %v()", name)
|
2013-01-07 14:24:26 -08:00
|
|
|
}
|
|
|
|
return function, nil
|
|
|
|
}
|