Merge pull request #641 from prometheus/stddev

Add stddev and stdvar aggregation functions.
This commit is contained in:
Julius Volz 2015-04-17 01:36:44 +02:00
commit e681a57d73
12 changed files with 781 additions and 573 deletions

View file

@ -66,9 +66,10 @@ type Vector []*Sample
type Matrix []SampleStream type Matrix []SampleStream
type groupedAggregation struct { type groupedAggregation struct {
labels clientmodel.COWMetric labels clientmodel.COWMetric
value clientmodel.SampleValue value clientmodel.SampleValue
groupCount int valuesSquaredSum clientmodel.SampleValue
groupCount int
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@ -128,6 +129,8 @@ const (
Min Min
Max Max
Count Count
Stdvar
Stddev
) )
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@ -468,6 +471,12 @@ func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[uint
aggregation.value = aggregation.value / clientmodel.SampleValue(aggregation.groupCount) aggregation.value = aggregation.value / clientmodel.SampleValue(aggregation.groupCount)
case Count: case Count:
aggregation.value = clientmodel.SampleValue(aggregation.groupCount) aggregation.value = clientmodel.SampleValue(aggregation.groupCount)
case Stdvar:
avg := float64(aggregation.value) / float64(aggregation.groupCount)
aggregation.value = clientmodel.SampleValue(float64(aggregation.valuesSquaredSum)/float64(aggregation.groupCount) - avg*avg)
case Stddev:
avg := float64(aggregation.value) / float64(aggregation.groupCount)
aggregation.value = clientmodel.SampleValue(math.Sqrt(float64(aggregation.valuesSquaredSum)/float64(aggregation.groupCount) - avg*avg))
default: default:
// For other aggregations, we already have the right value. // For other aggregations, we already have the right value.
} }
@ -509,6 +518,10 @@ func (node *VectorAggregation) Eval(timestamp clientmodel.Timestamp) Vector {
} }
case Count: case Count:
groupedResult.groupCount++ groupedResult.groupCount++
case Stdvar, Stddev:
groupedResult.value += sample.Value
groupedResult.valuesSquaredSum += sample.Value * sample.Value
groupedResult.groupCount++
default: default:
panic("Unknown aggregation type") panic("Unknown aggregation type")
} }
@ -529,9 +542,10 @@ func (node *VectorAggregation) Eval(timestamp clientmodel.Timestamp) Vector {
} }
} }
result[groupingKey] = &groupedAggregation{ result[groupingKey] = &groupedAggregation{
labels: m, labels: m,
value: sample.Value, value: sample.Value,
groupCount: 1, valuesSquaredSum: sample.Value * sample.Value,
groupCount: 1,
} }
} }
} }

View file

@ -60,11 +60,13 @@ func (opType BinOpType) String() string {
func (aggrType AggrType) String() string { func (aggrType AggrType) String() string {
aggrTypeMap := map[AggrType]string{ aggrTypeMap := map[AggrType]string{
Sum: "SUM", Sum: "SUM",
Avg: "AVG", Avg: "AVG",
Min: "MIN", Min: "MIN",
Max: "MAX", Max: "MAX",
Count: "COUNT", Count: "COUNT",
Stdvar: "STDVAR",
Stddev: "STDDEV",
} }
return aggrTypeMap[aggrType] return aggrTypeMap[aggrType]
} }

View file

@ -78,11 +78,13 @@ func NewVectorAggregation(aggrTypeStr string, vector ast.Node, groupBy clientmod
return nil, fmt.Errorf("operand of %v aggregation must be of vector type", aggrTypeStr) return nil, fmt.Errorf("operand of %v aggregation must be of vector type", aggrTypeStr)
} }
var aggrTypes = map[string]ast.AggrType{ var aggrTypes = map[string]ast.AggrType{
"SUM": ast.Sum, "SUM": ast.Sum,
"MAX": ast.Max, "MAX": ast.Max,
"MIN": ast.Min, "MIN": ast.Min,
"AVG": ast.Avg, "AVG": ast.Avg,
"COUNT": ast.Count, "COUNT": ast.Count,
"STDVAR": ast.Stdvar,
"STDDEV": ast.Stddev,
} }
aggrType, ok := aggrTypes[aggrTypeStr] aggrType, ok := aggrTypes[aggrTypeStr]
if !ok { if !ok {

View file

@ -89,8 +89,8 @@ GROUP_LEFT|GROUP_RIGHT lval.str = lexer.token(); return MATCH_MOD
group_left|group_right lval.str = strings.ToUpper(lexer.token()); return MATCH_MOD group_left|group_right lval.str = strings.ToUpper(lexer.token()); return MATCH_MOD
KEEPING_EXTRA|keeping_extra return KEEPING_EXTRA KEEPING_EXTRA|keeping_extra return KEEPING_EXTRA
OFFSET|offset return OFFSET OFFSET|offset return OFFSET
AVG|SUM|MAX|MIN|COUNT lval.str = lexer.token(); return AGGR_OP AVG|SUM|MAX|MIN|COUNT|STDVAR|STDDEV lval.str = lexer.token(); return AGGR_OP
avg|sum|max|min|count lval.str = strings.ToUpper(lexer.token()); return AGGR_OP avg|sum|max|min|count|stdvar|stddev lval.str = strings.ToUpper(lexer.token()); return AGGR_OP
\<|>|AND|OR|and|or lval.str = strings.ToUpper(lexer.token()); return CMP_OP \<|>|AND|OR|and|or lval.str = strings.ToUpper(lexer.token()); return CMP_OP
==|!=|>=|<=|=~|!~ lval.str = lexer.token(); return CMP_OP ==|!=|>=|<=|=~|!~ lval.str = lexer.token(); return CMP_OP
[+\-] lval.str = lexer.token(); return ADDITIVE_OP [+\-] lval.str = lexer.token(); return ADDITIVE_OP

File diff suppressed because it is too large Load diff

View file

@ -99,7 +99,7 @@ type ruleManager struct {
notificationHandler *notification.NotificationHandler notificationHandler *notification.NotificationHandler
prometheusURL string prometheusURL string
pathPrefix string pathPrefix string
} }
// RuleManagerOptions bundles options for the RuleManager. // RuleManagerOptions bundles options for the RuleManager.
@ -111,7 +111,7 @@ type RuleManagerOptions struct {
SampleAppender storage.SampleAppender SampleAppender storage.SampleAppender
PrometheusURL string PrometheusURL string
PathPrefix string PathPrefix string
} }
// NewRuleManager returns an implementation of RuleManager, ready to be started // NewRuleManager returns an implementation of RuleManager, ready to be started

View file

@ -1197,13 +1197,13 @@ func TestExpressions(t *testing.T) {
`{group="canary", instance="0", job="api-server"} => NaN @[%v]`, `{group="canary", instance="0", job="api-server"} => NaN @[%v]`,
}, },
}, },
{ {
expr: `sqrt(vector_matching_a)`, expr: `sqrt(vector_matching_a)`,
output: []string{ output: []string{
`{l="x"} => 3.1622776601683795 @[%v]`, `{l="x"} => 3.1622776601683795 @[%v]`,
`{l="y"} => 4.47213595499958 @[%v]`, `{l="y"} => 4.47213595499958 @[%v]`,
}, },
}, },
{ {
expr: `exp(vector_matching_a)`, expr: `exp(vector_matching_a)`,
output: []string{ output: []string{
@ -1295,6 +1295,32 @@ func TestExpressions(t *testing.T) {
`{l="y"} => -Inf @[%v]`, `{l="y"} => -Inf @[%v]`,
}, },
}, },
{
expr: `stddev(http_requests)`,
output: []string{
`{} => 229.12878474779 @[%v]`,
},
},
{
expr: `stddev by (instance)(http_requests)`,
output: []string{
`{instance="0"} => 223.60679774998 @[%v]`,
`{instance="1"} => 223.60679774998 @[%v]`,
},
},
{
expr: `stdvar(http_requests)`,
output: []string{
`{} => 52500 @[%v]`,
},
},
{
expr: `stdvar by (instance)(http_requests)`,
output: []string{
`{instance="0"} => 50000 @[%v]`,
`{instance="1"} => 50000 @[%v]`,
},
},
} }
storage, closer := newTestStorage(t) storage, closer := newTestStorage(t)

View file

@ -219,7 +219,7 @@ func NewTemplateExpander(text string, name string, data interface{}, timestamp c
return fmt.Sprintf("%.4g%ss", v, prefix) return fmt.Sprintf("%.4g%ss", v, prefix)
}, },
"pathPrefix": func() string { "pathPrefix": func() string {
return pathPrefix; return pathPrefix
}, },
}, },
} }

View file

@ -37,13 +37,13 @@ func (msrv *MetricsService) RegisterHandler(pathPrefix string) {
Handler: http.HandlerFunc(h), Handler: http.HandlerFunc(h),
} }
} }
http.Handle(pathPrefix + "api/query", prometheus.InstrumentHandler( http.Handle(pathPrefix+"api/query", prometheus.InstrumentHandler(
pathPrefix + "api/query", handler(msrv.Query), pathPrefix+"api/query", handler(msrv.Query),
)) ))
http.Handle(pathPrefix + "api/query_range", prometheus.InstrumentHandler( http.Handle(pathPrefix+"api/query_range", prometheus.InstrumentHandler(
pathPrefix + "api/query_range", handler(msrv.QueryRange), pathPrefix+"api/query_range", handler(msrv.QueryRange),
)) ))
http.Handle(pathPrefix + "api/metrics", prometheus.InstrumentHandler( http.Handle(pathPrefix+"api/metrics", prometheus.InstrumentHandler(
pathPrefix + "api/metrics", handler(msrv.Metrics), pathPrefix+"api/metrics", handler(msrv.Metrics),
)) ))
} }

View file

@ -33,7 +33,7 @@ var (
// ConsolesHandler implements http.Handler. // ConsolesHandler implements http.Handler.
type ConsolesHandler struct { type ConsolesHandler struct {
Storage local.Storage Storage local.Storage
PathPrefix string PathPrefix string
} }

View file

@ -32,7 +32,7 @@ type PrometheusStatusHandler struct {
RuleManager manager.RuleManager RuleManager manager.RuleManager
TargetPools map[string]*retrieval.TargetPool TargetPools map[string]*retrieval.TargetPool
Birth time.Time Birth time.Time
PathPrefix string PathPrefix string
} }

View file

@ -63,39 +63,39 @@ func (ws WebService) ServeForever(pathPrefix string) error {
http.Handle(pathPrefix, prometheus.InstrumentHandler( http.Handle(pathPrefix, prometheus.InstrumentHandler(
pathPrefix, ws.StatusHandler, pathPrefix, ws.StatusHandler,
)) ))
http.Handle(pathPrefix + "alerts", prometheus.InstrumentHandler( http.Handle(pathPrefix+"alerts", prometheus.InstrumentHandler(
pathPrefix + "alerts", ws.AlertsHandler, pathPrefix+"alerts", ws.AlertsHandler,
)) ))
http.Handle(pathPrefix + "consoles/", prometheus.InstrumentHandler( http.Handle(pathPrefix+"consoles/", prometheus.InstrumentHandler(
pathPrefix + "consoles/", http.StripPrefix(pathPrefix + "consoles/", ws.ConsolesHandler), pathPrefix+"consoles/", http.StripPrefix(pathPrefix+"consoles/", ws.ConsolesHandler),
)) ))
http.Handle(pathPrefix + "graph", prometheus.InstrumentHandler( http.Handle(pathPrefix+"graph", prometheus.InstrumentHandler(
pathPrefix + "graph", ws.GraphsHandler, pathPrefix+"graph", ws.GraphsHandler,
)) ))
http.Handle(pathPrefix + "heap", prometheus.InstrumentHandler( http.Handle(pathPrefix+"heap", prometheus.InstrumentHandler(
pathPrefix + "heap", http.HandlerFunc(dumpHeap), pathPrefix+"heap", http.HandlerFunc(dumpHeap),
)) ))
ws.MetricsHandler.RegisterHandler(pathPrefix) ws.MetricsHandler.RegisterHandler(pathPrefix)
http.Handle(pathPrefix + strings.TrimLeft(*metricsPath, "/"), prometheus.Handler()) http.Handle(pathPrefix+strings.TrimLeft(*metricsPath, "/"), prometheus.Handler())
if *useLocalAssets { if *useLocalAssets {
http.Handle(pathPrefix + "static/", prometheus.InstrumentHandler( http.Handle(pathPrefix+"static/", prometheus.InstrumentHandler(
pathPrefix + "static/", http.StripPrefix(pathPrefix + "static/", http.FileServer(http.Dir("web/static"))), pathPrefix+"static/", http.StripPrefix(pathPrefix+"static/", http.FileServer(http.Dir("web/static"))),
)) ))
} else { } else {
http.Handle(pathPrefix + "static/", prometheus.InstrumentHandler( http.Handle(pathPrefix+"static/", prometheus.InstrumentHandler(
pathPrefix + "static/", http.StripPrefix(pathPrefix + "static/", new(blob.Handler)), pathPrefix+"static/", http.StripPrefix(pathPrefix+"static/", new(blob.Handler)),
)) ))
} }
if *userAssetsPath != "" { if *userAssetsPath != "" {
http.Handle(pathPrefix + "user/", prometheus.InstrumentHandler( http.Handle(pathPrefix+"user/", prometheus.InstrumentHandler(
pathPrefix + "user/", http.StripPrefix(pathPrefix + "user/", http.FileServer(http.Dir(*userAssetsPath))), pathPrefix+"user/", http.StripPrefix(pathPrefix+"user/", http.FileServer(http.Dir(*userAssetsPath))),
)) ))
} }
if *enableQuit { if *enableQuit {
http.Handle(pathPrefix + "-/quit", http.HandlerFunc(ws.quitHandler)) http.Handle(pathPrefix+"-/quit", http.HandlerFunc(ws.quitHandler))
} }
if pathPrefix != "/" { if pathPrefix != "/" {