From e94b0899ee9dd2f7cefb8447806891a9ac90b388 Mon Sep 17 00:00:00 2001 From: Fabian Reinartz Date: Thu, 29 Dec 2016 17:31:14 +0100 Subject: [PATCH] rules: fix tests, remove model types --- pkg/labels/labels.go | 1 + promql/value.go | 6 ++++-- rules/alerting.go | 17 +++++++++-------- rules/alerting_test.go | 4 ++-- rules/manager.go | 14 +++++++------- rules/manager_test.go | 40 ++++++++++++++++++++++------------------ rules/recording.go | 10 ++++++---- rules/recording_test.go | 17 ++++++++--------- 8 files changed, 59 insertions(+), 50 deletions(-) diff --git a/pkg/labels/labels.go b/pkg/labels/labels.go index b4d6f51ba..0c65a4ff4 100644 --- a/pkg/labels/labels.go +++ b/pkg/labels/labels.go @@ -39,6 +39,7 @@ func (ls Labels) String() string { for i, l := range ls { if i > 0 { b.WriteByte(',') + b.WriteByte(' ') } b.WriteString(l.Name) b.WriteByte('=') diff --git a/promql/value.go b/promql/value.go index 59bac5125..317a673c1 100644 --- a/promql/value.go +++ b/promql/value.go @@ -53,7 +53,8 @@ type Scalar struct { } func (s Scalar) String() string { - return fmt.Sprintf("scalar: %v @[%v]", s.V, s.T) + v := strconv.FormatFloat(s.V, 'f', -1, 64) + return fmt.Sprintf("scalar: %v @[%v]", v, s.T) } func (s Scalar) MarshalJSON() ([]byte, error) { @@ -82,7 +83,8 @@ type Point struct { } func (p Point) String() string { - return fmt.Sprintf("%f @[%d]", p.V, p.T) + v := strconv.FormatFloat(p.V, 'f', -1, 64) + return fmt.Sprintf("%v @[%v]", v, p.T) } // MarshalJSON implements json.Marshaler. diff --git a/rules/alerting.go b/rules/alerting.go index 14a3f5a19..ef054d3f0 100644 --- a/rules/alerting.go +++ b/rules/alerting.go @@ -26,6 +26,7 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" + "github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/template" "github.com/prometheus/prometheus/util/strutil" @@ -78,7 +79,7 @@ type Alert struct { Value float64 // The interval during which the condition of this alert held true. // ResolvedAt will be 0 to indicate a still active alert. - ActiveAt, ResolvedAt model.Time + ActiveAt, ResolvedAt time.Time } // An AlertingRule generates alerts from its vector expression. @@ -123,7 +124,7 @@ func (r *AlertingRule) equal(o *AlertingRule) bool { return r.name == o.name && labels.Equal(r.labels, o.labels) } -func (r *AlertingRule) sample(alert *Alert, ts model.Time, set bool) promql.Sample { +func (r *AlertingRule) sample(alert *Alert, ts time.Time, set bool) promql.Sample { lb := labels.NewBuilder(r.labels) for _, l := range alert.Labels { @@ -136,7 +137,7 @@ func (r *AlertingRule) sample(alert *Alert, ts model.Time, set bool) promql.Samp s := promql.Sample{ Metric: lb.Labels(), - Point: promql.Point{T: int64(ts), V: 0}, + Point: promql.Point{T: timestamp.FromTime(ts), V: 0}, } if set { s.V = 1 @@ -150,8 +151,8 @@ const resolvedRetention = 15 * time.Minute // Eval evaluates the rule expression and then creates pending alerts and fires // or removes previously pending alerts accordingly. -func (r *AlertingRule) Eval(ctx context.Context, ts model.Time, engine *promql.Engine, externalURLPath string) (promql.Vector, error) { - query, err := engine.NewInstantQuery(r.vector.String(), ts.Time()) +func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, externalURLPath string) (promql.Vector, error) { + query, err := engine.NewInstantQuery(r.vector.String(), ts) if err != nil { return nil, err } @@ -191,7 +192,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts model.Time, engine *promql.E defs+string(text), "__alert_"+r.Name(), tmplData, - ts, + model.Time(timestamp.FromTime(ts)), engine, externalURLPath, ) @@ -244,7 +245,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts model.Time, engine *promql.E } // If the alert was previously firing, keep it around for a given // retention time so it is reported as resolved to the AlertManager. - if a.State == StatePending || (a.ResolvedAt != 0 && ts.Sub(a.ResolvedAt) > resolvedRetention) { + if a.State == StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > resolvedRetention) { delete(r.active, fp) } if a.State != StateInactive { @@ -284,7 +285,7 @@ func (r *AlertingRule) State() AlertState { func (r *AlertingRule) ActiveAlerts() []*Alert { var res []*Alert for _, a := range r.currentAlerts() { - if a.ResolvedAt == 0 { + if a.ResolvedAt.IsZero() { res = append(res, a) } } diff --git a/rules/alerting_test.go b/rules/alerting_test.go index 194f316f7..ee5f100ca 100644 --- a/rules/alerting_test.go +++ b/rules/alerting_test.go @@ -16,7 +16,7 @@ package rules import ( "testing" - "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/promql" ) @@ -25,7 +25,7 @@ func TestAlertingRuleHTMLSnippet(t *testing.T) { if err != nil { t.Fatal(err) } - rule := NewAlertingRule("testrule", expr, 0, model.LabelSet{"html": "BOLD"}, model.LabelSet{"html": "BOLD"}) + rule := NewAlertingRule("testrule", expr, 0, labels.FromStrings("html", "BOLD"), labels.FromStrings("html", "BOLD")) const want = `ALERT testrule IF foo{html="<b>BOLD<b>"} diff --git a/rules/manager.go b/rules/manager.go index d24b4eec2..7ad433d2f 100644 --- a/rules/manager.go +++ b/rules/manager.go @@ -105,7 +105,7 @@ const ( type Rule interface { Name() string // eval evaluates the rule, including any associated recording or alerting actions. - Eval(context.Context, model.Time, *promql.Engine, string) (promql.Vector, error) + Eval(context.Context, time.Time, *promql.Engine, string) (promql.Vector, error) // String returns a human-readable string representation of the rule. String() string // HTMLSnippet returns a human-readable string representation of the rule, @@ -238,7 +238,7 @@ func typeForRule(r Rule) ruleType { // rule dependency. func (g *Group) Eval() { var ( - now = model.Now() + now = time.Now() wg sync.WaitGroup ) @@ -268,7 +268,7 @@ func (g *Group) Eval() { } if ar, ok := rule.(*AlertingRule); ok { - g.sendAlerts(ar, now) + g.sendAlerts(ar) } var ( numOutOfOrder = 0 @@ -304,7 +304,7 @@ func (g *Group) Eval() { } // sendAlerts sends alert notifications for the given rule. -func (g *Group) sendAlerts(rule *AlertingRule, timestamp model.Time) error { +func (g *Group) sendAlerts(rule *AlertingRule) error { var alerts []*notifier.Alert for _, alert := range rule.currentAlerts() { @@ -314,13 +314,13 @@ func (g *Group) sendAlerts(rule *AlertingRule, timestamp model.Time) error { } a := ¬ifier.Alert{ - StartsAt: alert.ActiveAt.Add(rule.holdDuration).Time(), + StartsAt: alert.ActiveAt.Add(rule.holdDuration), Labels: alert.Labels, Annotations: alert.Annotations, GeneratorURL: g.opts.ExternalURL.String() + strutil.GraphLinkForExpression(rule.vector.String()), } - if alert.ResolvedAt != 0 { - a.EndsAt = alert.ResolvedAt.Time() + if !alert.ResolvedAt.IsZero() { + a.EndsAt = alert.ResolvedAt } alerts = append(alerts, a) diff --git a/rules/manager_test.go b/rules/manager_test.go index d229f8b96..f9c156438 100644 --- a/rules/manager_test.go +++ b/rules/manager_test.go @@ -21,6 +21,8 @@ import ( "github.com/prometheus/common/model" + "github.com/prometheus/prometheus/pkg/labels" + "github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/promql" ) @@ -48,10 +50,12 @@ func TestAlertingRule(t *testing.T) { "HTTPRequestRateLow", expr, time.Minute, - model.LabelSet{"severity": "{{\"c\"}}ritical"}, - model.LabelSet{}, + labels.FromStrings("severity", "{{\"c\"}}ritical"), + nil, ) + baseTime := time.Unix(0, 0) + var tests = []struct { time time.Duration result []string @@ -59,28 +63,28 @@ func TestAlertingRule(t *testing.T) { { time: 0, result: []string{ - `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, - `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 5 * time.Minute, result: []string{ - `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, - `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, - `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, - `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 10 * time.Minute, result: []string{ - `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, - `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, }, }, { time: 15 * time.Minute, result: []string{ - `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, }, }, { @@ -90,20 +94,20 @@ func TestAlertingRule(t *testing.T) { { time: 25 * time.Minute, result: []string{ - `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 30 * time.Minute, result: []string{ - `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, - `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, + `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, }, }, } for i, test := range tests { - evalTime := model.Time(0).Add(test.time) + evalTime := baseTime.Add(test.time) res, err := rule.Eval(suite.Context(), evalTime, suite.QueryEngine(), "") if err != nil { @@ -138,17 +142,17 @@ func TestAlertingRule(t *testing.T) { } for _, aa := range rule.ActiveAlerts() { - if _, ok := aa.Labels[model.MetricNameLabel]; ok { + if v := aa.Labels.Get(model.MetricNameLabel); v != "" { t.Fatalf("%s label set on active alert: %s", model.MetricNameLabel, aa.Labels) } } } } -func annotateWithTime(lines []string, timestamp model.Time) []string { +func annotateWithTime(lines []string, ts time.Time) []string { annotatedLines := []string{} for _, line := range lines { - annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp)) + annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp.FromTime(ts))) } return annotatedLines } diff --git a/rules/recording.go b/rules/recording.go index 2a3124875..d6cb70acc 100644 --- a/rules/recording.go +++ b/rules/recording.go @@ -16,8 +16,8 @@ package rules import ( "fmt" "html/template" + "time" - "github.com/prometheus/common/model" "golang.org/x/net/context" "github.com/prometheus/prometheus/pkg/labels" @@ -47,8 +47,8 @@ func (rule RecordingRule) Name() string { } // Eval evaluates the rule and then overrides the metric names and labels accordingly. -func (rule RecordingRule) Eval(ctx context.Context, timestamp model.Time, engine *promql.Engine, _ string) (promql.Vector, error) { - query, err := engine.NewInstantQuery(rule.vector.String(), timestamp.Time()) +func (rule RecordingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, _ string) (promql.Vector, error) { + query, err := engine.NewInstantQuery(rule.vector.String(), ts) if err != nil { return nil, err } @@ -74,7 +74,9 @@ func (rule RecordingRule) Eval(ctx context.Context, timestamp model.Time, engine } // Override the metric name and labels. - for _, sample := range vector { + for i := range vector { + sample := &vector[i] + lb := labels.NewBuilder(sample.Metric) lb.Set(labels.MetricName, rule.name) diff --git a/rules/recording_test.go b/rules/recording_test.go index 279f6897b..17b43763c 100644 --- a/rules/recording_test.go +++ b/rules/recording_test.go @@ -16,24 +16,25 @@ package rules import ( "reflect" "testing" + "time" - "github.com/prometheus/common/model" "golang.org/x/net/context" "github.com/prometheus/prometheus/pkg/labels" + "github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/util/testutil" ) func TestRuleEval(t *testing.T) { storage := testutil.NewStorage(t) - defer closer.Close() + defer storage.Close() engine := promql.NewEngine(storage, nil) ctx, cancelCtx := context.WithCancel(context.Background()) defer cancelCtx() - now := model.Now() + now := time.Now() suite := []struct { name string @@ -46,9 +47,8 @@ func TestRuleEval(t *testing.T) { expr: &promql.NumberLiteral{Val: 1}, labels: labels.Labels{}, result: promql.Vector{promql.Sample{ - Value: 1, - Timestamp: now, - Metric: labels.FromStrings("__name__", "nolabels"), + Metric: labels.FromStrings("__name__", "nolabels"), + Point: promql.Point{V: 1, T: timestamp.FromTime(now)}, }}, }, { @@ -56,9 +56,8 @@ func TestRuleEval(t *testing.T) { expr: &promql.NumberLiteral{Val: 1}, labels: labels.FromStrings("foo", "bar"), result: promql.Vector{promql.Sample{ - Value: 1, - Timestamp: now, - Metric: labels.FromStrings("__name__", "labels", "foo", "bar"), + Metric: labels.FromStrings("__name__", "labels", "foo", "bar"), + Point: promql.Point{V: 1, T: timestamp.FromTime(now)}, }}, }, }