rules: fix tests, remove model types

This commit is contained in:
Fabian Reinartz 2016-12-29 17:31:14 +01:00
parent 68dc358496
commit e94b0899ee
8 changed files with 59 additions and 50 deletions

View file

@ -39,6 +39,7 @@ func (ls Labels) String() string {
for i, l := range ls {
if i > 0 {
b.WriteByte(',')
b.WriteByte(' ')
}
b.WriteString(l.Name)
b.WriteByte('=')

View file

@ -53,7 +53,8 @@ type Scalar struct {
}
func (s Scalar) String() string {
return fmt.Sprintf("scalar: %v @[%v]", s.V, s.T)
v := strconv.FormatFloat(s.V, 'f', -1, 64)
return fmt.Sprintf("scalar: %v @[%v]", v, s.T)
}
func (s Scalar) MarshalJSON() ([]byte, error) {
@ -82,7 +83,8 @@ type Point struct {
}
func (p Point) String() string {
return fmt.Sprintf("%f @[%d]", p.V, p.T)
v := strconv.FormatFloat(p.V, 'f', -1, 64)
return fmt.Sprintf("%v @[%v]", v, p.T)
}
// MarshalJSON implements json.Marshaler.

View file

@ -26,6 +26,7 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/template"
"github.com/prometheus/prometheus/util/strutil"
@ -78,7 +79,7 @@ type Alert struct {
Value float64
// The interval during which the condition of this alert held true.
// ResolvedAt will be 0 to indicate a still active alert.
ActiveAt, ResolvedAt model.Time
ActiveAt, ResolvedAt time.Time
}
// An AlertingRule generates alerts from its vector expression.
@ -123,7 +124,7 @@ func (r *AlertingRule) equal(o *AlertingRule) bool {
return r.name == o.name && labels.Equal(r.labels, o.labels)
}
func (r *AlertingRule) sample(alert *Alert, ts model.Time, set bool) promql.Sample {
func (r *AlertingRule) sample(alert *Alert, ts time.Time, set bool) promql.Sample {
lb := labels.NewBuilder(r.labels)
for _, l := range alert.Labels {
@ -136,7 +137,7 @@ func (r *AlertingRule) sample(alert *Alert, ts model.Time, set bool) promql.Samp
s := promql.Sample{
Metric: lb.Labels(),
Point: promql.Point{T: int64(ts), V: 0},
Point: promql.Point{T: timestamp.FromTime(ts), V: 0},
}
if set {
s.V = 1
@ -150,8 +151,8 @@ const resolvedRetention = 15 * time.Minute
// Eval evaluates the rule expression and then creates pending alerts and fires
// or removes previously pending alerts accordingly.
func (r *AlertingRule) Eval(ctx context.Context, ts model.Time, engine *promql.Engine, externalURLPath string) (promql.Vector, error) {
query, err := engine.NewInstantQuery(r.vector.String(), ts.Time())
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, externalURLPath string) (promql.Vector, error) {
query, err := engine.NewInstantQuery(r.vector.String(), ts)
if err != nil {
return nil, err
}
@ -191,7 +192,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts model.Time, engine *promql.E
defs+string(text),
"__alert_"+r.Name(),
tmplData,
ts,
model.Time(timestamp.FromTime(ts)),
engine,
externalURLPath,
)
@ -244,7 +245,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts model.Time, engine *promql.E
}
// If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager.
if a.State == StatePending || (a.ResolvedAt != 0 && ts.Sub(a.ResolvedAt) > resolvedRetention) {
if a.State == StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > resolvedRetention) {
delete(r.active, fp)
}
if a.State != StateInactive {
@ -284,7 +285,7 @@ func (r *AlertingRule) State() AlertState {
func (r *AlertingRule) ActiveAlerts() []*Alert {
var res []*Alert
for _, a := range r.currentAlerts() {
if a.ResolvedAt == 0 {
if a.ResolvedAt.IsZero() {
res = append(res, a)
}
}

View file

@ -16,7 +16,7 @@ package rules
import (
"testing"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql"
)
@ -25,7 +25,7 @@ func TestAlertingRuleHTMLSnippet(t *testing.T) {
if err != nil {
t.Fatal(err)
}
rule := NewAlertingRule("testrule", expr, 0, model.LabelSet{"html": "<b>BOLD</b>"}, model.LabelSet{"html": "<b>BOLD</b>"})
rule := NewAlertingRule("testrule", expr, 0, labels.FromStrings("html", "<b>BOLD</b>"), labels.FromStrings("html", "<b>BOLD</b>"))
const want = `ALERT <a href="/test/prefix/graph?g0.expr=ALERTS%7Balertname%3D%22testrule%22%7D&g0.tab=0">testrule</a>
IF <a href="/test/prefix/graph?g0.expr=foo%7Bhtml%3D%22%3Cb%3EBOLD%3Cb%3E%22%7D&g0.tab=0">foo{html=&#34;&lt;b&gt;BOLD&lt;b&gt;&#34;}</a>

View file

@ -105,7 +105,7 @@ const (
type Rule interface {
Name() string
// eval evaluates the rule, including any associated recording or alerting actions.
Eval(context.Context, model.Time, *promql.Engine, string) (promql.Vector, error)
Eval(context.Context, time.Time, *promql.Engine, string) (promql.Vector, error)
// String returns a human-readable string representation of the rule.
String() string
// HTMLSnippet returns a human-readable string representation of the rule,
@ -238,7 +238,7 @@ func typeForRule(r Rule) ruleType {
// rule dependency.
func (g *Group) Eval() {
var (
now = model.Now()
now = time.Now()
wg sync.WaitGroup
)
@ -268,7 +268,7 @@ func (g *Group) Eval() {
}
if ar, ok := rule.(*AlertingRule); ok {
g.sendAlerts(ar, now)
g.sendAlerts(ar)
}
var (
numOutOfOrder = 0
@ -304,7 +304,7 @@ func (g *Group) Eval() {
}
// sendAlerts sends alert notifications for the given rule.
func (g *Group) sendAlerts(rule *AlertingRule, timestamp model.Time) error {
func (g *Group) sendAlerts(rule *AlertingRule) error {
var alerts []*notifier.Alert
for _, alert := range rule.currentAlerts() {
@ -314,13 +314,13 @@ func (g *Group) sendAlerts(rule *AlertingRule, timestamp model.Time) error {
}
a := &notifier.Alert{
StartsAt: alert.ActiveAt.Add(rule.holdDuration).Time(),
StartsAt: alert.ActiveAt.Add(rule.holdDuration),
Labels: alert.Labels,
Annotations: alert.Annotations,
GeneratorURL: g.opts.ExternalURL.String() + strutil.GraphLinkForExpression(rule.vector.String()),
}
if alert.ResolvedAt != 0 {
a.EndsAt = alert.ResolvedAt.Time()
if !alert.ResolvedAt.IsZero() {
a.EndsAt = alert.ResolvedAt
}
alerts = append(alerts, a)

View file

@ -21,6 +21,8 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
)
@ -48,10 +50,12 @@ func TestAlertingRule(t *testing.T) {
"HTTPRequestRateLow",
expr,
time.Minute,
model.LabelSet{"severity": "{{\"c\"}}ritical"},
model.LabelSet{},
labels.FromStrings("severity", "{{\"c\"}}ritical"),
nil,
)
baseTime := time.Unix(0, 0)
var tests = []struct {
time time.Duration
result []string
@ -59,28 +63,28 @@ func TestAlertingRule(t *testing.T) {
{
time: 0,
result: []string{
`ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`,
},
}, {
time: 5 * time.Minute,
result: []string{
`ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`,
`ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`,
},
}, {
time: 10 * time.Minute,
result: []string{
`ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`,
},
},
{
time: 15 * time.Minute,
result: []string{
`ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
},
},
{
@ -90,20 +94,20 @@ func TestAlertingRule(t *testing.T) {
{
time: 25 * time.Minute,
result: []string{
`ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
},
},
{
time: 30 * time.Minute,
result: []string{
`ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
},
},
}
for i, test := range tests {
evalTime := model.Time(0).Add(test.time)
evalTime := baseTime.Add(test.time)
res, err := rule.Eval(suite.Context(), evalTime, suite.QueryEngine(), "")
if err != nil {
@ -138,17 +142,17 @@ func TestAlertingRule(t *testing.T) {
}
for _, aa := range rule.ActiveAlerts() {
if _, ok := aa.Labels[model.MetricNameLabel]; ok {
if v := aa.Labels.Get(model.MetricNameLabel); v != "" {
t.Fatalf("%s label set on active alert: %s", model.MetricNameLabel, aa.Labels)
}
}
}
}
func annotateWithTime(lines []string, timestamp model.Time) []string {
func annotateWithTime(lines []string, ts time.Time) []string {
annotatedLines := []string{}
for _, line := range lines {
annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp))
annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp.FromTime(ts)))
}
return annotatedLines
}

View file

@ -16,8 +16,8 @@ package rules
import (
"fmt"
"html/template"
"time"
"github.com/prometheus/common/model"
"golang.org/x/net/context"
"github.com/prometheus/prometheus/pkg/labels"
@ -47,8 +47,8 @@ func (rule RecordingRule) Name() string {
}
// Eval evaluates the rule and then overrides the metric names and labels accordingly.
func (rule RecordingRule) Eval(ctx context.Context, timestamp model.Time, engine *promql.Engine, _ string) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), timestamp.Time())
func (rule RecordingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, _ string) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), ts)
if err != nil {
return nil, err
}
@ -74,7 +74,9 @@ func (rule RecordingRule) Eval(ctx context.Context, timestamp model.Time, engine
}
// Override the metric name and labels.
for _, sample := range vector {
for i := range vector {
sample := &vector[i]
lb := labels.NewBuilder(sample.Metric)
lb.Set(labels.MetricName, rule.name)

View file

@ -16,24 +16,25 @@ package rules
import (
"reflect"
"testing"
"time"
"github.com/prometheus/common/model"
"golang.org/x/net/context"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/testutil"
)
func TestRuleEval(t *testing.T) {
storage := testutil.NewStorage(t)
defer closer.Close()
defer storage.Close()
engine := promql.NewEngine(storage, nil)
ctx, cancelCtx := context.WithCancel(context.Background())
defer cancelCtx()
now := model.Now()
now := time.Now()
suite := []struct {
name string
@ -46,9 +47,8 @@ func TestRuleEval(t *testing.T) {
expr: &promql.NumberLiteral{Val: 1},
labels: labels.Labels{},
result: promql.Vector{promql.Sample{
Value: 1,
Timestamp: now,
Metric: labels.FromStrings("__name__", "nolabels"),
Metric: labels.FromStrings("__name__", "nolabels"),
Point: promql.Point{V: 1, T: timestamp.FromTime(now)},
}},
},
{
@ -56,9 +56,8 @@ func TestRuleEval(t *testing.T) {
expr: &promql.NumberLiteral{Val: 1},
labels: labels.FromStrings("foo", "bar"),
result: promql.Vector{promql.Sample{
Value: 1,
Timestamp: now,
Metric: labels.FromStrings("__name__", "labels", "foo", "bar"),
Metric: labels.FromStrings("__name__", "labels", "foo", "bar"),
Point: promql.Point{V: 1, T: timestamp.FromTime(now)},
}},
},
}