Don't append a 0 when alert is no longer pending/firing

With staleness we no longer need this behaviour.
This commit is contained in:
Brian Brazil 2017-05-19 17:02:25 +01:00
parent cc867dae60
commit dcea3e4773
2 changed files with 5 additions and 18 deletions

View file

@ -125,7 +125,7 @@ func (r *AlertingRule) equal(o *AlertingRule) bool {
return r.name == o.name && labels.Equal(r.labels, o.labels) return r.name == o.name && labels.Equal(r.labels, o.labels)
} }
func (r *AlertingRule) sample(alert *Alert, ts time.Time, set bool) promql.Sample { func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
lb := labels.NewBuilder(r.labels) lb := labels.NewBuilder(r.labels)
for _, l := range alert.Labels { for _, l := range alert.Labels {
@ -138,10 +138,7 @@ func (r *AlertingRule) sample(alert *Alert, ts time.Time, set bool) promql.Sampl
s := promql.Sample{ s := promql.Sample{
Metric: lb.Labels(), Metric: lb.Labels(),
Point: promql.Point{T: timestamp.FromTime(ts), V: 0}, Point: promql.Point{T: timestamp.FromTime(ts), V: 1},
}
if set {
s.V = 1
} }
return s return s
} }
@ -241,9 +238,6 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.En
// Check if any pending alerts should be removed or fire now. Write out alert timeseries. // Check if any pending alerts should be removed or fire now. Write out alert timeseries.
for fp, a := range r.active { for fp, a := range r.active {
if _, ok := resultFPs[fp]; !ok { if _, ok := resultFPs[fp]; !ok {
if a.State != StateInactive {
vec = append(vec, r.sample(a, ts, false))
}
// If the alert was previously firing, keep it around for a given // If the alert was previously firing, keep it around for a given
// retention time so it is reported as resolved to the AlertManager. // retention time so it is reported as resolved to the AlertManager.
if a.State == StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > resolvedRetention) { if a.State == StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > resolvedRetention) {
@ -257,11 +251,10 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.En
} }
if a.State == StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration { if a.State == StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
vec = append(vec, r.sample(a, ts, false))
a.State = StateFiring a.State = StateFiring
} }
vec = append(vec, r.sample(a, ts, true)) vec = append(vec, r.sample(a, ts))
} }
return vec, nil return vec, nil

View file

@ -75,23 +75,18 @@ func TestAlertingRule(t *testing.T) {
}, { }, {
time: 5 * time.Minute, time: 5 * time.Minute,
result: []string{ result: []string{
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`,
}, },
}, { }, {
time: 10 * time.Minute, time: 10 * time.Minute,
result: []string{ result: []string{
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`,
}, },
}, },
{ {
time: 15 * time.Minute, time: 15 * time.Minute,
result: []string{ result: []string{},
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
},
}, },
{ {
time: 20 * time.Minute, time: 20 * time.Minute,
@ -106,7 +101,6 @@ func TestAlertingRule(t *testing.T) {
{ {
time: 30 * time.Minute, time: 30 * time.Minute,
result: []string{ result: []string{
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`,
`{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `{__name__="ALERTS", alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`,
}, },
}, },