Merge branch 'master' into beorn7/release-2.24

This commit is contained in:
beorn7 2021-01-20 02:09:25 +01:00
commit 94d70e76cd
5 changed files with 67 additions and 184 deletions

View file

@ -15,6 +15,7 @@ package kubernetes
import (
"context"
"github.com/prometheus/prometheus/util/strutil"
"net"
"strconv"
@ -199,6 +200,8 @@ func endpointsSourceFromNamespaceAndName(namespace, name string) string {
}
const (
endpointsLabelPrefix = metaLabelPrefix + "endpoints_label_"
endpointsLabelPresentPrefix = metaLabelPrefix + "endpoints_labelpresent_"
endpointsNameLabel = metaLabelPrefix + "endpoints_name"
endpointNodeName = metaLabelPrefix + "endpoint_node_name"
endpointHostname = metaLabelPrefix + "endpoint_hostname"
@ -218,6 +221,12 @@ func (e *Endpoints) buildEndpoints(eps *apiv1.Endpoints) *targetgroup.Group {
endpointsNameLabel: lv(eps.Name),
}
e.addServiceLabels(eps.Namespace, eps.Name, tg)
//add endponits labels metadata
for k, v := range eps.Labels {
ln := strutil.SanitizeLabelName(k)
tg.Labels[model.LabelName(endpointsLabelPrefix+ln)] = lv(v)
tg.Labels[model.LabelName(endpointsLabelPresentPrefix+ln)] = presentValue
}
type podEntry struct {
pod *apiv1.Pod

View file

@ -38,7 +38,7 @@
// Example: @'http://test-alertmanager\..*'
nonNotifyingAlertmanagerRegEx: @'',
grafana: {
grafanaPrometheus: {
prefix: 'Prometheus / ',
tags: ['prometheus-mixin'],
// The default refresh time for all dashboards, default to 60s

View file

@ -11,7 +11,7 @@ local template = grafana.template;
grafanaDashboards+:: {
'prometheus.json':
g.dashboard(
'%(prefix)sOverview' % $._config.grafana
'%(prefix)sOverview' % $._config.grafanaPrometheus
)
.addMultiTemplate('job', 'prometheus_build_info', 'job')
.addMultiTemplate('instance', 'prometheus_build_info', 'instance')
@ -99,8 +99,8 @@ local template = grafana.template;
g.stack,
)
) + {
tags: $._config.grafana.tags,
refresh: $._config.grafana.refresh,
tags: $._config.grafanaPrometheus.tags,
refresh: $._config.grafanaPrometheus.refresh,
},
// Remote write specific dashboard.
'prometheus-remote-write.json':
@ -293,7 +293,7 @@ local template = grafana.template;
));
dashboard.new(
title='%(prefix)sRemote Write' % $._config.grafana,
title='%(prefix)sRemote Write' % $._config.grafanaPrometheus,
editable=true
)
.addTemplate(
@ -380,8 +380,8 @@ local template = grafana.template;
.addPanel(retriedSamples)
.addPanel(enqueueRetries)
) + {
tags: $._config.grafana.tags,
refresh: $._config.grafana.refresh,
tags: $._config.grafanaPrometheus.tags,
refresh: $._config.grafanaPrometheus.refresh,
},
},
}

View file

@ -555,10 +555,11 @@ load 10s
err = test.Run()
require.NoError(t, err)
// These test cases should be touching the limit exactly (hence no exceeding).
// Exceeding the limit will be tested by doing -1 to the MaxSamples.
cases := []struct {
Query string
MaxSamples int
Result Result
Start time.Time
End time.Time
Interval time.Duration
@ -567,209 +568,82 @@ load 10s
{
Query: "1",
MaxSamples: 1,
Result: Result{
nil,
Scalar{V: 1, T: 1000},
nil},
Start: time.Unix(1, 0),
},
{
Query: "1",
MaxSamples: 0,
Result: Result{
ErrTooManySamples(env),
nil,
nil,
},
Start: time.Unix(1, 0),
},
{
Query: "metric",
MaxSamples: 0,
Result: Result{
ErrTooManySamples(env),
nil,
nil,
},
Start: time.Unix(1, 0),
},
{
Start: time.Unix(1, 0),
}, {
Query: "metric",
MaxSamples: 1,
Result: Result{
nil,
Vector{
Sample{Point: Point{V: 1, T: 1000},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(1, 0),
},
{
Start: time.Unix(1, 0),
}, {
Query: "metric[20s]",
MaxSamples: 2,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Start: time.Unix(10, 0),
}, {
Query: "rate(metric[20s])",
MaxSamples: 3,
Result: Result{
nil,
Vector{
Sample{
Point: Point{V: 0.1, T: 10000},
Metric: labels.Labels{},
},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Start: time.Unix(10, 0),
}, {
Query: "metric[20s:5s]",
MaxSamples: 3,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Query: "metric[20s]",
MaxSamples: 0,
Result: Result{
ErrTooManySamples(env),
nil,
nil,
},
Start: time.Unix(10, 0),
Start: time.Unix(10, 0),
},
// Range queries.
{
Query: "1",
MaxSamples: 3,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
Metric: labels.FromStrings()},
},
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
},
{
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
}, {
Query: "1",
MaxSamples: 0,
Result: Result{
ErrTooManySamples(env),
nil,
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
},
{
MaxSamples: 3,
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
}, {
Query: "metric",
MaxSamples: 3,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
},
{
Query: "metric",
MaxSamples: 2,
Result: Result{
ErrTooManySamples(env),
nil,
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
},
{
Start: time.Unix(0, 0),
End: time.Unix(2, 0),
Interval: time.Second,
}, {
Query: "metric",
MaxSamples: 3,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(10, 0),
Interval: 5 * time.Second,
},
{
Query: "metric",
MaxSamples: 2,
Result: Result{
ErrTooManySamples(env),
nil,
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(10, 0),
Interval: 5 * time.Second,
},
{
Start: time.Unix(0, 0),
End: time.Unix(10, 0),
Interval: 5 * time.Second,
}, {
Query: "rate(bigmetric[1s])",
MaxSamples: 1,
Result: Result{
nil,
Matrix{},
nil,
},
Start: time.Unix(0, 0),
End: time.Unix(10, 0),
Interval: 5 * time.Second,
Start: time.Unix(0, 0),
End: time.Unix(10, 0),
Interval: 5 * time.Second,
},
}
engine := test.QueryEngine()
for _, c := range cases {
var err error
var qry Query
t.Run(c.Query, func(t *testing.T) {
var err error
var qry Query
engine.maxSamplesPerQuery = c.MaxSamples
engine.maxSamplesPerQuery = c.MaxSamples
if c.Interval == 0 {
qry, err = engine.NewInstantQuery(test.Queryable(), c.Query, c.Start)
} else {
qry, err = engine.NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval)
}
require.NoError(t, err)
if c.Interval == 0 {
qry, err = engine.NewInstantQuery(test.Queryable(), c.Query, c.Start)
} else {
qry, err = engine.NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval)
}
require.NoError(t, err)
res := qry.Exec(test.Context())
require.Equal(t, c.Result.Err, res.Err)
require.Equal(t, c.Result.Value, res.Value, "query %q failed", c.Query)
// Within limit.
res := qry.Exec(test.Context())
require.NoError(t, res.Err)
// Exceeding limit.
engine.maxSamplesPerQuery = c.MaxSamples - 1
res = qry.Exec(test.Context())
require.Equal(t, ErrTooManySamples(env), res.Err)
})
}
}

View file

@ -70,7 +70,7 @@ const (
DefaultWriteBufferSize = 4 * 1024 * 1024 // 4 MiB.
)
// corruptionErr is an error that's returned when corruption is encountered.
// CorruptionErr is an error that's returned when corruption is encountered.
type CorruptionErr struct {
Dir string
FileIndex int