Allow ingested metrics to be relabeled.

The main purpose of this is to allow for blacklisting
of expensive metrics as a tactical option.
It could also find uses for renaming and removing labels
from federation.
This commit is contained in:
Brian Brazil 2015-06-12 22:06:30 +01:00
parent 5e50addc87
commit f2f26ca08f
5 changed files with 102 additions and 2 deletions

View file

@ -204,8 +204,10 @@ type ScrapeConfig struct {
FileSDConfigs []*FileSDConfig `yaml:"file_sd_configs,omitempty"`
// List of Consul service discovery configurations.
ConsulSDConfigs []*ConsulSDConfig `yaml:"consul_sd_configs,omitempty"`
// List of relabel configurations.
// List of target relabel configurations.
RelabelConfigs []*RelabelConfig `yaml:"relabel_configs,omitempty"`
// List of metric relabel configurations.
MetricRelabelConfigs []*RelabelConfig `yaml:"metric_relabel_configs,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`

View file

@ -114,6 +114,14 @@ var expectedConf = &Config{
Action: RelabelDrop,
},
},
MetricRelabelConfigs: []*RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"__name__"},
Regex: &Regexp{*regexp.MustCompile("expensive_metric.*$")},
Separator: ";",
Action: RelabelDrop,
},
},
},
{
JobName: "service-y",

View file

@ -71,9 +71,13 @@ scrape_configs:
regex: (.*)some-[regex]$
action: drop
metric_relabel_configs:
- source_labels: [__name__]
regex: expensive_metric.*$
action: drop
- job_name: service-y
consul_sd_configs:
- server: 'localhost:1234'
services: ['nginx', 'cache', 'mysql']
services: ['nginx', 'cache', 'mysql']

View file

@ -151,6 +151,8 @@ type Target struct {
scraperStopped chan struct{}
// Channel to buffer ingested samples.
ingestedSamples chan clientmodel.Samples
// Metric relabel configuration.
metricRelabelConfigs []*config.RelabelConfig
// Mutex protects the members below.
sync.RWMutex
@ -212,6 +214,7 @@ func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientm
if _, ok := t.baseLabels[clientmodel.InstanceLabel]; !ok {
t.baseLabels[clientmodel.InstanceLabel] = clientmodel.LabelValue(t.InstanceIdentifier())
}
t.metricRelabelConfigs = cfg.MetricRelabelConfigs
}
func (t *Target) String() string {
@ -361,6 +364,16 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
for samples := range t.ingestedSamples {
for _, s := range samples {
s.Metric.MergeFromLabelSet(baseLabels, clientmodel.ExporterLabelPrefix)
labels, err := Relabel(clientmodel.LabelSet(s.Metric), t.metricRelabelConfigs...)
if err != nil {
log.Errorf("error while relabeling metric %s of instance %s: ", s.Metric, t.url, err)
continue
}
// Check if the timeseries was dropped.
if labels == nil {
continue
}
s.Metric = clientmodel.Metric(labels)
sampleAppender.Append(s)
}
}

View file

@ -20,12 +20,14 @@ import (
"net/http/httptest"
"net/url"
"reflect"
"regexp"
"strings"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/util/httputil"
)
@ -77,6 +79,77 @@ func TestTargetScrapeWithFullChannel(t *testing.T) {
}
}
func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
server := httptest.NewServer(
http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
w.Write([]byte("test_metric_drop 0\n"))
w.Write([]byte("test_metric_relabel 1\n"))
},
),
)
defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{})
testTarget.metricRelabelConfigs = []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"__name__"},
Regex: &config.Regexp{*regexp.MustCompile(".*drop.*")},
Action: config.RelabelDrop,
},
{
SourceLabels: clientmodel.LabelNames{"__name__"},
Regex: &config.Regexp{*regexp.MustCompile(".*(relabel|up).*")},
TargetLabel: "foo",
Replacement: "bar",
Action: config.RelabelReplace,
},
}
appender := &collectResultAppender{}
testTarget.scrape(appender)
// Remove variables part of result.
for _, sample := range appender.result {
sample.Timestamp = 0
sample.Value = 0
}
expected := []*clientmodel.Sample{
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "test_metric_relabel",
"foo": "bar",
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host),
},
Timestamp: 0,
Value: 0,
},
// The metrics about the scrape are not affected.
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: scrapeHealthMetricName,
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host),
},
Timestamp: 0,
Value: 0,
},
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: scrapeDurationMetricName,
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host),
},
Timestamp: 0,
Value: 0,
},
}
if !appender.result.Equal(expected) {
t.Fatalf("Expected and actual samples not equal. Expected: %s, actual: %s", expected, appender.result)
}
}
func TestTargetRecordScrapeHealth(t *testing.T) {
testTarget := newTestTarget("example.url:80", 0, clientmodel.LabelSet{clientmodel.JobLabel: "testjob"})