Scraping: stop storing discovered labels (#15261)

Instead of storing discovered labels on every target, recompute them if
required. The `Target` struct now needs to hold some more data required
to recompute them, such as ScrapeConfig.

This moves the load from every Prometheus all of the time, to just when
someone views Service Discovery in the UI.

The way `PopulateLabels` is used changes; you are no longer expected to
call it with a part-populated `labels.Builder`.

The signature of `Target.Labels` changes to take a `labels.Builder`
instead of a `ScratchBuilder`, for consistency with `DiscoveredLabels`.

This will save a lot of work when many targets are filtered out in
relabeling. Combine with `keep_dropped_targets` to avoid ever computing
most labels for dropped targets.

Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
This commit is contained in:
Bryan Boreham 2024-12-21 13:33:08 +00:00 committed by GitHub
parent 5b5fee08af
commit 7b03796d0f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 162 additions and 135 deletions

View file

@ -144,7 +144,9 @@ func getSDCheckResult(targetGroups []*targetgroup.Group, scrapeConfig *config.Sc
}
}
res, orig, err := scrape.PopulateLabels(lb, scrapeConfig)
scrape.PopulateDiscoveredLabels(lb, scrapeConfig, target, targetGroup.Labels)
orig := lb.Labels()
res, err := scrape.PopulateLabels(lb, scrapeConfig, target, targetGroup.Labels)
result := sdCheckResult{
DiscoveredLabels: orig,
Labels: res,

View file

@ -18,6 +18,7 @@ import (
"context"
"errors"
"fmt"
"maps"
"net/http"
"net/http/httptest"
"net/url"
@ -61,7 +62,7 @@ func init() {
func TestPopulateLabels(t *testing.T) {
cases := []struct {
in labels.Labels
in model.LabelSet
cfg *config.ScrapeConfig
res labels.Labels
resOrig labels.Labels
@ -69,10 +70,10 @@ func TestPopulateLabels(t *testing.T) {
}{
// Regular population of scrape config options.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
"custom": "value",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -103,14 +104,14 @@ func TestPopulateLabels(t *testing.T) {
// Pre-define/overwrite scrape config labels.
// Leave out port and expect it to be defaulted to scheme.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4",
model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -138,10 +139,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Provide instance label. HTTPS port default for IPv6.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "[::1]",
model.InstanceLabel: "custom-instance",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -170,7 +171,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing.
{
in: labels.FromStrings("custom", "value"),
in: model.LabelSet{"custom": "value"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -184,7 +185,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing, but added in relabelling.
{
in: labels.FromStrings("custom", "host:1234"),
in: model.LabelSet{"custom": "host:1234"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -222,7 +223,7 @@ func TestPopulateLabels(t *testing.T) {
},
// Address label missing, but added in relabelling.
{
in: labels.FromStrings("custom", "host:1234"),
in: model.LabelSet{"custom": "host:1234"},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -260,10 +261,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid UTF-8 in label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
"custom": "\xbd",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -277,10 +278,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid duration in interval label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "2notseconds",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -294,10 +295,10 @@ func TestPopulateLabels(t *testing.T) {
},
// Invalid duration in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "2notseconds",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -311,10 +312,10 @@ func TestPopulateLabels(t *testing.T) {
},
// 0 interval in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "0s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -328,10 +329,10 @@ func TestPopulateLabels(t *testing.T) {
},
// 0 duration in timeout label.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "0s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -345,11 +346,11 @@ func TestPopulateLabels(t *testing.T) {
},
// Timeout less than interval.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "2s",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -363,9 +364,9 @@ func TestPopulateLabels(t *testing.T) {
},
// Don't attach default port.
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -393,9 +394,9 @@ func TestPopulateLabels(t *testing.T) {
},
// verify that the default port is not removed (http).
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:80",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "http",
MetricsPath: "/metrics",
@ -423,9 +424,9 @@ func TestPopulateLabels(t *testing.T) {
},
// verify that the default port is not removed (https).
{
in: labels.FromMap(map[string]string{
in: model.LabelSet{
model.AddressLabel: "1.2.3.4:443",
}),
},
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
@ -453,17 +454,18 @@ func TestPopulateLabels(t *testing.T) {
},
}
for _, c := range cases {
in := c.in.Copy()
res, orig, err := PopulateLabels(labels.NewBuilder(c.in), c.cfg)
in := maps.Clone(c.in)
lb := labels.NewBuilder(labels.EmptyLabels())
res, err := PopulateLabels(lb, c.cfg, c.in, nil)
if c.err != "" {
require.EqualError(t, err, c.err)
} else {
require.NoError(t, err)
testutil.RequireEqual(t, c.res, res)
PopulateDiscoveredLabels(lb, c.cfg, c.in, nil)
testutil.RequireEqual(t, c.resOrig, lb.Labels())
}
require.Equal(t, c.in, in)
testutil.RequireEqual(t, c.res, res)
testutil.RequireEqual(t, c.resOrig, orig)
require.Equal(t, c.in, in) // Check this wasn't altered by PopulateLabels().
}
}

View file

@ -450,7 +450,7 @@ func (sp *scrapePool) Sync(tgs []*targetgroup.Group) {
switch {
case nonEmpty:
all = append(all, t)
case !t.discoveredLabels.IsEmpty():
default:
if sp.config.KeepDroppedTargets == 0 || uint(len(sp.droppedTargets)) < sp.config.KeepDroppedTargets {
sp.droppedTargets = append(sp.droppedTargets, t)
}
@ -553,9 +553,9 @@ func (sp *scrapePool) sync(targets []*Target) {
if _, ok := uniqueLoops[hash]; !ok {
uniqueLoops[hash] = nil
}
// Need to keep the most updated labels information
// for displaying it in the Service Discovery web page.
sp.activeTargets[hash].SetDiscoveredLabels(t.DiscoveredLabels())
// Need to keep the most updated ScrapeConfig for
// displaying labels in the Service Discovery web page.
sp.activeTargets[hash].SetScrapeConfig(sp.config, t.tLabels, t.tgLabels)
}
}

View file

@ -212,7 +212,8 @@ func TestDroppedTargetsList(t *testing.T) {
sp.Sync(tgs)
require.Len(t, sp.droppedTargets, expectedLength)
require.Equal(t, expectedLength, sp.droppedTargetsCount)
require.Equal(t, expectedLabelSetString, sp.droppedTargets[0].DiscoveredLabels().String())
lb := labels.NewBuilder(labels.EmptyLabels())
require.Equal(t, expectedLabelSetString, sp.droppedTargets[0].DiscoveredLabels(lb).String())
// Check that count is still correct when we don't retain all dropped targets.
sp.config.KeepDroppedTargets = 1
@ -235,16 +236,19 @@ func TestDiscoveredLabelsUpdate(t *testing.T) {
}
sp.activeTargets = make(map[uint64]*Target)
t1 := &Target{
discoveredLabels: labels.FromStrings("label", "name"),
tLabels: model.LabelSet{"label": "name"},
scrapeConfig: sp.config,
}
sp.activeTargets[t1.hash()] = t1
t2 := &Target{
discoveredLabels: labels.FromStrings("labelNew", "nameNew"),
tLabels: model.LabelSet{"labelNew": "nameNew"},
scrapeConfig: sp.config,
}
sp.sync([]*Target{t2})
require.Equal(t, t2.DiscoveredLabels(), sp.activeTargets[t1.hash()].DiscoveredLabels())
lb := labels.NewBuilder(labels.EmptyLabels())
require.Equal(t, t2.DiscoveredLabels(lb), sp.activeTargets[t1.hash()].DiscoveredLabels(lb))
}
type testLoop struct {
@ -309,7 +313,8 @@ func TestScrapePoolStop(t *testing.T) {
for i := 0; i < numTargets; i++ {
t := &Target{
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)),
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)),
scrapeConfig: &config.ScrapeConfig{},
}
l := &testLoop{}
d := time.Duration((i+1)*20) * time.Millisecond
@ -394,8 +399,8 @@ func TestScrapePoolReload(t *testing.T) {
for i := 0; i < numTargets; i++ {
labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i))
t := &Target{
labels: labels,
discoveredLabels: labels,
labels: labels,
scrapeConfig: &config.ScrapeConfig{},
}
l := &testLoop{}
d := time.Duration((i+1)*20) * time.Millisecond
@ -2689,6 +2694,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
model.SchemeLabel, serverURL.Scheme,
model.AddressLabel, serverURL.Host,
),
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
timeout: configTimeout,
@ -2739,6 +2745,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
model.SchemeLabel, serverURL.Scheme,
model.AddressLabel, serverURL.Host,
),
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
@ -2794,6 +2801,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
model.SchemeLabel, serverURL.Scheme,
model.AddressLabel, serverURL.Host,
),
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
@ -2837,6 +2845,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) {
model.SchemeLabel, serverURL.Scheme,
model.AddressLabel, serverURL.Host,
),
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
bodySizeLimit: bodySizeLimit,
@ -3107,7 +3116,8 @@ func TestReuseScrapeCache(t *testing.T) {
}
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
t1 = &Target{
discoveredLabels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
scrapeConfig: &config.ScrapeConfig{},
}
proxyURL, _ = url.Parse("http://localhost:2128")
)
@ -3291,7 +3301,8 @@ func TestReuseCacheRace(t *testing.T) {
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
t1 = &Target{
discoveredLabels: labels.FromStrings("labelNew", "nameNew"),
labels: labels.FromStrings("labelNew", "nameNew"),
scrapeConfig: &config.ScrapeConfig{},
}
)
defer sp.stop()
@ -4475,7 +4486,9 @@ func BenchmarkTargetScraperGzip(b *testing.B) {
model.SchemeLabel, serverURL.Scheme,
model.AddressLabel, serverURL.Host,
),
params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
scrapeConfig: &config.ScrapeConfig{
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
},
},
client: client,
timeout: time.Second,

View file

@ -45,12 +45,12 @@ const (
// Target refers to a singular HTTP or HTTPS endpoint.
type Target struct {
// Labels before any processing.
discoveredLabels labels.Labels
// Any labels that are added to this target and its metrics.
labels labels.Labels
// Additional URL parameters that are part of the target URL.
params url.Values
// ScrapeConfig used to create this target.
scrapeConfig *config.ScrapeConfig
// Target and TargetGroup labels used to create this target.
tLabels, tgLabels model.LabelSet
mtx sync.RWMutex
lastError error
@ -61,12 +61,13 @@ type Target struct {
}
// NewTarget creates a reasonably configured target for querying.
func NewTarget(labels, discoveredLabels labels.Labels, params url.Values) *Target {
func NewTarget(labels labels.Labels, scrapeConfig *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) *Target {
return &Target{
labels: labels,
discoveredLabels: discoveredLabels,
params: params,
health: HealthUnknown,
labels: labels,
tLabels: tLabels,
tgLabels: tgLabels,
scrapeConfig: scrapeConfig,
health: HealthUnknown,
}
}
@ -168,11 +169,11 @@ func (t *Target) offset(interval time.Duration, offsetSeed uint64) time.Duration
}
// Labels returns a copy of the set of all public labels of the target.
func (t *Target) Labels(b *labels.ScratchBuilder) labels.Labels {
b.Reset()
func (t *Target) Labels(b *labels.Builder) labels.Labels {
b.Reset(labels.EmptyLabels())
t.labels.Range(func(l labels.Label) {
if !strings.HasPrefix(l.Name, model.ReservedLabelPrefix) {
b.Add(l.Name, l.Value)
b.Set(l.Name, l.Value)
}
})
return b.Labels()
@ -188,24 +189,31 @@ func (t *Target) LabelsRange(f func(l labels.Label)) {
}
// DiscoveredLabels returns a copy of the target's labels before any processing.
func (t *Target) DiscoveredLabels() labels.Labels {
func (t *Target) DiscoveredLabels(lb *labels.Builder) labels.Labels {
t.mtx.Lock()
defer t.mtx.Unlock()
return t.discoveredLabels.Copy()
cfg, tLabels, tgLabels := t.scrapeConfig, t.tLabels, t.tgLabels
t.mtx.Unlock()
PopulateDiscoveredLabels(lb, cfg, tLabels, tgLabels)
return lb.Labels()
}
// SetDiscoveredLabels sets new DiscoveredLabels.
func (t *Target) SetDiscoveredLabels(l labels.Labels) {
// SetScrapeConfig sets new ScrapeConfig.
func (t *Target) SetScrapeConfig(scrapeConfig *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) {
t.mtx.Lock()
defer t.mtx.Unlock()
t.discoveredLabels = l
t.scrapeConfig = scrapeConfig
t.tLabels = tLabels
t.tgLabels = tgLabels
}
// URL returns a copy of the target's URL.
func (t *Target) URL() *url.URL {
t.mtx.Lock()
configParams := t.scrapeConfig.Params
t.mtx.Unlock()
params := url.Values{}
for k, v := range t.params {
for k, v := range configParams {
params[k] = make([]string, len(v))
copy(params[k], v)
}
@ -420,10 +428,19 @@ func (app *maxSchemaAppender) AppendHistogram(ref storage.SeriesRef, lset labels
return ref, nil
}
// PopulateLabels builds a label set from the given label set and scrape configuration.
// It returns a label set before relabeling was applied as the second return value.
// Returns the original discovered label set found before relabelling was applied if the target is dropped during relabeling.
func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig) (res, orig labels.Labels, err error) {
// PopulateDiscoveredLabels sets base labels on lb from target and group labels and scrape configuration, before relabeling.
func PopulateDiscoveredLabels(lb *labels.Builder, cfg *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) {
lb.Reset(labels.EmptyLabels())
for ln, lv := range tLabels {
lb.Set(string(ln), string(lv))
}
for ln, lv := range tgLabels {
if _, ok := tLabels[ln]; !ok {
lb.Set(string(ln), string(lv))
}
}
// Copy labels into the labelset for the target if they are not set already.
scrapeLabels := []labels.Label{
{Name: model.JobLabel, Value: cfg.JobName},
@ -444,44 +461,49 @@ func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig) (res, orig lab
lb.Set(name, v[0])
}
}
}
preRelabelLabels := lb.Labels()
// PopulateLabels builds labels from target and group labels and scrape configuration,
// performs defined relabeling, checks validity, and adds Prometheus standard labels such as 'instance'.
// A return of empty labels and nil error means the target was dropped by relabeling.
func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) (res labels.Labels, err error) {
PopulateDiscoveredLabels(lb, cfg, tLabels, tgLabels)
keep := relabel.ProcessBuilder(lb, cfg.RelabelConfigs...)
// Check if the target was dropped.
if !keep {
return labels.EmptyLabels(), preRelabelLabels, nil
return labels.EmptyLabels(), nil
}
if v := lb.Get(model.AddressLabel); v == "" {
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("no address")
return labels.EmptyLabels(), errors.New("no address")
}
addr := lb.Get(model.AddressLabel)
if err := config.CheckTargetAddress(model.LabelValue(addr)); err != nil {
return labels.EmptyLabels(), labels.EmptyLabels(), err
return labels.EmptyLabels(), err
}
interval := lb.Get(model.ScrapeIntervalLabel)
intervalDuration, err := model.ParseDuration(interval)
if err != nil {
return labels.EmptyLabels(), labels.EmptyLabels(), fmt.Errorf("error parsing scrape interval: %w", err)
return labels.EmptyLabels(), fmt.Errorf("error parsing scrape interval: %w", err)
}
if time.Duration(intervalDuration) == 0 {
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape interval cannot be 0")
return labels.EmptyLabels(), errors.New("scrape interval cannot be 0")
}
timeout := lb.Get(model.ScrapeTimeoutLabel)
timeoutDuration, err := model.ParseDuration(timeout)
if err != nil {
return labels.EmptyLabels(), labels.EmptyLabels(), fmt.Errorf("error parsing scrape timeout: %w", err)
return labels.EmptyLabels(), fmt.Errorf("error parsing scrape timeout: %w", err)
}
if time.Duration(timeoutDuration) == 0 {
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape timeout cannot be 0")
return labels.EmptyLabels(), errors.New("scrape timeout cannot be 0")
}
if timeoutDuration > intervalDuration {
return labels.EmptyLabels(), labels.EmptyLabels(), fmt.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
return labels.EmptyLabels(), fmt.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
}
// Meta labels are deleted after relabelling. Other internal labels propagate to
@ -506,9 +528,9 @@ func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig) (res, orig lab
return nil
})
if err != nil {
return labels.EmptyLabels(), labels.EmptyLabels(), err
return labels.EmptyLabels(), err
}
return res, preRelabelLabels, nil
return res, nil
}
// TargetsFromGroup builds targets based on the given TargetGroup and config.
@ -516,24 +538,12 @@ func TargetsFromGroup(tg *targetgroup.Group, cfg *config.ScrapeConfig, targets [
targets = targets[:0]
failures := []error{}
for i, tlset := range tg.Targets {
lb.Reset(labels.EmptyLabels())
for ln, lv := range tlset {
lb.Set(string(ln), string(lv))
}
for ln, lv := range tg.Labels {
if _, ok := tlset[ln]; !ok {
lb.Set(string(ln), string(lv))
}
}
lset, origLabels, err := PopulateLabels(lb, cfg)
for i, tLabels := range tg.Targets {
lset, err := PopulateLabels(lb, cfg, tLabels, tg.Labels)
if err != nil {
failures = append(failures, fmt.Errorf("instance %d in group %s: %w", i, tg, err))
}
if !lset.IsEmpty() || !origLabels.IsEmpty() {
targets = append(targets, NewTarget(lset, origLabels, cfg.Params))
} else {
targets = append(targets, NewTarget(lset, cfg, tLabels, tg.Labels))
}
}
return targets, failures

View file

@ -43,8 +43,8 @@ const (
func TestTargetLabels(t *testing.T) {
target := newTestTarget("example.com:80", 0, labels.FromStrings("job", "some_job", "foo", "bar"))
want := labels.FromStrings(model.JobLabel, "some_job", "foo", "bar")
b := labels.NewScratchBuilder(0)
got := target.Labels(&b)
b := labels.NewBuilder(labels.EmptyLabels())
got := target.Labels(b)
require.Equal(t, want, got)
i := 0
target.LabelsRange(func(l labels.Label) {
@ -103,9 +103,11 @@ func TestTargetOffset(t *testing.T) {
}
func TestTargetURL(t *testing.T) {
params := url.Values{
"abc": []string{"foo", "bar", "baz"},
"xyz": []string{"hoo"},
scrapeConfig := &config.ScrapeConfig{
Params: url.Values{
"abc": []string{"foo", "bar", "baz"},
"xyz": []string{"hoo"},
},
}
labels := labels.FromMap(map[string]string{
model.AddressLabel: "example.com:1234",
@ -114,7 +116,7 @@ func TestTargetURL(t *testing.T) {
"__param_abc": "overwrite",
"__param_cde": "huu",
})
target := NewTarget(labels, labels, params)
target := NewTarget(labels, scrapeConfig, nil, nil)
// The reserved labels are concatenated into a full URL. The first value for each
// URL query parameter can be set/modified via labels as well.
@ -139,7 +141,7 @@ func newTestTarget(targetURL string, _ time.Duration, lbls labels.Labels) *Targe
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
lb.Set(model.MetricsPathLabel, "/metrics")
return &Target{labels: lb.Labels()}
return &Target{labels: lb.Labels(), scrapeConfig: &config.ScrapeConfig{}}
}
func TestNewHTTPBearerToken(t *testing.T) {

View file

@ -1083,12 +1083,12 @@ func (api *API) targets(r *http.Request) apiFuncResult {
showActive := state == "" || state == "any" || state == "active"
showDropped := state == "" || state == "any" || state == "dropped"
res := &TargetDiscovery{}
builder := labels.NewBuilder(labels.EmptyLabels())
if showActive {
targetsActive := api.targetRetriever(r.Context()).TargetsActive()
activeKeys, numTargets := sortKeys(targetsActive)
res.ActiveTargets = make([]*Target, 0, numTargets)
builder := labels.NewScratchBuilder(0)
for _, key := range activeKeys {
if scrapePool != "" && key != scrapePool {
@ -1104,8 +1104,8 @@ func (api *API) targets(r *http.Request) apiFuncResult {
globalURL, err := getGlobalURL(target.URL(), api.globalURLOptions)
res.ActiveTargets = append(res.ActiveTargets, &Target{
DiscoveredLabels: target.DiscoveredLabels(),
Labels: target.Labels(&builder),
DiscoveredLabels: target.DiscoveredLabels(builder),
Labels: target.Labels(builder),
ScrapePool: key,
ScrapeURL: target.URL().String(),
GlobalURL: globalURL.String(),
@ -1143,7 +1143,7 @@ func (api *API) targets(r *http.Request) apiFuncResult {
}
for _, target := range targetsDropped[key] {
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
DiscoveredLabels: target.DiscoveredLabels(),
DiscoveredLabels: target.DiscoveredLabels(builder),
})
}
}
@ -1181,7 +1181,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
}
}
builder := labels.NewScratchBuilder(0)
builder := labels.NewBuilder(labels.EmptyLabels())
metric := r.FormValue("metric")
res := []metricMetadata{}
for _, tt := range api.targetRetriever(r.Context()).TargetsActive() {
@ -1189,7 +1189,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
if limit >= 0 && len(res) >= limit {
break
}
targetLabels := t.Labels(&builder)
targetLabels := t.Labels(builder)
// Filter targets that don't satisfy the label matchers.
if matchTarget != "" && !matchLabels(targetLabels, matchers) {
continue

View file

@ -103,12 +103,12 @@ type testTargetRetriever struct {
}
type testTargetParams struct {
Identifier string
Labels labels.Labels
DiscoveredLabels labels.Labels
Params url.Values
Reports []*testReport
Active bool
Identifier string
Labels labels.Labels
targetLabels model.LabelSet
Params url.Values
Reports []*testReport
Active bool
}
type testReport struct {
@ -124,7 +124,7 @@ func newTestTargetRetriever(targetsInfo []*testTargetParams) *testTargetRetrieve
droppedTargets = make(map[string][]*scrape.Target)
for _, t := range targetsInfo {
nt := scrape.NewTarget(t.Labels, t.DiscoveredLabels, t.Params)
nt := scrape.NewTarget(t.Labels, &config.ScrapeConfig{Params: t.Params}, t.targetLabels, nil)
for _, r := range t.Reports {
nt.Report(r.Start, r.Duration, r.Error)
@ -1004,10 +1004,9 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.ScrapeIntervalLabel: "15s",
model.ScrapeTimeoutLabel: "5s",
}),
DiscoveredLabels: labels.EmptyLabels(),
Params: url.Values{},
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
Active: true,
Params: url.Values{},
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
Active: true,
},
{
Identifier: "blackbox",
@ -1019,22 +1018,21 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.ScrapeIntervalLabel: "20s",
model.ScrapeTimeoutLabel: "10s",
}),
DiscoveredLabels: labels.EmptyLabels(),
Params: url.Values{"target": []string{"example.com"}},
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
Active: true,
Params: url.Values{"target": []string{"example.com"}},
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
Active: true,
},
{
Identifier: "blackbox",
Labels: labels.EmptyLabels(),
DiscoveredLabels: labels.FromMap(map[string]string{
targetLabels: model.LabelSet{
model.SchemeLabel: "http",
model.AddressLabel: "http://dropped.example.com:9115",
model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox",
model.ScrapeIntervalLabel: "30s",
model.ScrapeTimeoutLabel: "15s",
}),
},
Params: url.Values{},
Active: false,
},
@ -1507,7 +1505,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
response: &TargetDiscovery{
ActiveTargets: []*Target{
{
DiscoveredLabels: labels.FromStrings(),
DiscoveredLabels: labels.FromStrings("__param_target", "example.com", "__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
Labels: labels.FromStrings("job", "blackbox"),
ScrapePool: "blackbox",
ScrapeURL: "http://localhost:9115/probe?target=example.com",
@ -1520,7 +1518,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
ScrapeTimeout: "10s",
},
{
DiscoveredLabels: labels.FromStrings(),
DiscoveredLabels: labels.FromStrings("__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
Labels: labels.FromStrings("job", "test"),
ScrapePool: "test",
ScrapeURL: "http://example.com:8080/metrics",
@ -1556,7 +1554,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
response: &TargetDiscovery{
ActiveTargets: []*Target{
{
DiscoveredLabels: labels.FromStrings(),
DiscoveredLabels: labels.FromStrings("__param_target", "example.com", "__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
Labels: labels.FromStrings("job", "blackbox"),
ScrapePool: "blackbox",
ScrapeURL: "http://localhost:9115/probe?target=example.com",
@ -1569,7 +1567,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
ScrapeTimeout: "10s",
},
{
DiscoveredLabels: labels.FromStrings(),
DiscoveredLabels: labels.FromStrings("__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
Labels: labels.FromStrings("job", "test"),
ScrapePool: "test",
ScrapeURL: "http://example.com:8080/metrics",
@ -1605,7 +1603,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
response: &TargetDiscovery{
ActiveTargets: []*Target{
{
DiscoveredLabels: labels.FromStrings(),
DiscoveredLabels: labels.FromStrings("__param_target", "example.com", "__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
Labels: labels.FromStrings("job", "blackbox"),
ScrapePool: "blackbox",
ScrapeURL: "http://localhost:9115/probe?target=example.com",
@ -1618,7 +1616,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
ScrapeTimeout: "10s",
},
{
DiscoveredLabels: labels.FromStrings(),
DiscoveredLabels: labels.FromStrings("__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
Labels: labels.FromStrings("job", "test"),
ScrapePool: "test",
ScrapeURL: "http://example.com:8080/metrics",