mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
Scraping: stop storing discovered labels (#15261)
Instead of storing discovered labels on every target, recompute them if required. The `Target` struct now needs to hold some more data required to recompute them, such as ScrapeConfig. This moves the load from every Prometheus all of the time, to just when someone views Service Discovery in the UI. The way `PopulateLabels` is used changes; you are no longer expected to call it with a part-populated `labels.Builder`. The signature of `Target.Labels` changes to take a `labels.Builder` instead of a `ScratchBuilder`, for consistency with `DiscoveredLabels`. This will save a lot of work when many targets are filtered out in relabeling. Combine with `keep_dropped_targets` to avoid ever computing most labels for dropped targets. Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
This commit is contained in:
parent
5b5fee08af
commit
7b03796d0f
|
@ -144,7 +144,9 @@ func getSDCheckResult(targetGroups []*targetgroup.Group, scrapeConfig *config.Sc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res, orig, err := scrape.PopulateLabels(lb, scrapeConfig)
|
scrape.PopulateDiscoveredLabels(lb, scrapeConfig, target, targetGroup.Labels)
|
||||||
|
orig := lb.Labels()
|
||||||
|
res, err := scrape.PopulateLabels(lb, scrapeConfig, target, targetGroup.Labels)
|
||||||
result := sdCheckResult{
|
result := sdCheckResult{
|
||||||
DiscoveredLabels: orig,
|
DiscoveredLabels: orig,
|
||||||
Labels: res,
|
Labels: res,
|
||||||
|
|
|
@ -18,6 +18,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"maps"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -61,7 +62,7 @@ func init() {
|
||||||
|
|
||||||
func TestPopulateLabels(t *testing.T) {
|
func TestPopulateLabels(t *testing.T) {
|
||||||
cases := []struct {
|
cases := []struct {
|
||||||
in labels.Labels
|
in model.LabelSet
|
||||||
cfg *config.ScrapeConfig
|
cfg *config.ScrapeConfig
|
||||||
res labels.Labels
|
res labels.Labels
|
||||||
resOrig labels.Labels
|
resOrig labels.Labels
|
||||||
|
@ -69,10 +70,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
}{
|
}{
|
||||||
// Regular population of scrape config options.
|
// Regular population of scrape config options.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
"custom": "value",
|
"custom": "value",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -103,14 +104,14 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
// Pre-define/overwrite scrape config labels.
|
// Pre-define/overwrite scrape config labels.
|
||||||
// Leave out port and expect it to be defaulted to scheme.
|
// Leave out port and expect it to be defaulted to scheme.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4",
|
model.AddressLabel: "1.2.3.4",
|
||||||
model.SchemeLabel: "http",
|
model.SchemeLabel: "http",
|
||||||
model.MetricsPathLabel: "/custom",
|
model.MetricsPathLabel: "/custom",
|
||||||
model.JobLabel: "custom-job",
|
model.JobLabel: "custom-job",
|
||||||
model.ScrapeIntervalLabel: "2s",
|
model.ScrapeIntervalLabel: "2s",
|
||||||
model.ScrapeTimeoutLabel: "2s",
|
model.ScrapeTimeoutLabel: "2s",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -138,10 +139,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Provide instance label. HTTPS port default for IPv6.
|
// Provide instance label. HTTPS port default for IPv6.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "[::1]",
|
model.AddressLabel: "[::1]",
|
||||||
model.InstanceLabel: "custom-instance",
|
model.InstanceLabel: "custom-instance",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -170,7 +171,7 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Address label missing.
|
// Address label missing.
|
||||||
{
|
{
|
||||||
in: labels.FromStrings("custom", "value"),
|
in: model.LabelSet{"custom": "value"},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -184,7 +185,7 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Address label missing, but added in relabelling.
|
// Address label missing, but added in relabelling.
|
||||||
{
|
{
|
||||||
in: labels.FromStrings("custom", "host:1234"),
|
in: model.LabelSet{"custom": "host:1234"},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -222,7 +223,7 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Address label missing, but added in relabelling.
|
// Address label missing, but added in relabelling.
|
||||||
{
|
{
|
||||||
in: labels.FromStrings("custom", "host:1234"),
|
in: model.LabelSet{"custom": "host:1234"},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -260,10 +261,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Invalid UTF-8 in label.
|
// Invalid UTF-8 in label.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
"custom": "\xbd",
|
"custom": "\xbd",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -277,10 +278,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Invalid duration in interval label.
|
// Invalid duration in interval label.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
model.ScrapeIntervalLabel: "2notseconds",
|
model.ScrapeIntervalLabel: "2notseconds",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -294,10 +295,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Invalid duration in timeout label.
|
// Invalid duration in timeout label.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
model.ScrapeTimeoutLabel: "2notseconds",
|
model.ScrapeTimeoutLabel: "2notseconds",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -311,10 +312,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// 0 interval in timeout label.
|
// 0 interval in timeout label.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
model.ScrapeIntervalLabel: "0s",
|
model.ScrapeIntervalLabel: "0s",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -328,10 +329,10 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// 0 duration in timeout label.
|
// 0 duration in timeout label.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
model.ScrapeTimeoutLabel: "0s",
|
model.ScrapeTimeoutLabel: "0s",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -345,11 +346,11 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Timeout less than interval.
|
// Timeout less than interval.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:1000",
|
model.AddressLabel: "1.2.3.4:1000",
|
||||||
model.ScrapeIntervalLabel: "1s",
|
model.ScrapeIntervalLabel: "1s",
|
||||||
model.ScrapeTimeoutLabel: "2s",
|
model.ScrapeTimeoutLabel: "2s",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -363,9 +364,9 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// Don't attach default port.
|
// Don't attach default port.
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4",
|
model.AddressLabel: "1.2.3.4",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -393,9 +394,9 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// verify that the default port is not removed (http).
|
// verify that the default port is not removed (http).
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:80",
|
model.AddressLabel: "1.2.3.4:80",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "http",
|
Scheme: "http",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -423,9 +424,9 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
// verify that the default port is not removed (https).
|
// verify that the default port is not removed (https).
|
||||||
{
|
{
|
||||||
in: labels.FromMap(map[string]string{
|
in: model.LabelSet{
|
||||||
model.AddressLabel: "1.2.3.4:443",
|
model.AddressLabel: "1.2.3.4:443",
|
||||||
}),
|
},
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
Scheme: "https",
|
Scheme: "https",
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
@ -453,17 +454,18 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, c := range cases {
|
for _, c := range cases {
|
||||||
in := c.in.Copy()
|
in := maps.Clone(c.in)
|
||||||
|
lb := labels.NewBuilder(labels.EmptyLabels())
|
||||||
res, orig, err := PopulateLabels(labels.NewBuilder(c.in), c.cfg)
|
res, err := PopulateLabels(lb, c.cfg, c.in, nil)
|
||||||
if c.err != "" {
|
if c.err != "" {
|
||||||
require.EqualError(t, err, c.err)
|
require.EqualError(t, err, c.err)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
testutil.RequireEqual(t, c.res, res)
|
||||||
|
PopulateDiscoveredLabels(lb, c.cfg, c.in, nil)
|
||||||
|
testutil.RequireEqual(t, c.resOrig, lb.Labels())
|
||||||
}
|
}
|
||||||
require.Equal(t, c.in, in)
|
require.Equal(t, c.in, in) // Check this wasn't altered by PopulateLabels().
|
||||||
testutil.RequireEqual(t, c.res, res)
|
|
||||||
testutil.RequireEqual(t, c.resOrig, orig)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -450,7 +450,7 @@ func (sp *scrapePool) Sync(tgs []*targetgroup.Group) {
|
||||||
switch {
|
switch {
|
||||||
case nonEmpty:
|
case nonEmpty:
|
||||||
all = append(all, t)
|
all = append(all, t)
|
||||||
case !t.discoveredLabels.IsEmpty():
|
default:
|
||||||
if sp.config.KeepDroppedTargets == 0 || uint(len(sp.droppedTargets)) < sp.config.KeepDroppedTargets {
|
if sp.config.KeepDroppedTargets == 0 || uint(len(sp.droppedTargets)) < sp.config.KeepDroppedTargets {
|
||||||
sp.droppedTargets = append(sp.droppedTargets, t)
|
sp.droppedTargets = append(sp.droppedTargets, t)
|
||||||
}
|
}
|
||||||
|
@ -553,9 +553,9 @@ func (sp *scrapePool) sync(targets []*Target) {
|
||||||
if _, ok := uniqueLoops[hash]; !ok {
|
if _, ok := uniqueLoops[hash]; !ok {
|
||||||
uniqueLoops[hash] = nil
|
uniqueLoops[hash] = nil
|
||||||
}
|
}
|
||||||
// Need to keep the most updated labels information
|
// Need to keep the most updated ScrapeConfig for
|
||||||
// for displaying it in the Service Discovery web page.
|
// displaying labels in the Service Discovery web page.
|
||||||
sp.activeTargets[hash].SetDiscoveredLabels(t.DiscoveredLabels())
|
sp.activeTargets[hash].SetScrapeConfig(sp.config, t.tLabels, t.tgLabels)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -212,7 +212,8 @@ func TestDroppedTargetsList(t *testing.T) {
|
||||||
sp.Sync(tgs)
|
sp.Sync(tgs)
|
||||||
require.Len(t, sp.droppedTargets, expectedLength)
|
require.Len(t, sp.droppedTargets, expectedLength)
|
||||||
require.Equal(t, expectedLength, sp.droppedTargetsCount)
|
require.Equal(t, expectedLength, sp.droppedTargetsCount)
|
||||||
require.Equal(t, expectedLabelSetString, sp.droppedTargets[0].DiscoveredLabels().String())
|
lb := labels.NewBuilder(labels.EmptyLabels())
|
||||||
|
require.Equal(t, expectedLabelSetString, sp.droppedTargets[0].DiscoveredLabels(lb).String())
|
||||||
|
|
||||||
// Check that count is still correct when we don't retain all dropped targets.
|
// Check that count is still correct when we don't retain all dropped targets.
|
||||||
sp.config.KeepDroppedTargets = 1
|
sp.config.KeepDroppedTargets = 1
|
||||||
|
@ -235,16 +236,19 @@ func TestDiscoveredLabelsUpdate(t *testing.T) {
|
||||||
}
|
}
|
||||||
sp.activeTargets = make(map[uint64]*Target)
|
sp.activeTargets = make(map[uint64]*Target)
|
||||||
t1 := &Target{
|
t1 := &Target{
|
||||||
discoveredLabels: labels.FromStrings("label", "name"),
|
tLabels: model.LabelSet{"label": "name"},
|
||||||
|
scrapeConfig: sp.config,
|
||||||
}
|
}
|
||||||
sp.activeTargets[t1.hash()] = t1
|
sp.activeTargets[t1.hash()] = t1
|
||||||
|
|
||||||
t2 := &Target{
|
t2 := &Target{
|
||||||
discoveredLabels: labels.FromStrings("labelNew", "nameNew"),
|
tLabels: model.LabelSet{"labelNew": "nameNew"},
|
||||||
|
scrapeConfig: sp.config,
|
||||||
}
|
}
|
||||||
sp.sync([]*Target{t2})
|
sp.sync([]*Target{t2})
|
||||||
|
|
||||||
require.Equal(t, t2.DiscoveredLabels(), sp.activeTargets[t1.hash()].DiscoveredLabels())
|
lb := labels.NewBuilder(labels.EmptyLabels())
|
||||||
|
require.Equal(t, t2.DiscoveredLabels(lb), sp.activeTargets[t1.hash()].DiscoveredLabels(lb))
|
||||||
}
|
}
|
||||||
|
|
||||||
type testLoop struct {
|
type testLoop struct {
|
||||||
|
@ -309,7 +313,8 @@ func TestScrapePoolStop(t *testing.T) {
|
||||||
|
|
||||||
for i := 0; i < numTargets; i++ {
|
for i := 0; i < numTargets; i++ {
|
||||||
t := &Target{
|
t := &Target{
|
||||||
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)),
|
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
}
|
}
|
||||||
l := &testLoop{}
|
l := &testLoop{}
|
||||||
d := time.Duration((i+1)*20) * time.Millisecond
|
d := time.Duration((i+1)*20) * time.Millisecond
|
||||||
|
@ -394,8 +399,8 @@ func TestScrapePoolReload(t *testing.T) {
|
||||||
for i := 0; i < numTargets; i++ {
|
for i := 0; i < numTargets; i++ {
|
||||||
labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i))
|
labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i))
|
||||||
t := &Target{
|
t := &Target{
|
||||||
labels: labels,
|
labels: labels,
|
||||||
discoveredLabels: labels,
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
}
|
}
|
||||||
l := &testLoop{}
|
l := &testLoop{}
|
||||||
d := time.Duration((i+1)*20) * time.Millisecond
|
d := time.Duration((i+1)*20) * time.Millisecond
|
||||||
|
@ -2689,6 +2694,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
||||||
model.SchemeLabel, serverURL.Scheme,
|
model.SchemeLabel, serverURL.Scheme,
|
||||||
model.AddressLabel, serverURL.Host,
|
model.AddressLabel, serverURL.Host,
|
||||||
),
|
),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
timeout: configTimeout,
|
timeout: configTimeout,
|
||||||
|
@ -2739,6 +2745,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
|
||||||
model.SchemeLabel, serverURL.Scheme,
|
model.SchemeLabel, serverURL.Scheme,
|
||||||
model.AddressLabel, serverURL.Host,
|
model.AddressLabel, serverURL.Host,
|
||||||
),
|
),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
||||||
|
@ -2794,6 +2801,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
|
||||||
model.SchemeLabel, serverURL.Scheme,
|
model.SchemeLabel, serverURL.Scheme,
|
||||||
model.AddressLabel, serverURL.Host,
|
model.AddressLabel, serverURL.Host,
|
||||||
),
|
),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
||||||
|
@ -2837,6 +2845,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) {
|
||||||
model.SchemeLabel, serverURL.Scheme,
|
model.SchemeLabel, serverURL.Scheme,
|
||||||
model.AddressLabel, serverURL.Host,
|
model.AddressLabel, serverURL.Host,
|
||||||
),
|
),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
bodySizeLimit: bodySizeLimit,
|
bodySizeLimit: bodySizeLimit,
|
||||||
|
@ -3107,7 +3116,8 @@ func TestReuseScrapeCache(t *testing.T) {
|
||||||
}
|
}
|
||||||
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
t1 = &Target{
|
t1 = &Target{
|
||||||
discoveredLabels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
|
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
}
|
}
|
||||||
proxyURL, _ = url.Parse("http://localhost:2128")
|
proxyURL, _ = url.Parse("http://localhost:2128")
|
||||||
)
|
)
|
||||||
|
@ -3291,7 +3301,8 @@ func TestReuseCacheRace(t *testing.T) {
|
||||||
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
|
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
|
||||||
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
|
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
|
||||||
t1 = &Target{
|
t1 = &Target{
|
||||||
discoveredLabels: labels.FromStrings("labelNew", "nameNew"),
|
labels: labels.FromStrings("labelNew", "nameNew"),
|
||||||
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
defer sp.stop()
|
defer sp.stop()
|
||||||
|
@ -4475,7 +4486,9 @@ func BenchmarkTargetScraperGzip(b *testing.B) {
|
||||||
model.SchemeLabel, serverURL.Scheme,
|
model.SchemeLabel, serverURL.Scheme,
|
||||||
model.AddressLabel, serverURL.Host,
|
model.AddressLabel, serverURL.Host,
|
||||||
),
|
),
|
||||||
params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
|
scrapeConfig: &config.ScrapeConfig{
|
||||||
|
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
client: client,
|
client: client,
|
||||||
timeout: time.Second,
|
timeout: time.Second,
|
||||||
|
|
110
scrape/target.go
110
scrape/target.go
|
@ -45,12 +45,12 @@ const (
|
||||||
|
|
||||||
// Target refers to a singular HTTP or HTTPS endpoint.
|
// Target refers to a singular HTTP or HTTPS endpoint.
|
||||||
type Target struct {
|
type Target struct {
|
||||||
// Labels before any processing.
|
|
||||||
discoveredLabels labels.Labels
|
|
||||||
// Any labels that are added to this target and its metrics.
|
// Any labels that are added to this target and its metrics.
|
||||||
labels labels.Labels
|
labels labels.Labels
|
||||||
// Additional URL parameters that are part of the target URL.
|
// ScrapeConfig used to create this target.
|
||||||
params url.Values
|
scrapeConfig *config.ScrapeConfig
|
||||||
|
// Target and TargetGroup labels used to create this target.
|
||||||
|
tLabels, tgLabels model.LabelSet
|
||||||
|
|
||||||
mtx sync.RWMutex
|
mtx sync.RWMutex
|
||||||
lastError error
|
lastError error
|
||||||
|
@ -61,12 +61,13 @@ type Target struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewTarget creates a reasonably configured target for querying.
|
// NewTarget creates a reasonably configured target for querying.
|
||||||
func NewTarget(labels, discoveredLabels labels.Labels, params url.Values) *Target {
|
func NewTarget(labels labels.Labels, scrapeConfig *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) *Target {
|
||||||
return &Target{
|
return &Target{
|
||||||
labels: labels,
|
labels: labels,
|
||||||
discoveredLabels: discoveredLabels,
|
tLabels: tLabels,
|
||||||
params: params,
|
tgLabels: tgLabels,
|
||||||
health: HealthUnknown,
|
scrapeConfig: scrapeConfig,
|
||||||
|
health: HealthUnknown,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,11 +169,11 @@ func (t *Target) offset(interval time.Duration, offsetSeed uint64) time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
// Labels returns a copy of the set of all public labels of the target.
|
// Labels returns a copy of the set of all public labels of the target.
|
||||||
func (t *Target) Labels(b *labels.ScratchBuilder) labels.Labels {
|
func (t *Target) Labels(b *labels.Builder) labels.Labels {
|
||||||
b.Reset()
|
b.Reset(labels.EmptyLabels())
|
||||||
t.labels.Range(func(l labels.Label) {
|
t.labels.Range(func(l labels.Label) {
|
||||||
if !strings.HasPrefix(l.Name, model.ReservedLabelPrefix) {
|
if !strings.HasPrefix(l.Name, model.ReservedLabelPrefix) {
|
||||||
b.Add(l.Name, l.Value)
|
b.Set(l.Name, l.Value)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return b.Labels()
|
return b.Labels()
|
||||||
|
@ -188,24 +189,31 @@ func (t *Target) LabelsRange(f func(l labels.Label)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// DiscoveredLabels returns a copy of the target's labels before any processing.
|
// DiscoveredLabels returns a copy of the target's labels before any processing.
|
||||||
func (t *Target) DiscoveredLabels() labels.Labels {
|
func (t *Target) DiscoveredLabels(lb *labels.Builder) labels.Labels {
|
||||||
t.mtx.Lock()
|
t.mtx.Lock()
|
||||||
defer t.mtx.Unlock()
|
cfg, tLabels, tgLabels := t.scrapeConfig, t.tLabels, t.tgLabels
|
||||||
return t.discoveredLabels.Copy()
|
t.mtx.Unlock()
|
||||||
|
PopulateDiscoveredLabels(lb, cfg, tLabels, tgLabels)
|
||||||
|
return lb.Labels()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetDiscoveredLabels sets new DiscoveredLabels.
|
// SetScrapeConfig sets new ScrapeConfig.
|
||||||
func (t *Target) SetDiscoveredLabels(l labels.Labels) {
|
func (t *Target) SetScrapeConfig(scrapeConfig *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) {
|
||||||
t.mtx.Lock()
|
t.mtx.Lock()
|
||||||
defer t.mtx.Unlock()
|
defer t.mtx.Unlock()
|
||||||
t.discoveredLabels = l
|
t.scrapeConfig = scrapeConfig
|
||||||
|
t.tLabels = tLabels
|
||||||
|
t.tgLabels = tgLabels
|
||||||
}
|
}
|
||||||
|
|
||||||
// URL returns a copy of the target's URL.
|
// URL returns a copy of the target's URL.
|
||||||
func (t *Target) URL() *url.URL {
|
func (t *Target) URL() *url.URL {
|
||||||
|
t.mtx.Lock()
|
||||||
|
configParams := t.scrapeConfig.Params
|
||||||
|
t.mtx.Unlock()
|
||||||
params := url.Values{}
|
params := url.Values{}
|
||||||
|
|
||||||
for k, v := range t.params {
|
for k, v := range configParams {
|
||||||
params[k] = make([]string, len(v))
|
params[k] = make([]string, len(v))
|
||||||
copy(params[k], v)
|
copy(params[k], v)
|
||||||
}
|
}
|
||||||
|
@ -420,10 +428,19 @@ func (app *maxSchemaAppender) AppendHistogram(ref storage.SeriesRef, lset labels
|
||||||
return ref, nil
|
return ref, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PopulateLabels builds a label set from the given label set and scrape configuration.
|
// PopulateDiscoveredLabels sets base labels on lb from target and group labels and scrape configuration, before relabeling.
|
||||||
// It returns a label set before relabeling was applied as the second return value.
|
func PopulateDiscoveredLabels(lb *labels.Builder, cfg *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) {
|
||||||
// Returns the original discovered label set found before relabelling was applied if the target is dropped during relabeling.
|
lb.Reset(labels.EmptyLabels())
|
||||||
func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig) (res, orig labels.Labels, err error) {
|
|
||||||
|
for ln, lv := range tLabels {
|
||||||
|
lb.Set(string(ln), string(lv))
|
||||||
|
}
|
||||||
|
for ln, lv := range tgLabels {
|
||||||
|
if _, ok := tLabels[ln]; !ok {
|
||||||
|
lb.Set(string(ln), string(lv))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Copy labels into the labelset for the target if they are not set already.
|
// Copy labels into the labelset for the target if they are not set already.
|
||||||
scrapeLabels := []labels.Label{
|
scrapeLabels := []labels.Label{
|
||||||
{Name: model.JobLabel, Value: cfg.JobName},
|
{Name: model.JobLabel, Value: cfg.JobName},
|
||||||
|
@ -444,44 +461,49 @@ func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig) (res, orig lab
|
||||||
lb.Set(name, v[0])
|
lb.Set(name, v[0])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
preRelabelLabels := lb.Labels()
|
// PopulateLabels builds labels from target and group labels and scrape configuration,
|
||||||
|
// performs defined relabeling, checks validity, and adds Prometheus standard labels such as 'instance'.
|
||||||
|
// A return of empty labels and nil error means the target was dropped by relabeling.
|
||||||
|
func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig, tLabels, tgLabels model.LabelSet) (res labels.Labels, err error) {
|
||||||
|
PopulateDiscoveredLabels(lb, cfg, tLabels, tgLabels)
|
||||||
keep := relabel.ProcessBuilder(lb, cfg.RelabelConfigs...)
|
keep := relabel.ProcessBuilder(lb, cfg.RelabelConfigs...)
|
||||||
|
|
||||||
// Check if the target was dropped.
|
// Check if the target was dropped.
|
||||||
if !keep {
|
if !keep {
|
||||||
return labels.EmptyLabels(), preRelabelLabels, nil
|
return labels.EmptyLabels(), nil
|
||||||
}
|
}
|
||||||
if v := lb.Get(model.AddressLabel); v == "" {
|
if v := lb.Get(model.AddressLabel); v == "" {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("no address")
|
return labels.EmptyLabels(), errors.New("no address")
|
||||||
}
|
}
|
||||||
|
|
||||||
addr := lb.Get(model.AddressLabel)
|
addr := lb.Get(model.AddressLabel)
|
||||||
|
|
||||||
if err := config.CheckTargetAddress(model.LabelValue(addr)); err != nil {
|
if err := config.CheckTargetAddress(model.LabelValue(addr)); err != nil {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), err
|
return labels.EmptyLabels(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
interval := lb.Get(model.ScrapeIntervalLabel)
|
interval := lb.Get(model.ScrapeIntervalLabel)
|
||||||
intervalDuration, err := model.ParseDuration(interval)
|
intervalDuration, err := model.ParseDuration(interval)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), fmt.Errorf("error parsing scrape interval: %w", err)
|
return labels.EmptyLabels(), fmt.Errorf("error parsing scrape interval: %w", err)
|
||||||
}
|
}
|
||||||
if time.Duration(intervalDuration) == 0 {
|
if time.Duration(intervalDuration) == 0 {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape interval cannot be 0")
|
return labels.EmptyLabels(), errors.New("scrape interval cannot be 0")
|
||||||
}
|
}
|
||||||
|
|
||||||
timeout := lb.Get(model.ScrapeTimeoutLabel)
|
timeout := lb.Get(model.ScrapeTimeoutLabel)
|
||||||
timeoutDuration, err := model.ParseDuration(timeout)
|
timeoutDuration, err := model.ParseDuration(timeout)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), fmt.Errorf("error parsing scrape timeout: %w", err)
|
return labels.EmptyLabels(), fmt.Errorf("error parsing scrape timeout: %w", err)
|
||||||
}
|
}
|
||||||
if time.Duration(timeoutDuration) == 0 {
|
if time.Duration(timeoutDuration) == 0 {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape timeout cannot be 0")
|
return labels.EmptyLabels(), errors.New("scrape timeout cannot be 0")
|
||||||
}
|
}
|
||||||
|
|
||||||
if timeoutDuration > intervalDuration {
|
if timeoutDuration > intervalDuration {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), fmt.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
|
return labels.EmptyLabels(), fmt.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Meta labels are deleted after relabelling. Other internal labels propagate to
|
// Meta labels are deleted after relabelling. Other internal labels propagate to
|
||||||
|
@ -506,9 +528,9 @@ func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig) (res, orig lab
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return labels.EmptyLabels(), labels.EmptyLabels(), err
|
return labels.EmptyLabels(), err
|
||||||
}
|
}
|
||||||
return res, preRelabelLabels, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TargetsFromGroup builds targets based on the given TargetGroup and config.
|
// TargetsFromGroup builds targets based on the given TargetGroup and config.
|
||||||
|
@ -516,24 +538,12 @@ func TargetsFromGroup(tg *targetgroup.Group, cfg *config.ScrapeConfig, targets [
|
||||||
targets = targets[:0]
|
targets = targets[:0]
|
||||||
failures := []error{}
|
failures := []error{}
|
||||||
|
|
||||||
for i, tlset := range tg.Targets {
|
for i, tLabels := range tg.Targets {
|
||||||
lb.Reset(labels.EmptyLabels())
|
lset, err := PopulateLabels(lb, cfg, tLabels, tg.Labels)
|
||||||
|
|
||||||
for ln, lv := range tlset {
|
|
||||||
lb.Set(string(ln), string(lv))
|
|
||||||
}
|
|
||||||
for ln, lv := range tg.Labels {
|
|
||||||
if _, ok := tlset[ln]; !ok {
|
|
||||||
lb.Set(string(ln), string(lv))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lset, origLabels, err := PopulateLabels(lb, cfg)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
failures = append(failures, fmt.Errorf("instance %d in group %s: %w", i, tg, err))
|
failures = append(failures, fmt.Errorf("instance %d in group %s: %w", i, tg, err))
|
||||||
}
|
} else {
|
||||||
if !lset.IsEmpty() || !origLabels.IsEmpty() {
|
targets = append(targets, NewTarget(lset, cfg, tLabels, tg.Labels))
|
||||||
targets = append(targets, NewTarget(lset, origLabels, cfg.Params))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return targets, failures
|
return targets, failures
|
||||||
|
|
|
@ -43,8 +43,8 @@ const (
|
||||||
func TestTargetLabels(t *testing.T) {
|
func TestTargetLabels(t *testing.T) {
|
||||||
target := newTestTarget("example.com:80", 0, labels.FromStrings("job", "some_job", "foo", "bar"))
|
target := newTestTarget("example.com:80", 0, labels.FromStrings("job", "some_job", "foo", "bar"))
|
||||||
want := labels.FromStrings(model.JobLabel, "some_job", "foo", "bar")
|
want := labels.FromStrings(model.JobLabel, "some_job", "foo", "bar")
|
||||||
b := labels.NewScratchBuilder(0)
|
b := labels.NewBuilder(labels.EmptyLabels())
|
||||||
got := target.Labels(&b)
|
got := target.Labels(b)
|
||||||
require.Equal(t, want, got)
|
require.Equal(t, want, got)
|
||||||
i := 0
|
i := 0
|
||||||
target.LabelsRange(func(l labels.Label) {
|
target.LabelsRange(func(l labels.Label) {
|
||||||
|
@ -103,9 +103,11 @@ func TestTargetOffset(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTargetURL(t *testing.T) {
|
func TestTargetURL(t *testing.T) {
|
||||||
params := url.Values{
|
scrapeConfig := &config.ScrapeConfig{
|
||||||
"abc": []string{"foo", "bar", "baz"},
|
Params: url.Values{
|
||||||
"xyz": []string{"hoo"},
|
"abc": []string{"foo", "bar", "baz"},
|
||||||
|
"xyz": []string{"hoo"},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
labels := labels.FromMap(map[string]string{
|
labels := labels.FromMap(map[string]string{
|
||||||
model.AddressLabel: "example.com:1234",
|
model.AddressLabel: "example.com:1234",
|
||||||
|
@ -114,7 +116,7 @@ func TestTargetURL(t *testing.T) {
|
||||||
"__param_abc": "overwrite",
|
"__param_abc": "overwrite",
|
||||||
"__param_cde": "huu",
|
"__param_cde": "huu",
|
||||||
})
|
})
|
||||||
target := NewTarget(labels, labels, params)
|
target := NewTarget(labels, scrapeConfig, nil, nil)
|
||||||
|
|
||||||
// The reserved labels are concatenated into a full URL. The first value for each
|
// The reserved labels are concatenated into a full URL. The first value for each
|
||||||
// URL query parameter can be set/modified via labels as well.
|
// URL query parameter can be set/modified via labels as well.
|
||||||
|
@ -139,7 +141,7 @@ func newTestTarget(targetURL string, _ time.Duration, lbls labels.Labels) *Targe
|
||||||
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
|
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
|
||||||
lb.Set(model.MetricsPathLabel, "/metrics")
|
lb.Set(model.MetricsPathLabel, "/metrics")
|
||||||
|
|
||||||
return &Target{labels: lb.Labels()}
|
return &Target{labels: lb.Labels(), scrapeConfig: &config.ScrapeConfig{}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewHTTPBearerToken(t *testing.T) {
|
func TestNewHTTPBearerToken(t *testing.T) {
|
||||||
|
|
|
@ -1083,12 +1083,12 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
||||||
showActive := state == "" || state == "any" || state == "active"
|
showActive := state == "" || state == "any" || state == "active"
|
||||||
showDropped := state == "" || state == "any" || state == "dropped"
|
showDropped := state == "" || state == "any" || state == "dropped"
|
||||||
res := &TargetDiscovery{}
|
res := &TargetDiscovery{}
|
||||||
|
builder := labels.NewBuilder(labels.EmptyLabels())
|
||||||
|
|
||||||
if showActive {
|
if showActive {
|
||||||
targetsActive := api.targetRetriever(r.Context()).TargetsActive()
|
targetsActive := api.targetRetriever(r.Context()).TargetsActive()
|
||||||
activeKeys, numTargets := sortKeys(targetsActive)
|
activeKeys, numTargets := sortKeys(targetsActive)
|
||||||
res.ActiveTargets = make([]*Target, 0, numTargets)
|
res.ActiveTargets = make([]*Target, 0, numTargets)
|
||||||
builder := labels.NewScratchBuilder(0)
|
|
||||||
|
|
||||||
for _, key := range activeKeys {
|
for _, key := range activeKeys {
|
||||||
if scrapePool != "" && key != scrapePool {
|
if scrapePool != "" && key != scrapePool {
|
||||||
|
@ -1104,8 +1104,8 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
||||||
globalURL, err := getGlobalURL(target.URL(), api.globalURLOptions)
|
globalURL, err := getGlobalURL(target.URL(), api.globalURLOptions)
|
||||||
|
|
||||||
res.ActiveTargets = append(res.ActiveTargets, &Target{
|
res.ActiveTargets = append(res.ActiveTargets, &Target{
|
||||||
DiscoveredLabels: target.DiscoveredLabels(),
|
DiscoveredLabels: target.DiscoveredLabels(builder),
|
||||||
Labels: target.Labels(&builder),
|
Labels: target.Labels(builder),
|
||||||
ScrapePool: key,
|
ScrapePool: key,
|
||||||
ScrapeURL: target.URL().String(),
|
ScrapeURL: target.URL().String(),
|
||||||
GlobalURL: globalURL.String(),
|
GlobalURL: globalURL.String(),
|
||||||
|
@ -1143,7 +1143,7 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
||||||
}
|
}
|
||||||
for _, target := range targetsDropped[key] {
|
for _, target := range targetsDropped[key] {
|
||||||
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
|
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
|
||||||
DiscoveredLabels: target.DiscoveredLabels(),
|
DiscoveredLabels: target.DiscoveredLabels(builder),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1181,7 +1181,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
builder := labels.NewScratchBuilder(0)
|
builder := labels.NewBuilder(labels.EmptyLabels())
|
||||||
metric := r.FormValue("metric")
|
metric := r.FormValue("metric")
|
||||||
res := []metricMetadata{}
|
res := []metricMetadata{}
|
||||||
for _, tt := range api.targetRetriever(r.Context()).TargetsActive() {
|
for _, tt := range api.targetRetriever(r.Context()).TargetsActive() {
|
||||||
|
@ -1189,7 +1189,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
|
||||||
if limit >= 0 && len(res) >= limit {
|
if limit >= 0 && len(res) >= limit {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
targetLabels := t.Labels(&builder)
|
targetLabels := t.Labels(builder)
|
||||||
// Filter targets that don't satisfy the label matchers.
|
// Filter targets that don't satisfy the label matchers.
|
||||||
if matchTarget != "" && !matchLabels(targetLabels, matchers) {
|
if matchTarget != "" && !matchLabels(targetLabels, matchers) {
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -103,12 +103,12 @@ type testTargetRetriever struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type testTargetParams struct {
|
type testTargetParams struct {
|
||||||
Identifier string
|
Identifier string
|
||||||
Labels labels.Labels
|
Labels labels.Labels
|
||||||
DiscoveredLabels labels.Labels
|
targetLabels model.LabelSet
|
||||||
Params url.Values
|
Params url.Values
|
||||||
Reports []*testReport
|
Reports []*testReport
|
||||||
Active bool
|
Active bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type testReport struct {
|
type testReport struct {
|
||||||
|
@ -124,7 +124,7 @@ func newTestTargetRetriever(targetsInfo []*testTargetParams) *testTargetRetrieve
|
||||||
droppedTargets = make(map[string][]*scrape.Target)
|
droppedTargets = make(map[string][]*scrape.Target)
|
||||||
|
|
||||||
for _, t := range targetsInfo {
|
for _, t := range targetsInfo {
|
||||||
nt := scrape.NewTarget(t.Labels, t.DiscoveredLabels, t.Params)
|
nt := scrape.NewTarget(t.Labels, &config.ScrapeConfig{Params: t.Params}, t.targetLabels, nil)
|
||||||
|
|
||||||
for _, r := range t.Reports {
|
for _, r := range t.Reports {
|
||||||
nt.Report(r.Start, r.Duration, r.Error)
|
nt.Report(r.Start, r.Duration, r.Error)
|
||||||
|
@ -1004,10 +1004,9 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
||||||
model.ScrapeIntervalLabel: "15s",
|
model.ScrapeIntervalLabel: "15s",
|
||||||
model.ScrapeTimeoutLabel: "5s",
|
model.ScrapeTimeoutLabel: "5s",
|
||||||
}),
|
}),
|
||||||
DiscoveredLabels: labels.EmptyLabels(),
|
Params: url.Values{},
|
||||||
Params: url.Values{},
|
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
|
||||||
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
|
Active: true,
|
||||||
Active: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Identifier: "blackbox",
|
Identifier: "blackbox",
|
||||||
|
@ -1019,22 +1018,21 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
||||||
model.ScrapeIntervalLabel: "20s",
|
model.ScrapeIntervalLabel: "20s",
|
||||||
model.ScrapeTimeoutLabel: "10s",
|
model.ScrapeTimeoutLabel: "10s",
|
||||||
}),
|
}),
|
||||||
DiscoveredLabels: labels.EmptyLabels(),
|
Params: url.Values{"target": []string{"example.com"}},
|
||||||
Params: url.Values{"target": []string{"example.com"}},
|
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
|
||||||
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
|
Active: true,
|
||||||
Active: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Identifier: "blackbox",
|
Identifier: "blackbox",
|
||||||
Labels: labels.EmptyLabels(),
|
Labels: labels.EmptyLabels(),
|
||||||
DiscoveredLabels: labels.FromMap(map[string]string{
|
targetLabels: model.LabelSet{
|
||||||
model.SchemeLabel: "http",
|
model.SchemeLabel: "http",
|
||||||
model.AddressLabel: "http://dropped.example.com:9115",
|
model.AddressLabel: "http://dropped.example.com:9115",
|
||||||
model.MetricsPathLabel: "/probe",
|
model.MetricsPathLabel: "/probe",
|
||||||
model.JobLabel: "blackbox",
|
model.JobLabel: "blackbox",
|
||||||
model.ScrapeIntervalLabel: "30s",
|
model.ScrapeIntervalLabel: "30s",
|
||||||
model.ScrapeTimeoutLabel: "15s",
|
model.ScrapeTimeoutLabel: "15s",
|
||||||
}),
|
},
|
||||||
Params: url.Values{},
|
Params: url.Values{},
|
||||||
Active: false,
|
Active: false,
|
||||||
},
|
},
|
||||||
|
@ -1507,7 +1505,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
response: &TargetDiscovery{
|
response: &TargetDiscovery{
|
||||||
ActiveTargets: []*Target{
|
ActiveTargets: []*Target{
|
||||||
{
|
{
|
||||||
DiscoveredLabels: labels.FromStrings(),
|
DiscoveredLabels: labels.FromStrings("__param_target", "example.com", "__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
|
||||||
Labels: labels.FromStrings("job", "blackbox"),
|
Labels: labels.FromStrings("job", "blackbox"),
|
||||||
ScrapePool: "blackbox",
|
ScrapePool: "blackbox",
|
||||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||||
|
@ -1520,7 +1518,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
ScrapeTimeout: "10s",
|
ScrapeTimeout: "10s",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
DiscoveredLabels: labels.FromStrings(),
|
DiscoveredLabels: labels.FromStrings("__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
|
||||||
Labels: labels.FromStrings("job", "test"),
|
Labels: labels.FromStrings("job", "test"),
|
||||||
ScrapePool: "test",
|
ScrapePool: "test",
|
||||||
ScrapeURL: "http://example.com:8080/metrics",
|
ScrapeURL: "http://example.com:8080/metrics",
|
||||||
|
@ -1556,7 +1554,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
response: &TargetDiscovery{
|
response: &TargetDiscovery{
|
||||||
ActiveTargets: []*Target{
|
ActiveTargets: []*Target{
|
||||||
{
|
{
|
||||||
DiscoveredLabels: labels.FromStrings(),
|
DiscoveredLabels: labels.FromStrings("__param_target", "example.com", "__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
|
||||||
Labels: labels.FromStrings("job", "blackbox"),
|
Labels: labels.FromStrings("job", "blackbox"),
|
||||||
ScrapePool: "blackbox",
|
ScrapePool: "blackbox",
|
||||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||||
|
@ -1569,7 +1567,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
ScrapeTimeout: "10s",
|
ScrapeTimeout: "10s",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
DiscoveredLabels: labels.FromStrings(),
|
DiscoveredLabels: labels.FromStrings("__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
|
||||||
Labels: labels.FromStrings("job", "test"),
|
Labels: labels.FromStrings("job", "test"),
|
||||||
ScrapePool: "test",
|
ScrapePool: "test",
|
||||||
ScrapeURL: "http://example.com:8080/metrics",
|
ScrapeURL: "http://example.com:8080/metrics",
|
||||||
|
@ -1605,7 +1603,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
response: &TargetDiscovery{
|
response: &TargetDiscovery{
|
||||||
ActiveTargets: []*Target{
|
ActiveTargets: []*Target{
|
||||||
{
|
{
|
||||||
DiscoveredLabels: labels.FromStrings(),
|
DiscoveredLabels: labels.FromStrings("__param_target", "example.com", "__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
|
||||||
Labels: labels.FromStrings("job", "blackbox"),
|
Labels: labels.FromStrings("job", "blackbox"),
|
||||||
ScrapePool: "blackbox",
|
ScrapePool: "blackbox",
|
||||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||||
|
@ -1618,7 +1616,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
ScrapeTimeout: "10s",
|
ScrapeTimeout: "10s",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
DiscoveredLabels: labels.FromStrings(),
|
DiscoveredLabels: labels.FromStrings("__scrape_interval__", "0s", "__scrape_timeout__", "0s"),
|
||||||
Labels: labels.FromStrings("job", "test"),
|
Labels: labels.FromStrings("job", "test"),
|
||||||
ScrapePool: "test",
|
ScrapePool: "test",
|
||||||
ScrapeURL: "http://example.com:8080/metrics",
|
ScrapeURL: "http://example.com:8080/metrics",
|
||||||
|
|
Loading…
Reference in a new issue