Configure Scrape Interval and Timeout Via Relabeling (#8911)

* Configure scrape interval and timeout with labels

Signed-off-by: Levi Harrison <git@leviharrison.dev>
This commit is contained in:
Levi Harrison 2021-08-31 11:37:32 -04:00 committed by GitHub
parent 6a31b28ca9
commit 70f597b033
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 578 additions and 134 deletions

View file

@ -2172,6 +2172,9 @@ it was not set during relabeling. The `__scheme__` and `__metrics_path__` labels
are set to the scheme and metrics path of the target respectively. The `__param_<name>` are set to the scheme and metrics path of the target respectively. The `__param_<name>`
label is set to the value of the first passed URL parameter called `<name>`. label is set to the value of the first passed URL parameter called `<name>`.
The `__scrape_interval__` and `__scrape_timeout__` labels are set to the target's
interval and timeout. This is **experimental** and could change in the future.
Additional labels prefixed with `__meta_` may be available during the Additional labels prefixed with `__meta_` may be available during the
relabeling phase. They are set by the service discovery mechanism that provided relabeling phase. They are set by the service discovery mechanism that provided
the target and vary between mechanisms. the target and vary between mechanisms.

View file

@ -502,7 +502,9 @@ $ curl http://localhost:9090/api/v1/targets
"lastError": "", "lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00", "lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 0.050688943, "lastScrapeDuration": 0.050688943,
"health": "up" "health": "up",
"scrapeInterval": "1m",
"scrapeTimeout": "10s"
} }
], ],
"droppedTargets": [ "droppedTargets": [
@ -511,6 +513,8 @@ $ curl http://localhost:9090/api/v1/targets
"__address__": "127.0.0.1:9100", "__address__": "127.0.0.1:9100",
"__metrics_path__": "/metrics", "__metrics_path__": "/metrics",
"__scheme__": "http", "__scheme__": "http",
"__scrape_interval__": "1m",
"__scrape_timeout__": "10s",
"job": "node" "job": "node"
}, },
} }

View file

@ -47,6 +47,8 @@ func TestPopulateLabels(t *testing.T) {
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000", model.AddressLabel: "1.2.3.4:1000",
@ -54,6 +56,8 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "value", "custom": "value",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
@ -62,6 +66,8 @@ func TestPopulateLabels(t *testing.T) {
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "value", "custom": "value",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
}), }),
}, },
// Pre-define/overwrite scrape config labels. // Pre-define/overwrite scrape config labels.
@ -72,11 +78,15 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom", model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job", model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}), }),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:80", model.AddressLabel: "1.2.3.4:80",
@ -84,12 +94,16 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom", model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job", model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4", model.AddressLabel: "1.2.3.4",
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom", model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job", model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}), }),
}, },
// Provide instance label. HTTPS port default for IPv6. // Provide instance label. HTTPS port default for IPv6.
@ -102,6 +116,8 @@ func TestPopulateLabels(t *testing.T) {
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "[::1]:443", model.AddressLabel: "[::1]:443",
@ -109,6 +125,8 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.AddressLabel: "[::1]", model.AddressLabel: "[::1]",
@ -116,6 +134,8 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
}), }),
}, },
// Address label missing. // Address label missing.
@ -125,6 +145,8 @@ func TestPopulateLabels(t *testing.T) {
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: nil,
resOrig: nil, resOrig: nil,
@ -137,6 +159,8 @@ func TestPopulateLabels(t *testing.T) {
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
RelabelConfigs: []*relabel.Config{ RelabelConfigs: []*relabel.Config{
{ {
Action: relabel.Replace, Action: relabel.Replace,
@ -153,12 +177,16 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234", "custom": "host:1234",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234", "custom": "host:1234",
}), }),
}, },
@ -169,6 +197,8 @@ func TestPopulateLabels(t *testing.T) {
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
RelabelConfigs: []*relabel.Config{ RelabelConfigs: []*relabel.Config{
{ {
Action: relabel.Replace, Action: relabel.Replace,
@ -185,12 +215,16 @@ func TestPopulateLabels(t *testing.T) {
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234", "custom": "host:1234",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234", "custom": "host:1234",
}), }),
}, },
@ -204,11 +238,99 @@ func TestPopulateLabels(t *testing.T) {
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: nil,
resOrig: nil, resOrig: nil,
err: "invalid label value for \"custom\": \"\\xbd\"", err: "invalid label value for \"custom\": \"\\xbd\"",
}, },
// Invalid duration in interval label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "2notseconds",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
},
// Invalid duration in timeout label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "2notseconds",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
},
// 0 interval in timeout label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "0s",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "scrape interval cannot be 0",
},
// 0 duration in timeout label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "0s",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "scrape timeout cannot be 0",
},
// Timeout less than interval.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "2s",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
},
} }
for _, c := range cases { for _, c := range cases {
in := c.in.Copy() in := c.in.Copy()

View file

@ -253,6 +253,8 @@ type scrapeLoopOptions struct {
labelLimits *labelLimits labelLimits *labelLimits
honorLabels bool honorLabels bool
honorTimestamps bool honorTimestamps bool
interval time.Duration
timeout time.Duration
mrc []*relabel.Config mrc []*relabel.Config
cache *scrapeCache cache *scrapeCache
} }
@ -307,6 +309,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed
jitterSeed, jitterSeed,
opts.honorTimestamps, opts.honorTimestamps,
opts.labelLimits, opts.labelLimits,
opts.interval,
opts.timeout,
) )
} }
@ -414,6 +418,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
} else { } else {
cache = newScrapeCache() cache = newScrapeCache()
} }
var ( var (
t = sp.activeTargets[fp] t = sp.activeTargets[fp]
s = &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit} s = &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit}
@ -426,6 +431,8 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
mrc: mrc, mrc: mrc,
cache: cache, cache: cache,
interval: interval,
timeout: timeout,
}) })
) )
wg.Add(1) wg.Add(1)
@ -435,7 +442,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
wg.Done() wg.Done()
newLoop.setForcedError(forcedErr) newLoop.setForcedError(forcedErr)
newLoop.run(interval, timeout, nil) newLoop.run(nil)
}(oldLoop, newLoop) }(oldLoop, newLoop)
sp.loops[fp] = newLoop sp.loops[fp] = newLoop
@ -509,6 +516,12 @@ func (sp *scrapePool) sync(targets []*Target) {
hash := t.hash() hash := t.hash()
if _, ok := sp.activeTargets[hash]; !ok { if _, ok := sp.activeTargets[hash]; !ok {
// The scrape interval and timeout labels are set to the config's values initially,
// so whether changed via relabeling or not, they'll exist and hold the correct values
// for every target.
var err error
interval, timeout, err = t.intervalAndTimeout(interval, timeout)
s := &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit} s := &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit}
l := sp.newLoop(scrapeLoopOptions{ l := sp.newLoop(scrapeLoopOptions{
target: t, target: t,
@ -518,7 +531,12 @@ func (sp *scrapePool) sync(targets []*Target) {
honorLabels: honorLabels, honorLabels: honorLabels,
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
mrc: mrc, mrc: mrc,
interval: interval,
timeout: timeout,
}) })
if err != nil {
l.setForcedError(err)
}
sp.activeTargets[hash] = t sp.activeTargets[hash] = t
sp.loops[hash] = l sp.loops[hash] = l
@ -560,7 +578,7 @@ func (sp *scrapePool) sync(targets []*Target) {
} }
for _, l := range uniqueLoops { for _, l := range uniqueLoops {
if l != nil { if l != nil {
go l.run(interval, timeout, nil) go l.run(nil)
} }
} }
// Wait for all potentially stopped scrapers to terminate. // Wait for all potentially stopped scrapers to terminate.
@ -772,7 +790,7 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error)
// A loop can run and be stopped again. It must not be reused after it was stopped. // A loop can run and be stopped again. It must not be reused after it was stopped.
type loop interface { type loop interface {
run(interval, timeout time.Duration, errc chan<- error) run(errc chan<- error)
setForcedError(err error) setForcedError(err error)
stop() stop()
getCache() *scrapeCache getCache() *scrapeCache
@ -797,6 +815,8 @@ type scrapeLoop struct {
forcedErr error forcedErr error
forcedErrMtx sync.Mutex forcedErrMtx sync.Mutex
labelLimits *labelLimits labelLimits *labelLimits
interval time.Duration
timeout time.Duration
appender func(ctx context.Context) storage.Appender appender func(ctx context.Context) storage.Appender
sampleMutator labelsMutator sampleMutator labelsMutator
@ -1065,6 +1085,8 @@ func newScrapeLoop(ctx context.Context,
jitterSeed uint64, jitterSeed uint64,
honorTimestamps bool, honorTimestamps bool,
labelLimits *labelLimits, labelLimits *labelLimits,
interval time.Duration,
timeout time.Duration,
) *scrapeLoop { ) *scrapeLoop {
if l == nil { if l == nil {
l = log.NewNopLogger() l = log.NewNopLogger()
@ -1088,15 +1110,17 @@ func newScrapeLoop(ctx context.Context,
parentCtx: ctx, parentCtx: ctx,
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
labelLimits: labelLimits, labelLimits: labelLimits,
interval: interval,
timeout: timeout,
} }
sl.ctx, sl.cancel = context.WithCancel(ctx) sl.ctx, sl.cancel = context.WithCancel(ctx)
return sl return sl
} }
func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) { func (sl *scrapeLoop) run(errc chan<- error) {
select { select {
case <-time.After(sl.scraper.offset(interval, sl.jitterSeed)): case <-time.After(sl.scraper.offset(sl.interval, sl.jitterSeed)):
// Continue after a scraping offset. // Continue after a scraping offset.
case <-sl.ctx.Done(): case <-sl.ctx.Done():
close(sl.stopped) close(sl.stopped)
@ -1106,7 +1130,7 @@ func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
var last time.Time var last time.Time
alignedScrapeTime := time.Now().Round(0) alignedScrapeTime := time.Now().Round(0)
ticker := time.NewTicker(interval) ticker := time.NewTicker(sl.interval)
defer ticker.Stop() defer ticker.Stop()
mainLoop: mainLoop:
@ -1126,11 +1150,11 @@ mainLoop:
// Calling Round ensures the time used is the wall clock, as otherwise .Sub // Calling Round ensures the time used is the wall clock, as otherwise .Sub
// and .Add on time.Time behave differently (see time package docs). // and .Add on time.Time behave differently (see time package docs).
scrapeTime := time.Now().Round(0) scrapeTime := time.Now().Round(0)
if AlignScrapeTimestamps && interval > 100*scrapeTimestampTolerance { if AlignScrapeTimestamps && sl.interval > 100*scrapeTimestampTolerance {
// For some reason, a tick might have been skipped, in which case we // For some reason, a tick might have been skipped, in which case we
// would call alignedScrapeTime.Add(interval) multiple times. // would call alignedScrapeTime.Add(interval) multiple times.
for scrapeTime.Sub(alignedScrapeTime) >= interval { for scrapeTime.Sub(alignedScrapeTime) >= sl.interval {
alignedScrapeTime = alignedScrapeTime.Add(interval) alignedScrapeTime = alignedScrapeTime.Add(sl.interval)
} }
// Align the scrape time if we are in the tolerance boundaries. // Align the scrape time if we are in the tolerance boundaries.
if scrapeTime.Sub(alignedScrapeTime) <= scrapeTimestampTolerance { if scrapeTime.Sub(alignedScrapeTime) <= scrapeTimestampTolerance {
@ -1138,7 +1162,7 @@ mainLoop:
} }
} }
last = sl.scrapeAndReport(interval, timeout, last, scrapeTime, errc) last = sl.scrapeAndReport(sl.interval, sl.timeout, last, scrapeTime, errc)
select { select {
case <-sl.parentCtx.Done(): case <-sl.parentCtx.Done():
@ -1153,7 +1177,7 @@ mainLoop:
close(sl.stopped) close(sl.stopped)
if !sl.disabledEndOfRunStalenessMarkers { if !sl.disabledEndOfRunStalenessMarkers {
sl.endOfRunStaleness(last, ticker, interval) sl.endOfRunStaleness(last, ticker, sl.interval)
} }
} }

View file

@ -93,7 +93,7 @@ func TestDroppedTargetsList(t *testing.T) {
}, },
} }
sp, _ = newScrapePool(cfg, app, 0, nil) sp, _ = newScrapePool(cfg, app, 0, nil)
expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", job=\"dropMe\"}" expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", __scrape_interval__=\"0s\", __scrape_timeout__=\"0s\", job=\"dropMe\"}"
expectedLength = 1 expectedLength = 1
) )
sp.Sync(tgs) sp.Sync(tgs)
@ -146,14 +146,16 @@ type testLoop struct {
forcedErr error forcedErr error
forcedErrMtx sync.Mutex forcedErrMtx sync.Mutex
runOnce bool runOnce bool
interval time.Duration
timeout time.Duration
} }
func (l *testLoop) run(interval, timeout time.Duration, errc chan<- error) { func (l *testLoop) run(errc chan<- error) {
if l.runOnce { if l.runOnce {
panic("loop must be started only once") panic("loop must be started only once")
} }
l.runOnce = true l.runOnce = true
l.startFunc(interval, timeout, errc) l.startFunc(l.interval, l.timeout, errc)
} }
func (l *testLoop) disableEndOfRunStalenessMarkers() { func (l *testLoop) disableEndOfRunStalenessMarkers() {
@ -250,7 +252,7 @@ func TestScrapePoolReload(t *testing.T) {
// On starting to run, new loops created on reload check whether their preceding // On starting to run, new loops created on reload check whether their preceding
// equivalents have been stopped. // equivalents have been stopped.
newLoop := func(opts scrapeLoopOptions) loop { newLoop := func(opts scrapeLoopOptions) loop {
l := &testLoop{} l := &testLoop{interval: time.Duration(reloadCfg.ScrapeInterval), timeout: time.Duration(reloadCfg.ScrapeTimeout)}
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) { l.startFunc = func(interval, timeout time.Duration, errc chan<- error) {
require.Equal(t, 3*time.Second, interval, "Unexpected scrape interval") require.Equal(t, 3*time.Second, interval, "Unexpected scrape interval")
require.Equal(t, 2*time.Second, timeout, "Unexpected scrape timeout") require.Equal(t, 2*time.Second, timeout, "Unexpected scrape timeout")
@ -276,8 +278,10 @@ func TestScrapePoolReload(t *testing.T) {
// one terminated. // one terminated.
for i := 0; i < numTargets; i++ { for i := 0; i < numTargets; i++ {
labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i))
t := &Target{ t := &Target{
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)), labels: labels,
discoveredLabels: labels,
} }
l := &testLoop{} l := &testLoop{}
l.stopFunc = func() { l.stopFunc = func() {
@ -342,7 +346,7 @@ func TestScrapePoolTargetLimit(t *testing.T) {
activeTargets: map[uint64]*Target{}, activeTargets: map[uint64]*Target{},
loops: map[uint64]loop{}, loops: map[uint64]loop{},
newLoop: newLoop, newLoop: newLoop,
logger: nil, logger: log.NewNopLogger(),
client: http.DefaultClient, client: http.DefaultClient,
} }
@ -488,8 +492,8 @@ func TestScrapePoolAppender(t *testing.T) {
} }
func TestScrapePoolRaces(t *testing.T) { func TestScrapePoolRaces(t *testing.T) {
interval, _ := model.ParseDuration("500ms") interval, _ := model.ParseDuration("1s")
timeout, _ := model.ParseDuration("1s") timeout, _ := model.ParseDuration("500ms")
newConfig := func() *config.ScrapeConfig { newConfig := func() *config.ScrapeConfig {
return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout} return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout}
} }
@ -583,6 +587,8 @@ func TestScrapeLoopStopBeforeRun(t *testing.T) {
nil, nil, 0, nil, nil, 0,
true, true,
nil, nil,
1,
0,
) )
// The scrape pool synchronizes on stopping scrape loops. However, new scrape // The scrape pool synchronizes on stopping scrape loops. However, new scrape
@ -611,7 +617,7 @@ func TestScrapeLoopStopBeforeRun(t *testing.T) {
runDone := make(chan struct{}) runDone := make(chan struct{})
go func() { go func() {
sl.run(1, 0, nil) sl.run(nil)
close(runDone) close(runDone)
}() }()
@ -648,6 +654,8 @@ func TestScrapeLoopStop(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
// Terminate loop after 2 scrapes. // Terminate loop after 2 scrapes.
@ -664,7 +672,7 @@ func TestScrapeLoopStop(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -716,6 +724,8 @@ func TestScrapeLoopRun(t *testing.T) {
0, 0,
true, true,
nil, nil,
time.Second,
time.Hour,
) )
// The loop must terminate during the initial offset if the context // The loop must terminate during the initial offset if the context
@ -723,7 +733,7 @@ func TestScrapeLoopRun(t *testing.T) {
scraper.offsetDur = time.Hour scraper.offsetDur = time.Hour
go func() { go func() {
sl.run(time.Second, time.Hour, errc) sl.run(errc)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -764,10 +774,12 @@ func TestScrapeLoopRun(t *testing.T) {
0, 0,
true, true,
nil, nil,
time.Second,
100*time.Millisecond,
) )
go func() { go func() {
sl.run(time.Second, 100*time.Millisecond, errc) sl.run(errc)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -816,6 +828,8 @@ func TestScrapeLoopForcedErr(t *testing.T) {
0, 0,
true, true,
nil, nil,
time.Second,
time.Hour,
) )
forcedErr := fmt.Errorf("forced err") forcedErr := fmt.Errorf("forced err")
@ -827,7 +841,7 @@ func TestScrapeLoopForcedErr(t *testing.T) {
} }
go func() { go func() {
sl.run(time.Second, time.Hour, errc) sl.run(errc)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -867,6 +881,8 @@ func TestScrapeLoopMetadata(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
defer cancel() defer cancel()
@ -917,6 +933,8 @@ func TestScrapeLoopSeriesAdded(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
defer cancel() defer cancel()
@ -956,6 +974,8 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
// Succeed once, several failures, then stop. // Succeed once, several failures, then stop.
numScrapes := 0 numScrapes := 0
@ -973,7 +993,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1011,6 +1031,8 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
// Succeed once, several failures, then stop. // Succeed once, several failures, then stop.
@ -1030,7 +1052,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1070,6 +1092,8 @@ func TestScrapeLoopCache(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
numScrapes := 0 numScrapes := 0
@ -1106,7 +1130,7 @@ func TestScrapeLoopCache(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1145,6 +1169,8 @@ func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
numScrapes := 0 numScrapes := 0
@ -1164,7 +1190,7 @@ func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1252,6 +1278,8 @@ func TestScrapeLoopAppend(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -1294,6 +1322,8 @@ func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
fakeRef := uint64(1) fakeRef := uint64(1)
@ -1344,6 +1374,8 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
// Get the value of the Counter before performing the append. // Get the value of the Counter before performing the append.
@ -1414,6 +1446,8 @@ func TestScrapeLoop_ChangingMetricString(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -1455,6 +1489,8 @@ func TestScrapeLoopAppendStaleness(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -1499,6 +1535,8 @@ func TestScrapeLoopAppendNoStalenessIfTimestamp(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -1601,6 +1639,8 @@ metric_total{n="2"} 2 # {t="2"} 2.0 20000
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -1659,6 +1699,8 @@ func TestScrapeLoopAppendExemplarSeries(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -1704,6 +1746,8 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
@ -1711,7 +1755,7 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
return errors.New("scrape failed") return errors.New("scrape failed")
} }
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value") require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value")
} }
@ -1733,6 +1777,8 @@ func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
@ -1741,7 +1787,7 @@ func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) {
return nil return nil
} }
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value") require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value")
} }
@ -1775,6 +1821,8 @@ func TestScrapeLoopAppendGracefullyIfAmendOrOutOfOrderOrOutOfBounds(t *testing.T
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Unix(1, 0) now := time.Unix(1, 0)
@ -1813,6 +1861,8 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now().Add(20 * time.Minute) now := time.Now().Add(20 * time.Minute)
@ -2064,6 +2114,8 @@ func TestScrapeLoop_RespectTimestamps(t *testing.T) {
nil, 0, nil, 0,
true, true,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -2098,6 +2150,8 @@ func TestScrapeLoop_DiscardTimestamps(t *testing.T) {
nil, 0, nil, 0,
false, false,
nil, nil,
0,
0,
) )
now := time.Now() now := time.Now()
@ -2131,6 +2185,8 @@ func TestScrapeLoopDiscardDuplicateLabels(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
defer cancel() defer cancel()
@ -2182,6 +2238,8 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
defer cancel() defer cancel()
@ -2400,6 +2458,8 @@ func TestScrapeAddFast(t *testing.T) {
0, 0,
true, true,
nil, nil,
0,
0,
) )
defer cancel() defer cancel()
@ -2484,6 +2544,8 @@ func TestScrapeReportSingleAppender(t *testing.T) {
0, 0,
true, true,
nil, nil,
10*time.Millisecond,
time.Hour,
) )
numScrapes := 0 numScrapes := 0
@ -2498,7 +2560,7 @@ func TestScrapeReportSingleAppender(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -2613,6 +2675,8 @@ func TestScrapeLoopLabelLimit(t *testing.T) {
0, 0,
true, true,
&test.labelLimits, &test.labelLimits,
0,
0,
) )
slApp := sl.appender(context.Background()) slApp := sl.appender(context.Background())
@ -2627,3 +2691,40 @@ func TestScrapeLoopLabelLimit(t *testing.T) {
} }
} }
} }
func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) {
interval, _ := model.ParseDuration("2s")
timeout, _ := model.ParseDuration("500ms")
config := &config.ScrapeConfig{
ScrapeInterval: interval,
ScrapeTimeout: timeout,
RelabelConfigs: []*relabel.Config{
{
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
Regex: relabel.MustNewRegexp("2s"),
Replacement: "3s",
TargetLabel: model.ScrapeIntervalLabel,
Action: relabel.Replace,
},
{
SourceLabels: model.LabelNames{model.ScrapeTimeoutLabel},
Regex: relabel.MustNewRegexp("500ms"),
Replacement: "750ms",
TargetLabel: model.ScrapeTimeoutLabel,
Action: relabel.Replace,
},
},
}
sp, _ := newScrapePool(config, &nopAppendable{}, 0, nil)
tgts := []*targetgroup.Group{
{
Targets: []model.LabelSet{{model.AddressLabel: "127.0.0.1:9090"}},
},
}
sp.Sync(tgts)
defer sp.stop()
require.Equal(t, "3s", sp.ActiveTargets()[0].labels.Get(model.ScrapeIntervalLabel))
require.Equal(t, "750ms", sp.ActiveTargets()[0].labels.Get(model.ScrapeTimeoutLabel))
}

View file

@ -143,8 +143,18 @@ func (t *Target) SetMetadataStore(s MetricMetadataStore) {
// hash returns an identifying hash for the target. // hash returns an identifying hash for the target.
func (t *Target) hash() uint64 { func (t *Target) hash() uint64 {
h := fnv.New64a() h := fnv.New64a()
// We must build a label set without the scrape interval and timeout
// labels because those aren't defining attributes of a target
// and can be changed without qualifying its parent as a new target,
// therefore they should not effect its unique hash.
l := t.labels.Map()
delete(l, model.ScrapeIntervalLabel)
delete(l, model.ScrapeTimeoutLabel)
lset := labels.FromMap(l)
//nolint: errcheck //nolint: errcheck
h.Write([]byte(fmt.Sprintf("%016d", t.labels.Hash()))) h.Write([]byte(fmt.Sprintf("%016d", lset.Hash())))
//nolint: errcheck //nolint: errcheck
h.Write([]byte(t.URL().String())) h.Write([]byte(t.URL().String()))
@ -273,6 +283,31 @@ func (t *Target) Health() TargetHealth {
return t.health return t.health
} }
// intervalAndTimeout returns the interval and timeout derived from
// the targets labels.
func (t *Target) intervalAndTimeout(defaultInterval, defaultDuration time.Duration) (time.Duration, time.Duration, error) {
t.mtx.RLock()
defer t.mtx.RUnlock()
intervalLabel := t.labels.Get(model.ScrapeIntervalLabel)
interval, err := model.ParseDuration(intervalLabel)
if err != nil {
return defaultInterval, defaultDuration, errors.Errorf("Error parsing interval label %q: %v", intervalLabel, err)
}
timeoutLabel := t.labels.Get(model.ScrapeTimeoutLabel)
timeout, err := model.ParseDuration(timeoutLabel)
if err != nil {
return defaultInterval, defaultDuration, errors.Errorf("Error parsing timeout label %q: %v", timeoutLabel, err)
}
return time.Duration(interval), time.Duration(timeout), nil
}
// GetValue gets a label value from the entire label set.
func (t *Target) GetValue(name string) string {
return t.labels.Get(name)
}
// Targets is a sortable list of targets. // Targets is a sortable list of targets.
type Targets []*Target type Targets []*Target
@ -329,6 +364,8 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
// Copy labels into the labelset for the target if they are not set already. // Copy labels into the labelset for the target if they are not set already.
scrapeLabels := []labels.Label{ scrapeLabels := []labels.Label{
{Name: model.JobLabel, Value: cfg.JobName}, {Name: model.JobLabel, Value: cfg.JobName},
{Name: model.ScrapeIntervalLabel, Value: cfg.ScrapeInterval.String()},
{Name: model.ScrapeTimeoutLabel, Value: cfg.ScrapeTimeout.String()},
{Name: model.MetricsPathLabel, Value: cfg.MetricsPath}, {Name: model.MetricsPathLabel, Value: cfg.MetricsPath},
{Name: model.SchemeLabel, Value: cfg.Scheme}, {Name: model.SchemeLabel, Value: cfg.Scheme},
} }
@ -390,6 +427,34 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
return nil, nil, err return nil, nil, err
} }
var interval string
var intervalDuration model.Duration
if interval = lset.Get(model.ScrapeIntervalLabel); interval != cfg.ScrapeInterval.String() {
intervalDuration, err = model.ParseDuration(interval)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
}
if time.Duration(intervalDuration) == 0 {
return nil, nil, errors.New("scrape interval cannot be 0")
}
}
var timeout string
var timeoutDuration model.Duration
if timeout = lset.Get(model.ScrapeTimeoutLabel); timeout != cfg.ScrapeTimeout.String() {
timeoutDuration, err = model.ParseDuration(timeout)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
}
if time.Duration(timeoutDuration) == 0 {
return nil, nil, errors.New("scrape timeout cannot be 0")
}
}
if timeoutDuration > intervalDuration {
return nil, nil, errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
}
// Meta labels are deleted after relabelling. Other internal labels propagate to // Meta labels are deleted after relabelling. Other internal labels propagate to
// the target which decides whether they will be part of their label set. // the target which decides whether they will be part of their label set.
for _, l := range lset { for _, l := range lset {

View file

@ -382,3 +382,29 @@ func TestTargetsFromGroup(t *testing.T) {
t.Fatalf("Expected error %s, got %s", expectedError, failures[0]) t.Fatalf("Expected error %s, got %s", expectedError, failures[0])
} }
} }
func TestTargetHash(t *testing.T) {
target1 := &Target{
labels: labels.Labels{
{Name: model.AddressLabel, Value: "localhost"},
{Name: model.SchemeLabel, Value: "http"},
{Name: model.MetricsPathLabel, Value: "/metrics"},
{Name: model.ScrapeIntervalLabel, Value: "15s"},
{Name: model.ScrapeTimeoutLabel, Value: "500ms"},
},
}
hash1 := target1.hash()
target2 := &Target{
labels: labels.Labels{
{Name: model.AddressLabel, Value: "localhost"},
{Name: model.SchemeLabel, Value: "http"},
{Name: model.MetricsPathLabel, Value: "/metrics"},
{Name: model.ScrapeIntervalLabel, Value: "14s"},
{Name: model.ScrapeTimeoutLabel, Value: "600ms"},
},
}
hash2 := target2.hash()
require.Equal(t, hash1, hash2, "Scrape interval and duration labels should not effect hash.")
}

View file

@ -760,6 +760,9 @@ type Target struct {
LastScrape time.Time `json:"lastScrape"` LastScrape time.Time `json:"lastScrape"`
LastScrapeDuration float64 `json:"lastScrapeDuration"` LastScrapeDuration float64 `json:"lastScrapeDuration"`
Health scrape.TargetHealth `json:"health"` Health scrape.TargetHealth `json:"health"`
ScrapeInterval string `json:"scrapeInterval"`
ScrapeTimeout string `json:"scrapeTimeout"`
} }
// DroppedTarget has the information for one target that was dropped during relabelling. // DroppedTarget has the information for one target that was dropped during relabelling.
@ -899,6 +902,8 @@ func (api *API) targets(r *http.Request) apiFuncResult {
LastScrape: target.LastScrape(), LastScrape: target.LastScrape(),
LastScrapeDuration: target.LastScrapeDuration().Seconds(), LastScrapeDuration: target.LastScrapeDuration().Seconds(),
Health: target.Health(), Health: target.Health(),
ScrapeInterval: target.GetValue(model.ScrapeIntervalLabel),
ScrapeTimeout: target.GetValue(model.ScrapeTimeoutLabel),
}) })
} }
} }

View file

@ -538,6 +538,8 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.AddressLabel: "example.com:8080", model.AddressLabel: "example.com:8080",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "test", model.JobLabel: "test",
model.ScrapeIntervalLabel: "15s",
model.ScrapeTimeoutLabel: "5s",
}), }),
DiscoveredLabels: nil, DiscoveredLabels: nil,
Params: url.Values{}, Params: url.Values{},
@ -551,6 +553,8 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.AddressLabel: "localhost:9115", model.AddressLabel: "localhost:9115",
model.MetricsPathLabel: "/probe", model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox", model.JobLabel: "blackbox",
model.ScrapeIntervalLabel: "20s",
model.ScrapeTimeoutLabel: "10s",
}), }),
DiscoveredLabels: nil, DiscoveredLabels: nil,
Params: url.Values{"target": []string{"example.com"}}, Params: url.Values{"target": []string{"example.com"}},
@ -565,6 +569,8 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.AddressLabel: "http://dropped.example.com:9115", model.AddressLabel: "http://dropped.example.com:9115",
model.MetricsPathLabel: "/probe", model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox", model.JobLabel: "blackbox",
model.ScrapeIntervalLabel: "30s",
model.ScrapeTimeoutLabel: "15s",
}), }),
Params: url.Values{}, Params: url.Values{},
Active: false, Active: false,
@ -951,6 +957,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "failed: missing port in address", LastError: "failed: missing port in address",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.1, LastScrapeDuration: 0.1,
ScrapeInterval: "20s",
ScrapeTimeout: "10s",
}, },
{ {
DiscoveredLabels: map[string]string{}, DiscoveredLabels: map[string]string{},
@ -964,6 +972,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "", LastError: "",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.07, LastScrapeDuration: 0.07,
ScrapeInterval: "15s",
ScrapeTimeout: "5s",
}, },
}, },
DroppedTargets: []*DroppedTarget{ DroppedTargets: []*DroppedTarget{
@ -973,6 +983,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
"__metrics_path__": "/probe", "__metrics_path__": "/probe",
"__scheme__": "http", "__scheme__": "http",
"job": "blackbox", "job": "blackbox",
"__scrape_interval__": "30s",
"__scrape_timeout__": "15s",
}, },
}, },
}, },
@ -997,6 +1009,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "failed: missing port in address", LastError: "failed: missing port in address",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.1, LastScrapeDuration: 0.1,
ScrapeInterval: "20s",
ScrapeTimeout: "10s",
}, },
{ {
DiscoveredLabels: map[string]string{}, DiscoveredLabels: map[string]string{},
@ -1010,6 +1024,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "", LastError: "",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.07, LastScrapeDuration: 0.07,
ScrapeInterval: "15s",
ScrapeTimeout: "5s",
}, },
}, },
DroppedTargets: []*DroppedTarget{ DroppedTargets: []*DroppedTarget{
@ -1019,6 +1035,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
"__metrics_path__": "/probe", "__metrics_path__": "/probe",
"__scheme__": "http", "__scheme__": "http",
"job": "blackbox", "job": "blackbox",
"__scrape_interval__": "30s",
"__scrape_timeout__": "15s",
}, },
}, },
}, },
@ -1043,6 +1061,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "failed: missing port in address", LastError: "failed: missing port in address",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.1, LastScrapeDuration: 0.1,
ScrapeInterval: "20s",
ScrapeTimeout: "10s",
}, },
{ {
DiscoveredLabels: map[string]string{}, DiscoveredLabels: map[string]string{},
@ -1056,6 +1076,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "", LastError: "",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.07, LastScrapeDuration: 0.07,
ScrapeInterval: "15s",
ScrapeTimeout: "5s",
}, },
}, },
DroppedTargets: []*DroppedTarget{}, DroppedTargets: []*DroppedTarget{},
@ -1075,6 +1097,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
"__metrics_path__": "/probe", "__metrics_path__": "/probe",
"__scheme__": "http", "__scheme__": "http",
"job": "blackbox", "job": "blackbox",
"__scrape_interval__": "30s",
"__scrape_timeout__": "15s",
}, },
}, },
}, },

View file

@ -25,6 +25,9 @@ describe('ScrapePoolList', () => {
const div = document.createElement('div'); const div = document.createElement('div');
div.id = `series-labels-${pool}-${idx}`; div.id = `series-labels-${pool}-${idx}`;
document.body.appendChild(div); document.body.appendChild(div);
const div2 = document.createElement('div');
div2.id = `scrape-duration-${pool}-${idx}`;
document.body.appendChild(div2);
}); });
}); });
mock = fetchMock.mockResponse(JSON.stringify(sampleApiResponse)); mock = fetchMock.mockResponse(JSON.stringify(sampleApiResponse));

View file

@ -57,6 +57,9 @@ describe('ScrapePoolPanel', () => {
const div = document.createElement('div'); const div = document.createElement('div');
div.id = `series-labels-prometheus-0`; div.id = `series-labels-prometheus-0`;
document.body.appendChild(div); document.body.appendChild(div);
const div2 = document.createElement('div');
div2.id = `scrape-duration-prometheus-0`;
document.body.appendChild(div2);
const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />); const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />);
const btn = scrapePoolPanel.find(Button); const btn = scrapePoolPanel.find(Button);

View file

@ -5,9 +5,10 @@ import styles from './ScrapePoolPanel.module.css';
import { Target } from './target'; import { Target } from './target';
import EndpointLink from './EndpointLink'; import EndpointLink from './EndpointLink';
import TargetLabels from './TargetLabels'; import TargetLabels from './TargetLabels';
import TargetScrapeDuration from './TargetScrapeDuration';
import { now } from 'moment'; import { now } from 'moment';
import { ToggleMoreLess } from '../../components/ToggleMoreLess'; import { ToggleMoreLess } from '../../components/ToggleMoreLess';
import { formatRelative, humanizeDuration } from '../../utils'; import { formatRelative } from '../../utils';
interface PanelProps { interface PanelProps {
scrapePool: string; scrapePool: string;
@ -54,6 +55,8 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup, expanded, to
lastScrape, lastScrape,
lastScrapeDuration, lastScrapeDuration,
health, health,
scrapeInterval,
scrapeTimeout,
} = target; } = target;
const color = getColor(health); const color = getColor(health);
@ -69,7 +72,15 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup, expanded, to
<TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} /> <TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} />
</td> </td>
<td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td> <td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td>
<td className={styles['scrape-duration']}>{humanizeDuration(lastScrapeDuration * 1000)}</td> <td className={styles['scrape-duration']}>
<TargetScrapeDuration
duration={lastScrapeDuration}
scrapePool={scrapePool}
idx={idx}
interval={scrapeInterval}
timeout={scrapeTimeout}
/>
</td>
<td className={styles.errors}>{lastError ? <span className="text-danger">{lastError}</span> : null}</td> <td className={styles.errors}>{lastError ? <span className="text-danger">{lastError}</span> : null}</td>
</tr> </tr>
); );

View file

@ -0,0 +1,41 @@
import React, { FC, Fragment, useState } from 'react';
import { Tooltip } from 'reactstrap';
import 'css.escape';
import { humanizeDuration } from '../../utils';
export interface TargetScrapeDurationProps {
duration: number;
interval: string;
timeout: string;
idx: number;
scrapePool: string;
}
const TargetScrapeDuration: FC<TargetScrapeDurationProps> = ({ duration, interval, timeout, idx, scrapePool }) => {
const [scrapeTooltipOpen, setScrapeTooltipOpen] = useState<boolean>(false);
const id = `scrape-duration-${scrapePool}-${idx}`;
return (
<>
<div id={id} className="scrape-duration-container">
{humanizeDuration(duration * 1000)}
</div>
<Tooltip
isOpen={scrapeTooltipOpen}
toggle={() => setScrapeTooltipOpen(!scrapeTooltipOpen)}
target={CSS.escape(id)}
style={{ maxWidth: 'none', textAlign: 'left' }}
>
<Fragment>
<span>Interval: {interval}</span>
<br />
</Fragment>
<Fragment>
<span>Timeout: {timeout}</span>
</Fragment>
</Tooltip>
</>
);
};
export default TargetScrapeDuration;

View file

@ -25,6 +25,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:14.759299-07:00', lastScrape: '2019-11-04T11:52:14.759299-07:00',
lastScrapeDuration: 36560147, lastScrapeDuration: 36560147,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
{ {
discoveredLabels: { discoveredLabels: {
@ -45,6 +47,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:24.731096-07:00', lastScrape: '2019-11-04T11:52:24.731096-07:00',
lastScrapeDuration: 49448763, lastScrapeDuration: 49448763,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
{ {
discoveredLabels: { discoveredLabels: {
@ -65,6 +69,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:13.516654-07:00', lastScrape: '2019-11-04T11:52:13.516654-07:00',
lastScrapeDuration: 120916592, lastScrapeDuration: 120916592,
health: 'down', health: 'down',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
], ],
}, },
@ -89,6 +95,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:14.145703-07:00', lastScrape: '2019-11-04T11:52:14.145703-07:00',
lastScrapeDuration: 3842307, lastScrapeDuration: 3842307,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
], ],
}, },
@ -113,6 +121,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:18.479731-07:00', lastScrape: '2019-11-04T11:52:18.479731-07:00',
lastScrapeDuration: 4050976, lastScrapeDuration: 4050976,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
], ],
}, },

View file

@ -12,6 +12,8 @@ export interface Target {
lastScrape: string; lastScrape: string;
lastScrapeDuration: number; lastScrapeDuration: number;
health: string; health: string;
scrapeInterval: string;
scrapeTimeout: string;
} }
export interface DroppedTarget { export interface DroppedTarget {