mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-26 06:04:05 -08:00
scrape: Rewrite scrape loop options as a struct (#5314)
Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
This commit is contained in:
parent
027d2ece14
commit
04ce817c49
|
@ -274,7 +274,7 @@ scrape_configs:
|
||||||
)
|
)
|
||||||
|
|
||||||
scrapeManager := NewManager(nil, nil)
|
scrapeManager := NewManager(nil, nil)
|
||||||
newLoop := func(_ *Target, s scraper, _ int, _ bool, _ []*relabel.Config) loop {
|
newLoop := func(scrapeLoopOptions) loop {
|
||||||
ch <- struct{}{}
|
ch <- struct{}{}
|
||||||
return noopLoop()
|
return noopLoop()
|
||||||
}
|
}
|
||||||
|
|
|
@ -158,7 +158,15 @@ type scrapePool struct {
|
||||||
cancel context.CancelFunc
|
cancel context.CancelFunc
|
||||||
|
|
||||||
// Constructor for new scrape loops. This is settable for testing convenience.
|
// Constructor for new scrape loops. This is settable for testing convenience.
|
||||||
newLoop func(*Target, scraper, int, bool, []*relabel.Config) loop
|
newLoop func(scrapeLoopOptions) loop
|
||||||
|
}
|
||||||
|
|
||||||
|
type scrapeLoopOptions struct {
|
||||||
|
target *Target
|
||||||
|
scraper scraper
|
||||||
|
limit int
|
||||||
|
honorLabels bool
|
||||||
|
mrc []*relabel.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
const maxAheadTime = 10 * time.Minute
|
const maxAheadTime = 10 * time.Minute
|
||||||
|
@ -189,24 +197,26 @@ func newScrapePool(cfg *config.ScrapeConfig, app Appendable, logger log.Logger)
|
||||||
loops: map[uint64]loop{},
|
loops: map[uint64]loop{},
|
||||||
logger: logger,
|
logger: logger,
|
||||||
}
|
}
|
||||||
sp.newLoop = func(t *Target, s scraper, limit int, honor bool, mrc []*relabel.Config) loop {
|
sp.newLoop = func(opts scrapeLoopOptions) loop {
|
||||||
// Update the targets retrieval function for metadata to a new scrape cache.
|
// Update the targets retrieval function for metadata to a new scrape cache.
|
||||||
cache := newScrapeCache()
|
cache := newScrapeCache()
|
||||||
t.setMetadataStore(cache)
|
opts.target.setMetadataStore(cache)
|
||||||
|
|
||||||
return newScrapeLoop(
|
return newScrapeLoop(
|
||||||
ctx,
|
ctx,
|
||||||
s,
|
opts.scraper,
|
||||||
log.With(logger, "target", t),
|
log.With(logger, "target", opts.target),
|
||||||
buffers,
|
buffers,
|
||||||
func(l labels.Labels) labels.Labels { return mutateSampleLabels(l, t, honor, mrc) },
|
func(l labels.Labels) labels.Labels {
|
||||||
func(l labels.Labels) labels.Labels { return mutateReportSampleLabels(l, t) },
|
return mutateSampleLabels(l, opts.target, opts.honorLabels, opts.mrc)
|
||||||
|
},
|
||||||
|
func(l labels.Labels) labels.Labels { return mutateReportSampleLabels(l, opts.target) },
|
||||||
func() storage.Appender {
|
func() storage.Appender {
|
||||||
app, err := app.Appender()
|
app, err := app.Appender()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
return appender(app, limit)
|
return appender(app, opts.limit)
|
||||||
},
|
},
|
||||||
cache,
|
cache,
|
||||||
)
|
)
|
||||||
|
@ -285,7 +295,13 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
|
||||||
var (
|
var (
|
||||||
t = sp.activeTargets[fp]
|
t = sp.activeTargets[fp]
|
||||||
s = &targetScraper{Target: t, client: sp.client, timeout: timeout}
|
s = &targetScraper{Target: t, client: sp.client, timeout: timeout}
|
||||||
newLoop = sp.newLoop(t, s, limit, honor, mrc)
|
newLoop = sp.newLoop(scrapeLoopOptions{
|
||||||
|
target: t,
|
||||||
|
scraper: s,
|
||||||
|
limit: limit,
|
||||||
|
honorLabels: honor,
|
||||||
|
mrc: mrc,
|
||||||
|
})
|
||||||
)
|
)
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
|
||||||
|
@ -360,7 +376,13 @@ func (sp *scrapePool) sync(targets []*Target) {
|
||||||
|
|
||||||
if _, ok := sp.activeTargets[hash]; !ok {
|
if _, ok := sp.activeTargets[hash]; !ok {
|
||||||
s := &targetScraper{Target: t, client: sp.client, timeout: timeout}
|
s := &targetScraper{Target: t, client: sp.client, timeout: timeout}
|
||||||
l := sp.newLoop(t, s, limit, honor, mrc)
|
l := sp.newLoop(scrapeLoopOptions{
|
||||||
|
target: t,
|
||||||
|
scraper: s,
|
||||||
|
limit: limit,
|
||||||
|
honorLabels: honor,
|
||||||
|
mrc: mrc,
|
||||||
|
})
|
||||||
|
|
||||||
sp.activeTargets[hash] = t
|
sp.activeTargets[hash] = t
|
||||||
sp.loops[hash] = l
|
sp.loops[hash] = l
|
||||||
|
|
|
@ -219,7 +219,7 @@ func TestScrapePoolReload(t *testing.T) {
|
||||||
}
|
}
|
||||||
// On starting to run, new loops created on reload check whether their preceding
|
// On starting to run, new loops created on reload check whether their preceding
|
||||||
// equivalents have been stopped.
|
// equivalents have been stopped.
|
||||||
newLoop := func(_ *Target, s scraper, _ int, _ bool, _ []*relabel.Config) loop {
|
newLoop := func(opts scrapeLoopOptions) loop {
|
||||||
l := &testLoop{}
|
l := &testLoop{}
|
||||||
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) {
|
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) {
|
||||||
if interval != 3*time.Second {
|
if interval != 3*time.Second {
|
||||||
|
@ -229,8 +229,8 @@ func TestScrapePoolReload(t *testing.T) {
|
||||||
t.Errorf("Expected scrape timeout %d but got %d", 2*time.Second, timeout)
|
t.Errorf("Expected scrape timeout %d but got %d", 2*time.Second, timeout)
|
||||||
}
|
}
|
||||||
mtx.Lock()
|
mtx.Lock()
|
||||||
if !stopped[s.(*targetScraper).hash()] {
|
if !stopped[opts.scraper.(*targetScraper).hash()] {
|
||||||
t.Errorf("Scrape loop for %v not stopped yet", s.(*targetScraper))
|
t.Errorf("Scrape loop for %v not stopped yet", opts.scraper.(*targetScraper))
|
||||||
}
|
}
|
||||||
mtx.Unlock()
|
mtx.Unlock()
|
||||||
}
|
}
|
||||||
|
@ -307,7 +307,9 @@ func TestScrapePoolAppender(t *testing.T) {
|
||||||
app := &nopAppendable{}
|
app := &nopAppendable{}
|
||||||
sp, _ := newScrapePool(cfg, app, nil)
|
sp, _ := newScrapePool(cfg, app, nil)
|
||||||
|
|
||||||
loop := sp.newLoop(&Target{}, nil, 0, false, nil)
|
loop := sp.newLoop(scrapeLoopOptions{
|
||||||
|
target: &Target{},
|
||||||
|
})
|
||||||
appl, ok := loop.(*scrapeLoop)
|
appl, ok := loop.(*scrapeLoop)
|
||||||
if !ok {
|
if !ok {
|
||||||
t.Fatalf("Expected scrapeLoop but got %T", loop)
|
t.Fatalf("Expected scrapeLoop but got %T", loop)
|
||||||
|
@ -322,7 +324,10 @@ func TestScrapePoolAppender(t *testing.T) {
|
||||||
t.Fatalf("Expected base appender but got %T", tl.Appender)
|
t.Fatalf("Expected base appender but got %T", tl.Appender)
|
||||||
}
|
}
|
||||||
|
|
||||||
loop = sp.newLoop(&Target{}, nil, 100, false, nil)
|
loop = sp.newLoop(scrapeLoopOptions{
|
||||||
|
target: &Target{},
|
||||||
|
limit: 100,
|
||||||
|
})
|
||||||
appl, ok = loop.(*scrapeLoop)
|
appl, ok = loop.(*scrapeLoop)
|
||||||
if !ok {
|
if !ok {
|
||||||
t.Fatalf("Expected scrapeLoop but got %T", loop)
|
t.Fatalf("Expected scrapeLoop but got %T", loop)
|
||||||
|
|
Loading…
Reference in a new issue