From 5f50d974c92bd55199961bdc514f92fbd5ac2fd1 Mon Sep 17 00:00:00 2001 From: Bryan Boreham Date: Fri, 24 Nov 2023 19:46:26 +0000 Subject: [PATCH] scraping: reset symbol table periodically Signed-off-by: Bryan Boreham --- scrape/manager_test.go | 11 ++++++----- scrape/scrape.go | 42 ++++++++++++++++++++++++++++++------------ scrape/scrape_test.go | 14 +++++++++----- 3 files changed, 45 insertions(+), 22 deletions(-) diff --git a/scrape/manager_test.go b/scrape/manager_test.go index 7b7a92916..f90fd0ce6 100644 --- a/scrape/manager_test.go +++ b/scrape/manager_test.go @@ -523,11 +523,12 @@ scrape_configs: loops: map[uint64]loop{ 1: noopLoop(), }, - newLoop: newLoop, - logger: nil, - config: cfg1.ScrapeConfigs[0], - client: http.DefaultClient, - metrics: scrapeManager.metrics, + newLoop: newLoop, + logger: nil, + config: cfg1.ScrapeConfigs[0], + client: http.DefaultClient, + metrics: scrapeManager.metrics, + symbolTable: labels.NewSymbolTable(), } scrapeManager.scrapePools = map[string]*scrapePool{ "job1": sp, diff --git a/scrape/scrape.go b/scrape/scrape.go index ce581d71b..aa2d5538b 100644 --- a/scrape/scrape.go +++ b/scrape/scrape.go @@ -73,7 +73,9 @@ type scrapePool struct { client *http.Client loops map[uint64]loop - symbolTable *labels.SymbolTable + symbolTable *labels.SymbolTable + lastSymbolTableCheck time.Time + initialSymbolTableLen int targetMtx sync.Mutex // activeTargets and loops must always be synchronized to have the same @@ -132,17 +134,18 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed ctx, cancel := context.WithCancel(context.Background()) sp := &scrapePool{ - cancel: cancel, - appendable: app, - config: cfg, - client: client, - activeTargets: map[uint64]*Target{}, - loops: map[uint64]loop{}, - symbolTable: labels.NewSymbolTable(), // TODO: clean this out from time to time. - logger: logger, - metrics: metrics, - httpOpts: options.HTTPClientOptions, - noDefaultPort: options.NoDefaultPort, + cancel: cancel, + appendable: app, + config: cfg, + client: client, + activeTargets: map[uint64]*Target{}, + loops: map[uint64]loop{}, + symbolTable: labels.NewSymbolTable(), + lastSymbolTableCheck: time.Now(), + logger: logger, + metrics: metrics, + httpOpts: options.HTTPClientOptions, + noDefaultPort: options.NoDefaultPort, } sp.newLoop = func(opts scrapeLoopOptions) loop { // Update the targets retrieval function for metadata to a new scrape cache. @@ -352,6 +355,21 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error { sp.metrics.targetReloadIntervalLength.WithLabelValues(interval.String()).Observe( time.Since(start).Seconds(), ) + + // Here we take steps to clear out the symbol table if it has grown a lot. + // After waiting some time for things to settle, we take the size of the symbol-table. + // If, after some more time, the table has grown to twice that size, we start a new one. + const minTimeToCleanSymbolTable = 5 * time.Minute + if time.Since(sp.lastSymbolTableCheck) > minTimeToCleanSymbolTable { + if sp.initialSymbolTableLen == 0 { + sp.initialSymbolTableLen = sp.symbolTable.Len() + } else if sp.symbolTable.Len() > 2*sp.initialSymbolTableLen { + sp.symbolTable = labels.NewSymbolTable() + sp.initialSymbolTableLen = 0 + } + sp.lastSymbolTableCheck = time.Now() + } + return nil } diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index 67f22f149..bcaeb460e 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -279,6 +279,7 @@ func TestScrapePoolReload(t *testing.T) { logger: nil, client: http.DefaultClient, metrics: newTestScrapeMetrics(t), + symbolTable: labels.NewSymbolTable(), } // Reloading a scrape pool with a new scrape configuration must stop all scrape @@ -357,10 +358,11 @@ func TestScrapePoolReloadPreserveRelabeledIntervalTimeout(t *testing.T) { loops: map[uint64]loop{ 1: noopLoop(), }, - newLoop: newLoop, - logger: nil, - client: http.DefaultClient, - metrics: newTestScrapeMetrics(t), + newLoop: newLoop, + logger: nil, + client: http.DefaultClient, + metrics: newTestScrapeMetrics(t), + symbolTable: labels.NewSymbolTable(), } err := sp.reload(reloadCfg) @@ -391,6 +393,7 @@ func TestScrapePoolTargetLimit(t *testing.T) { logger: log.NewNopLogger(), client: http.DefaultClient, metrics: newTestScrapeMetrics(t), + symbolTable: labels.NewSymbolTable(), } tgs := []*targetgroup.Group{} @@ -623,6 +626,7 @@ func TestScrapePoolScrapeLoopsStarted(t *testing.T) { logger: nil, client: http.DefaultClient, metrics: newTestScrapeMetrics(t), + symbolTable: labels.NewSymbolTable(), } tgs := []*targetgroup.Group{ @@ -660,7 +664,7 @@ func newBasicScrapeLoop(t testing.TB, ctx context.Context, scraper scraper, app nopMutator, app, nil, - nil, + labels.NewSymbolTable(), 0, true, false,