Merge pull request #15563 from GiedriusS/fix_setting_field

scrape: fix nil panic after scrape loop reload
This commit is contained in:
Bryan Boreham 2024-12-10 15:33:56 +00:00 committed by GitHub
commit c69aeabf1d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 42 additions and 0 deletions

View file

@ -361,6 +361,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
metrics: sp.metrics,
}
newLoop = sp.newLoop(scrapeLoopOptions{
target: t,

View file

@ -4831,3 +4831,44 @@ func newScrapableServer(scrapeText string) (s *httptest.Server, scrapedTwice cha
}
})), scrapedTwice
}
// Regression test for the panic fixed in https://github.com/prometheus/prometheus/pull/15523.
func TestScrapePoolScrapeAfterReload(t *testing.T) {
h := httptest.NewServer(http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte{0x42, 0x42})
},
))
t.Cleanup(h.Close)
cfg := &config.ScrapeConfig{
BodySizeLimit: 1,
JobName: "test",
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
EnableCompression: false,
ServiceDiscoveryConfigs: discovery.Configs{
&discovery.StaticConfig{
{
Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(h.URL)}},
},
},
},
}
p, err := newScrapePool(cfg, &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
require.NoError(t, err)
t.Cleanup(p.stop)
p.Sync([]*targetgroup.Group{
{
Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(strings.TrimPrefix(h.URL, "http://"))}},
Source: "test",
},
})
require.NoError(t, p.reload(cfg))
<-time.After(1 * time.Second)
}