This commit is contained in:
Darshan Chaudhary 2025-03-05 22:37:36 +08:00 committed by GitHub
commit 997910db21
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 78 additions and 0 deletions

View file

@ -1,6 +1,7 @@
# Changelog # Changelog
## unreleased ## unreleased
* [ENHANCEMENT] Scraping: add warning if targets relabel to same labels. This is enabled under the feature-flag `warn-if-targets-relabelled-to-same-labels`. #9589
## 3.2.1 / 2025-02-25 ## 3.2.1 / 2025-02-25

View file

@ -264,6 +264,9 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
logger.Info("Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols)) logger.Info("Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols))
case "warn-if-targets-relabelled-to-same-labels":
c.scrape.EnableWarnIfTargetsRelabelledToSameLabels = true
logger.Info("Enabled warning if targets relabelled to same labels")
case "delayed-compaction": case "delayed-compaction":
c.tsdb.EnableDelayedCompaction = true c.tsdb.EnableDelayedCompaction = true
logger.Info("Experimental delayed compaction is enabled.") logger.Info("Experimental delayed compaction is enabled.")

View file

@ -90,6 +90,9 @@ type Options struct {
// Optional HTTP client options to use when scraping. // Optional HTTP client options to use when scraping.
HTTPClientOptions []config_util.HTTPClientOption HTTPClientOptions []config_util.HTTPClientOption
// Option to warn if targets relabelled to same labels
EnableWarnIfTargetsRelabelledToSameLabels bool
// private option for testability. // private option for testability.
skipOffsetting bool skipOffsetting bool
} }
@ -205,6 +208,40 @@ func (m *Manager) reload() {
} }
m.mtxScrape.Unlock() m.mtxScrape.Unlock()
wg.Wait() wg.Wait()
if m.opts.EnableWarnIfTargetsRelabelledToSameLabels {
m.warnIfTargetsRelabelledToSameLabels()
}
}
func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
m.mtxScrape.Lock()
defer m.mtxScrape.Unlock()
totalTargets := 0
for _, scrapePool := range m.scrapePools {
totalTargets += len(scrapePool.activeTargets)
}
activeTargets := make(map[string]*Target, totalTargets)
buf := [1024]byte{}
builder := labels.NewBuilder(labels.EmptyLabels())
for _, scrapePool := range m.scrapePools {
for _, target := range scrapePool.activeTargets {
lStr := string(target.labels.Bytes(buf[:]))
t, ok := activeTargets[lStr]
if !ok {
activeTargets[lStr] = target
continue
}
m.logger.Warn(
"Found targets with same labels after relabelling",
"target_one", t.DiscoveredLabels(builder).Get(model.AddressLabel),
"target_two", target.DiscoveredLabels(builder).Get(model.AddressLabel),
"labels", target.labels.String(),
)
}
}
} }
// setOffsetSeed calculates a global offsetSeed per server relying on extra label set. // setOffsetSeed calculates a global offsetSeed per server relying on extra label set.

View file

@ -622,6 +622,43 @@ func TestManagerTargetsUpdates(t *testing.T) {
} }
} }
func TestManagerDuplicateAfterRelabellingWarning(t *testing.T) {
var buf bytes.Buffer
writer := &buf
logger := promslog.New(&promslog.Config{Writer: writer})
opts := Options{EnableWarnIfTargetsRelabelledToSameLabels: true}
testRegistry := prometheus.NewRegistry()
m, err := NewManager(&opts, logger, nil, nil, testRegistry)
require.NoError(t, err)
m.scrapePools = map[string]*scrapePool{}
sp := &scrapePool{
activeTargets: map[uint64]*Target{},
}
targetScrapeCfg := &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}
sp.activeTargets[uint64(0)] = &Target{
scrapeConfig: targetScrapeCfg,
tLabels: map[model.LabelName]model.LabelValue{model.AddressLabel: "foo"},
}
sp.activeTargets[uint64(1)] = &Target{
scrapeConfig: targetScrapeCfg,
tLabels: map[model.LabelName]model.LabelValue{model.AddressLabel: "bar"},
}
m.scrapePools["default"] = sp
m.reload()
require.Contains(t, buf.String(), "Found targets with same labels after relabelling")
require.Contains(t, buf.String(), "foo")
require.Contains(t, buf.String(), "bar")
}
func TestSetOffsetSeed(t *testing.T) { func TestSetOffsetSeed(t *testing.T) {
getConfig := func(prometheus string) *config.Config { getConfig := func(prometheus string) *config.Config {
cfgText := ` cfgText := `