add warn under feature flag

Signed-off-by: darshanime <deathbullet@gmail.com>
This commit is contained in:
darshanime 2025-01-03 15:35:53 +05:30
parent db7a60f162
commit 354a993f3b
4 changed files with 34 additions and 45 deletions

View file

@ -1,6 +1,7 @@
# Changelog # Changelog
## unreleased ## unreleased
* [ENHANCEMENT] Scraping: add warning if targets relabel to same labels. This is enabled under the feature-flag `warn-if-targets-relabelled-to-same-labels`. #9589
## 3.1.0 / 2025-01-02 ## 3.1.0 / 2025-01-02

View file

@ -264,6 +264,9 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
logger.Info("Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols)) logger.Info("Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols))
case "warn-if-targets-relabelled-to-same-labels":
c.scrape.EnableWarnIfTargetsRelabelledToSameLabels = true
logger.Info("Enabled warning if targets relabelled to same labels")
case "delayed-compaction": case "delayed-compaction":
c.tsdb.EnableDelayedCompaction = true c.tsdb.EnableDelayedCompaction = true
logger.Info("Experimental delayed compaction is enabled.") logger.Info("Experimental delayed compaction is enabled.")

View file

@ -90,6 +90,9 @@ type Options struct {
// Optional HTTP client options to use when scraping. // Optional HTTP client options to use when scraping.
HTTPClientOptions []config_util.HTTPClientOption HTTPClientOptions []config_util.HTTPClientOption
// Option to warn if targets relabelled to same labels
EnableWarnIfTargetsRelabelledToSameLabels bool
// private option for testability. // private option for testability.
skipOffsetting bool skipOffsetting bool
} }
@ -206,7 +209,9 @@ func (m *Manager) reload() {
m.mtxScrape.Unlock() m.mtxScrape.Unlock()
wg.Wait() wg.Wait()
m.warnIfTargetsRelabelledToSameLabels() if m.opts.EnableWarnIfTargetsRelabelledToSameLabels {
m.warnIfTargetsRelabelledToSameLabels()
}
} }
func (m *Manager) warnIfTargetsRelabelledToSameLabels() { func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
@ -220,6 +225,7 @@ func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
activeTargets := make(map[string]*Target, totalTargets) activeTargets := make(map[string]*Target, totalTargets)
buf := [1024]byte{} buf := [1024]byte{}
builder := labels.NewBuilder(labels.EmptyLabels())
for _, scrapePool := range m.scrapePools { for _, scrapePool := range m.scrapePools {
for _, target := range scrapePool.activeTargets { for _, target := range scrapePool.activeTargets {
lStr := string(target.labels.Bytes(buf[:])) lStr := string(target.labels.Bytes(buf[:]))
@ -228,10 +234,10 @@ func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
activeTargets[lStr] = target activeTargets[lStr] = target
continue continue
} }
level.Warn(m.logger).Log( m.logger.Warn(
"msg", "Found targets with same labels after relabelling", "Found targets with same labels after relabelling",
"target_one", t.DiscoveredLabels().Get(model.AddressLabel), "target_one", t.DiscoveredLabels(builder).Get(model.AddressLabel),
"target_two", target.DiscoveredLabels().Get(model.AddressLabel), "target_two", target.DiscoveredLabels(builder).Get(model.AddressLabel),
"labels", target.labels.String(), "labels", target.labels.String(),
) )
} }

View file

@ -622,62 +622,41 @@ func TestManagerTargetsUpdates(t *testing.T) {
} }
} }
func BenchmarkManagerReload(b *testing.B) {
opts := Options{}
testRegistry := prometheus.NewRegistry()
m, err := NewManager(&opts, nil, nil, testRegistry)
require.NoError(b, err)
m.scrapePools = map[string]*scrapePool{}
sp := &scrapePool{
activeTargets: map[uint64]*Target{},
}
for i := 0; i < 10000; i++ {
sp.activeTargets[uint64(i)] = &Target{
discoveredLabels: labels.FromStrings("__address__", fmt.Sprintf("foo-%d", i)),
labels: labels.FromStrings("label_key", fmt.Sprintf("foo-%d", i)),
}
}
m.scrapePools["default"] = sp
b.ResetTimer()
for i := 0; i < b.N; i++ {
m.reload()
}
}
func TestManagerDuplicateAfterRelabellingWarning(t *testing.T) { func TestManagerDuplicateAfterRelabellingWarning(t *testing.T) {
var output []interface{} var buf bytes.Buffer
logger := log.Logger(log.LoggerFunc(func(keyvals ...interface{}) error { writer := &buf
output = keyvals logger := promslog.New(&promslog.Config{Writer: writer})
return nil
}))
opts := Options{} opts := Options{EnableWarnIfTargetsRelabelledToSameLabels: true}
testRegistry := prometheus.NewRegistry() testRegistry := prometheus.NewRegistry()
m, err := NewManager(&opts, logger, nil, testRegistry) m, err := NewManager(&opts, logger, nil, nil, testRegistry)
require.NoError(t, err) require.NoError(t, err)
m.scrapePools = map[string]*scrapePool{} m.scrapePools = map[string]*scrapePool{}
sp := &scrapePool{ sp := &scrapePool{
activeTargets: map[uint64]*Target{}, activeTargets: map[uint64]*Target{},
} }
targetScrapeCfg := &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}
sp.activeTargets[uint64(0)] = &Target{ sp.activeTargets[uint64(0)] = &Target{
discoveredLabels: labels.FromStrings("__address__", "foo"), scrapeConfig: targetScrapeCfg,
labels: labels.FromStrings("label_key", "label_value"), tLabels: map[model.LabelName]model.LabelValue{model.AddressLabel: "foo"},
} }
sp.activeTargets[uint64(1)] = &Target{ sp.activeTargets[uint64(1)] = &Target{
discoveredLabels: labels.FromStrings("__address__", "bar"), scrapeConfig: targetScrapeCfg,
labels: labels.FromStrings("label_key", "label_value"), tLabels: map[model.LabelName]model.LabelValue{model.AddressLabel: "bar"},
} }
m.scrapePools["default"] = sp m.scrapePools["default"] = sp
m.reload() m.reload()
require.Contains(t, output, "Found targets with same labels after relabelling") require.Contains(t, buf.String(), "Found targets with same labels after relabelling")
require.Contains(t, output, "foo") require.Contains(t, buf.String(), "foo")
require.Contains(t, output, "bar") require.Contains(t, buf.String(), "bar")
require.Contains(t, output, `{label_key="label_value"}`)
} }
func TestSetOffsetSeed(t *testing.T) { func TestSetOffsetSeed(t *testing.T) {