add warn under feature flag

Signed-off-by: darshanime <deathbullet@gmail.com>
This commit is contained in:
darshanime 2025-01-03 15:35:53 +05:30
parent db7a60f162
commit 354a993f3b
4 changed files with 34 additions and 45 deletions

View file

@ -1,6 +1,7 @@
# Changelog
## unreleased
* [ENHANCEMENT] Scraping: add warning if targets relabel to same labels. This is enabled under the feature-flag `warn-if-targets-relabelled-to-same-labels`. #9589
## 3.1.0 / 2025-01-02

View file

@ -264,6 +264,9 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
logger.Info("Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols))
case "warn-if-targets-relabelled-to-same-labels":
c.scrape.EnableWarnIfTargetsRelabelledToSameLabels = true
logger.Info("Enabled warning if targets relabelled to same labels")
case "delayed-compaction":
c.tsdb.EnableDelayedCompaction = true
logger.Info("Experimental delayed compaction is enabled.")

View file

@ -90,6 +90,9 @@ type Options struct {
// Optional HTTP client options to use when scraping.
HTTPClientOptions []config_util.HTTPClientOption
// Option to warn if targets relabelled to same labels
EnableWarnIfTargetsRelabelledToSameLabels bool
// private option for testability.
skipOffsetting bool
}
@ -206,7 +209,9 @@ func (m *Manager) reload() {
m.mtxScrape.Unlock()
wg.Wait()
m.warnIfTargetsRelabelledToSameLabels()
if m.opts.EnableWarnIfTargetsRelabelledToSameLabels {
m.warnIfTargetsRelabelledToSameLabels()
}
}
func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
@ -220,6 +225,7 @@ func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
activeTargets := make(map[string]*Target, totalTargets)
buf := [1024]byte{}
builder := labels.NewBuilder(labels.EmptyLabels())
for _, scrapePool := range m.scrapePools {
for _, target := range scrapePool.activeTargets {
lStr := string(target.labels.Bytes(buf[:]))
@ -228,10 +234,10 @@ func (m *Manager) warnIfTargetsRelabelledToSameLabels() {
activeTargets[lStr] = target
continue
}
level.Warn(m.logger).Log(
"msg", "Found targets with same labels after relabelling",
"target_one", t.DiscoveredLabels().Get(model.AddressLabel),
"target_two", target.DiscoveredLabels().Get(model.AddressLabel),
m.logger.Warn(
"Found targets with same labels after relabelling",
"target_one", t.DiscoveredLabels(builder).Get(model.AddressLabel),
"target_two", target.DiscoveredLabels(builder).Get(model.AddressLabel),
"labels", target.labels.String(),
)
}

View file

@ -622,62 +622,41 @@ func TestManagerTargetsUpdates(t *testing.T) {
}
}
func BenchmarkManagerReload(b *testing.B) {
opts := Options{}
testRegistry := prometheus.NewRegistry()
m, err := NewManager(&opts, nil, nil, testRegistry)
require.NoError(b, err)
m.scrapePools = map[string]*scrapePool{}
sp := &scrapePool{
activeTargets: map[uint64]*Target{},
}
for i := 0; i < 10000; i++ {
sp.activeTargets[uint64(i)] = &Target{
discoveredLabels: labels.FromStrings("__address__", fmt.Sprintf("foo-%d", i)),
labels: labels.FromStrings("label_key", fmt.Sprintf("foo-%d", i)),
}
}
m.scrapePools["default"] = sp
b.ResetTimer()
for i := 0; i < b.N; i++ {
m.reload()
}
}
func TestManagerDuplicateAfterRelabellingWarning(t *testing.T) {
var output []interface{}
logger := log.Logger(log.LoggerFunc(func(keyvals ...interface{}) error {
output = keyvals
return nil
}))
var buf bytes.Buffer
writer := &buf
logger := promslog.New(&promslog.Config{Writer: writer})
opts := Options{}
opts := Options{EnableWarnIfTargetsRelabelledToSameLabels: true}
testRegistry := prometheus.NewRegistry()
m, err := NewManager(&opts, logger, nil, testRegistry)
m, err := NewManager(&opts, logger, nil, nil, testRegistry)
require.NoError(t, err)
m.scrapePools = map[string]*scrapePool{}
sp := &scrapePool{
activeTargets: map[uint64]*Target{},
}
targetScrapeCfg := &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}
sp.activeTargets[uint64(0)] = &Target{
discoveredLabels: labels.FromStrings("__address__", "foo"),
labels: labels.FromStrings("label_key", "label_value"),
scrapeConfig: targetScrapeCfg,
tLabels: map[model.LabelName]model.LabelValue{model.AddressLabel: "foo"},
}
sp.activeTargets[uint64(1)] = &Target{
discoveredLabels: labels.FromStrings("__address__", "bar"),
labels: labels.FromStrings("label_key", "label_value"),
scrapeConfig: targetScrapeCfg,
tLabels: map[model.LabelName]model.LabelValue{model.AddressLabel: "bar"},
}
m.scrapePools["default"] = sp
m.reload()
require.Contains(t, output, "Found targets with same labels after relabelling")
require.Contains(t, output, "foo")
require.Contains(t, output, "bar")
require.Contains(t, output, `{label_key="label_value"}`)
require.Contains(t, buf.String(), "Found targets with same labels after relabelling")
require.Contains(t, buf.String(), "foo")
require.Contains(t, buf.String(), "bar")
}
func TestSetOffsetSeed(t *testing.T) {