From 5868e36d91ed27a40561c47ac5482c0d2844dcf1 Mon Sep 17 00:00:00 2001 From: Matt Hughes Date: Thu, 20 Feb 2025 11:22:23 +0000 Subject: [PATCH] scraper: fix UTF-8 scraping header always sent with PrometheusText1.0.0 The `Accept` header should not include `escape=allow-utf-8` unless explicitly requested. Conveniently, there was already a test covering this header's value, it just required updating so it also asserts that this value in the header is not set in the cases we don't expect it to be set. I also converted those tests into table tests to help make failures clearer. Issue: https://github.com/prometheus/prometheus/issues/15857 Signed-off-by: Matt Hughes --- config/config.go | 2 +- scrape/scrape_test.go | 49 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/config/config.go b/config/config.go index 73282ac429..465affe082 100644 --- a/config/config.go +++ b/config/config.go @@ -523,7 +523,7 @@ var ( ScrapeProtocolsHeaders = map[ScrapeProtocol]string{ PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited", PrometheusText0_0_4: "text/plain;version=0.0.4", - PrometheusText1_0_0: "text/plain;version=1.0.0;escaping=allow-utf-8", + PrometheusText1_0_0: "text/plain;version=1.0.0", OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1", OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0", } diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index b30dbfa1b9..5d79650828 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -2889,6 +2889,8 @@ func TestTargetScraperScrapeOK(t *testing.T) { accept := r.Header.Get("Accept") if allowUTF8 { require.Containsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to allow utf8, got %q", accept) + } else { + require.NotContainsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to not allow utf8, got %q", accept) } if protobufParsing { require.True(t, strings.HasPrefix(accept, "application/vnd.google.protobuf;"), @@ -2924,7 +2926,7 @@ func TestTargetScraperScrapeOK(t *testing.T) { panic(err) } - runTest := func(acceptHeader string) { + runTest := func(t *testing.T, acceptHeader string) { ts := &targetScraper{ Target: &Target{ labels: labels.FromStrings( @@ -2951,14 +2953,43 @@ func TestTargetScraperScrapeOK(t *testing.T) { require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String()) } - runTest(acceptHeader(config.DefaultScrapeProtocols, model.LegacyValidation)) - protobufParsing = true - runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, model.LegacyValidation)) - protobufParsing = false - allowUTF8 = true - runTest(acceptHeader(config.DefaultScrapeProtocols, model.UTF8Validation)) - protobufParsing = true - runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, model.UTF8Validation)) + for _, tc := range []struct { + scrapeProtocols []config.ScrapeProtocol + scheme model.ValidationScheme + protobufParsing bool + allowUTF8 bool + }{ + { + scrapeProtocols: config.DefaultScrapeProtocols, + scheme: model.LegacyValidation, + protobufParsing: false, + allowUTF8: false, + }, + { + scrapeProtocols: config.DefaultProtoFirstScrapeProtocols, + scheme: model.LegacyValidation, + protobufParsing: true, + allowUTF8: false, + }, + { + scrapeProtocols: config.DefaultScrapeProtocols, + scheme: model.UTF8Validation, + protobufParsing: false, + allowUTF8: true, + }, + { + scrapeProtocols: config.DefaultProtoFirstScrapeProtocols, + scheme: model.UTF8Validation, + protobufParsing: true, + allowUTF8: true, + }, + } { + t.Run(fmt.Sprintf("%+v", tc), func(t *testing.T) { + protobufParsing = tc.protobufParsing + allowUTF8 = tc.allowUTF8 + runTest(t, acceptHeader(tc.scrapeProtocols, tc.scheme)) + }) + } } func TestTargetScrapeScrapeCancel(t *testing.T) {