Merge pull request #16061 from matthewhughes-uw/fix-escaping-unconditionally-set

scraper: fix UTF-8 scraping header always sent with PrometheusText1.0.0
This commit is contained in:
Owen Williams 2025-02-20 10:52:12 -05:00 committed by GitHub
commit 23af3463e5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 41 additions and 10 deletions

View file

@ -523,7 +523,7 @@ var (
ScrapeProtocolsHeaders = map[ScrapeProtocol]string{ ScrapeProtocolsHeaders = map[ScrapeProtocol]string{
PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited", PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited",
PrometheusText0_0_4: "text/plain;version=0.0.4", PrometheusText0_0_4: "text/plain;version=0.0.4",
PrometheusText1_0_0: "text/plain;version=1.0.0;escaping=allow-utf-8", PrometheusText1_0_0: "text/plain;version=1.0.0",
OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1", OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1",
OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0", OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0",
} }

View file

@ -2889,6 +2889,8 @@ func TestTargetScraperScrapeOK(t *testing.T) {
accept := r.Header.Get("Accept") accept := r.Header.Get("Accept")
if allowUTF8 { if allowUTF8 {
require.Containsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to allow utf8, got %q", accept) require.Containsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to allow utf8, got %q", accept)
} else {
require.NotContainsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to not allow utf8, got %q", accept)
} }
if protobufParsing { if protobufParsing {
require.True(t, strings.HasPrefix(accept, "application/vnd.google.protobuf;"), require.True(t, strings.HasPrefix(accept, "application/vnd.google.protobuf;"),
@ -2924,7 +2926,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
panic(err) panic(err)
} }
runTest := func(acceptHeader string) { runTest := func(t *testing.T, acceptHeader string) {
ts := &targetScraper{ ts := &targetScraper{
Target: &Target{ Target: &Target{
labels: labels.FromStrings( labels: labels.FromStrings(
@ -2951,14 +2953,43 @@ func TestTargetScraperScrapeOK(t *testing.T) {
require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String()) require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String())
} }
runTest(acceptHeader(config.DefaultScrapeProtocols, model.LegacyValidation)) for _, tc := range []struct {
protobufParsing = true scrapeProtocols []config.ScrapeProtocol
runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, model.LegacyValidation)) scheme model.ValidationScheme
protobufParsing = false protobufParsing bool
allowUTF8 = true allowUTF8 bool
runTest(acceptHeader(config.DefaultScrapeProtocols, model.UTF8Validation)) }{
protobufParsing = true {
runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, model.UTF8Validation)) scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.LegacyValidation,
protobufParsing: false,
allowUTF8: false,
},
{
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.LegacyValidation,
protobufParsing: true,
allowUTF8: false,
},
{
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.UTF8Validation,
protobufParsing: false,
allowUTF8: true,
},
{
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.UTF8Validation,
protobufParsing: true,
allowUTF8: true,
},
} {
t.Run(fmt.Sprintf("%+v", tc), func(t *testing.T) {
protobufParsing = tc.protobufParsing
allowUTF8 = tc.allowUTF8
runTest(t, acceptHeader(tc.scrapeProtocols, tc.scheme))
})
}
} }
func TestTargetScrapeScrapeCancel(t *testing.T) { func TestTargetScrapeScrapeCancel(t *testing.T) {