some content negotiation!

This commit is contained in:
Owen Williams 2024-01-04 11:01:25 -05:00
parent 1d8e6e8b6e
commit f7159917a2
16 changed files with 1325 additions and 132 deletions

View file

@ -215,6 +215,10 @@ func (c *flagConfig) setFeatureListOptions(logger log.Logger) error {
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
level.Info(logger).Log("msg", "Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols)) level.Info(logger).Log("msg", "Experimental created timestamp zero ingestion enabled. Changed default scrape_protocols to prefer PrometheusProto format.", "global.scrape_protocols", fmt.Sprintf("%v", config.DefaultGlobalConfig.ScrapeProtocols))
case "utf8-names":
config.DefaultConfig.GlobalConfig.AllowUTF8Names = true
config.DefaultGlobalConfig.AllowUTF8Names = true
model.NameValidationScheme = model.UTF8Validation
case "": case "":
continue continue
case "promql-at-modifier", "promql-negative-offset": case "promql-at-modifier", "promql-negative-offset":
@ -429,7 +433,7 @@ func main() {
a.Flag("scrape.discovery-reload-interval", "Interval used by scrape manager to throttle target groups updates."). a.Flag("scrape.discovery-reload-interval", "Interval used by scrape manager to throttle target groups updates.").
Hidden().Default("5s").SetValue(&cfg.scrape.DiscoveryReloadInterval) Hidden().Default("5s").SetValue(&cfg.scrape.DiscoveryReloadInterval)
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: agent, exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-at-modifier, promql-negative-offset, promql-per-step-stats, promql-experimental-functions, remote-write-receiver (DEPRECATED), extra-scrape-metrics, new-service-discovery-manager, auto-gomaxprocs, no-default-scrape-port, native-histograms, otlp-write-receiver. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details."). a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: agent, exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-at-modifier, promql-negative-offset, promql-per-step-stats, promql-experimental-functions, remote-write-receiver (DEPRECATED), extra-scrape-metrics, new-service-discovery-manager, auto-gomaxprocs, no-default-scrape-port, native-histograms, otlp-write-receiver, utf8-names. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
Default("").StringsVar(&cfg.featureList) Default("").StringsVar(&cfg.featureList)
promlogflag.AddFlags(a, &cfg.promlogConfig) promlogflag.AddFlags(a, &cfg.promlogConfig)

View file

@ -102,6 +102,7 @@ func PushMetrics(url *url.URL, roundTripper http.RoundTripper, headers map[strin
} }
func parseAndPushMetrics(client *remote.Client, data []byte, labels map[string]string) bool { func parseAndPushMetrics(client *remote.Client, data []byte, labels map[string]string) bool {
// XXXXX ok we need communication between this call (it should escape)
metricsData, err := fmtutil.MetricTextToWriteRequest(bytes.NewReader(data), labels) metricsData, err := fmtutil.MetricTextToWriteRequest(bytes.NewReader(data), labels)
if err != nil { if err != nil {
fmt.Fprintln(os.Stderr, " FAILED:", err) fmt.Fprintln(os.Stderr, " FAILED:", err)
@ -116,6 +117,7 @@ func parseAndPushMetrics(client *remote.Client, data []byte, labels map[string]s
// Encode the request body into snappy encoding. // Encode the request body into snappy encoding.
compressed := snappy.Encode(nil, raw) compressed := snappy.Encode(nil, raw)
// XXXXXXXXX and this call to store (which sets the content headers)
err = client.Store(context.Background(), compressed, 0) err = client.Store(context.Background(), compressed, 0)
if err != nil { if err != nil {
fmt.Fprintln(os.Stderr, " FAILED:", err) fmt.Fprintln(os.Stderr, " FAILED:", err)

View file

@ -421,6 +421,8 @@ type GlobalConfig struct {
// Keep no more than this many dropped targets per job. // Keep no more than this many dropped targets per job.
// 0 means no limit. // 0 means no limit.
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"` KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
// Allow UTF8 Metric and Label Names
AllowUTF8Names bool `yaml:"utf8_names,omitempty"`
} }
// ScrapeProtocol represents supported protocol for scraping metrics. // ScrapeProtocol represents supported protocol for scraping metrics.
@ -446,6 +448,7 @@ var (
PrometheusText0_0_4 ScrapeProtocol = "PrometheusText0.0.4" PrometheusText0_0_4 ScrapeProtocol = "PrometheusText0.0.4"
OpenMetricsText0_0_1 ScrapeProtocol = "OpenMetricsText0.0.1" OpenMetricsText0_0_1 ScrapeProtocol = "OpenMetricsText0.0.1"
OpenMetricsText1_0_0 ScrapeProtocol = "OpenMetricsText1.0.0" OpenMetricsText1_0_0 ScrapeProtocol = "OpenMetricsText1.0.0"
UTF8NamesHeader string = "validchars=utf8"
ScrapeProtocolsHeaders = map[ScrapeProtocol]string{ ScrapeProtocolsHeaders = map[ScrapeProtocol]string{
PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited", PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited",
@ -616,6 +619,8 @@ type ScrapeConfig struct {
// Keep no more than this many dropped targets per job. // Keep no more than this many dropped targets per job.
// 0 means no limit. // 0 means no limit.
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"` KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
// Allow UTF8 Metric and Label Names
AllowUTF8Names bool `yaml:"utf8_names,omitempty"`
// We cannot do proper Go type embedding below as the parser will then parse // We cannot do proper Go type embedding below as the parser will then parse
// values arbitrarily into the overflow maps of further-down types. // values arbitrarily into the overflow maps of further-down types.

4
go.mod
View file

@ -212,3 +212,7 @@ exclude (
github.com/grpc-ecosystem/grpc-gateway v1.14.7 github.com/grpc-ecosystem/grpc-gateway v1.14.7
google.golang.org/api v0.30.0 google.golang.org/api v0.30.0
) )
replace github.com/prometheus/common => /home/owilliams/src/third_party/common
replace github.com/prometheus/client_golang => /home/owilliams/src/grafana/client_golang

1194
go.sum

File diff suppressed because it is too large Load diff

View file

@ -85,6 +85,10 @@ func New(b []byte, contentType string, parseClassicHistograms bool) (Parser, err
return NewPromParser(b), nil return NewPromParser(b), nil
} }
// XXXX looks like this could be a place to decide if UTF8 is ok?? or is this
// all about we need a global option ---- yeah I think prometheus is either
// utf8 on and some preferred escaping, or its utf8 off. not per scrape target.
// In wihch case, nothing needs to change here.
mediaType, _, err := mime.ParseMediaType(contentType) mediaType, _, err := mime.ParseMediaType(contentType)
if err != nil { if err != nil {
return NewPromParser(b), err return NewPromParser(b), err

View file

@ -26,7 +26,6 @@ import (
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/model/exemplar" "github.com/prometheus/prometheus/model/exemplar"
"github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"

View file

@ -301,10 +301,10 @@ foo_total 17.0 1520879607.789 # {id="counter-test"} 5`
} }
func TestUTF8OpenMetricsParse(t *testing.T) { func TestUTF8OpenMetricsParse(t *testing.T) {
// model.NameValidationScheme = model.UTF8Validation model.NameValidationScheme = model.UTF8Validation
// defer func(){ defer func(){
// model.NameValidationScheme = model.LegacyValidation model.NameValidationScheme = model.LegacyValidation
// }() }()
input := `# HELP "go.gc_duration_seconds" A summary of the GC invocation durations. input := `# HELP "go.gc_duration_seconds" A summary of the GC invocation durations.
# TYPE "go.gc_duration_seconds" summary # TYPE "go.gc_duration_seconds" summary
@ -324,7 +324,7 @@ func TestUTF8OpenMetricsParse(t *testing.T) {
m string m string
t *int64 t *int64
v float64 v float64
typ MetricType typ model.MetricType
help string help string
unit string unit string
comment string comment string
@ -335,7 +335,7 @@ func TestUTF8OpenMetricsParse(t *testing.T) {
help: "A summary of the GC invocation durations.", help: "A summary of the GC invocation durations.",
}, { }, {
m: "go.gc_duration_seconds", m: "go.gc_duration_seconds",
typ: MetricTypeSummary, typ: model.MetricTypeSummary,
}, { }, {
m: "go.gc_duration_seconds", m: "go.gc_duration_seconds",
unit: "seconds", unit: "seconds",

View file

@ -28,7 +28,6 @@ import (
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/model/exemplar" "github.com/prometheus/prometheus/model/exemplar"
"github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"

View file

@ -219,10 +219,10 @@ testmetric{label="\"bar\""} 1`
} }
func TestUTF8PromParse(t *testing.T) { func TestUTF8PromParse(t *testing.T) {
// model.NameValidationScheme = model.UTF8Validation model.NameValidationScheme = model.UTF8Validation
// defer func() { defer func() {
// model.NameValidationScheme = model.LegacyValidation model.NameValidationScheme = model.LegacyValidation
// }() }()
input := `# HELP "go.gc_duration_seconds" A summary of the GC invocation durations. input := `# HELP "go.gc_duration_seconds" A summary of the GC invocation durations.
# TYPE "go.gc_duration_seconds" summary # TYPE "go.gc_duration_seconds" summary
@ -242,7 +242,7 @@ func TestUTF8PromParse(t *testing.T) {
m string m string
t *int64 t *int64
v float64 v float64
typ MetricType typ model.MetricType
help string help string
comment string comment string
}{ }{
@ -251,7 +251,7 @@ func TestUTF8PromParse(t *testing.T) {
help: "A summary of the GC invocation durations.", help: "A summary of the GC invocation durations.",
}, { }, {
m: "go.gc_duration_seconds", m: "go.gc_duration_seconds",
typ: MetricTypeSummary, typ: model.MetricTypeSummary,
}, { }, {
m: `{"go.gc_duration_seconds",quantile="0"}`, m: `{"go.gc_duration_seconds",quantile="0"}`,
v: 4.9351e-05, v: 4.9351e-05,
@ -597,7 +597,7 @@ func BenchmarkParse(b *testing.B) {
for i := 0; i < b.N; i += promtestdataSampleCount { for i := 0; i < b.N; i += promtestdataSampleCount {
decSamples := make(model.Vector, 0, 50) decSamples := make(model.Vector, 0, 50)
sdec := expfmt.SampleDecoder{ sdec := expfmt.SampleDecoder{
Dec: expfmt.NewDecoder(bytes.NewReader(buf), expfmt.FmtText), Dec: expfmt.NewDecoder(bytes.NewReader(buf), expfmt.FmtText_1_0_0),
Opts: &expfmt.DecodeOptions{ Opts: &expfmt.DecodeOptions{
Timestamp: model.TimeFromUnixNano(0), Timestamp: model.TimeFromUnixNano(0),
}, },

View file

@ -66,6 +66,9 @@ func (ReadRequest_ResponseType) EnumDescriptor() ([]byte, []int) {
type WriteRequest struct { type WriteRequest struct {
Timeseries []TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"` Timeseries []TimeSeries `protobuf:"bytes,1,rep,name=timeseries,proto3" json:"timeseries"`
Metadata []MetricMetadata `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata"` Metadata []MetricMetadata `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata"`
// If true, metric names and labels can use the full UTF-8 character set.
// Otherwise, apply the legacy naming convention.
Utf8Names bool `protobuf:"varint,4,opt,name=utf8_names,json=utf8Names,proto3" json:"utf8_names,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"` XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"` XXX_sizecache int32 `json:"-"`
@ -118,6 +121,13 @@ func (m *WriteRequest) GetMetadata() []MetricMetadata {
return nil return nil
} }
func (m *WriteRequest) GetUtf8Names() bool {
if m != nil {
return m.Utf8Names
}
return false
}
// ReadRequest represents a remote read request. // ReadRequest represents a remote read request.
type ReadRequest struct { type ReadRequest struct {
Queries []*Query `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"` Queries []*Query `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"`
@ -420,38 +430,40 @@ func init() {
func init() { proto.RegisterFile("remote.proto", fileDescriptor_eefc82927d57d89b) } func init() { proto.RegisterFile("remote.proto", fileDescriptor_eefc82927d57d89b) }
var fileDescriptor_eefc82927d57d89b = []byte{ var fileDescriptor_eefc82927d57d89b = []byte{
// 496 bytes of a gzipped FileDescriptorProto // 517 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xcd, 0x6e, 0xd3, 0x40, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xcb, 0x6e, 0xd3, 0x4c,
0x10, 0xee, 0x26, 0x69, 0x13, 0x8d, 0x43, 0x14, 0xb6, 0x2d, 0x09, 0x39, 0xa4, 0x91, 0xc5, 0x21, 0x14, 0xae, 0x93, 0xb4, 0xc9, 0x7f, 0x9c, 0x3f, 0x0a, 0xd3, 0x96, 0x84, 0x48, 0xa4, 0x91, 0xc5,
0x52, 0x51, 0x10, 0xa1, 0xe2, 0xd4, 0x03, 0x69, 0x89, 0x54, 0xa0, 0xe6, 0x67, 0x13, 0x04, 0x42, 0x22, 0x52, 0x51, 0x10, 0xa1, 0x42, 0x2c, 0xba, 0x20, 0x2d, 0x91, 0x0a, 0xd4, 0x05, 0xc6, 0x41,
0x48, 0xd6, 0xc6, 0x1e, 0x35, 0x16, 0xf5, 0x4f, 0x77, 0xd7, 0x52, 0xf3, 0x16, 0x3c, 0x13, 0xa7, 0x20, 0x84, 0x64, 0x4d, 0xec, 0x43, 0x63, 0x51, 0x5f, 0x3a, 0x33, 0x96, 0x9a, 0xa7, 0xe2, 0x1d,
0x9e, 0x10, 0x4f, 0x80, 0x50, 0x9e, 0x04, 0x79, 0x6d, 0x87, 0x2d, 0x5c, 0xb8, 0xad, 0xbf, 0x3f, 0x58, 0x75, 0x85, 0x78, 0x02, 0x84, 0xf2, 0x24, 0xc8, 0x63, 0x3b, 0x4c, 0x61, 0xc3, 0x6e, 0xfc,
0xcf, 0xcc, 0xce, 0x42, 0x53, 0x60, 0x18, 0x2b, 0x1c, 0x25, 0x22, 0x56, 0x31, 0x85, 0x44, 0xc4, 0xdd, 0x66, 0xce, 0xc5, 0xd0, 0xe4, 0x18, 0xc6, 0x12, 0x47, 0x09, 0x8f, 0x65, 0x4c, 0x20, 0xe1,
0x21, 0xaa, 0x25, 0xa6, 0xb2, 0x67, 0xa9, 0x55, 0x82, 0x32, 0x27, 0x7a, 0x7b, 0x17, 0xf1, 0x45, 0x71, 0x88, 0x72, 0x81, 0xa9, 0xe8, 0x99, 0x72, 0x99, 0xa0, 0xc8, 0x89, 0xde, 0xce, 0x79, 0x7c,
0xac, 0x8f, 0x8f, 0xb2, 0x53, 0x8e, 0xda, 0x5f, 0x09, 0x34, 0x3f, 0x88, 0x40, 0x21, 0xc3, 0xab, 0x1e, 0xab, 0xe3, 0x83, 0xec, 0x94, 0xa3, 0xd6, 0x17, 0x03, 0x9a, 0xef, 0x78, 0x20, 0x91, 0xe2,
0x14, 0xa5, 0xa2, 0xc7, 0x00, 0x2a, 0x08, 0x51, 0xa2, 0x08, 0x50, 0x76, 0xc9, 0xa0, 0x3a, 0xb4, 0x65, 0x8a, 0x42, 0x92, 0x43, 0x00, 0x19, 0x84, 0x28, 0x90, 0x07, 0x28, 0xba, 0xc6, 0xa0, 0x3a,
0xc6, 0xf7, 0x46, 0x7f, 0x42, 0x47, 0xf3, 0x20, 0xc4, 0x99, 0x66, 0x4f, 0x6a, 0x37, 0x3f, 0x0f, 0x34, 0xc7, 0xb7, 0x47, 0xbf, 0x43, 0x47, 0xb3, 0x20, 0x44, 0x47, 0xb1, 0x47, 0xb5, 0xeb, 0x1f,
0xb6, 0x98, 0xa1, 0xa7, 0xc7, 0xd0, 0x08, 0x51, 0x71, 0x9f, 0x2b, 0xde, 0xad, 0x6a, 0x6f, 0xcf, 0x7b, 0x1b, 0x54, 0xd3, 0x93, 0x43, 0x68, 0x84, 0x28, 0x99, 0xcf, 0x24, 0xeb, 0x56, 0x95, 0xb7,
0xf4, 0x3a, 0xa8, 0x44, 0xe0, 0x39, 0x85, 0xa2, 0xf0, 0x6f, 0x1c, 0x2f, 0x6b, 0x8d, 0x4a, 0xbb, 0xa7, 0x7b, 0x6d, 0x94, 0x3c, 0xf0, 0xec, 0x42, 0x51, 0xf8, 0xd7, 0x0e, 0x72, 0x17, 0x20, 0x95,
0x6a, 0x7f, 0x27, 0x60, 0x31, 0xe4, 0x7e, 0x59, 0xd1, 0x21, 0xd4, 0xaf, 0x52, 0xb3, 0x9c, 0xbb, 0x9f, 0x9e, 0xb8, 0x11, 0x0b, 0x51, 0x74, 0x6b, 0x03, 0x63, 0xd8, 0xa0, 0xff, 0x65, 0xc8, 0x59,
0x66, 0xe4, 0xbb, 0x14, 0xc5, 0x8a, 0x95, 0x0a, 0xfa, 0x19, 0x3a, 0xdc, 0xf3, 0x30, 0x51, 0xe8, 0x06, 0xbc, 0xa8, 0x35, 0x2a, 0xed, 0xaa, 0xf5, 0xcd, 0x00, 0x93, 0x22, 0xf3, 0xcb, 0x07, 0xef,
0xbb, 0x02, 0x65, 0x12, 0x47, 0x12, 0x5d, 0x3d, 0x86, 0x6e, 0x65, 0x50, 0x1d, 0xb6, 0xc6, 0x0f, 0x43, 0xfd, 0x32, 0xd5, 0x5f, 0x7b, 0x4b, 0xbf, 0xf1, 0x4d, 0x8a, 0x7c, 0x49, 0x4b, 0x05, 0xf9,
0x4c, 0xb3, 0xf1, 0x9b, 0x11, 0x2b, 0xd4, 0xf3, 0x55, 0x82, 0x6c, 0xbf, 0x0c, 0x31, 0x51, 0x69, 0x08, 0x1d, 0xe6, 0x79, 0x98, 0x48, 0xf4, 0x5d, 0x8e, 0x22, 0x89, 0x23, 0x81, 0xae, 0xea, 0x52,
0x1f, 0x41, 0xd3, 0x04, 0xa8, 0x05, 0xf5, 0xd9, 0xc4, 0x79, 0x7b, 0x3e, 0x9d, 0xb5, 0xb7, 0x68, 0xb7, 0x32, 0xa8, 0x0e, 0x5b, 0xe3, 0x7b, 0xba, 0x59, 0xbb, 0x66, 0x44, 0x0b, 0xf5, 0x6c, 0x99,
0x07, 0x76, 0x67, 0x73, 0x36, 0x9d, 0x38, 0xd3, 0xe7, 0xee, 0xc7, 0x37, 0xcc, 0x3d, 0x3d, 0x7b, 0x20, 0xdd, 0x2d, 0x43, 0x74, 0x54, 0x58, 0x07, 0xd0, 0xd4, 0x01, 0x62, 0x42, 0xdd, 0x99, 0xd8,
0xff, 0xfa, 0xd5, 0xac, 0x4d, 0xec, 0x49, 0xe6, 0xe2, 0x9b, 0x28, 0xfa, 0x18, 0xea, 0x02, 0x65, 0xaf, 0x4f, 0xa7, 0x4e, 0x7b, 0x83, 0x74, 0x60, 0xdb, 0x99, 0xd1, 0xe9, 0xc4, 0x9e, 0x3e, 0x73,
0x7a, 0xa9, 0xca, 0x86, 0x3a, 0xff, 0x36, 0xa4, 0x79, 0x56, 0xea, 0xec, 0x6f, 0x04, 0xb6, 0x35, 0xdf, 0xbf, 0xa2, 0xee, 0xf1, 0xc9, 0xdb, 0xb3, 0x97, 0x4e, 0xdb, 0xb0, 0x26, 0x99, 0x8b, 0xad,
0x41, 0x1f, 0x02, 0x95, 0x8a, 0x0b, 0xe5, 0xea, 0xa9, 0x2b, 0x1e, 0x26, 0x6e, 0x98, 0xe5, 0x90, 0xa3, 0xc8, 0x43, 0xa8, 0x73, 0x14, 0xe9, 0x85, 0x2c, 0x0b, 0xea, 0xfc, 0x5d, 0x90, 0xe2, 0x69,
0x61, 0x95, 0xb5, 0x35, 0x33, 0x2f, 0x09, 0x47, 0xd2, 0x21, 0xb4, 0x31, 0xf2, 0x6f, 0x6b, 0x2b, 0xa9, 0xb3, 0xbe, 0x1a, 0xb0, 0xa9, 0x08, 0x72, 0x1f, 0x88, 0x90, 0x8c, 0x4b, 0x57, 0x0d, 0x45,
0x5a, 0xdb, 0xc2, 0xc8, 0x37, 0x95, 0x47, 0xd0, 0x08, 0xb9, 0xf2, 0x96, 0x28, 0x64, 0x71, 0x73, 0xb2, 0x30, 0x71, 0xc3, 0x2c, 0xc7, 0x18, 0x56, 0x69, 0x5b, 0x31, 0xb3, 0x92, 0xb0, 0x05, 0x19,
0x5d, 0xb3, 0xaa, 0x73, 0xbe, 0xc0, 0x4b, 0x27, 0x17, 0xb0, 0x8d, 0x92, 0x1e, 0xc2, 0xf6, 0x32, 0x42, 0x1b, 0x23, 0xff, 0xa6, 0xb6, 0xa2, 0xb4, 0x2d, 0x8c, 0x7c, 0x5d, 0x79, 0x00, 0x8d, 0x90,
0x88, 0x94, 0xec, 0xd6, 0x06, 0x64, 0x68, 0x8d, 0xf7, 0xff, 0x1e, 0xee, 0x59, 0x46, 0xb2, 0x5c, 0x49, 0x6f, 0x81, 0x5c, 0x14, 0x83, 0xed, 0xea, 0xaf, 0x3a, 0x65, 0x73, 0xbc, 0xb0, 0x73, 0x01,
0x63, 0x4f, 0xc1, 0x32, 0x9a, 0xa3, 0x4f, 0xff, 0x7f, 0xd3, 0xcc, 0x1d, 0xb3, 0xaf, 0x61, 0xf7, 0x5d, 0x2b, 0xc9, 0x3e, 0x6c, 0x2e, 0x82, 0x48, 0xe6, 0xb3, 0x34, 0xc7, 0xbb, 0x7f, 0x36, 0xf7,
0x74, 0x99, 0x46, 0x5f, 0xb2, 0xcb, 0x31, 0xa6, 0xfa, 0x0c, 0x5a, 0x5e, 0x0e, 0xbb, 0xb7, 0x22, 0x24, 0x23, 0x69, 0xae, 0xb1, 0xa6, 0x60, 0x6a, 0xc5, 0x91, 0xc7, 0xff, 0xbe, 0x88, 0xfa, 0x0a,
0xef, 0x9b, 0x91, 0x85, 0xb1, 0x48, 0xbd, 0xe3, 0x99, 0x9f, 0xf4, 0x00, 0xac, 0x6c, 0x8d, 0x56, 0x5a, 0x57, 0xb0, 0x7d, 0xbc, 0x48, 0xa3, 0xcf, 0xd9, 0x70, 0xb4, 0xae, 0x3e, 0x85, 0x96, 0x97,
0x6e, 0x10, 0xf9, 0x78, 0x5d, 0xcc, 0x09, 0x34, 0xf4, 0x22, 0x43, 0x4e, 0xf6, 0x6e, 0xd6, 0x7d, 0xc3, 0xee, 0x8d, 0xc8, 0x3b, 0x7a, 0x64, 0x61, 0x2c, 0x52, 0xff, 0xf7, 0xf4, 0x4f, 0xb2, 0x07,
0xf2, 0x63, 0xdd, 0x27, 0xbf, 0xd6, 0x7d, 0xf2, 0x69, 0x27, 0xcb, 0x4d, 0x16, 0x8b, 0x1d, 0xfd, 0x66, 0xb6, 0x46, 0x4b, 0x37, 0x88, 0x7c, 0xbc, 0x2a, 0xfa, 0x04, 0x0a, 0x7a, 0x9e, 0x21, 0x47,
0x92, 0x9e, 0xfc, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x13, 0x18, 0x12, 0x0a, 0x88, 0x03, 0x00, 0x00, 0x3b, 0xd7, 0xab, 0xbe, 0xf1, 0x7d, 0xd5, 0x37, 0x7e, 0xae, 0xfa, 0xc6, 0x87, 0xad, 0x2c, 0x37,
0x99, 0xcf, 0xb7, 0xd4, 0x8f, 0xf6, 0xe8, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x43, 0xa9,
0x4e, 0xa7, 0x03, 0x00, 0x00,
} }
func (m *WriteRequest) Marshal() (dAtA []byte, err error) { func (m *WriteRequest) Marshal() (dAtA []byte, err error) {
@ -478,6 +490,16 @@ func (m *WriteRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i -= len(m.XXX_unrecognized) i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized)
} }
if m.Utf8Names {
i--
if m.Utf8Names {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i--
dAtA[i] = 0x20
}
if len(m.Metadata) > 0 { if len(m.Metadata) > 0 {
for iNdEx := len(m.Metadata) - 1; iNdEx >= 0; iNdEx-- { for iNdEx := len(m.Metadata) - 1; iNdEx >= 0; iNdEx-- {
{ {
@ -788,6 +810,9 @@ func (m *WriteRequest) Size() (n int) {
n += 1 + l + sovRemote(uint64(l)) n += 1 + l + sovRemote(uint64(l))
} }
} }
if m.Utf8Names {
n += 2
}
if m.XXX_unrecognized != nil { if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized) n += len(m.XXX_unrecognized)
} }
@ -1007,6 +1032,26 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error {
return err return err
} }
iNdEx = postIndex iNdEx = postIndex
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Utf8Names", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowRemote
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.Utf8Names = bool(v != 0)
default: default:
iNdEx = preIndex iNdEx = preIndex
skippy, err := skipRemote(dAtA[iNdEx:]) skippy, err := skipRemote(dAtA[iNdEx:])

View file

@ -25,6 +25,9 @@ message WriteRequest {
// We reserve it to avoid any compatibility issues. // We reserve it to avoid any compatibility issues.
reserved 2; reserved 2;
repeated prometheus.MetricMetadata metadata = 3 [(gogoproto.nullable) = false]; repeated prometheus.MetricMetadata metadata = 3 [(gogoproto.nullable) = false];
// If true, metric names and labels can use the full UTF-8 character set.
// Otherwise, apply the legacy naming convention.
bool utf8_names = 4;
} }
// ReadRequest represents a remote read request. // ReadRequest represents a remote read request.

View file

@ -302,7 +302,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
client: sp.client, client: sp.client,
timeout: timeout, timeout: timeout,
bodySizeLimit: bodySizeLimit, bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(cfg.ScrapeProtocols), acceptHeader: acceptHeader(cfg.ScrapeProtocols, cfg.AllowUTF8Names),
acceptEncodingHeader: acceptEncodingHeader(enableCompression), acceptEncodingHeader: acceptEncodingHeader(enableCompression),
} }
newLoop = sp.newLoop(scrapeLoopOptions{ newLoop = sp.newLoop(scrapeLoopOptions{
@ -429,7 +429,7 @@ func (sp *scrapePool) sync(targets []*Target) {
client: sp.client, client: sp.client,
timeout: timeout, timeout: timeout,
bodySizeLimit: bodySizeLimit, bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(sp.config.ScrapeProtocols), acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.config.AllowUTF8Names),
acceptEncodingHeader: acceptEncodingHeader(enableCompression), acceptEncodingHeader: acceptEncodingHeader(enableCompression),
metrics: sp.metrics, metrics: sp.metrics,
} }
@ -667,11 +667,16 @@ var errBodySizeLimit = errors.New("body size limit exceeded")
// acceptHeader transforms preference from the options into specific header values as // acceptHeader transforms preference from the options into specific header values as
// https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines. // https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines.
// No validation is here, we expect scrape protocols to be validated already. // No validation is here, we expect scrape protocols to be validated already.
func acceptHeader(sps []config.ScrapeProtocol) string { func acceptHeader(sps []config.ScrapeProtocol, allowUTF8Names bool) string {
var vals []string var vals []string
weight := len(config.ScrapeProtocolsHeaders) + 1 weight := len(config.ScrapeProtocolsHeaders) + 1
for _, sp := range sps { for _, sp := range sps {
vals = append(vals, fmt.Sprintf("%s;q=0.%d", config.ScrapeProtocolsHeaders[sp], weight)) val := config.ScrapeProtocolsHeaders[sp]
if allowUTF8Names {
val += ";"+config.UTF8NamesHeader
}
val += fmt.Sprintf(";q=0.%d", weight)
vals = append(vals, val)
weight-- weight--
} }
// Default match anything. // Default match anything.

View file

@ -2336,12 +2336,15 @@ func TestTargetScraperScrapeOK(t *testing.T) {
server := httptest.NewServer( server := httptest.NewServer(
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if protobufParsing {
accept := r.Header.Get("Accept") accept := r.Header.Get("Accept")
if protobufParsing {
if !strings.HasPrefix(accept, "application/vnd.google.protobuf;") { if !strings.HasPrefix(accept, "application/vnd.google.protobuf;") {
t.Errorf("Expected Accept header to prefer application/vnd.google.protobuf, got %q", accept) t.Errorf("Expected Accept header to prefer application/vnd.google.protobuf, got %q", accept)
} }
} }
if strings.Contains(accept, "validchars=utf8") {
t.Errorf("Expected Accept header not to allow utf8, got %q", accept)
}
timeout := r.Header.Get("X-Prometheus-Scrape-Timeout-Seconds") timeout := r.Header.Get("X-Prometheus-Scrape-Timeout-Seconds")
if timeout != expectedTimeout { if timeout != expectedTimeout {
@ -2381,9 +2384,68 @@ func TestTargetScraperScrapeOK(t *testing.T) {
require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String()) require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String())
} }
runTest(acceptHeader(config.DefaultScrapeProtocols)) runTest(acceptHeader(config.DefaultScrapeProtocols, false))
protobufParsing = true protobufParsing = true
runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols)) runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, false))
}
func TestUTF8TargetScraperScrapeOK(t *testing.T) {
const (
configTimeout = 1500 * time.Millisecond
expectedTimeout = "1.5"
)
server := httptest.NewServer(
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
accept := r.Header.Get("Accept")
if !strings.Contains(accept, "validchars=utf8") {
t.Errorf("Expected Accept header to allow utf8, got %q", accept)
}
timeout := r.Header.Get("X-Prometheus-Scrape-Timeout-Seconds")
if timeout != expectedTimeout {
t.Errorf("Expected scrape timeout header %q, got %q", expectedTimeout, timeout)
}
w.Header().Set("Content-Type", `text/plain; version=1.0.0; validchars=utf8`)
w.Write([]byte(`{"metric.a"} 1
{"metric.b"} 2
`))
}),
)
defer server.Close()
serverURL, err := url.Parse(server.URL)
if err != nil {
panic(err)
}
runTest := func(acceptHeader string) {
ts := &targetScraper{
Target: &Target{
labels: labels.FromStrings(
model.SchemeLabel, serverURL.Scheme,
model.AddressLabel, serverURL.Host,
),
},
client: http.DefaultClient,
timeout: configTimeout,
acceptHeader: acceptHeader,
}
var buf bytes.Buffer
resp, err := ts.scrape(context.Background())
require.NoError(t, err)
contentType, err := ts.readResponse(context.Background(), resp, &buf)
require.NoError(t, err)
require.Equal(t, "text/plain; version=1.0.0; validchars=utf8", contentType)
require.Equal(t, `{"metric.a"} 1
{"metric.b"} 2
`, buf.String())
}
runTest(acceptHeader(config.DefaultScrapeProtocols, true))
runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, true))
} }
func TestTargetScrapeScrapeCancel(t *testing.T) { func TestTargetScrapeScrapeCancel(t *testing.T) {
@ -2409,7 +2471,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
), ),
}, },
client: http.DefaultClient, client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols), acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, false),
} }
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
@ -2464,7 +2526,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
), ),
}, },
client: http.DefaultClient, client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols), acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, false),
} }
resp, err := ts.scrape(context.Background()) resp, err := ts.scrape(context.Background())
@ -2508,7 +2570,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) {
}, },
client: http.DefaultClient, client: http.DefaultClient,
bodySizeLimit: bodySizeLimit, bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols), acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, false),
metrics: newTestScrapeMetrics(t), metrics: newTestScrapeMetrics(t),
} }
var buf bytes.Buffer var buf bytes.Buffer

View file

@ -274,6 +274,7 @@ func (c *Client) Read(ctx context.Context, query *prompb.Query) (*prompb.QueryRe
c.readQueries.Inc() c.readQueries.Inc()
defer c.readQueries.Dec() defer c.readQueries.Dec()
// XXXXXX based on content negotiation we may need to escape the query. (If the other side is old)
req := &prompb.ReadRequest{ req := &prompb.ReadRequest{
// TODO: Support batching multiple queries into one read request, // TODO: Support batching multiple queries into one read request,
// as the protobuf interface allows for it. // as the protobuf interface allows for it.

View file

@ -58,6 +58,10 @@ func MetricTextToWriteRequest(input io.Reader, labels map[string]string) (*promp
func MetricFamiliesToWriteRequest(mf map[string]*dto.MetricFamily, extraLabels map[string]string) (*prompb.WriteRequest, error) { func MetricFamiliesToWriteRequest(mf map[string]*dto.MetricFamily, extraLabels map[string]string) (*prompb.WriteRequest, error) {
wr := &prompb.WriteRequest{} wr := &prompb.WriteRequest{}
if model.NameValidationScheme == model.UTF8Validation {
wr.Utf8Names = true
}
// build metric list // build metric list
sortedMetricNames := make([]string, 0, len(mf)) sortedMetricNames := make([]string, 0, len(mf))
for metric := range mf { for metric := range mf {