mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
scrape: provide a fallback format (#15136)
scrape: Remove implicit fallback to the Prometheus text format Remove implicit fallback to the Prometheus text format in case of invalid/missing Content-Type and fail the scrape instead. Add ability to specify a `fallback_scrape_protocol` in the scrape config. --------- Signed-off-by: alexgreenbank <alex.greenbank@grafana.com> Signed-off-by: Alex Greenbank <alex.greenbank@grafana.com> Co-authored-by: Björn Rabenstein <beorn@grafana.com>
This commit is contained in:
parent
5505c83a4d
commit
421a3c22ea
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
## unreleased
|
## unreleased
|
||||||
|
|
||||||
|
* [CHANGE] Scraping: Remove implicit fallback to the Prometheus text format in case of invalid/missing Content-Type and fail the scrape instead. Add ability to specify a `fallback_scrape_protocol` in the scrape config. #15136
|
||||||
* [BUGFIX] PromQL: Fix stddev+stdvar aggregations to always ignore native histograms. #14941
|
* [BUGFIX] PromQL: Fix stddev+stdvar aggregations to always ignore native histograms. #14941
|
||||||
* [BUGFIX] PromQL: Fix stddev+stdvar aggregations to treat Infinity consistently. #14941
|
* [BUGFIX] PromQL: Fix stddev+stdvar aggregations to treat Infinity consistently. #14941
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
|
"mime"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -473,9 +474,22 @@ func (s ScrapeProtocol) Validate() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HeaderMediaType returns the MIME mediaType for a particular ScrapeProtocol.
|
||||||
|
func (s ScrapeProtocol) HeaderMediaType() string {
|
||||||
|
if _, ok := ScrapeProtocolsHeaders[s]; !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
mediaType, _, err := mime.ParseMediaType(ScrapeProtocolsHeaders[s])
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return mediaType
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
PrometheusProto ScrapeProtocol = "PrometheusProto"
|
PrometheusProto ScrapeProtocol = "PrometheusProto"
|
||||||
PrometheusText0_0_4 ScrapeProtocol = "PrometheusText0.0.4"
|
PrometheusText0_0_4 ScrapeProtocol = "PrometheusText0.0.4"
|
||||||
|
PrometheusText1_0_0 ScrapeProtocol = "PrometheusText1.0.0"
|
||||||
OpenMetricsText0_0_1 ScrapeProtocol = "OpenMetricsText0.0.1"
|
OpenMetricsText0_0_1 ScrapeProtocol = "OpenMetricsText0.0.1"
|
||||||
OpenMetricsText1_0_0 ScrapeProtocol = "OpenMetricsText1.0.0"
|
OpenMetricsText1_0_0 ScrapeProtocol = "OpenMetricsText1.0.0"
|
||||||
UTF8NamesHeader string = model.EscapingKey + "=" + model.AllowUTF8
|
UTF8NamesHeader string = model.EscapingKey + "=" + model.AllowUTF8
|
||||||
|
@ -483,6 +497,7 @@ var (
|
||||||
ScrapeProtocolsHeaders = map[ScrapeProtocol]string{
|
ScrapeProtocolsHeaders = map[ScrapeProtocol]string{
|
||||||
PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited",
|
PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited",
|
||||||
PrometheusText0_0_4: "text/plain;version=0.0.4",
|
PrometheusText0_0_4: "text/plain;version=0.0.4",
|
||||||
|
PrometheusText1_0_0: "text/plain;version=1.0.0;escaping=allow-utf-8",
|
||||||
OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1",
|
OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1",
|
||||||
OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0",
|
OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0",
|
||||||
}
|
}
|
||||||
|
@ -492,6 +507,7 @@ var (
|
||||||
DefaultScrapeProtocols = []ScrapeProtocol{
|
DefaultScrapeProtocols = []ScrapeProtocol{
|
||||||
OpenMetricsText1_0_0,
|
OpenMetricsText1_0_0,
|
||||||
OpenMetricsText0_0_1,
|
OpenMetricsText0_0_1,
|
||||||
|
PrometheusText1_0_0,
|
||||||
PrometheusText0_0_4,
|
PrometheusText0_0_4,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -503,6 +519,7 @@ var (
|
||||||
PrometheusProto,
|
PrometheusProto,
|
||||||
OpenMetricsText1_0_0,
|
OpenMetricsText1_0_0,
|
||||||
OpenMetricsText0_0_1,
|
OpenMetricsText0_0_1,
|
||||||
|
PrometheusText1_0_0,
|
||||||
PrometheusText0_0_4,
|
PrometheusText0_0_4,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -629,8 +646,15 @@ type ScrapeConfig struct {
|
||||||
// The protocols to negotiate during a scrape. It tells clients what
|
// The protocols to negotiate during a scrape. It tells clients what
|
||||||
// protocol are accepted by Prometheus and with what preference (most wanted is first).
|
// protocol are accepted by Prometheus and with what preference (most wanted is first).
|
||||||
// Supported values (case sensitive): PrometheusProto, OpenMetricsText0.0.1,
|
// Supported values (case sensitive): PrometheusProto, OpenMetricsText0.0.1,
|
||||||
// OpenMetricsText1.0.0, PrometheusText0.0.4.
|
// OpenMetricsText1.0.0, PrometheusText1.0.0, PrometheusText0.0.4.
|
||||||
ScrapeProtocols []ScrapeProtocol `yaml:"scrape_protocols,omitempty"`
|
ScrapeProtocols []ScrapeProtocol `yaml:"scrape_protocols,omitempty"`
|
||||||
|
// The fallback protocol to use if the Content-Type provided by the target
|
||||||
|
// is not provided, blank, or not one of the expected values.
|
||||||
|
// Supported values (case sensitive): PrometheusProto, OpenMetricsText0.0.1,
|
||||||
|
// OpenMetricsText1.0.0, PrometheusText1.0.0, PrometheusText0.0.4.
|
||||||
|
ScrapeFallbackProtocol ScrapeProtocol `yaml:"fallback_scrape_protocol,omitempty"`
|
||||||
|
// Whether to scrape a classic histogram that is also exposed as a native histogram.
|
||||||
|
ScrapeClassicHistograms bool `yaml:"scrape_classic_histograms,omitempty"`
|
||||||
// Whether to scrape a classic histogram, even if it is also exposed as a native histogram.
|
// Whether to scrape a classic histogram, even if it is also exposed as a native histogram.
|
||||||
AlwaysScrapeClassicHistograms bool `yaml:"always_scrape_classic_histograms,omitempty"`
|
AlwaysScrapeClassicHistograms bool `yaml:"always_scrape_classic_histograms,omitempty"`
|
||||||
// File to which scrape failures are logged.
|
// File to which scrape failures are logged.
|
||||||
|
@ -780,6 +804,12 @@ func (c *ScrapeConfig) Validate(globalConfig GlobalConfig) error {
|
||||||
return fmt.Errorf("%w for scrape config with job name %q", err, c.JobName)
|
return fmt.Errorf("%w for scrape config with job name %q", err, c.JobName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.ScrapeFallbackProtocol != "" {
|
||||||
|
if err := c.ScrapeFallbackProtocol.Validate(); err != nil {
|
||||||
|
return fmt.Errorf("invalid fallback_scrape_protocol for scrape config with job name %q: %w", c.JobName, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
switch globalConfig.MetricNameValidationScheme {
|
switch globalConfig.MetricNameValidationScheme {
|
||||||
case LegacyValidationConfig:
|
case LegacyValidationConfig:
|
||||||
case "", UTF8ValidationConfig:
|
case "", UTF8ValidationConfig:
|
||||||
|
|
|
@ -206,19 +206,20 @@ var expectedConf = &Config{
|
||||||
{
|
{
|
||||||
JobName: "prometheus",
|
JobName: "prometheus",
|
||||||
|
|
||||||
HonorLabels: true,
|
HonorLabels: true,
|
||||||
HonorTimestamps: true,
|
HonorTimestamps: true,
|
||||||
ScrapeInterval: model.Duration(15 * time.Second),
|
ScrapeInterval: model.Duration(15 * time.Second),
|
||||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||||
EnableCompression: true,
|
EnableCompression: true,
|
||||||
BodySizeLimit: globBodySizeLimit,
|
BodySizeLimit: globBodySizeLimit,
|
||||||
SampleLimit: globSampleLimit,
|
SampleLimit: globSampleLimit,
|
||||||
TargetLimit: globTargetLimit,
|
TargetLimit: globTargetLimit,
|
||||||
LabelLimit: globLabelLimit,
|
LabelLimit: globLabelLimit,
|
||||||
LabelNameLengthLimit: globLabelNameLengthLimit,
|
LabelNameLengthLimit: globLabelNameLengthLimit,
|
||||||
LabelValueLengthLimit: globLabelValueLengthLimit,
|
LabelValueLengthLimit: globLabelValueLengthLimit,
|
||||||
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
|
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
|
||||||
ScrapeFailureLogFile: "testdata/fail_prom.log",
|
ScrapeFallbackProtocol: PrometheusText0_0_4,
|
||||||
|
ScrapeFailureLogFile: "testdata/fail_prom.log",
|
||||||
|
|
||||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||||
Scheme: DefaultScrapeConfig.Scheme,
|
Scheme: DefaultScrapeConfig.Scheme,
|
||||||
|
@ -2086,12 +2087,20 @@ var expectedErrors = []struct {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
filename: "scrape_config_files_scrape_protocols.bad.yml",
|
filename: "scrape_config_files_scrape_protocols.bad.yml",
|
||||||
errMsg: `parsing YAML file testdata/scrape_config_files_scrape_protocols.bad.yml: scrape_protocols: unknown scrape protocol prometheusproto, supported: [OpenMetricsText0.0.1 OpenMetricsText1.0.0 PrometheusProto PrometheusText0.0.4] for scrape config with job name "node"`,
|
errMsg: `parsing YAML file testdata/scrape_config_files_scrape_protocols.bad.yml: scrape_protocols: unknown scrape protocol prometheusproto, supported: [OpenMetricsText0.0.1 OpenMetricsText1.0.0 PrometheusProto PrometheusText0.0.4 PrometheusText1.0.0] for scrape config with job name "node"`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
filename: "scrape_config_files_scrape_protocols2.bad.yml",
|
filename: "scrape_config_files_scrape_protocols2.bad.yml",
|
||||||
errMsg: `parsing YAML file testdata/scrape_config_files_scrape_protocols2.bad.yml: duplicated protocol in scrape_protocols, got [OpenMetricsText1.0.0 PrometheusProto OpenMetricsText1.0.0] for scrape config with job name "node"`,
|
errMsg: `parsing YAML file testdata/scrape_config_files_scrape_protocols2.bad.yml: duplicated protocol in scrape_protocols, got [OpenMetricsText1.0.0 PrometheusProto OpenMetricsText1.0.0] for scrape config with job name "node"`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
filename: "scrape_config_files_fallback_scrape_protocol1.bad.yml",
|
||||||
|
errMsg: `parsing YAML file testdata/scrape_config_files_fallback_scrape_protocol1.bad.yml: invalid fallback_scrape_protocol for scrape config with job name "node": unknown scrape protocol prometheusproto, supported: [OpenMetricsText0.0.1 OpenMetricsText1.0.0 PrometheusProto PrometheusText0.0.4 PrometheusText1.0.0]`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
filename: "scrape_config_files_fallback_scrape_protocol2.bad.yml",
|
||||||
|
errMsg: `unmarshal errors`,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBadConfigs(t *testing.T) {
|
func TestBadConfigs(t *testing.T) {
|
||||||
|
@ -2412,3 +2421,54 @@ func TestScrapeConfigNameValidationSettings(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestScrapeProtocolHeader(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
proto ScrapeProtocol
|
||||||
|
expectedValue string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "blank",
|
||||||
|
proto: ScrapeProtocol(""),
|
||||||
|
expectedValue: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid",
|
||||||
|
proto: ScrapeProtocol("invalid"),
|
||||||
|
expectedValue: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "prometheus protobuf",
|
||||||
|
proto: PrometheusProto,
|
||||||
|
expectedValue: "application/vnd.google.protobuf",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "prometheus text 0.0.4",
|
||||||
|
proto: PrometheusText0_0_4,
|
||||||
|
expectedValue: "text/plain",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "prometheus text 1.0.0",
|
||||||
|
proto: PrometheusText1_0_0,
|
||||||
|
expectedValue: "text/plain",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "openmetrics 0.0.1",
|
||||||
|
proto: OpenMetricsText0_0_1,
|
||||||
|
expectedValue: "application/openmetrics-text",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "openmetrics 1.0.0",
|
||||||
|
proto: OpenMetricsText1_0_0,
|
||||||
|
expectedValue: "application/openmetrics-text",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tc := range tests {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
mediaType := tc.proto.HeaderMediaType()
|
||||||
|
|
||||||
|
require.Equal(t, tc.expectedValue, mediaType)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
2
config/testdata/conf.good.yml
vendored
2
config/testdata/conf.good.yml
vendored
|
@ -74,6 +74,8 @@ scrape_configs:
|
||||||
# metrics_path defaults to '/metrics'
|
# metrics_path defaults to '/metrics'
|
||||||
# scheme defaults to 'http'.
|
# scheme defaults to 'http'.
|
||||||
|
|
||||||
|
fallback_scrape_protocol: PrometheusText0.0.4
|
||||||
|
|
||||||
scrape_failure_log_file: fail_prom.log
|
scrape_failure_log_file: fail_prom.log
|
||||||
file_sd_configs:
|
file_sd_configs:
|
||||||
- files:
|
- files:
|
||||||
|
|
5
config/testdata/scrape_config_files_fallback_scrape_protocol1.bad.yml
vendored
Normal file
5
config/testdata/scrape_config_files_fallback_scrape_protocol1.bad.yml
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: node
|
||||||
|
fallback_scrape_protocol: "prometheusproto"
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
5
config/testdata/scrape_config_files_fallback_scrape_protocol2.bad.yml
vendored
Normal file
5
config/testdata/scrape_config_files_fallback_scrape_protocol2.bad.yml
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: node
|
||||||
|
fallback_scrape_protocol: ["OpenMetricsText1.0.0", "PrometheusText0.0.4"]
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
|
@ -212,9 +212,15 @@ job_name: <job_name>
|
||||||
|
|
||||||
# The protocols to negotiate during a scrape with the client.
|
# The protocols to negotiate during a scrape with the client.
|
||||||
# Supported values (case sensitive): PrometheusProto, OpenMetricsText0.0.1,
|
# Supported values (case sensitive): PrometheusProto, OpenMetricsText0.0.1,
|
||||||
# OpenMetricsText1.0.0, PrometheusText0.0.4.
|
# OpenMetricsText1.0.0, PrometheusText0.0.4, PrometheusText1.0.0.
|
||||||
[ scrape_protocols: [<string>, ...] | default = <global_config.scrape_protocols> ]
|
[ scrape_protocols: [<string>, ...] | default = <global_config.scrape_protocols> ]
|
||||||
|
|
||||||
|
# Fallback protocol to use if a scrape returns blank, unparseable, or otherwise
|
||||||
|
# invalid Content-Type.
|
||||||
|
# Supported values (case sensitive): PrometheusProto, OpenMetricsText0.0.1,
|
||||||
|
# OpenMetricsText1.0.0, PrometheusText0.0.4, PrometheusText1.0.0.
|
||||||
|
[ fallback_scrape_protocol: <string> ]
|
||||||
|
|
||||||
# Whether to scrape a classic histogram, even if it is also exposed as a native
|
# Whether to scrape a classic histogram, even if it is also exposed as a native
|
||||||
# histogram (has no effect without --enable-feature=native-histograms).
|
# histogram (has no effect without --enable-feature=native-histograms).
|
||||||
[ always_scrape_classic_histograms: <boolean> | default = false ]
|
[ always_scrape_classic_histograms: <boolean> | default = false ]
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
package textparse
|
package textparse
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
"mime"
|
"mime"
|
||||||
|
|
||||||
"github.com/prometheus/common/model"
|
"github.com/prometheus/common/model"
|
||||||
|
@ -78,28 +80,65 @@ type Parser interface {
|
||||||
Next() (Entry, error)
|
Next() (Entry, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// New returns a new parser of the byte slice.
|
// extractMediaType returns the mediaType of a required parser. It tries first to
|
||||||
//
|
// extract a valid and supported mediaType from contentType. If that fails,
|
||||||
// This function always returns a valid parser, but might additionally
|
// the provided fallbackType (possibly an empty string) is returned, together with
|
||||||
// return an error if the content type cannot be parsed.
|
// an error. fallbackType is used as-is without further validation.
|
||||||
func New(b []byte, contentType string, parseClassicHistograms, skipOMCTSeries bool, st *labels.SymbolTable) (Parser, error) {
|
func extractMediaType(contentType, fallbackType string) (string, error) {
|
||||||
if contentType == "" {
|
if contentType == "" {
|
||||||
return NewPromParser(b, st), nil
|
if fallbackType == "" {
|
||||||
|
return "", errors.New("non-compliant scrape target sending blank Content-Type and no fallback_scrape_protocol specified for target")
|
||||||
|
}
|
||||||
|
return fallbackType, fmt.Errorf("non-compliant scrape target sending blank Content-Type, using fallback_scrape_protocol %q", fallbackType)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We have a contentType, parse it.
|
||||||
mediaType, _, err := mime.ParseMediaType(contentType)
|
mediaType, _, err := mime.ParseMediaType(contentType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return NewPromParser(b, st), err
|
if fallbackType == "" {
|
||||||
|
retErr := fmt.Errorf("cannot parse Content-Type %q and no fallback_scrape_protocol for target", contentType)
|
||||||
|
return "", errors.Join(retErr, err)
|
||||||
|
}
|
||||||
|
retErr := fmt.Errorf("could not parse received Content-Type %q, using fallback_scrape_protocol %q", contentType, fallbackType)
|
||||||
|
return fallbackType, errors.Join(retErr, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We have a valid media type, either we recognise it and can use it
|
||||||
|
// or we have to error.
|
||||||
|
switch mediaType {
|
||||||
|
case "application/openmetrics-text", "application/vnd.google.protobuf", "text/plain":
|
||||||
|
return mediaType, nil
|
||||||
|
}
|
||||||
|
// We're here because we have no recognised mediaType.
|
||||||
|
if fallbackType == "" {
|
||||||
|
return "", fmt.Errorf("received unsupported Content-Type %q and no fallback_scrape_protocol specified for target", contentType)
|
||||||
|
}
|
||||||
|
return fallbackType, fmt.Errorf("received unsupported Content-Type %q, using fallback_scrape_protocol %q", contentType, fallbackType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// New returns a new parser of the byte slice.
|
||||||
|
//
|
||||||
|
// This function no longer guarantees to return a valid parser.
|
||||||
|
//
|
||||||
|
// It only returns a valid parser if the supplied contentType and fallbackType allow.
|
||||||
|
// An error may also be returned if fallbackType had to be used or there was some
|
||||||
|
// other error parsing the supplied Content-Type.
|
||||||
|
// If the returned parser is nil then the scrape must fail.
|
||||||
|
func New(b []byte, contentType, fallbackType string, parseClassicHistograms, skipOMCTSeries bool, st *labels.SymbolTable) (Parser, error) {
|
||||||
|
mediaType, err := extractMediaType(contentType, fallbackType)
|
||||||
|
// err may be nil or something we want to warn about.
|
||||||
|
|
||||||
switch mediaType {
|
switch mediaType {
|
||||||
case "application/openmetrics-text":
|
case "application/openmetrics-text":
|
||||||
return NewOpenMetricsParser(b, st, func(o *openMetricsParserOptions) {
|
return NewOpenMetricsParser(b, st, func(o *openMetricsParserOptions) {
|
||||||
o.SkipCTSeries = skipOMCTSeries
|
o.SkipCTSeries = skipOMCTSeries
|
||||||
}), nil
|
}), err
|
||||||
case "application/vnd.google.protobuf":
|
case "application/vnd.google.protobuf":
|
||||||
return NewProtobufParser(b, parseClassicHistograms, st), nil
|
return NewProtobufParser(b, parseClassicHistograms, st), err
|
||||||
|
case "text/plain":
|
||||||
|
return NewPromParser(b, st), err
|
||||||
default:
|
default:
|
||||||
return NewPromParser(b, st), nil
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ import (
|
||||||
"github.com/prometheus/common/model"
|
"github.com/prometheus/common/model"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/config"
|
||||||
"github.com/prometheus/prometheus/model/exemplar"
|
"github.com/prometheus/prometheus/model/exemplar"
|
||||||
"github.com/prometheus/prometheus/model/histogram"
|
"github.com/prometheus/prometheus/model/histogram"
|
||||||
"github.com/prometheus/prometheus/model/labels"
|
"github.com/prometheus/prometheus/model/labels"
|
||||||
|
@ -31,6 +32,10 @@ import (
|
||||||
func TestNewParser(t *testing.T) {
|
func TestNewParser(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
|
requireNilParser := func(t *testing.T, p Parser) {
|
||||||
|
require.Nil(t, p)
|
||||||
|
}
|
||||||
|
|
||||||
requirePromParser := func(t *testing.T, p Parser) {
|
requirePromParser := func(t *testing.T, p Parser) {
|
||||||
require.NotNil(t, p)
|
require.NotNil(t, p)
|
||||||
_, ok := p.(*PromParser)
|
_, ok := p.(*PromParser)
|
||||||
|
@ -43,34 +48,83 @@ func TestNewParser(t *testing.T) {
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
requireProtobufParser := func(t *testing.T, p Parser) {
|
||||||
|
require.NotNil(t, p)
|
||||||
|
_, ok := p.(*ProtobufParser)
|
||||||
|
require.True(t, ok)
|
||||||
|
}
|
||||||
|
|
||||||
for name, tt := range map[string]*struct {
|
for name, tt := range map[string]*struct {
|
||||||
contentType string
|
contentType string
|
||||||
validateParser func(*testing.T, Parser)
|
fallbackScrapeProtocol config.ScrapeProtocol
|
||||||
err string
|
validateParser func(*testing.T, Parser)
|
||||||
|
err string
|
||||||
}{
|
}{
|
||||||
"empty-string": {
|
"empty-string": {
|
||||||
validateParser: requirePromParser,
|
validateParser: requireNilParser,
|
||||||
|
err: "non-compliant scrape target sending blank Content-Type and no fallback_scrape_protocol specified for target",
|
||||||
|
},
|
||||||
|
"empty-string-fallback-text-plain": {
|
||||||
|
validateParser: requirePromParser,
|
||||||
|
fallbackScrapeProtocol: config.PrometheusText0_0_4,
|
||||||
|
err: "non-compliant scrape target sending blank Content-Type, using fallback_scrape_protocol \"text/plain\"",
|
||||||
},
|
},
|
||||||
"invalid-content-type-1": {
|
"invalid-content-type-1": {
|
||||||
contentType: "invalid/",
|
contentType: "invalid/",
|
||||||
validateParser: requirePromParser,
|
validateParser: requireNilParser,
|
||||||
err: "expected token after slash",
|
err: "expected token after slash",
|
||||||
},
|
},
|
||||||
|
"invalid-content-type-1-fallback-text-plain": {
|
||||||
|
contentType: "invalid/",
|
||||||
|
validateParser: requirePromParser,
|
||||||
|
fallbackScrapeProtocol: config.PrometheusText0_0_4,
|
||||||
|
err: "expected token after slash",
|
||||||
|
},
|
||||||
|
"invalid-content-type-1-fallback-openmetrics": {
|
||||||
|
contentType: "invalid/",
|
||||||
|
validateParser: requireOpenMetricsParser,
|
||||||
|
fallbackScrapeProtocol: config.OpenMetricsText0_0_1,
|
||||||
|
err: "expected token after slash",
|
||||||
|
},
|
||||||
|
"invalid-content-type-1-fallback-protobuf": {
|
||||||
|
contentType: "invalid/",
|
||||||
|
validateParser: requireProtobufParser,
|
||||||
|
fallbackScrapeProtocol: config.PrometheusProto,
|
||||||
|
err: "expected token after slash",
|
||||||
|
},
|
||||||
"invalid-content-type-2": {
|
"invalid-content-type-2": {
|
||||||
contentType: "invalid/invalid/invalid",
|
contentType: "invalid/invalid/invalid",
|
||||||
validateParser: requirePromParser,
|
validateParser: requireNilParser,
|
||||||
err: "unexpected content after media subtype",
|
err: "unexpected content after media subtype",
|
||||||
},
|
},
|
||||||
|
"invalid-content-type-2-fallback-text-plain": {
|
||||||
|
contentType: "invalid/invalid/invalid",
|
||||||
|
validateParser: requirePromParser,
|
||||||
|
fallbackScrapeProtocol: config.PrometheusText1_0_0,
|
||||||
|
err: "unexpected content after media subtype",
|
||||||
|
},
|
||||||
"invalid-content-type-3": {
|
"invalid-content-type-3": {
|
||||||
contentType: "/",
|
contentType: "/",
|
||||||
validateParser: requirePromParser,
|
validateParser: requireNilParser,
|
||||||
err: "no media type",
|
err: "no media type",
|
||||||
},
|
},
|
||||||
|
"invalid-content-type-3-fallback-text-plain": {
|
||||||
|
contentType: "/",
|
||||||
|
validateParser: requirePromParser,
|
||||||
|
fallbackScrapeProtocol: config.PrometheusText1_0_0,
|
||||||
|
err: "no media type",
|
||||||
|
},
|
||||||
"invalid-content-type-4": {
|
"invalid-content-type-4": {
|
||||||
contentType: "application/openmetrics-text; charset=UTF-8; charset=utf-8",
|
contentType: "application/openmetrics-text; charset=UTF-8; charset=utf-8",
|
||||||
validateParser: requirePromParser,
|
validateParser: requireNilParser,
|
||||||
err: "duplicate parameter name",
|
err: "duplicate parameter name",
|
||||||
},
|
},
|
||||||
|
"invalid-content-type-4-fallback-open-metrics": {
|
||||||
|
contentType: "application/openmetrics-text; charset=UTF-8; charset=utf-8",
|
||||||
|
validateParser: requireOpenMetricsParser,
|
||||||
|
fallbackScrapeProtocol: config.OpenMetricsText1_0_0,
|
||||||
|
err: "duplicate parameter name",
|
||||||
|
},
|
||||||
"openmetrics": {
|
"openmetrics": {
|
||||||
contentType: "application/openmetrics-text",
|
contentType: "application/openmetrics-text",
|
||||||
validateParser: requireOpenMetricsParser,
|
validateParser: requireOpenMetricsParser,
|
||||||
|
@ -87,20 +141,33 @@ func TestNewParser(t *testing.T) {
|
||||||
contentType: "text/plain",
|
contentType: "text/plain",
|
||||||
validateParser: requirePromParser,
|
validateParser: requirePromParser,
|
||||||
},
|
},
|
||||||
|
"protobuf": {
|
||||||
|
contentType: "application/vnd.google.protobuf",
|
||||||
|
validateParser: requireProtobufParser,
|
||||||
|
},
|
||||||
"plain-text-with-version": {
|
"plain-text-with-version": {
|
||||||
contentType: "text/plain; version=0.0.4",
|
contentType: "text/plain; version=0.0.4",
|
||||||
validateParser: requirePromParser,
|
validateParser: requirePromParser,
|
||||||
},
|
},
|
||||||
"some-other-valid-content-type": {
|
"some-other-valid-content-type": {
|
||||||
contentType: "text/html",
|
contentType: "text/html",
|
||||||
validateParser: requirePromParser,
|
validateParser: requireNilParser,
|
||||||
|
err: "received unsupported Content-Type \"text/html\" and no fallback_scrape_protocol specified for target",
|
||||||
|
},
|
||||||
|
"some-other-valid-content-type-fallback-text-plain": {
|
||||||
|
contentType: "text/html",
|
||||||
|
validateParser: requirePromParser,
|
||||||
|
fallbackScrapeProtocol: config.PrometheusText0_0_4,
|
||||||
|
err: "received unsupported Content-Type \"text/html\", using fallback_scrape_protocol \"text/plain\"",
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(name, func(t *testing.T) {
|
t.Run(name, func(t *testing.T) {
|
||||||
tt := tt // Copy to local variable before going parallel.
|
tt := tt // Copy to local variable before going parallel.
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
p, err := New([]byte{}, tt.contentType, false, false, labels.NewSymbolTable())
|
fallbackProtoMediaType := tt.fallbackScrapeProtocol.HeaderMediaType()
|
||||||
|
|
||||||
|
p, err := New([]byte{}, tt.contentType, fallbackProtoMediaType, false, false, labels.NewSymbolTable())
|
||||||
tt.validateParser(t, p)
|
tt.validateParser(t, p)
|
||||||
if tt.err == "" {
|
if tt.err == "" {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -61,8 +61,8 @@ const (
|
||||||
var symbolTable = labels.NewSymbolTable()
|
var symbolTable = labels.NewSymbolTable()
|
||||||
|
|
||||||
func fuzzParseMetricWithContentType(in []byte, contentType string) int {
|
func fuzzParseMetricWithContentType(in []byte, contentType string) int {
|
||||||
p, warning := textparse.New(in, contentType, false, false, symbolTable)
|
p, warning := textparse.New(in, contentType, "", false, false, symbolTable)
|
||||||
if warning != nil {
|
if p == nil || warning != nil {
|
||||||
// An invalid content type is being passed, which should not happen
|
// An invalid content type is being passed, which should not happen
|
||||||
// in this context.
|
// in this context.
|
||||||
panic(warning)
|
panic(warning)
|
||||||
|
@ -91,7 +91,7 @@ func fuzzParseMetricWithContentType(in []byte, contentType string) int {
|
||||||
// Note that this is not the parser for the text-based exposition-format; that
|
// Note that this is not the parser for the text-based exposition-format; that
|
||||||
// lives in github.com/prometheus/client_golang/text.
|
// lives in github.com/prometheus/client_golang/text.
|
||||||
func FuzzParseMetric(in []byte) int {
|
func FuzzParseMetric(in []byte) int {
|
||||||
return fuzzParseMetricWithContentType(in, "")
|
return fuzzParseMetricWithContentType(in, "text/plain")
|
||||||
}
|
}
|
||||||
|
|
||||||
func FuzzParseOpenMetric(in []byte) int {
|
func FuzzParseOpenMetric(in []byte) int {
|
||||||
|
|
|
@ -114,6 +114,7 @@ type scrapeLoopOptions struct {
|
||||||
timeout time.Duration
|
timeout time.Duration
|
||||||
alwaysScrapeClassicHist bool
|
alwaysScrapeClassicHist bool
|
||||||
validationScheme model.ValidationScheme
|
validationScheme model.ValidationScheme
|
||||||
|
fallbackScrapeProtocol string
|
||||||
|
|
||||||
mrc []*relabel.Config
|
mrc []*relabel.Config
|
||||||
cache *scrapeCache
|
cache *scrapeCache
|
||||||
|
@ -189,6 +190,7 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
|
||||||
metrics,
|
metrics,
|
||||||
options.skipOffsetting,
|
options.skipOffsetting,
|
||||||
opts.validationScheme,
|
opts.validationScheme,
|
||||||
|
opts.fallbackScrapeProtocol,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit))
|
sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit))
|
||||||
|
@ -325,6 +327,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
|
||||||
enableCompression = sp.config.EnableCompression
|
enableCompression = sp.config.EnableCompression
|
||||||
trackTimestampsStaleness = sp.config.TrackTimestampsStaleness
|
trackTimestampsStaleness = sp.config.TrackTimestampsStaleness
|
||||||
mrc = sp.config.MetricRelabelConfigs
|
mrc = sp.config.MetricRelabelConfigs
|
||||||
|
fallbackScrapeProtocol = sp.config.ScrapeFallbackProtocol.HeaderMediaType()
|
||||||
)
|
)
|
||||||
|
|
||||||
validationScheme := model.UTF8Validation
|
validationScheme := model.UTF8Validation
|
||||||
|
@ -371,6 +374,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
|
||||||
interval: interval,
|
interval: interval,
|
||||||
timeout: timeout,
|
timeout: timeout,
|
||||||
validationScheme: validationScheme,
|
validationScheme: validationScheme,
|
||||||
|
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -480,6 +484,7 @@ func (sp *scrapePool) sync(targets []*Target) {
|
||||||
enableCompression = sp.config.EnableCompression
|
enableCompression = sp.config.EnableCompression
|
||||||
trackTimestampsStaleness = sp.config.TrackTimestampsStaleness
|
trackTimestampsStaleness = sp.config.TrackTimestampsStaleness
|
||||||
mrc = sp.config.MetricRelabelConfigs
|
mrc = sp.config.MetricRelabelConfigs
|
||||||
|
fallbackScrapeProtocol = sp.config.ScrapeFallbackProtocol.HeaderMediaType()
|
||||||
alwaysScrapeClassicHist = sp.config.AlwaysScrapeClassicHistograms
|
alwaysScrapeClassicHist = sp.config.AlwaysScrapeClassicHistograms
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -523,6 +528,7 @@ func (sp *scrapePool) sync(targets []*Target) {
|
||||||
timeout: timeout,
|
timeout: timeout,
|
||||||
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
||||||
validationScheme: validationScheme,
|
validationScheme: validationScheme,
|
||||||
|
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
l.setForcedError(err)
|
l.setForcedError(err)
|
||||||
|
@ -885,6 +891,7 @@ type scrapeLoop struct {
|
||||||
timeout time.Duration
|
timeout time.Duration
|
||||||
alwaysScrapeClassicHist bool
|
alwaysScrapeClassicHist bool
|
||||||
validationScheme model.ValidationScheme
|
validationScheme model.ValidationScheme
|
||||||
|
fallbackScrapeProtocol string
|
||||||
|
|
||||||
// Feature flagged options.
|
// Feature flagged options.
|
||||||
enableNativeHistogramIngestion bool
|
enableNativeHistogramIngestion bool
|
||||||
|
@ -1193,6 +1200,7 @@ func newScrapeLoop(ctx context.Context,
|
||||||
metrics *scrapeMetrics,
|
metrics *scrapeMetrics,
|
||||||
skipOffsetting bool,
|
skipOffsetting bool,
|
||||||
validationScheme model.ValidationScheme,
|
validationScheme model.ValidationScheme,
|
||||||
|
fallbackScrapeProtocol string,
|
||||||
) *scrapeLoop {
|
) *scrapeLoop {
|
||||||
if l == nil {
|
if l == nil {
|
||||||
l = promslog.NewNopLogger()
|
l = promslog.NewNopLogger()
|
||||||
|
@ -1245,6 +1253,7 @@ func newScrapeLoop(ctx context.Context,
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
skipOffsetting: skipOffsetting,
|
skipOffsetting: skipOffsetting,
|
||||||
validationScheme: validationScheme,
|
validationScheme: validationScheme,
|
||||||
|
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||||
}
|
}
|
||||||
sl.ctx, sl.cancel = context.WithCancel(ctx)
|
sl.ctx, sl.cancel = context.WithCancel(ctx)
|
||||||
|
|
||||||
|
@ -1537,11 +1546,21 @@ type appendErrors struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sl *scrapeLoop) append(app storage.Appender, b []byte, contentType string, ts time.Time) (total, added, seriesAdded int, err error) {
|
func (sl *scrapeLoop) append(app storage.Appender, b []byte, contentType string, ts time.Time) (total, added, seriesAdded int, err error) {
|
||||||
p, err := textparse.New(b, contentType, sl.alwaysScrapeClassicHist, sl.enableCTZeroIngestion, sl.symbolTable)
|
p, err := textparse.New(b, contentType, sl.fallbackScrapeProtocol, sl.alwaysScrapeClassicHist, sl.enableCTZeroIngestion, sl.symbolTable)
|
||||||
|
if p == nil {
|
||||||
|
sl.l.Error(
|
||||||
|
"Failed to determine correct type of scrape target.",
|
||||||
|
"content_type", contentType,
|
||||||
|
"fallback_media_type", sl.fallbackScrapeProtocol,
|
||||||
|
"err", err,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
sl.l.Debug(
|
sl.l.Debug(
|
||||||
"Invalid content type on scrape, using prometheus parser as fallback.",
|
"Invalid content type on scrape, using fallback setting.",
|
||||||
"content_type", contentType,
|
"content_type", contentType,
|
||||||
|
"fallback_media_type", sl.fallbackScrapeProtocol,
|
||||||
"err", err,
|
"err", err,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -690,6 +690,7 @@ func newBasicScrapeLoop(t testing.TB, ctx context.Context, scraper scraper, app
|
||||||
newTestScrapeMetrics(t),
|
newTestScrapeMetrics(t),
|
||||||
false,
|
false,
|
||||||
model.LegacyValidation,
|
model.LegacyValidation,
|
||||||
|
"text/plain",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -833,6 +834,7 @@ func TestScrapeLoopRun(t *testing.T) {
|
||||||
scrapeMetrics,
|
scrapeMetrics,
|
||||||
false,
|
false,
|
||||||
model.LegacyValidation,
|
model.LegacyValidation,
|
||||||
|
"text/plain",
|
||||||
)
|
)
|
||||||
|
|
||||||
// The loop must terminate during the initial offset if the context
|
// The loop must terminate during the initial offset if the context
|
||||||
|
@ -978,6 +980,7 @@ func TestScrapeLoopMetadata(t *testing.T) {
|
||||||
scrapeMetrics,
|
scrapeMetrics,
|
||||||
false,
|
false,
|
||||||
model.LegacyValidation,
|
model.LegacyValidation,
|
||||||
|
"text/plain",
|
||||||
)
|
)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
|
@ -1526,7 +1529,8 @@ func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) {
|
||||||
fakeRef := storage.SeriesRef(1)
|
fakeRef := storage.SeriesRef(1)
|
||||||
expValue := float64(1)
|
expValue := float64(1)
|
||||||
metric := []byte(`metric{n="1"} 1`)
|
metric := []byte(`metric{n="1"} 1`)
|
||||||
p, warning := textparse.New(metric, "", false, false, labels.NewSymbolTable())
|
p, warning := textparse.New(metric, "text/plain", "", false, false, labels.NewSymbolTable())
|
||||||
|
require.NotNil(t, p)
|
||||||
require.NoError(t, warning)
|
require.NoError(t, warning)
|
||||||
|
|
||||||
var lset labels.Labels
|
var lset labels.Labels
|
||||||
|
|
Loading…
Reference in a new issue