mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
Merge pull request #12019 from roidelapluie/scrape-config-files
Add include scrape configs
This commit is contained in:
commit
c4da9cd92f
|
@ -468,6 +468,14 @@ func main() {
|
||||||
level.Error(logger).Log("msg", fmt.Sprintf("Error loading config (--config.file=%s)", cfg.configFile), "file", absPath, "err", err)
|
level.Error(logger).Log("msg", fmt.Sprintf("Error loading config (--config.file=%s)", cfg.configFile), "file", absPath, "err", err)
|
||||||
os.Exit(2)
|
os.Exit(2)
|
||||||
}
|
}
|
||||||
|
if _, err := cfgFile.GetScrapeConfigs(); err != nil {
|
||||||
|
absPath, pathErr := filepath.Abs(cfg.configFile)
|
||||||
|
if pathErr != nil {
|
||||||
|
absPath = cfg.configFile
|
||||||
|
}
|
||||||
|
level.Error(logger).Log("msg", fmt.Sprintf("Error loading scrape config files from config (--config.file=%q)", cfg.configFile), "file", absPath, "err", err)
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
if cfg.tsdb.EnableExemplarStorage {
|
if cfg.tsdb.EnableExemplarStorage {
|
||||||
if cfgFile.StorageConfig.ExemplarsConfig == nil {
|
if cfgFile.StorageConfig.ExemplarsConfig == nil {
|
||||||
cfgFile.StorageConfig.ExemplarsConfig = &config.DefaultExemplarsConfig
|
cfgFile.StorageConfig.ExemplarsConfig = &config.DefaultExemplarsConfig
|
||||||
|
@ -730,7 +738,11 @@ func main() {
|
||||||
name: "scrape_sd",
|
name: "scrape_sd",
|
||||||
reloader: func(cfg *config.Config) error {
|
reloader: func(cfg *config.Config) error {
|
||||||
c := make(map[string]discovery.Configs)
|
c := make(map[string]discovery.Configs)
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
scfgs, err := cfg.GetScrapeConfigs()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, v := range scfgs {
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfigs
|
c[v.JobName] = v.ServiceDiscoveryConfigs
|
||||||
}
|
}
|
||||||
return discoveryManagerScrape.ApplyConfig(c)
|
return discoveryManagerScrape.ApplyConfig(c)
|
||||||
|
|
|
@ -463,7 +463,18 @@ func checkConfig(agentMode bool, filename string, checkSyntaxOnly bool) ([]strin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, scfg := range cfg.ScrapeConfigs {
|
var scfgs []*config.ScrapeConfig
|
||||||
|
if checkSyntaxOnly {
|
||||||
|
scfgs = cfg.ScrapeConfigs
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
scfgs, err = cfg.GetScrapeConfigs()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error loading scrape configs: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, scfg := range scfgs {
|
||||||
if !checkSyntaxOnly && scfg.HTTPClientConfig.Authorization != nil {
|
if !checkSyntaxOnly && scfg.HTTPClientConfig.Authorization != nil {
|
||||||
if err := checkFileExists(scfg.HTTPClientConfig.Authorization.CredentialsFile); err != nil {
|
if err := checkFileExists(scfg.HTTPClientConfig.Authorization.CredentialsFile); err != nil {
|
||||||
return nil, fmt.Errorf("error checking authorization credentials or bearer token file %q: %w", scfg.HTTPClientConfig.Authorization.CredentialsFile, err)
|
return nil, fmt.Errorf("error checking authorization credentials or bearer token file %q: %w", scfg.HTTPClientConfig.Authorization.CredentialsFile, err)
|
||||||
|
|
|
@ -47,9 +47,15 @@ func CheckSD(sdConfigFiles, sdJobName string, sdTimeout time.Duration, noDefault
|
||||||
}
|
}
|
||||||
|
|
||||||
var scrapeConfig *config.ScrapeConfig
|
var scrapeConfig *config.ScrapeConfig
|
||||||
|
scfgs, err := cfg.GetScrapeConfigs()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, "Cannot load scrape configs", err)
|
||||||
|
return failureExitCode
|
||||||
|
}
|
||||||
|
|
||||||
jobs := []string{}
|
jobs := []string{}
|
||||||
jobMatched := false
|
jobMatched := false
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
for _, v := range scfgs {
|
||||||
jobs = append(jobs, v.JobName)
|
jobs = append(jobs, v.JobName)
|
||||||
if v.JobName == sdJobName {
|
if v.JobName == sdJobName {
|
||||||
jobMatched = true
|
jobMatched = true
|
||||||
|
|
132
config/config.go
132
config/config.go
|
@ -216,12 +216,13 @@ var (
|
||||||
|
|
||||||
// Config is the top-level configuration for Prometheus's config files.
|
// Config is the top-level configuration for Prometheus's config files.
|
||||||
type Config struct {
|
type Config struct {
|
||||||
GlobalConfig GlobalConfig `yaml:"global"`
|
GlobalConfig GlobalConfig `yaml:"global"`
|
||||||
AlertingConfig AlertingConfig `yaml:"alerting,omitempty"`
|
AlertingConfig AlertingConfig `yaml:"alerting,omitempty"`
|
||||||
RuleFiles []string `yaml:"rule_files,omitempty"`
|
RuleFiles []string `yaml:"rule_files,omitempty"`
|
||||||
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"`
|
ScrapeConfigFiles []string `yaml:"scrape_config_files,omitempty"`
|
||||||
StorageConfig StorageConfig `yaml:"storage,omitempty"`
|
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"`
|
||||||
TracingConfig TracingConfig `yaml:"tracing,omitempty"`
|
StorageConfig StorageConfig `yaml:"storage,omitempty"`
|
||||||
|
TracingConfig TracingConfig `yaml:"tracing,omitempty"`
|
||||||
|
|
||||||
RemoteWriteConfigs []*RemoteWriteConfig `yaml:"remote_write,omitempty"`
|
RemoteWriteConfigs []*RemoteWriteConfig `yaml:"remote_write,omitempty"`
|
||||||
RemoteReadConfigs []*RemoteReadConfig `yaml:"remote_read,omitempty"`
|
RemoteReadConfigs []*RemoteReadConfig `yaml:"remote_read,omitempty"`
|
||||||
|
@ -235,6 +236,9 @@ func (c *Config) SetDirectory(dir string) {
|
||||||
for i, file := range c.RuleFiles {
|
for i, file := range c.RuleFiles {
|
||||||
c.RuleFiles[i] = config.JoinDir(dir, file)
|
c.RuleFiles[i] = config.JoinDir(dir, file)
|
||||||
}
|
}
|
||||||
|
for i, file := range c.ScrapeConfigFiles {
|
||||||
|
c.ScrapeConfigFiles[i] = config.JoinDir(dir, file)
|
||||||
|
}
|
||||||
for _, c := range c.ScrapeConfigs {
|
for _, c := range c.ScrapeConfigs {
|
||||||
c.SetDirectory(dir)
|
c.SetDirectory(dir)
|
||||||
}
|
}
|
||||||
|
@ -254,6 +258,58 @@ func (c Config) String() string {
|
||||||
return string(b)
|
return string(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ScrapeConfigs returns the scrape configurations.
|
||||||
|
func (c *Config) GetScrapeConfigs() ([]*ScrapeConfig, error) {
|
||||||
|
scfgs := make([]*ScrapeConfig, len(c.ScrapeConfigs))
|
||||||
|
|
||||||
|
jobNames := map[string]string{}
|
||||||
|
for i, scfg := range c.ScrapeConfigs {
|
||||||
|
// We do these checks for library users that would not call Validate in
|
||||||
|
// Unmarshal.
|
||||||
|
if err := scfg.Validate(c.GlobalConfig.ScrapeInterval, c.GlobalConfig.ScrapeTimeout); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := jobNames[scfg.JobName]; ok {
|
||||||
|
return nil, fmt.Errorf("found multiple scrape configs with job name %q", scfg.JobName)
|
||||||
|
}
|
||||||
|
jobNames[scfg.JobName] = "main config file"
|
||||||
|
scfgs[i] = scfg
|
||||||
|
}
|
||||||
|
for _, pat := range c.ScrapeConfigFiles {
|
||||||
|
fs, err := filepath.Glob(pat)
|
||||||
|
if err != nil {
|
||||||
|
// The only error can be a bad pattern.
|
||||||
|
return nil, fmt.Errorf("error retrieving scrape config files for %q: %w", pat, err)
|
||||||
|
}
|
||||||
|
for _, filename := range fs {
|
||||||
|
cfg := ScrapeConfigs{}
|
||||||
|
content, err := os.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fileErr(filename, err)
|
||||||
|
}
|
||||||
|
err = yaml.UnmarshalStrict(content, &cfg)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fileErr(filename, err)
|
||||||
|
}
|
||||||
|
for _, scfg := range cfg.ScrapeConfigs {
|
||||||
|
if err := scfg.Validate(c.GlobalConfig.ScrapeInterval, c.GlobalConfig.ScrapeTimeout); err != nil {
|
||||||
|
return nil, fileErr(filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f, ok := jobNames[scfg.JobName]; ok {
|
||||||
|
return nil, fileErr(filename, fmt.Errorf("found multiple scrape configs with job name %q, first found in %s", scfg.JobName, f))
|
||||||
|
}
|
||||||
|
jobNames[scfg.JobName] = fmt.Sprintf("%q", filePath(filename))
|
||||||
|
|
||||||
|
scfg.SetDirectory(filepath.Dir(filename))
|
||||||
|
scfgs = append(scfgs, scfg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return scfgs, nil
|
||||||
|
}
|
||||||
|
|
||||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||||
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
*c = DefaultConfig
|
*c = DefaultConfig
|
||||||
|
@ -276,26 +332,18 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
return fmt.Errorf("invalid rule file path %q", rf)
|
return fmt.Errorf("invalid rule file path %q", rf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, sf := range c.ScrapeConfigFiles {
|
||||||
|
if !patRulePath.MatchString(sf) {
|
||||||
|
return fmt.Errorf("invalid scrape config file path %q", sf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Do global overrides and validate unique names.
|
// Do global overrides and validate unique names.
|
||||||
jobNames := map[string]struct{}{}
|
jobNames := map[string]struct{}{}
|
||||||
for _, scfg := range c.ScrapeConfigs {
|
for _, scfg := range c.ScrapeConfigs {
|
||||||
if scfg == nil {
|
if err := scfg.Validate(c.GlobalConfig.ScrapeInterval, c.GlobalConfig.ScrapeTimeout); err != nil {
|
||||||
return errors.New("empty or null scrape config section")
|
return err
|
||||||
}
|
|
||||||
// First set the correct scrape interval, then check that the timeout
|
|
||||||
// (inferred or explicit) is not greater than that.
|
|
||||||
if scfg.ScrapeInterval == 0 {
|
|
||||||
scfg.ScrapeInterval = c.GlobalConfig.ScrapeInterval
|
|
||||||
}
|
|
||||||
if scfg.ScrapeTimeout > scfg.ScrapeInterval {
|
|
||||||
return fmt.Errorf("scrape timeout greater than scrape interval for scrape config with job name %q", scfg.JobName)
|
|
||||||
}
|
|
||||||
if scfg.ScrapeTimeout == 0 {
|
|
||||||
if c.GlobalConfig.ScrapeTimeout > scfg.ScrapeInterval {
|
|
||||||
scfg.ScrapeTimeout = scfg.ScrapeInterval
|
|
||||||
} else {
|
|
||||||
scfg.ScrapeTimeout = c.GlobalConfig.ScrapeTimeout
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := jobNames[scfg.JobName]; ok {
|
if _, ok := jobNames[scfg.JobName]; ok {
|
||||||
|
@ -401,6 +449,10 @@ func (c *GlobalConfig) isZero() bool {
|
||||||
c.QueryLogFile == ""
|
c.QueryLogFile == ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ScrapeConfigs struct {
|
||||||
|
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
// ScrapeConfig configures a scraping unit for Prometheus.
|
// ScrapeConfig configures a scraping unit for Prometheus.
|
||||||
type ScrapeConfig struct {
|
type ScrapeConfig struct {
|
||||||
// The job name to which the job label is set by default.
|
// The job name to which the job label is set by default.
|
||||||
|
@ -494,6 +546,28 @@ func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *ScrapeConfig) Validate(defaultInterval, defaultTimeout model.Duration) error {
|
||||||
|
if c == nil {
|
||||||
|
return errors.New("empty or null scrape config section")
|
||||||
|
}
|
||||||
|
// First set the correct scrape interval, then check that the timeout
|
||||||
|
// (inferred or explicit) is not greater than that.
|
||||||
|
if c.ScrapeInterval == 0 {
|
||||||
|
c.ScrapeInterval = defaultInterval
|
||||||
|
}
|
||||||
|
if c.ScrapeTimeout > c.ScrapeInterval {
|
||||||
|
return fmt.Errorf("scrape timeout greater than scrape interval for scrape config with job name %q", c.JobName)
|
||||||
|
}
|
||||||
|
if c.ScrapeTimeout == 0 {
|
||||||
|
if defaultTimeout > c.ScrapeInterval {
|
||||||
|
c.ScrapeTimeout = c.ScrapeInterval
|
||||||
|
} else {
|
||||||
|
c.ScrapeTimeout = defaultTimeout
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// MarshalYAML implements the yaml.Marshaler interface.
|
// MarshalYAML implements the yaml.Marshaler interface.
|
||||||
func (c *ScrapeConfig) MarshalYAML() (interface{}, error) {
|
func (c *ScrapeConfig) MarshalYAML() (interface{}, error) {
|
||||||
return discovery.MarshalYAMLWithInlineConfigs(c)
|
return discovery.MarshalYAMLWithInlineConfigs(c)
|
||||||
|
@ -936,3 +1010,15 @@ func (c *RemoteReadConfig) UnmarshalYAML(unmarshal func(interface{}) error) erro
|
||||||
// Thus we just do its validation here.
|
// Thus we just do its validation here.
|
||||||
return c.HTTPClientConfig.Validate()
|
return c.HTTPClientConfig.Validate()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func filePath(filename string) string {
|
||||||
|
absPath, err := filepath.Abs(filename)
|
||||||
|
if err != nil {
|
||||||
|
return filename
|
||||||
|
}
|
||||||
|
return absPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileErr(filename string, err error) error {
|
||||||
|
return fmt.Errorf("%q: %w", filePath(filename), err)
|
||||||
|
}
|
||||||
|
|
|
@ -1693,6 +1693,10 @@ var expectedErrors = []struct {
|
||||||
filename: "ovhcloud_bad_service.bad.yml",
|
filename: "ovhcloud_bad_service.bad.yml",
|
||||||
errMsg: "unknown service: fakeservice",
|
errMsg: "unknown service: fakeservice",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
filename: "scrape_config_files_glob.bad.yml",
|
||||||
|
errMsg: `parsing YAML file testdata/scrape_config_files_glob.bad.yml: invalid scrape config file path "scrape_configs/*/*"`,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBadConfigs(t *testing.T) {
|
func TestBadConfigs(t *testing.T) {
|
||||||
|
@ -1779,6 +1783,156 @@ func TestEmptyGlobalBlock(t *testing.T) {
|
||||||
require.Equal(t, exp, *c)
|
require.Equal(t, exp, *c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetScrapeConfigs(t *testing.T) {
|
||||||
|
sc := func(jobName string, scrapeInterval, scrapeTimeout model.Duration) *ScrapeConfig {
|
||||||
|
return &ScrapeConfig{
|
||||||
|
JobName: jobName,
|
||||||
|
HonorTimestamps: true,
|
||||||
|
ScrapeInterval: scrapeInterval,
|
||||||
|
ScrapeTimeout: scrapeTimeout,
|
||||||
|
MetricsPath: "/metrics",
|
||||||
|
Scheme: "http",
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
ServiceDiscoveryConfigs: discovery.Configs{
|
||||||
|
discovery.StaticConfig{
|
||||||
|
{
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
{
|
||||||
|
model.AddressLabel: "localhost:8080",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Source: "0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
configFile string
|
||||||
|
expectedResult []*ScrapeConfig
|
||||||
|
expectedError string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "An included config file should be a valid global config.",
|
||||||
|
configFile: "testdata/scrape_config_files.good.yml",
|
||||||
|
expectedResult: []*ScrapeConfig{sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second))},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "An global config that only include a scrape config file.",
|
||||||
|
configFile: "testdata/scrape_config_files_only.good.yml",
|
||||||
|
expectedResult: []*ScrapeConfig{sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second))},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "An global config that combine scrape config files and scrape configs.",
|
||||||
|
configFile: "testdata/scrape_config_files_combined.good.yml",
|
||||||
|
expectedResult: []*ScrapeConfig{
|
||||||
|
sc("node", model.Duration(60*time.Second), model.Duration(10*time.Second)),
|
||||||
|
sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second)),
|
||||||
|
sc("alertmanager", model.Duration(60*time.Second), model.Duration(10*time.Second)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "An global config that includes a scrape config file with globs",
|
||||||
|
configFile: "testdata/scrape_config_files_glob.good.yml",
|
||||||
|
expectedResult: []*ScrapeConfig{
|
||||||
|
{
|
||||||
|
JobName: "prometheus",
|
||||||
|
|
||||||
|
HonorTimestamps: true,
|
||||||
|
ScrapeInterval: model.Duration(60 * time.Second),
|
||||||
|
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||||
|
|
||||||
|
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||||
|
Scheme: DefaultScrapeConfig.Scheme,
|
||||||
|
|
||||||
|
HTTPClientConfig: config.HTTPClientConfig{
|
||||||
|
TLSConfig: config.TLSConfig{
|
||||||
|
CertFile: filepath.FromSlash("testdata/scrape_configs/valid_cert_file"),
|
||||||
|
KeyFile: filepath.FromSlash("testdata/scrape_configs/valid_key_file"),
|
||||||
|
},
|
||||||
|
FollowRedirects: true,
|
||||||
|
EnableHTTP2: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
ServiceDiscoveryConfigs: discovery.Configs{
|
||||||
|
discovery.StaticConfig{
|
||||||
|
{
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
{model.AddressLabel: "localhost:8080"},
|
||||||
|
},
|
||||||
|
Source: "0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
JobName: "node",
|
||||||
|
|
||||||
|
HonorTimestamps: true,
|
||||||
|
ScrapeInterval: model.Duration(15 * time.Second),
|
||||||
|
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||||
|
HTTPClientConfig: config.HTTPClientConfig{
|
||||||
|
TLSConfig: config.TLSConfig{
|
||||||
|
CertFile: filepath.FromSlash("testdata/valid_cert_file"),
|
||||||
|
KeyFile: filepath.FromSlash("testdata/valid_key_file"),
|
||||||
|
},
|
||||||
|
FollowRedirects: true,
|
||||||
|
EnableHTTP2: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||||
|
Scheme: DefaultScrapeConfig.Scheme,
|
||||||
|
|
||||||
|
ServiceDiscoveryConfigs: discovery.Configs{
|
||||||
|
&vultr.SDConfig{
|
||||||
|
HTTPClientConfig: config.HTTPClientConfig{
|
||||||
|
Authorization: &config.Authorization{
|
||||||
|
Type: "Bearer",
|
||||||
|
Credentials: "abcdef",
|
||||||
|
},
|
||||||
|
FollowRedirects: true,
|
||||||
|
EnableHTTP2: true,
|
||||||
|
},
|
||||||
|
Port: 80,
|
||||||
|
RefreshInterval: model.Duration(60 * time.Second),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "An global config that includes twice the same scrape configs.",
|
||||||
|
configFile: "testdata/scrape_config_files_double_import.bad.yml",
|
||||||
|
expectedError: `found multiple scrape configs with job name "prometheus"`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "An global config that includes a scrape config identical to a scrape config in the main file.",
|
||||||
|
configFile: "testdata/scrape_config_files_duplicate.bad.yml",
|
||||||
|
expectedError: `found multiple scrape configs with job name "prometheus"`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "An global config that includes a scrape config file with errors.",
|
||||||
|
configFile: "testdata/scrape_config_files_global.bad.yml",
|
||||||
|
expectedError: `scrape timeout greater than scrape interval for scrape config with job name "prometheus"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
c, err := LoadFile(tc.configFile, false, false, log.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
scfgs, err := c.GetScrapeConfigs()
|
||||||
|
if len(tc.expectedError) > 0 {
|
||||||
|
require.ErrorContains(t, err, tc.expectedError)
|
||||||
|
}
|
||||||
|
require.Equal(t, tc.expectedResult, scfgs)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func kubernetesSDHostURL() config.URL {
|
func kubernetesSDHostURL() config.URL {
|
||||||
tURL, _ := url.Parse("https://localhost:1234")
|
tURL, _ := url.Parse("https://localhost:1234")
|
||||||
return config.URL{URL: tURL}
|
return config.URL{URL: tURL}
|
||||||
|
|
6
config/testdata/scrape_config_files.bad.yml
vendored
Normal file
6
config/testdata/scrape_config_files.bad.yml
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
||||||
|
scrape_interval: 10s
|
||||||
|
scrape_timeout: 20s
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
4
config/testdata/scrape_config_files.good.yml
vendored
Normal file
4
config/testdata/scrape_config_files.good.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
4
config/testdata/scrape_config_files2.good.yml
vendored
Normal file
4
config/testdata/scrape_config_files2.good.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: alertmanager
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
7
config/testdata/scrape_config_files_combined.good.yml
vendored
Normal file
7
config/testdata/scrape_config_files_combined.good.yml
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_config_files.good.yml
|
||||||
|
- scrape_config_files2.good.yml
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: node
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
3
config/testdata/scrape_config_files_double_import.bad.yml
vendored
Normal file
3
config/testdata/scrape_config_files_double_import.bad.yml
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_config_files.good.yml
|
||||||
|
- scrape_config_files.good.yml
|
6
config/testdata/scrape_config_files_duplicate.bad.yml
vendored
Normal file
6
config/testdata/scrape_config_files_duplicate.bad.yml
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_config_files.good.yml
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
6
config/testdata/scrape_config_files_glob.bad.yml
vendored
Normal file
6
config/testdata/scrape_config_files_glob.bad.yml
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_configs/*/*
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: node
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
2
config/testdata/scrape_config_files_glob.good.yml
vendored
Normal file
2
config/testdata/scrape_config_files_glob.good.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_configs/*.yml
|
2
config/testdata/scrape_config_files_global.bad.yml
vendored
Normal file
2
config/testdata/scrape_config_files_global.bad.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_config_files.bad.yml
|
11
config/testdata/scrape_config_files_global_duplicate.bad.yml
vendored
Normal file
11
config/testdata/scrape_config_files_global_duplicate.bad.yml
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
global:
|
||||||
|
scrape_interval: 15s
|
||||||
|
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_config_files.good.yml
|
||||||
|
- scrape_config_files.good.yml
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
2
config/testdata/scrape_config_files_only.good.yml
vendored
Normal file
2
config/testdata/scrape_config_files_only.good.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
scrape_config_files:
|
||||||
|
- scrape_config_files.good.yml
|
7
config/testdata/scrape_configs/scrape_config_files1.good.yml
vendored
Normal file
7
config/testdata/scrape_configs/scrape_config_files1.good.yml
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:8080']
|
||||||
|
tls_config:
|
||||||
|
cert_file: valid_cert_file
|
||||||
|
key_file: valid_key_file
|
9
config/testdata/scrape_configs/scrape_config_files2.good.yml
vendored
Normal file
9
config/testdata/scrape_configs/scrape_config_files2.good.yml
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: node
|
||||||
|
scrape_interval: 15s
|
||||||
|
tls_config:
|
||||||
|
cert_file: ../valid_cert_file
|
||||||
|
key_file: ../valid_key_file
|
||||||
|
vultr_sd_configs:
|
||||||
|
- authorization:
|
||||||
|
credentials: abcdef
|
|
@ -78,6 +78,11 @@ global:
|
||||||
rule_files:
|
rule_files:
|
||||||
[ - <filepath_glob> ... ]
|
[ - <filepath_glob> ... ]
|
||||||
|
|
||||||
|
# Scrape config files specifies a list of globs. Scrape configs are read from
|
||||||
|
# all matching files and appended to the list of scrape configs.
|
||||||
|
scrape_config_files:
|
||||||
|
[ - <filepath_glob> ... ]
|
||||||
|
|
||||||
# A list of scrape configurations.
|
# A list of scrape configurations.
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
[ - <scrape_config> ... ]
|
[ - <scrape_config> ... ]
|
||||||
|
|
|
@ -270,8 +270,13 @@ func (m *Manager) ApplyConfig(cfg *config.Config) error {
|
||||||
m.mtxScrape.Lock()
|
m.mtxScrape.Lock()
|
||||||
defer m.mtxScrape.Unlock()
|
defer m.mtxScrape.Unlock()
|
||||||
|
|
||||||
|
scfgs, err := cfg.GetScrapeConfigs()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
c := make(map[string]*config.ScrapeConfig)
|
c := make(map[string]*config.ScrapeConfig)
|
||||||
for _, scfg := range cfg.ScrapeConfigs {
|
for _, scfg := range scfgs {
|
||||||
c[scfg.JobName] = scfg
|
c[scfg.JobName] = scfg
|
||||||
}
|
}
|
||||||
m.scrapeConfigs = c
|
m.scrapeConfigs = c
|
||||||
|
|
Loading…
Reference in a new issue