mirror of
https://github.com/prometheus/prometheus.git
synced 2025-02-02 08:31:11 -08:00
promtool: add --ignore-unknown-fields
Add --ignore-unknown-fields that ignores unknown fields in rule group
files. There are lots of tools in the ecosystem that "like" to extend
the rule group file structure but they are currently unreadable by
promtool if there's anything extra. The purpose of this flag is so that
we could use the "vanilla" promtool instead of rolling our own.
Some examples of tools/code:
https://github.com/grafana/mimir/blob/main/pkg/mimirtool/rules/rwrulefmt/rulefmt.go
8898eb3cc5/pkg/rules/rules.go (L18-L25)
Signed-off-by: Giedrius Statkevičius <giedrius.statkevicius@vinted.com>
This commit is contained in:
parent
5df6ea3042
commit
92218ecb9b
|
@ -127,6 +127,7 @@ func main() {
|
||||||
checkConfigLintFatal := checkConfigCmd.Flag(
|
checkConfigLintFatal := checkConfigCmd.Flag(
|
||||||
"lint-fatal",
|
"lint-fatal",
|
||||||
"Make lint errors exit with exit code 3.").Default("false").Bool()
|
"Make lint errors exit with exit code 3.").Default("false").Bool()
|
||||||
|
checkConfigIgnoreUnknownFields := checkConfigCmd.Flag("ignore-unknown-fields", "Ignore unknown fields in the rule groups read by the config files. This is useful when you want to extend rule files with custom metadata. Ensure that those fields are removed before loading them into the Prometheus server as it performs strict checks by default.").Default("false").Bool()
|
||||||
|
|
||||||
checkWebConfigCmd := checkCmd.Command("web-config", "Check if the web config files are valid or not.")
|
checkWebConfigCmd := checkCmd.Command("web-config", "Check if the web config files are valid or not.")
|
||||||
webConfigFiles := checkWebConfigCmd.Arg(
|
webConfigFiles := checkWebConfigCmd.Arg(
|
||||||
|
@ -154,6 +155,7 @@ func main() {
|
||||||
checkRulesLintFatal := checkRulesCmd.Flag(
|
checkRulesLintFatal := checkRulesCmd.Flag(
|
||||||
"lint-fatal",
|
"lint-fatal",
|
||||||
"Make lint errors exit with exit code 3.").Default("false").Bool()
|
"Make lint errors exit with exit code 3.").Default("false").Bool()
|
||||||
|
checkRulesIgnoreUnknownFields := checkRulesCmd.Flag("ignore-unknown-fields", "Ignore unknown fields in the rule files. This is useful when you want to extend rule files with custom metadata. Ensure that those fields are removed before loading them into the Prometheus server as it performs strict checks by default.").Default("false").Bool()
|
||||||
|
|
||||||
checkMetricsCmd := checkCmd.Command("metrics", checkMetricsUsage)
|
checkMetricsCmd := checkCmd.Command("metrics", checkMetricsUsage)
|
||||||
checkMetricsExtended := checkCmd.Flag("extended", "Print extended information related to the cardinality of the metrics.").Bool()
|
checkMetricsExtended := checkCmd.Flag("extended", "Print extended information related to the cardinality of the metrics.").Bool()
|
||||||
|
@ -227,6 +229,7 @@ func main() {
|
||||||
).Required().ExistingFiles()
|
).Required().ExistingFiles()
|
||||||
testRulesDebug := testRulesCmd.Flag("debug", "Enable unit test debugging.").Default("false").Bool()
|
testRulesDebug := testRulesCmd.Flag("debug", "Enable unit test debugging.").Default("false").Bool()
|
||||||
testRulesDiff := testRulesCmd.Flag("diff", "[Experimental] Print colored differential output between expected & received output.").Default("false").Bool()
|
testRulesDiff := testRulesCmd.Flag("diff", "[Experimental] Print colored differential output between expected & received output.").Default("false").Bool()
|
||||||
|
testRulesIgnoreUnknownFields := testRulesCmd.Flag("ignore-unknown-fields", "Ignore unknown fields in the test files. This is useful when you want to extend rule files with custom metadata. Ensure that those fields are removed before loading them into the Prometheus server as it performs strict checks by default.").Default("false").Bool()
|
||||||
|
|
||||||
defaultDBPath := "data/"
|
defaultDBPath := "data/"
|
||||||
tsdbCmd := app.Command("tsdb", "Run tsdb commands.")
|
tsdbCmd := app.Command("tsdb", "Run tsdb commands.")
|
||||||
|
@ -348,7 +351,7 @@ func main() {
|
||||||
os.Exit(CheckSD(*sdConfigFile, *sdJobName, *sdTimeout, prometheus.DefaultRegisterer))
|
os.Exit(CheckSD(*sdConfigFile, *sdJobName, *sdTimeout, prometheus.DefaultRegisterer))
|
||||||
|
|
||||||
case checkConfigCmd.FullCommand():
|
case checkConfigCmd.FullCommand():
|
||||||
os.Exit(CheckConfig(*agentMode, *checkConfigSyntaxOnly, newConfigLintConfig(*checkConfigLint, *checkConfigLintFatal, model.Duration(*checkLookbackDelta)), *configFiles...))
|
os.Exit(CheckConfig(*agentMode, *checkConfigSyntaxOnly, newConfigLintConfig(*checkConfigLint, *checkConfigLintFatal, *checkConfigIgnoreUnknownFields, model.Duration(*checkLookbackDelta)), *configFiles...))
|
||||||
|
|
||||||
case checkServerHealthCmd.FullCommand():
|
case checkServerHealthCmd.FullCommand():
|
||||||
os.Exit(checkErr(CheckServerStatus(serverURL, checkHealth, httpRoundTripper)))
|
os.Exit(checkErr(CheckServerStatus(serverURL, checkHealth, httpRoundTripper)))
|
||||||
|
@ -360,7 +363,7 @@ func main() {
|
||||||
os.Exit(CheckWebConfig(*webConfigFiles...))
|
os.Exit(CheckWebConfig(*webConfigFiles...))
|
||||||
|
|
||||||
case checkRulesCmd.FullCommand():
|
case checkRulesCmd.FullCommand():
|
||||||
os.Exit(CheckRules(newRulesLintConfig(*checkRulesLint, *checkRulesLintFatal), *ruleFiles...))
|
os.Exit(CheckRules(newRulesLintConfig(*checkRulesLint, *checkRulesLintFatal, *checkRulesIgnoreUnknownFields), *ruleFiles...))
|
||||||
|
|
||||||
case checkMetricsCmd.FullCommand():
|
case checkMetricsCmd.FullCommand():
|
||||||
os.Exit(CheckMetrics(*checkMetricsExtended))
|
os.Exit(CheckMetrics(*checkMetricsExtended))
|
||||||
|
@ -402,6 +405,7 @@ func main() {
|
||||||
*testRulesRun,
|
*testRulesRun,
|
||||||
*testRulesDiff,
|
*testRulesDiff,
|
||||||
*testRulesDebug,
|
*testRulesDebug,
|
||||||
|
*testRulesIgnoreUnknownFields,
|
||||||
*testRulesFiles...),
|
*testRulesFiles...),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -458,12 +462,14 @@ type rulesLintConfig struct {
|
||||||
all bool
|
all bool
|
||||||
duplicateRules bool
|
duplicateRules bool
|
||||||
fatal bool
|
fatal bool
|
||||||
|
ignoreUnknownFields bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func newRulesLintConfig(stringVal string, fatal bool) rulesLintConfig {
|
func newRulesLintConfig(stringVal string, fatal, ignoreUnknownFields bool) rulesLintConfig {
|
||||||
items := strings.Split(stringVal, ",")
|
items := strings.Split(stringVal, ",")
|
||||||
ls := rulesLintConfig{
|
ls := rulesLintConfig{
|
||||||
fatal: fatal,
|
fatal: fatal,
|
||||||
|
ignoreUnknownFields: ignoreUnknownFields,
|
||||||
}
|
}
|
||||||
for _, setting := range items {
|
for _, setting := range items {
|
||||||
switch setting {
|
switch setting {
|
||||||
|
@ -489,7 +495,7 @@ type configLintConfig struct {
|
||||||
lookbackDelta model.Duration
|
lookbackDelta model.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
func newConfigLintConfig(optionsStr string, fatal bool, lookbackDelta model.Duration) configLintConfig {
|
func newConfigLintConfig(optionsStr string, fatal, ignoreUnknownFields bool, lookbackDelta model.Duration) configLintConfig {
|
||||||
c := configLintConfig{
|
c := configLintConfig{
|
||||||
rulesLintConfig: rulesLintConfig{
|
rulesLintConfig: rulesLintConfig{
|
||||||
fatal: fatal,
|
fatal: fatal,
|
||||||
|
@ -518,7 +524,7 @@ func newConfigLintConfig(optionsStr string, fatal bool, lookbackDelta model.Dura
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(rulesOptions) > 0 {
|
if len(rulesOptions) > 0 {
|
||||||
c.rulesLintConfig = newRulesLintConfig(strings.Join(rulesOptions, ","), fatal)
|
c.rulesLintConfig = newRulesLintConfig(strings.Join(rulesOptions, ","), fatal, ignoreUnknownFields)
|
||||||
}
|
}
|
||||||
|
|
||||||
return c
|
return c
|
||||||
|
@ -839,7 +845,7 @@ func checkRulesFromStdin(ls rulesLintConfig) (bool, bool) {
|
||||||
fmt.Fprintln(os.Stderr, " FAILED:", err)
|
fmt.Fprintln(os.Stderr, " FAILED:", err)
|
||||||
return true, true
|
return true, true
|
||||||
}
|
}
|
||||||
rgs, errs := rulefmt.Parse(data)
|
rgs, errs := rulefmt.Parse(data, ls.ignoreUnknownFields)
|
||||||
if errs != nil {
|
if errs != nil {
|
||||||
failed = true
|
failed = true
|
||||||
fmt.Fprintln(os.Stderr, " FAILED:")
|
fmt.Fprintln(os.Stderr, " FAILED:")
|
||||||
|
@ -873,7 +879,7 @@ func checkRules(files []string, ls rulesLintConfig) (bool, bool) {
|
||||||
hasErrors := false
|
hasErrors := false
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
fmt.Println("Checking", f)
|
fmt.Println("Checking", f)
|
||||||
rgs, errs := rulefmt.ParseFile(f)
|
rgs, errs := rulefmt.ParseFile(f, ls.ignoreUnknownFields)
|
||||||
if errs != nil {
|
if errs != nil {
|
||||||
failed = true
|
failed = true
|
||||||
fmt.Fprintln(os.Stderr, " FAILED:")
|
fmt.Fprintln(os.Stderr, " FAILED:")
|
||||||
|
|
|
@ -185,7 +185,7 @@ func TestCheckDuplicates(t *testing.T) {
|
||||||
c := test
|
c := test
|
||||||
t.Run(c.name, func(t *testing.T) {
|
t.Run(c.name, func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
rgs, err := rulefmt.ParseFile(c.ruleFile)
|
rgs, err := rulefmt.ParseFile(c.ruleFile, false)
|
||||||
require.Empty(t, err)
|
require.Empty(t, err)
|
||||||
dups := checkDuplicates(rgs.Groups)
|
dups := checkDuplicates(rgs.Groups)
|
||||||
require.Equal(t, c.expectedDups, dups)
|
require.Equal(t, c.expectedDups, dups)
|
||||||
|
@ -194,7 +194,7 @@ func TestCheckDuplicates(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkCheckDuplicates(b *testing.B) {
|
func BenchmarkCheckDuplicates(b *testing.B) {
|
||||||
rgs, err := rulefmt.ParseFile("./testdata/rules_large.yml")
|
rgs, err := rulefmt.ParseFile("./testdata/rules_large.yml", false)
|
||||||
require.Empty(b, err)
|
require.Empty(b, err)
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
|
|
||||||
|
@ -508,7 +508,7 @@ func TestCheckRules(t *testing.T) {
|
||||||
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
||||||
os.Stdin = r
|
os.Stdin = r
|
||||||
|
|
||||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false))
|
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false))
|
||||||
require.Equal(t, successExitCode, exitCode, "")
|
require.Equal(t, successExitCode, exitCode, "")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -530,7 +530,7 @@ func TestCheckRules(t *testing.T) {
|
||||||
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
||||||
os.Stdin = r
|
os.Stdin = r
|
||||||
|
|
||||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false))
|
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false))
|
||||||
require.Equal(t, failureExitCode, exitCode, "")
|
require.Equal(t, failureExitCode, exitCode, "")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -552,7 +552,7 @@ func TestCheckRules(t *testing.T) {
|
||||||
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
defer func(v *os.File) { os.Stdin = v }(os.Stdin)
|
||||||
os.Stdin = r
|
os.Stdin = r
|
||||||
|
|
||||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true))
|
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true, false))
|
||||||
require.Equal(t, lintErrExitCode, exitCode, "")
|
require.Equal(t, lintErrExitCode, exitCode, "")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -560,19 +560,19 @@ func TestCheckRules(t *testing.T) {
|
||||||
func TestCheckRulesWithRuleFiles(t *testing.T) {
|
func TestCheckRulesWithRuleFiles(t *testing.T) {
|
||||||
t.Run("rules-good", func(t *testing.T) {
|
t.Run("rules-good", func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false), "./testdata/rules.yml")
|
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false), "./testdata/rules.yml")
|
||||||
require.Equal(t, successExitCode, exitCode, "")
|
require.Equal(t, successExitCode, exitCode, "")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("rules-bad", func(t *testing.T) {
|
t.Run("rules-bad", func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false), "./testdata/rules-bad.yml")
|
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, false, false), "./testdata/rules-bad.yml")
|
||||||
require.Equal(t, failureExitCode, exitCode, "")
|
require.Equal(t, failureExitCode, exitCode, "")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("rules-lint-fatal", func(t *testing.T) {
|
t.Run("rules-lint-fatal", func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true), "./testdata/prometheus-rules.lint.yml")
|
exitCode := CheckRules(newRulesLintConfig(lintOptionDuplicateRules, true, false), "./testdata/prometheus-rules.lint.yml")
|
||||||
require.Equal(t, lintErrExitCode, exitCode, "")
|
require.Equal(t, lintErrExitCode, exitCode, "")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -601,20 +601,20 @@ func TestCheckScrapeConfigs(t *testing.T) {
|
||||||
} {
|
} {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
// Non-fatal linting.
|
// Non-fatal linting.
|
||||||
code := CheckConfig(false, false, newConfigLintConfig(lintOptionTooLongScrapeInterval, false, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
code := CheckConfig(false, false, newConfigLintConfig(lintOptionTooLongScrapeInterval, false, false, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
||||||
require.Equal(t, successExitCode, code, "Non-fatal linting should return success")
|
require.Equal(t, successExitCode, code, "Non-fatal linting should return success")
|
||||||
// Fatal linting.
|
// Fatal linting.
|
||||||
code = CheckConfig(false, false, newConfigLintConfig(lintOptionTooLongScrapeInterval, true, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
code = CheckConfig(false, false, newConfigLintConfig(lintOptionTooLongScrapeInterval, true, false, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
||||||
if tc.expectError {
|
if tc.expectError {
|
||||||
require.Equal(t, lintErrExitCode, code, "Fatal linting should return error")
|
require.Equal(t, lintErrExitCode, code, "Fatal linting should return error")
|
||||||
} else {
|
} else {
|
||||||
require.Equal(t, successExitCode, code, "Fatal linting should return success when there are no problems")
|
require.Equal(t, successExitCode, code, "Fatal linting should return success when there are no problems")
|
||||||
}
|
}
|
||||||
// Check syntax only, no linting.
|
// Check syntax only, no linting.
|
||||||
code = CheckConfig(false, true, newConfigLintConfig(lintOptionTooLongScrapeInterval, true, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
code = CheckConfig(false, true, newConfigLintConfig(lintOptionTooLongScrapeInterval, true, false, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
||||||
require.Equal(t, successExitCode, code, "Fatal linting should return success when checking syntax only")
|
require.Equal(t, successExitCode, code, "Fatal linting should return success when checking syntax only")
|
||||||
// Lint option "none" should disable linting.
|
// Lint option "none" should disable linting.
|
||||||
code = CheckConfig(false, false, newConfigLintConfig(lintOptionNone+","+lintOptionTooLongScrapeInterval, true, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
code = CheckConfig(false, false, newConfigLintConfig(lintOptionNone+","+lintOptionTooLongScrapeInterval, true, false, tc.lookbackDelta), "./testdata/prometheus-config.lint.too_long_scrape_interval.yml")
|
||||||
require.Equal(t, successExitCode, code, `Fatal linting should return success when lint option "none" is specified`)
|
require.Equal(t, successExitCode, code, `Fatal linting should return success when lint option "none" is specified`)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ func newRuleImporter(logger *slog.Logger, config ruleImporterConfig, apiClient q
|
||||||
|
|
||||||
// loadGroups parses groups from a list of recording rule files.
|
// loadGroups parses groups from a list of recording rule files.
|
||||||
func (importer *ruleImporter) loadGroups(_ context.Context, filenames []string) (errs []error) {
|
func (importer *ruleImporter) loadGroups(_ context.Context, filenames []string) (errs []error) {
|
||||||
groups, errs := importer.ruleManager.LoadGroups(importer.config.evalInterval, labels.Labels{}, "", nil, filenames...)
|
groups, errs := importer.ruleManager.LoadGroups(importer.config.evalInterval, labels.Labels{}, "", nil, false, filenames...)
|
||||||
if errs != nil {
|
if errs != nil {
|
||||||
return errs
|
return errs
|
||||||
}
|
}
|
||||||
|
|
33
cmd/promtool/testdata/rules_extrafields.yml
vendored
Normal file
33
cmd/promtool/testdata/rules_extrafields.yml
vendored
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
# This is the rules file. It has an extra "ownership"
|
||||||
|
# field in the second group. promtool should ignore this field
|
||||||
|
# and not return an error with --ignore-unknown-fields.
|
||||||
|
|
||||||
|
groups:
|
||||||
|
- name: alerts
|
||||||
|
namespace: "foobar"
|
||||||
|
rules:
|
||||||
|
- alert: InstanceDown
|
||||||
|
expr: up == 0
|
||||||
|
for: 5m
|
||||||
|
labels:
|
||||||
|
severity: page
|
||||||
|
annotations:
|
||||||
|
summary: "Instance {{ $labels.instance }} down"
|
||||||
|
description: "{{ $labels.instance }} of job {{ $labels.job }} has been down for more than 5 minutes."
|
||||||
|
- alert: AlwaysFiring
|
||||||
|
expr: 1
|
||||||
|
|
||||||
|
- name: rules
|
||||||
|
ownership:
|
||||||
|
service: "test"
|
||||||
|
rules:
|
||||||
|
- record: job:test:count_over_time1m
|
||||||
|
expr: sum without(instance) (count_over_time(test[1m]))
|
||||||
|
|
||||||
|
# A recording rule that doesn't depend on input series.
|
||||||
|
- record: fixed_data
|
||||||
|
expr: 1
|
||||||
|
|
||||||
|
# Subquery with default resolution test.
|
||||||
|
- record: suquery_interval_test
|
||||||
|
expr: count_over_time(up[5m:])
|
21
cmd/promtool/testdata/rules_run_extrafields.yml
vendored
Normal file
21
cmd/promtool/testdata/rules_run_extrafields.yml
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# Minimal test case to see that --ignore-unknown-fields
|
||||||
|
# is working as expected. It should not return an error
|
||||||
|
# when any extra fields are present in the rules file.
|
||||||
|
rule_files:
|
||||||
|
- rules_extrafields.yml
|
||||||
|
|
||||||
|
evaluation_interval: 1m
|
||||||
|
|
||||||
|
|
||||||
|
tests:
|
||||||
|
- name: extra ownership field test
|
||||||
|
input_series:
|
||||||
|
- series: test
|
||||||
|
values: 1
|
||||||
|
|
||||||
|
promql_expr_test:
|
||||||
|
- expr: test
|
||||||
|
eval_time: 0
|
||||||
|
exp_samples:
|
||||||
|
- value: 1
|
||||||
|
labels: test
|
|
@ -46,11 +46,11 @@ import (
|
||||||
|
|
||||||
// RulesUnitTest does unit testing of rules based on the unit testing files provided.
|
// RulesUnitTest does unit testing of rules based on the unit testing files provided.
|
||||||
// More info about the file format can be found in the docs.
|
// More info about the file format can be found in the docs.
|
||||||
func RulesUnitTest(queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug bool, files ...string) int {
|
func RulesUnitTest(queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug, ignoreUnknownFields bool, files ...string) int {
|
||||||
return RulesUnitTestResult(io.Discard, queryOpts, runStrings, diffFlag, debug, files...)
|
return RulesUnitTestResult(io.Discard, queryOpts, runStrings, diffFlag, debug, ignoreUnknownFields, files...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug bool, files ...string) int {
|
func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug, ignoreUnknownFields bool, files ...string) int {
|
||||||
failed := false
|
failed := false
|
||||||
junit := &junitxml.JUnitXML{}
|
junit := &junitxml.JUnitXML{}
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts,
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
if errs := ruleUnitTest(f, queryOpts, run, diffFlag, debug, junit.Suite(f)); errs != nil {
|
if errs := ruleUnitTest(f, queryOpts, run, diffFlag, debug, ignoreUnknownFields, junit.Suite(f)); errs != nil {
|
||||||
fmt.Fprintln(os.Stderr, " FAILED:")
|
fmt.Fprintln(os.Stderr, " FAILED:")
|
||||||
for _, e := range errs {
|
for _, e := range errs {
|
||||||
fmt.Fprintln(os.Stderr, e.Error())
|
fmt.Fprintln(os.Stderr, e.Error())
|
||||||
|
@ -82,7 +82,7 @@ func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts,
|
||||||
return successExitCode
|
return successExitCode
|
||||||
}
|
}
|
||||||
|
|
||||||
func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *regexp.Regexp, diffFlag, debug bool, ts *junitxml.TestSuite) []error {
|
func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *regexp.Regexp, diffFlag, debug, ignoreUnknownFields bool, ts *junitxml.TestSuite) []error {
|
||||||
b, err := os.ReadFile(filename)
|
b, err := os.ReadFile(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ts.Abort(err)
|
ts.Abort(err)
|
||||||
|
@ -131,7 +131,7 @@ func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *reg
|
||||||
if t.Interval == 0 {
|
if t.Interval == 0 {
|
||||||
t.Interval = unitTestInp.EvaluationInterval
|
t.Interval = unitTestInp.EvaluationInterval
|
||||||
}
|
}
|
||||||
ers := t.test(testname, evalInterval, groupOrderMap, queryOpts, diffFlag, debug, unitTestInp.RuleFiles...)
|
ers := t.test(testname, evalInterval, groupOrderMap, queryOpts, diffFlag, debug, ignoreUnknownFields, unitTestInp.RuleFiles...)
|
||||||
if ers != nil {
|
if ers != nil {
|
||||||
for _, e := range ers {
|
for _, e := range ers {
|
||||||
tc.Fail(e.Error())
|
tc.Fail(e.Error())
|
||||||
|
@ -198,7 +198,7 @@ type testGroup struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// test performs the unit tests.
|
// test performs the unit tests.
|
||||||
func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrderMap map[string]int, queryOpts promqltest.LazyLoaderOpts, diffFlag, debug bool, ruleFiles ...string) (outErr []error) {
|
func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrderMap map[string]int, queryOpts promqltest.LazyLoaderOpts, diffFlag, debug, ignoreUnknownFields bool, ruleFiles ...string) (outErr []error) {
|
||||||
if debug {
|
if debug {
|
||||||
testStart := time.Now()
|
testStart := time.Now()
|
||||||
fmt.Printf("DEBUG: Starting test %s\n", testname)
|
fmt.Printf("DEBUG: Starting test %s\n", testname)
|
||||||
|
@ -228,7 +228,7 @@ func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrde
|
||||||
Logger: promslog.NewNopLogger(),
|
Logger: promslog.NewNopLogger(),
|
||||||
}
|
}
|
||||||
m := rules.NewManager(opts)
|
m := rules.NewManager(opts)
|
||||||
groupsMap, ers := m.LoadGroups(time.Duration(tg.Interval), tg.ExternalLabels, tg.ExternalURL, nil, ruleFiles...)
|
groupsMap, ers := m.LoadGroups(time.Duration(tg.Interval), tg.ExternalLabels, tg.ExternalURL, nil, ignoreUnknownFields, ruleFiles...)
|
||||||
if ers != nil {
|
if ers != nil {
|
||||||
return ers
|
return ers
|
||||||
}
|
}
|
||||||
|
|
|
@ -143,7 +143,7 @@ func TestRulesUnitTest(t *testing.T) {
|
||||||
}
|
}
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
if got := RulesUnitTest(tt.queryOpts, nil, false, false, tt.args.files...); got != tt.want {
|
if got := RulesUnitTest(tt.queryOpts, nil, false, false, false, tt.args.files...); got != tt.want {
|
||||||
t.Errorf("RulesUnitTest() = %v, want %v", got, tt.want)
|
t.Errorf("RulesUnitTest() = %v, want %v", got, tt.want)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -151,7 +151,7 @@ func TestRulesUnitTest(t *testing.T) {
|
||||||
t.Run("Junit xml output ", func(t *testing.T) {
|
t.Run("Junit xml output ", func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
if got := RulesUnitTestResult(&buf, promqltest.LazyLoaderOpts{}, nil, false, false, reuseFiles...); got != 1 {
|
if got := RulesUnitTestResult(&buf, promqltest.LazyLoaderOpts{}, nil, false, false, false, reuseFiles...); got != 1 {
|
||||||
t.Errorf("RulesUnitTestResults() = %v, want 1", got)
|
t.Errorf("RulesUnitTestResults() = %v, want 1", got)
|
||||||
}
|
}
|
||||||
var test junitxml.JUnitXML
|
var test junitxml.JUnitXML
|
||||||
|
@ -198,6 +198,7 @@ func TestRulesUnitTestRun(t *testing.T) {
|
||||||
args args
|
args args
|
||||||
queryOpts promqltest.LazyLoaderOpts
|
queryOpts promqltest.LazyLoaderOpts
|
||||||
want int
|
want int
|
||||||
|
ignoreUnknownFields bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "Test all without run arg",
|
name: "Test all without run arg",
|
||||||
|
@ -231,11 +232,19 @@ func TestRulesUnitTestRun(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: 1,
|
want: 1,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Test all with extra fields",
|
||||||
|
args: args{
|
||||||
|
files: []string{"./testdata/rules_run_extrafields.yml"},
|
||||||
|
},
|
||||||
|
ignoreUnknownFields: true,
|
||||||
|
want: 0,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
got := RulesUnitTest(tt.queryOpts, tt.args.run, false, false, tt.args.files...)
|
got := RulesUnitTest(tt.queryOpts, tt.args.run, false, false, tt.ignoreUnknownFields, tt.args.files...)
|
||||||
require.Equal(t, tt.want, got)
|
require.Equal(t, tt.want, got)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,6 +105,7 @@ Check if the config files are valid or not.
|
||||||
| <code class="text-nowrap">--syntax-only</code> | Only check the config file syntax, ignoring file and content validation referenced in the config | |
|
| <code class="text-nowrap">--syntax-only</code> | Only check the config file syntax, ignoring file and content validation referenced in the config | |
|
||||||
| <code class="text-nowrap">--lint</code> | Linting checks to apply to the rules/scrape configs specified in the config. Available options are: all, duplicate-rules, none, too-long-scrape-interval. Use --lint=none to disable linting | `duplicate-rules` |
|
| <code class="text-nowrap">--lint</code> | Linting checks to apply to the rules/scrape configs specified in the config. Available options are: all, duplicate-rules, none, too-long-scrape-interval. Use --lint=none to disable linting | `duplicate-rules` |
|
||||||
| <code class="text-nowrap">--lint-fatal</code> | Make lint errors exit with exit code 3. | `false` |
|
| <code class="text-nowrap">--lint-fatal</code> | Make lint errors exit with exit code 3. | `false` |
|
||||||
|
| <code class="text-nowrap">--ignore-unknown-fields</code> | Ignore unknown fields in the rule groups read by the config files. This is useful when you want to extend rule files with custom metadata. Ensure that those fields are removed before loading them into the Prometheus server as it performs strict checks by default. | `false` |
|
||||||
| <code class="text-nowrap">--agent</code> | Check config file for Prometheus in Agent mode. | |
|
| <code class="text-nowrap">--agent</code> | Check config file for Prometheus in Agent mode. | |
|
||||||
|
|
||||||
|
|
||||||
|
@ -178,6 +179,7 @@ Check if the rule files are valid or not.
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
| <code class="text-nowrap">--lint</code> | Linting checks to apply. Available options are: all, duplicate-rules, none. Use --lint=none to disable linting | `duplicate-rules` |
|
| <code class="text-nowrap">--lint</code> | Linting checks to apply. Available options are: all, duplicate-rules, none. Use --lint=none to disable linting | `duplicate-rules` |
|
||||||
| <code class="text-nowrap">--lint-fatal</code> | Make lint errors exit with exit code 3. | `false` |
|
| <code class="text-nowrap">--lint-fatal</code> | Make lint errors exit with exit code 3. | `false` |
|
||||||
|
| <code class="text-nowrap">--ignore-unknown-fields</code> | Ignore unknown fields in the rule files. This is useful when you want to extend rule files with custom metadata. Ensure that those fields are removed before loading them into the Prometheus server as it performs strict checks by default. | `false` |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -465,6 +467,7 @@ Unit tests for rules.
|
||||||
| <code class="text-nowrap">--run</code> <code class="text-nowrap">...<code class="text-nowrap"> | If set, will only run test groups whose names match the regular expression. Can be specified multiple times. | |
|
| <code class="text-nowrap">--run</code> <code class="text-nowrap">...<code class="text-nowrap"> | If set, will only run test groups whose names match the regular expression. Can be specified multiple times. | |
|
||||||
| <code class="text-nowrap">--debug</code> | Enable unit test debugging. | `false` |
|
| <code class="text-nowrap">--debug</code> | Enable unit test debugging. | `false` |
|
||||||
| <code class="text-nowrap">--diff</code> | [Experimental] Print colored differential output between expected & received output. | `false` |
|
| <code class="text-nowrap">--diff</code> | [Experimental] Print colored differential output between expected & received output. | `false` |
|
||||||
|
| <code class="text-nowrap">--ignore-unknown-fields</code> | Ignore unknown fields in the test files. This is useful when you want to extend rule files with custom metadata. Ensure that those fields are removed before loading them into the Prometheus server as it performs strict checks by default. | `false` |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -314,7 +314,7 @@ func testTemplateParsing(rl *RuleNode) (errs []error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse parses and validates a set of rules.
|
// Parse parses and validates a set of rules.
|
||||||
func Parse(content []byte) (*RuleGroups, []error) {
|
func Parse(content []byte, ignoreUnknownFields bool) (*RuleGroups, []error) {
|
||||||
var (
|
var (
|
||||||
groups RuleGroups
|
groups RuleGroups
|
||||||
node ruleGroups
|
node ruleGroups
|
||||||
|
@ -322,7 +322,9 @@ func Parse(content []byte) (*RuleGroups, []error) {
|
||||||
)
|
)
|
||||||
|
|
||||||
decoder := yaml.NewDecoder(bytes.NewReader(content))
|
decoder := yaml.NewDecoder(bytes.NewReader(content))
|
||||||
|
if !ignoreUnknownFields {
|
||||||
decoder.KnownFields(true)
|
decoder.KnownFields(true)
|
||||||
|
}
|
||||||
err := decoder.Decode(&groups)
|
err := decoder.Decode(&groups)
|
||||||
// Ignore io.EOF which happens with empty input.
|
// Ignore io.EOF which happens with empty input.
|
||||||
if err != nil && !errors.Is(err, io.EOF) {
|
if err != nil && !errors.Is(err, io.EOF) {
|
||||||
|
@ -341,12 +343,12 @@ func Parse(content []byte) (*RuleGroups, []error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseFile reads and parses rules from a file.
|
// ParseFile reads and parses rules from a file.
|
||||||
func ParseFile(file string) (*RuleGroups, []error) {
|
func ParseFile(file string, ignoreUnknownFields bool) (*RuleGroups, []error) {
|
||||||
b, err := os.ReadFile(file)
|
b, err := os.ReadFile(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, []error{fmt.Errorf("%s: %w", file, err)}
|
return nil, []error{fmt.Errorf("%s: %w", file, err)}
|
||||||
}
|
}
|
||||||
rgs, errs := Parse(b)
|
rgs, errs := Parse(b, ignoreUnknownFields)
|
||||||
for i := range errs {
|
for i := range errs {
|
||||||
errs[i] = fmt.Errorf("%s: %w", file, errs[i])
|
errs[i] = fmt.Errorf("%s: %w", file, errs[i])
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseFileSuccess(t *testing.T) {
|
func TestParseFileSuccess(t *testing.T) {
|
||||||
_, errs := ParseFile("testdata/test.yaml")
|
_, errs := ParseFile("testdata/test.yaml", false)
|
||||||
require.Empty(t, errs, "unexpected errors parsing file")
|
require.Empty(t, errs, "unexpected errors parsing file")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ func TestParseFileFailure(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, c := range table {
|
for _, c := range table {
|
||||||
_, errs := ParseFile(filepath.Join("testdata", c.filename))
|
_, errs := ParseFile(filepath.Join("testdata", c.filename), false)
|
||||||
require.NotEmpty(t, errs, "Expected error parsing %s but got none", c.filename)
|
require.NotEmpty(t, errs, "Expected error parsing %s but got none", c.filename)
|
||||||
require.ErrorContainsf(t, errs[0], c.errMsg, "Expected error for %s.", c.filename)
|
require.ErrorContainsf(t, errs[0], c.errMsg, "Expected error for %s.", c.filename)
|
||||||
}
|
}
|
||||||
|
@ -179,7 +179,7 @@ groups:
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tst := range tests {
|
for _, tst := range tests {
|
||||||
rgs, errs := Parse([]byte(tst.ruleString))
|
rgs, errs := Parse([]byte(tst.ruleString), false)
|
||||||
require.NotNil(t, rgs, "Rule parsing, rule=\n"+tst.ruleString)
|
require.NotNil(t, rgs, "Rule parsing, rule=\n"+tst.ruleString)
|
||||||
passed := (tst.shouldPass && len(errs) == 0) || (!tst.shouldPass && len(errs) > 0)
|
passed := (tst.shouldPass && len(errs) == 0) || (!tst.shouldPass && len(errs) > 0)
|
||||||
require.True(t, passed, "Rule validation failed, rule=\n"+tst.ruleString)
|
require.True(t, passed, "Rule validation failed, rule=\n"+tst.ruleString)
|
||||||
|
@ -206,7 +206,7 @@ groups:
|
||||||
annotations:
|
annotations:
|
||||||
summary: "Instance {{ $labels.instance }} up"
|
summary: "Instance {{ $labels.instance }} up"
|
||||||
`
|
`
|
||||||
_, errs := Parse([]byte(group))
|
_, errs := Parse([]byte(group), false)
|
||||||
require.Len(t, errs, 2, "Expected two errors")
|
require.Len(t, errs, 2, "Expected two errors")
|
||||||
var err00 *Error
|
var err00 *Error
|
||||||
require.ErrorAs(t, errs[0], &err00)
|
require.ErrorAs(t, errs[0], &err00)
|
||||||
|
|
|
@ -207,7 +207,7 @@ func (m *Manager) Update(interval time.Duration, files []string, externalLabels
|
||||||
default:
|
default:
|
||||||
}
|
}
|
||||||
|
|
||||||
groups, errs := m.LoadGroups(interval, externalLabels, externalURL, groupEvalIterationFunc, files...)
|
groups, errs := m.LoadGroups(interval, externalLabels, externalURL, groupEvalIterationFunc, false, files...)
|
||||||
|
|
||||||
if errs != nil {
|
if errs != nil {
|
||||||
for _, e := range errs {
|
for _, e := range errs {
|
||||||
|
@ -276,7 +276,7 @@ func (m *Manager) Update(interval time.Duration, files []string, externalLabels
|
||||||
|
|
||||||
// GroupLoader is responsible for loading rule groups from arbitrary sources and parsing them.
|
// GroupLoader is responsible for loading rule groups from arbitrary sources and parsing them.
|
||||||
type GroupLoader interface {
|
type GroupLoader interface {
|
||||||
Load(identifier string) (*rulefmt.RuleGroups, []error)
|
Load(identifier string, ignoreUnknownFields bool) (*rulefmt.RuleGroups, []error)
|
||||||
Parse(query string) (parser.Expr, error)
|
Parse(query string) (parser.Expr, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -284,22 +284,22 @@ type GroupLoader interface {
|
||||||
// and parser.ParseExpr.
|
// and parser.ParseExpr.
|
||||||
type FileLoader struct{}
|
type FileLoader struct{}
|
||||||
|
|
||||||
func (FileLoader) Load(identifier string) (*rulefmt.RuleGroups, []error) {
|
func (FileLoader) Load(identifier string, ignoreUnknownFields bool) (*rulefmt.RuleGroups, []error) {
|
||||||
return rulefmt.ParseFile(identifier)
|
return rulefmt.ParseFile(identifier, ignoreUnknownFields)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (FileLoader) Parse(query string) (parser.Expr, error) { return parser.ParseExpr(query) }
|
func (FileLoader) Parse(query string) (parser.Expr, error) { return parser.ParseExpr(query) }
|
||||||
|
|
||||||
// LoadGroups reads groups from a list of files.
|
// LoadGroups reads groups from a list of files.
|
||||||
func (m *Manager) LoadGroups(
|
func (m *Manager) LoadGroups(
|
||||||
interval time.Duration, externalLabels labels.Labels, externalURL string, groupEvalIterationFunc GroupEvalIterationFunc, filenames ...string,
|
interval time.Duration, externalLabels labels.Labels, externalURL string, groupEvalIterationFunc GroupEvalIterationFunc, ignoreUnknownFields bool, filenames ...string,
|
||||||
) (map[string]*Group, []error) {
|
) (map[string]*Group, []error) {
|
||||||
groups := make(map[string]*Group)
|
groups := make(map[string]*Group)
|
||||||
|
|
||||||
shouldRestore := !m.restored
|
shouldRestore := !m.restored
|
||||||
|
|
||||||
for _, fn := range filenames {
|
for _, fn := range filenames {
|
||||||
rgs, errs := m.opts.GroupLoader.Load(fn)
|
rgs, errs := m.opts.GroupLoader.Load(fn, ignoreUnknownFields)
|
||||||
if errs != nil {
|
if errs != nil {
|
||||||
return nil, errs
|
return nil, errs
|
||||||
}
|
}
|
||||||
|
|
|
@ -808,7 +808,7 @@ func TestUpdate(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Groups will be recreated if updated.
|
// Groups will be recreated if updated.
|
||||||
rgs, errs := rulefmt.ParseFile("fixtures/rules.yaml")
|
rgs, errs := rulefmt.ParseFile("fixtures/rules.yaml", false)
|
||||||
require.Empty(t, errs, "file parsing failures")
|
require.Empty(t, errs, "file parsing failures")
|
||||||
|
|
||||||
tmpFile, err := os.CreateTemp("", "rules.test.*.yaml")
|
tmpFile, err := os.CreateTemp("", "rules.test.*.yaml")
|
||||||
|
@ -1532,7 +1532,7 @@ func TestManager_LoadGroups_ShouldCheckWhetherEachRuleHasDependentsAndDependenci
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("load a mix of dependent and independent rules", func(t *testing.T) {
|
t.Run("load a mix of dependent and independent rules", func(t *testing.T) {
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -1567,7 +1567,7 @@ func TestManager_LoadGroups_ShouldCheckWhetherEachRuleHasDependentsAndDependenci
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("load only independent rules", func(t *testing.T) {
|
t.Run("load only independent rules", func(t *testing.T) {
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple_independent.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple_independent.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -1975,7 +1975,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
t.Cleanup(cancel)
|
t.Cleanup(cancel)
|
||||||
|
|
||||||
ruleManager := NewManager(optsFactory(storage, &maxInflight, &inflightQueries, 0))
|
ruleManager := NewManager(optsFactory(storage, &maxInflight, &inflightQueries, 0))
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -2021,7 +2021,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
opts.RuleConcurrencyController = nil
|
opts.RuleConcurrencyController = nil
|
||||||
ruleManager := NewManager(opts)
|
ruleManager := NewManager(opts)
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -2059,7 +2059,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
opts.RuleConcurrencyController = nil
|
opts.RuleConcurrencyController = nil
|
||||||
ruleManager := NewManager(opts)
|
ruleManager := NewManager(opts)
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple_independent.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple_independent.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -2103,7 +2103,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
opts.RuleConcurrencyController = nil
|
opts.RuleConcurrencyController = nil
|
||||||
ruleManager := NewManager(opts)
|
ruleManager := NewManager(opts)
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple_independent.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple_independent.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -2150,7 +2150,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
opts.RuleConcurrencyController = nil
|
opts.RuleConcurrencyController = nil
|
||||||
ruleManager := NewManager(opts)
|
ruleManager := NewManager(opts)
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_indeterminates.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_indeterminates.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -2189,7 +2189,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
opts.RuleConcurrencyController = nil
|
opts.RuleConcurrencyController = nil
|
||||||
ruleManager := NewManager(opts)
|
ruleManager := NewManager(opts)
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_multiple_dependents_on_base.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_multiple_dependents_on_base.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
var group *Group
|
var group *Group
|
||||||
|
@ -2235,7 +2235,7 @@ func TestAsyncRuleEvaluation(t *testing.T) {
|
||||||
opts.RuleConcurrencyController = nil
|
opts.RuleConcurrencyController = nil
|
||||||
ruleManager := NewManager(opts)
|
ruleManager := NewManager(opts)
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, []string{"fixtures/rules_chain.yaml"}...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, []string{"fixtures/rules_chain.yaml"}...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
var group *Group
|
var group *Group
|
||||||
|
@ -2279,7 +2279,7 @@ func TestBoundedRuleEvalConcurrency(t *testing.T) {
|
||||||
|
|
||||||
ruleManager := NewManager(optsFactory(storage, &maxInflight, &inflightQueries, maxConcurrency))
|
ruleManager := NewManager(optsFactory(storage, &maxInflight, &inflightQueries, maxConcurrency))
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, files...)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, files...)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, groupCount)
|
require.Len(t, groups, groupCount)
|
||||||
|
|
||||||
|
@ -2521,7 +2521,7 @@ func TestRuleDependencyController_AnalyseRules(t *testing.T) {
|
||||||
QueryFunc: func(ctx context.Context, q string, ts time.Time) (promql.Vector, error) { return nil, nil },
|
QueryFunc: func(ctx context.Context, q string, ts time.Time) (promql.Vector, error) { return nil, nil },
|
||||||
})
|
})
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, tc.ruleFile)
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, tc.ruleFile)
|
||||||
require.Empty(t, errs)
|
require.Empty(t, errs)
|
||||||
require.Len(t, groups, 1)
|
require.Len(t, groups, 1)
|
||||||
|
|
||||||
|
@ -2550,7 +2550,7 @@ func BenchmarkRuleDependencyController_AnalyseRules(b *testing.B) {
|
||||||
QueryFunc: func(ctx context.Context, q string, ts time.Time) (promql.Vector, error) { return nil, nil },
|
QueryFunc: func(ctx context.Context, q string, ts time.Time) (promql.Vector, error) { return nil, nil },
|
||||||
})
|
})
|
||||||
|
|
||||||
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, "fixtures/rules_multiple.yaml")
|
groups, errs := ruleManager.LoadGroups(time.Second, labels.EmptyLabels(), "", nil, false, "fixtures/rules_multiple.yaml")
|
||||||
require.Empty(b, errs)
|
require.Empty(b, errs)
|
||||||
require.Len(b, groups, 1)
|
require.Len(b, groups, 1)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue