diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f6af7b21d1..a3535c61d8 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -27,7 +27,10 @@ updates: directory: "/" schedule: interval: "monthly" - open-pull-requests-limit: 0 + - package-ecosystem: "github-actions" + directory: "/scripts" + schedule: + interval: "monthly" - package-ecosystem: "docker" directory: "/" schedule: diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index c6ac3d74ef..3f557a0895 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'prometheus' steps: - - uses: dessant/lock-threads@v4 + - uses: dessant/lock-threads@be8aa5be94131386884a6da4189effda9b14aa21 # v4.0.1 with: process-only: 'issues' issue-inactive-days: '180' diff --git a/CHANGELOG.md b/CHANGELOG.md index 057342dedb..4a9d4521a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## 2.47.0 / 2023-09-06 + +This release adds an experimental OpenTelemetry (OTLP) Ingestion feature, +and also new setting `keep_dropped_targets` to limit the amount of dropped +targets held in memory. This defaults to 0 meaning 'no limit', so we encourage +users with large Prometheus to try setting a limit such as 100. + +* [FEATURE] Web: Add OpenTelemetry (OTLP) Ingestion endpoint. #12571 #12643 +* [FEATURE] Scraping: Optionally limit detail on dropped targets, to save memory. #12647 +* [ENHANCEMENT] TSDB: Write head chunks to disk in the background to reduce blocking. #11818 +* [ENHANCEMENT] PromQL: Speed up aggregate and function queries. #12682 +* [ENHANCEMENT] PromQL: More efficient evaluation of query with `timestamp()`. #12579 +* [ENHANCEMENT] API: Faster streaming of Labels to JSON. #12598 +* [ENHANCEMENT] Agent: Memory pooling optimisation. #12651 +* [ENHANCEMENT] TSDB: Prevent storage space leaks due to terminated snapshots on shutdown. #12664 +* [ENHANCEMENT] Histograms: Refactoring and optimisations. #12352 #12584 #12596 #12711 #12054 +* [ENHANCEMENT] Histograms: Add `histogram_stdvar` and `histogram_stddev` functions. #12614 +* [ENHANCEMENT] Remote-write: add http.resend_count tracing attribute. #12676 +* [ENHANCEMENT] TSDB: Support native histograms in snapshot on shutdown. #12722 +* [BUGFIX] TSDB/Agent: ensure that new series get written to WAL on rollback. #12592 +* [BUGFIX] Scraping: fix infinite loop on exemplar in protobuf format. #12737 + ## 2.46.0 / 2023-07-25 * [FEATURE] Promtool: Add PromQL format and label matcher set/delete commands to promtool. #11411 diff --git a/VERSION b/VERSION index cc8758303e..bb13a7e354 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.46.0 +2.47.0 diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index cab65626aa..43b781f62c 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -1378,17 +1378,17 @@ func (s *readyStorage) StartTime() (int64, error) { } // Querier implements the Storage interface. -func (s *readyStorage) Querier(ctx context.Context, mint, maxt int64) (storage.Querier, error) { +func (s *readyStorage) Querier(mint, maxt int64) (storage.Querier, error) { if x := s.get(); x != nil { - return x.Querier(ctx, mint, maxt) + return x.Querier(mint, maxt) } return nil, tsdb.ErrNotReady } // ChunkQuerier implements the Storage interface. -func (s *readyStorage) ChunkQuerier(ctx context.Context, mint, maxt int64) (storage.ChunkQuerier, error) { +func (s *readyStorage) ChunkQuerier(mint, maxt int64) (storage.ChunkQuerier, error) { if x := s.get(); x != nil { - return x.ChunkQuerier(ctx, mint, maxt) + return x.ChunkQuerier(mint, maxt) } return nil, tsdb.ErrNotReady } @@ -1461,11 +1461,11 @@ func (s *readyStorage) CleanTombstones() error { } // Delete implements the api_v1.TSDBAdminStats and api_v2.TSDBAdmin interfaces. -func (s *readyStorage) Delete(mint, maxt int64, ms ...*labels.Matcher) error { +func (s *readyStorage) Delete(ctx context.Context, mint, maxt int64, ms ...*labels.Matcher) error { if x := s.get(); x != nil { switch db := x.(type) { case *tsdb.DB: - return db.Delete(mint, maxt, ms...) + return db.Delete(ctx, mint, maxt, ms...) case *agent.DB: return agent.ErrUnsupported default: diff --git a/cmd/promtool/backfill_test.go b/cmd/promtool/backfill_test.go index e6f7cad31b..b77dc7826d 100644 --- a/cmd/promtool/backfill_test.go +++ b/cmd/promtool/backfill_test.go @@ -45,7 +45,7 @@ func sortSamples(samples []backfillSample) { } func queryAllSeries(t testing.TB, q storage.Querier, expectedMinTime, expectedMaxTime int64) []backfillSample { // nolint:revive - ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) + ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) samples := []backfillSample{} for ss.Next() { series := ss.At() @@ -67,7 +67,7 @@ func testBlocks(t *testing.T, db *tsdb.DB, expectedMinTime, expectedMaxTime, exp require.Equal(t, block.MinTime()/expectedBlockDuration, (block.MaxTime()-1)/expectedBlockDuration, "block %d contains data outside of one aligned block duration", i) } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) defer func() { require.NoError(t, q.Close()) diff --git a/cmd/promtool/main.go b/cmd/promtool/main.go index da4b8dc797..21b1439a85 100644 --- a/cmd/promtool/main.go +++ b/cmd/promtool/main.go @@ -86,6 +86,8 @@ func main() { httpConfigFilePath string ) + ctx := context.Background() + app := kingpin.New(filepath.Base(os.Args[0]), "Tooling for the Prometheus monitoring system.").UsageWriter(os.Stdout) app.Version(version.Print("promtool")) app.HelpFlag.Short('h') @@ -370,13 +372,13 @@ func main() { os.Exit(checkErr(benchmarkWrite(*benchWriteOutPath, *benchSamplesFile, *benchWriteNumMetrics, *benchWriteNumScrapes))) case tsdbAnalyzeCmd.FullCommand(): - os.Exit(checkErr(analyzeBlock(*analyzePath, *analyzeBlockID, *analyzeLimit, *analyzeRunExtended))) + os.Exit(checkErr(analyzeBlock(ctx, *analyzePath, *analyzeBlockID, *analyzeLimit, *analyzeRunExtended))) case tsdbListCmd.FullCommand(): os.Exit(checkErr(listBlocks(*listPath, *listHumanReadable))) case tsdbDumpCmd.FullCommand(): - os.Exit(checkErr(dumpSamples(*dumpPath, *dumpMinTime, *dumpMaxTime, *dumpMatch))) + os.Exit(checkErr(dumpSamples(ctx, *dumpPath, *dumpMinTime, *dumpMaxTime, *dumpMatch))) // TODO(aSquare14): Work on adding support for custom block size. case openMetricsImportCmd.FullCommand(): os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable, *importQuiet, *maxBlockDuration)) diff --git a/cmd/promtool/rules_test.go b/cmd/promtool/rules_test.go index 213b7d2a01..bfea7c937d 100644 --- a/cmd/promtool/rules_test.go +++ b/cmd/promtool/rules_test.go @@ -124,10 +124,10 @@ func TestBackfillRuleIntegration(t *testing.T) { blocks := db.Blocks() require.Equal(t, (i+1)*tt.expectedBlockCount, len(blocks)) - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) - selectedSeries := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) + selectedSeries := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) var seriesCount, samplesCount int for selectedSeries.Next() { seriesCount++ @@ -248,11 +248,11 @@ func TestBackfillLabels(t *testing.T) { db, err := tsdb.Open(tmpDir, nil, nil, opts, nil) require.NoError(t, err) - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) t.Run("correct-labels", func(t *testing.T) { - selectedSeries := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) + selectedSeries := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) for selectedSeries.Next() { series := selectedSeries.At() expectedLabels := labels.FromStrings("__name__", "rulename", "name1", "value-from-rule") diff --git a/cmd/promtool/tsdb.go b/cmd/promtool/tsdb.go index 820cd4687c..bda047e360 100644 --- a/cmd/promtool/tsdb.go +++ b/cmd/promtool/tsdb.go @@ -413,7 +413,7 @@ func openBlock(path, blockID string) (*tsdb.DBReadOnly, tsdb.BlockReader, error) return db, b, nil } -func analyzeBlock(path, blockID string, limit int, runExtended bool) error { +func analyzeBlock(ctx context.Context, path, blockID string, limit int, runExtended bool) error { db, block, err := openBlock(path, blockID) if err != nil { return err @@ -433,7 +433,7 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error { } defer ir.Close() - allLabelNames, err := ir.LabelNames() + allLabelNames, err := ir.LabelNames(ctx) if err != nil { return err } @@ -460,7 +460,7 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error { labelpairsUncovered := map[string]uint64{} labelpairsCount := map[string]uint64{} entries := 0 - p, err := ir.Postings("", "") // The special all key. + p, err := ir.Postings(ctx, "", "") // The special all key. if err != nil { return err } @@ -512,7 +512,7 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error { postingInfos = postingInfos[:0] for _, n := range allLabelNames { - values, err := ir.SortedLabelValues(n) + values, err := ir.SortedLabelValues(ctx, n) if err != nil { return err } @@ -528,7 +528,7 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error { postingInfos = postingInfos[:0] for _, n := range allLabelNames { - lv, err := ir.SortedLabelValues(n) + lv, err := ir.SortedLabelValues(ctx, n) if err != nil { return err } @@ -538,12 +538,12 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error { printInfo(postingInfos) postingInfos = postingInfos[:0] - lv, err := ir.SortedLabelValues("__name__") + lv, err := ir.SortedLabelValues(ctx, "__name__") if err != nil { return err } for _, n := range lv { - postings, err := ir.Postings("__name__", n) + postings, err := ir.Postings(ctx, "__name__", n) if err != nil { return err } @@ -560,14 +560,15 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error { printInfo(postingInfos) if runExtended { - return analyzeCompaction(block, ir) + return analyzeCompaction(ctx, block, ir) } return nil } -func analyzeCompaction(block tsdb.BlockReader, indexr tsdb.IndexReader) (err error) { - postingsr, err := indexr.Postings(index.AllPostingsKey()) +func analyzeCompaction(ctx context.Context, block tsdb.BlockReader, indexr tsdb.IndexReader) (err error) { + n, v := index.AllPostingsKey() + postingsr, err := indexr.Postings(ctx, n, v) if err != nil { return err } @@ -619,7 +620,7 @@ func analyzeCompaction(block tsdb.BlockReader, indexr tsdb.IndexReader) (err err return nil } -func dumpSamples(path string, mint, maxt int64, match string) (err error) { +func dumpSamples(ctx context.Context, path string, mint, maxt int64, match string) (err error) { db, err := tsdb.OpenDBReadOnly(path, nil) if err != nil { return err @@ -627,7 +628,7 @@ func dumpSamples(path string, mint, maxt int64, match string) (err error) { defer func() { err = tsdb_errors.NewMulti(err, db.Close()).Err() }() - q, err := db.Querier(context.TODO(), mint, maxt) + q, err := db.Querier(mint, maxt) if err != nil { return err } @@ -637,7 +638,7 @@ func dumpSamples(path string, mint, maxt int64, match string) (err error) { if err != nil { return err } - ss := q.Select(false, nil, matchers...) + ss := q.Select(ctx, false, nil, matchers...) for ss.Next() { series := ss.At() @@ -661,7 +662,7 @@ func dumpSamples(path string, mint, maxt int64, match string) (err error) { } if ws := ss.Warnings(); len(ws) > 0 { - return tsdb_errors.NewMulti(ws...).Err() + return tsdb_errors.NewMulti(ws.AsErrors()...).Err() } if ss.Err() != nil { diff --git a/config/config_test.go b/config/config_test.go index d3288cc90d..b03b2ebe3d 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -1745,6 +1745,14 @@ var expectedErrors = []struct { filename: "ec2_filters_empty_values.bad.yml", errMsg: `EC2 SD configuration filter values cannot be empty`, }, + { + filename: "ec2_token_file.bad.yml", + errMsg: `at most one of bearer_token & bearer_token_file must be configured`, + }, + { + filename: "lightsail_token_file.bad.yml", + errMsg: `at most one of bearer_token & bearer_token_file must be configured`, + }, { filename: "section_key_dup.bad.yml", errMsg: "field scrape_configs already set in type config.plain", @@ -1769,6 +1777,10 @@ var expectedErrors = []struct { filename: "azure_authentication_method.bad.yml", errMsg: "unknown authentication_type \"invalid\". Supported types are \"OAuth\" or \"ManagedIdentity\"", }, + { + filename: "azure_bearertoken_basicauth.bad.yml", + errMsg: "at most one of basic_auth, oauth2, bearer_token & bearer_token_file must be configured", + }, { filename: "empty_scrape_config.bad.yml", errMsg: "empty or null scrape config section", @@ -1821,6 +1833,10 @@ var expectedErrors = []struct { filename: "puppetdb_no_scheme.bad.yml", errMsg: "URL scheme must be 'http' or 'https'", }, + { + filename: "puppetdb_token_file.bad.yml", + errMsg: "at most one of bearer_token & bearer_token_file must be configured", + }, { filename: "hetzner_role.bad.yml", errMsg: "unknown role", @@ -1857,6 +1873,10 @@ var expectedErrors = []struct { filename: "http_url_no_host.bad.yml", errMsg: "host is missing in URL", }, + { + filename: "http_token_file.bad.yml", + errMsg: "at most one of bearer_token & bearer_token_file must be configured", + }, { filename: "http_url_bad_scheme.bad.yml", errMsg: "URL scheme must be 'http' or 'https'", @@ -1885,6 +1905,10 @@ var expectedErrors = []struct { filename: "uyuni_no_server.bad.yml", errMsg: "Uyuni SD configuration requires server host", }, + { + filename: "uyuni_token_file.bad.yml", + errMsg: "at most one of bearer_token & bearer_token_file must be configured", + }, { filename: "ionos_datacenter.bad.yml", errMsg: "datacenter id can't be empty", diff --git a/config/testdata/azure_bearertoken_basicauth.bad.yml b/config/testdata/azure_bearertoken_basicauth.bad.yml new file mode 100644 index 0000000000..1c22ce2e8e --- /dev/null +++ b/config/testdata/azure_bearertoken_basicauth.bad.yml @@ -0,0 +1,11 @@ +scrape_configs: + - job_name: prometheus + azure_sd_configs: + - subscription_id: 11AAAA11-A11A-111A-A111-1111A1111A11 + tenant_id: BBBB222B-B2B2-2B22-B222-2BB2222BB2B2 + client_id: 333333CC-3C33-3333-CCC3-33C3CCCCC33C + client_secret: mysecret + bearer_token: 1234 + basic_auth: + username: user + password: password diff --git a/config/testdata/ec2_token_file.bad.yml b/config/testdata/ec2_token_file.bad.yml new file mode 100644 index 0000000000..cd413ee090 --- /dev/null +++ b/config/testdata/ec2_token_file.bad.yml @@ -0,0 +1,6 @@ +scrape_configs: + - job_name: foo + ec2_sd_configs: + - region: us-east-1 + bearer_token: foo + bearer_token_file: foo diff --git a/config/testdata/http_token_file.bad.yml b/config/testdata/http_token_file.bad.yml new file mode 100644 index 0000000000..b3d7a35c9e --- /dev/null +++ b/config/testdata/http_token_file.bad.yml @@ -0,0 +1,6 @@ +scrape_configs: + - job_name: foo + http_sd_configs: + - url: http://foo + bearer_token: foo + bearer_token_file: foo diff --git a/config/testdata/lightsail_token_file.bad.yml b/config/testdata/lightsail_token_file.bad.yml new file mode 100644 index 0000000000..1f1ca64efb --- /dev/null +++ b/config/testdata/lightsail_token_file.bad.yml @@ -0,0 +1,6 @@ +scrape_configs: + - job_name: foo + lightsail_sd_configs: + - region: us-east-1 + bearer_token: foo + bearer_token_file: foo diff --git a/config/testdata/puppetdb_token_file.bad.yml b/config/testdata/puppetdb_token_file.bad.yml new file mode 100644 index 0000000000..7775f2d19d --- /dev/null +++ b/config/testdata/puppetdb_token_file.bad.yml @@ -0,0 +1,7 @@ +scrape_configs: +- job_name: puppetdb + puppetdb_sd_configs: + - url: http://puppet + query: 'resources { type = "Package" and title = "httpd" }' + bearer_token: foo + bearer_token_file: foo diff --git a/config/testdata/uyuni_token_file.bad.yml b/config/testdata/uyuni_token_file.bad.yml new file mode 100644 index 0000000000..5a061dadf5 --- /dev/null +++ b/config/testdata/uyuni_token_file.bad.yml @@ -0,0 +1,8 @@ +scrape_configs: + - job_name: uyuni + uyuni_sd_configs: + - server: "server" + username: "username" + password: "password" + bearer_token: foo + bearer_token_file: foo diff --git a/discovery/aws/ec2.go b/discovery/aws/ec2.go index 86d76627e1..64c8fdce63 100644 --- a/discovery/aws/ec2.go +++ b/discovery/aws/ec2.go @@ -129,7 +129,7 @@ func (c *EC2SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { return errors.New("EC2 SD configuration filter values cannot be empty") } } - return nil + return c.HTTPClientConfig.Validate() } // EC2Discovery periodically performs EC2-SD requests. It implements diff --git a/discovery/aws/lightsail.go b/discovery/aws/lightsail.go index e671769ca3..c0198d6a77 100644 --- a/discovery/aws/lightsail.go +++ b/discovery/aws/lightsail.go @@ -109,7 +109,7 @@ func (c *LightsailSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) err } c.Region = region } - return nil + return c.HTTPClientConfig.Validate() } // LightsailDiscovery periodically performs Lightsail-SD requests. It implements diff --git a/discovery/azure/azure.go b/discovery/azure/azure.go index 098fbb4c5f..61dfc4b249 100644 --- a/discovery/azure/azure.go +++ b/discovery/azure/azure.go @@ -144,7 +144,7 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { return fmt.Errorf("unknown authentication_type %q. Supported types are %q or %q", c.AuthenticationMethod, authMethodOAuth, authMethodManagedIdentity) } - return nil + return c.HTTPClientConfig.Validate() } type Discovery struct { diff --git a/discovery/http/http.go b/discovery/http/http.go index ec958c6148..2980d7efda 100644 --- a/discovery/http/http.go +++ b/discovery/http/http.go @@ -99,7 +99,7 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { if parsedURL.Host == "" { return fmt.Errorf("host is missing in URL") } - return nil + return c.HTTPClientConfig.Validate() } const httpSDURLLabel = model.MetaLabelPrefix + "url" diff --git a/discovery/puppetdb/puppetdb.go b/discovery/puppetdb/puppetdb.go index f22a2e22b5..9484a0aa63 100644 --- a/discovery/puppetdb/puppetdb.go +++ b/discovery/puppetdb/puppetdb.go @@ -115,7 +115,7 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { if c.Query == "" { return fmt.Errorf("query missing") } - return nil + return c.HTTPClientConfig.Validate() } // Discovery provides service discovery functionality based diff --git a/discovery/uyuni/uyuni.go b/discovery/uyuni/uyuni.go index e37acbf98a..bc33d28cba 100644 --- a/discovery/uyuni/uyuni.go +++ b/discovery/uyuni/uyuni.go @@ -146,7 +146,7 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { if c.Password == "" { return errors.New("Uyuni SD configuration requires a password") } - return nil + return c.HTTPClientConfig.Validate() } func login(rpcclient *xmlrpc.Client, user, pass string, duration int) (string, error) { diff --git a/go.mod b/go.mod index 98f9109965..2ff42f59b5 100644 --- a/go.mod +++ b/go.mod @@ -69,7 +69,7 @@ require ( go.uber.org/automaxprocs v1.5.2 go.uber.org/goleak v1.2.1 go.uber.org/multierr v1.11.0 - golang.org/x/net v0.12.0 + golang.org/x/net v0.13.0 golang.org/x/oauth2 v0.10.0 golang.org/x/sync v0.3.0 golang.org/x/sys v0.10.0 @@ -81,9 +81,9 @@ require ( google.golang.org/protobuf v1.31.0 gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 - k8s.io/api v0.27.3 - k8s.io/apimachinery v0.27.3 - k8s.io/client-go v0.27.3 + k8s.io/api v0.28.1 + k8s.io/apimachinery v0.28.1 + k8s.io/client-go v0.28.1 k8s.io/klog v1.0.0 k8s.io/klog/v2 v2.100.1 ) @@ -94,6 +94,7 @@ require ( github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/dustin/go-humanize v1.0.0 // indirect + github.com/google/gnostic-models v0.6.8 // indirect github.com/google/s2a-go v0.1.4 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect @@ -124,7 +125,7 @@ require ( github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/emicklei/go-restful/v3 v3.10.2 // indirect - github.com/evanphx/json-patch v4.12.0+incompatible // indirect + github.com/evanphx/json-patch v5.6.0+incompatible // indirect github.com/fatih/color v1.15.0 // indirect github.com/felixge/httpsnoop v1.0.3 // indirect github.com/ghodss/yaml v1.0.0 // indirect @@ -144,7 +145,6 @@ require ( github.com/golang/glog v1.1.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect - github.com/google/gnostic v0.6.9 // indirect github.com/google/go-cmp v0.5.9 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/gofuzz v1.2.0 // indirect @@ -196,7 +196,7 @@ require ( gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gotest.tools/v3 v3.0.3 // indirect - k8s.io/kube-openapi v0.0.0-20230525220651-2546d827e515 // indirect + k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 // indirect k8s.io/utils v0.0.0-20230711102312-30195339c3c7 // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect sigs.k8s.io/structured-merge-diff/v4 v4.3.0 // indirect diff --git a/go.sum b/go.sum index fa4bc0d6a6..86a1c2097d 100644 --- a/go.sum +++ b/go.sum @@ -74,7 +74,6 @@ github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2Qc github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= @@ -114,13 +113,11 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -134,7 +131,6 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= @@ -173,7 +169,6 @@ github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKoh github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= @@ -190,15 +185,14 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM= github.com/envoyproxy/go-control-plane v0.11.1/go.mod h1:uhMcXKCQMEJHiAb0w+YGefQLaTEw+YhGluxZkrTmD0g= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= -github.com/evanphx/json-patch v4.12.0+incompatible h1:4onqiflcdA9EOZ4RxV643DvftH5pOlLGNtQ5lPWQu84= -github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U= +github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= @@ -206,7 +200,6 @@ github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= @@ -276,7 +269,7 @@ github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPr github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-zookeeper/zk v1.0.3 h1:7M2kwOsc//9VeeFiPtf+uSJlVpU66x9Ba5+8XK7/TDg= github.com/go-zookeeper/zk v1.0.3/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= @@ -354,8 +347,8 @@ github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= -github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= -github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -525,7 +518,6 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxv github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -622,9 +614,9 @@ github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:v github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo/v2 v2.9.1 h1:zie5Ly042PD3bsCvsSOPvRnFwyo3rKe64TJlD6nu0mk= +github.com/onsi/ginkgo/v2 v2.9.4 h1:xR7vG4IXt5RWx6FfIjyAtsoMAtnc3C/rFXBBd2AjZwE= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.27.4 h1:Z2AnStgsdSayCMDiCU42qIz+HLqEPcgiOCXjAU/w+8E= +github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= @@ -735,13 +727,11 @@ github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1 github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= @@ -777,9 +767,6 @@ github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3k github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc= github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= @@ -936,18 +923,16 @@ golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= -golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY= +golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1023,7 +1008,6 @@ golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1181,7 +1165,6 @@ google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1m google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20230717213848-3f92550aa753 h1:+VoAg+OKmWaommL56xmZSE2sUK8A7m6SUO7X89F2tbw= google.golang.org/genproto v0.0.0-20230717213848-3f92550aa753/go.mod h1:iqkVr8IRpZ53gx1dEnWlCUIEwDWqWARWrbzpasaTNYM= google.golang.org/genproto/googleapis/api v0.0.0-20230717213848-3f92550aa753 h1:lCbbUxUDD+DiXx9Q6F/ttL0aAu7N2pz8XnmMm8ZW4NE= @@ -1207,7 +1190,6 @@ google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= @@ -1223,7 +1205,6 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= @@ -1271,14 +1252,14 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -k8s.io/api v0.27.3 h1:yR6oQXXnUEBWEWcvPWS0jQL575KoAboQPfJAuKNrw5Y= -k8s.io/api v0.27.3/go.mod h1:C4BNvZnQOF7JA/0Xed2S+aUyJSfTGkGFxLXz9MnpIpg= -k8s.io/apimachinery v0.27.3 h1:Ubye8oBufD04l9QnNtW05idcOe9Z3GQN8+7PqmuVcUM= -k8s.io/apimachinery v0.27.3/go.mod h1:XNfZ6xklnMCOGGFNqXG7bUrQCoR04dh/E7FprV6pb+E= -k8s.io/client-go v0.27.3 h1:7dnEGHZEJld3lYwxvLl7WoehK6lAq7GvgjxpA3nv1E8= -k8s.io/client-go v0.27.3/go.mod h1:2MBEKuTo6V1lbKy3z1euEGnhPfGZLKTS9tiJ2xodM48= -k8s.io/kube-openapi v0.0.0-20230525220651-2546d827e515 h1:OmK1d0WrkD3IPfkskvroRykOulHVHf0s0ZIFRjyt+UI= -k8s.io/kube-openapi v0.0.0-20230525220651-2546d827e515/go.mod h1:kzo02I3kQ4BTtEfVLaPbjvCkX97YqGve33wzlb3fofQ= +k8s.io/api v0.28.1 h1:i+0O8k2NPBCPYaMB+uCkseEbawEt/eFaiRqUx8aB108= +k8s.io/api v0.28.1/go.mod h1:uBYwID+66wiL28Kn2tBjBYQdEU0Xk0z5qF8bIBqk/Dg= +k8s.io/apimachinery v0.28.1 h1:EJD40og3GizBSV3mkIoXQBsws32okPOy+MkRyzh6nPY= +k8s.io/apimachinery v0.28.1/go.mod h1:X0xh/chESs2hP9koe+SdIAcXWcQ+RM5hy0ZynB+yEvw= +k8s.io/client-go v0.28.1 h1:pRhMzB8HyLfVwpngWKE8hDcXRqifh1ga2Z/PU9SXVK8= +k8s.io/client-go v0.28.1/go.mod h1:pEZA3FqOsVkCc07pFVzK076R+P/eXqsgx5zuuRWukNE= +k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 h1:LyMgNKD2P8Wn1iAwQU5OhxCKlKJy0sHc+PcDwFB24dQ= +k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9/go.mod h1:wZK2AVp1uHCp4VamDVgBP2COHZjqD1T68Rf0CM3YjSM= k8s.io/utils v0.0.0-20230711102312-30195339c3c7 h1:ZgnF1KZsYxWIifwSNZFZgNtWE89WI5yiP5WwlfDoIyc= k8s.io/utils v0.0.0-20230711102312-30195339c3c7/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/promql/bench_test.go b/promql/bench_test.go index a4bde6c416..8e443b5a6a 100644 --- a/promql/bench_test.go +++ b/promql/bench_test.go @@ -28,6 +28,8 @@ import ( ) func setupRangeQueryTestData(stor *teststorage.TestStorage, _ *Engine, interval, numIntervals int) error { + ctx := context.Background() + metrics := []labels.Labels{} metrics = append(metrics, labels.FromStrings("__name__", "a_one")) metrics = append(metrics, labels.FromStrings("__name__", "b_one")) @@ -67,7 +69,7 @@ func setupRangeQueryTestData(stor *teststorage.TestStorage, _ *Engine, interval, } } stor.DB.ForceHeadMMap() // Ensure we have at most one head chunk for every series. - stor.DB.Compact() + stor.DB.Compact(ctx) return nil } @@ -154,7 +156,8 @@ func rangeQueryCases() []benchCase { expr: "sum by (le)(h_X)", }, { - expr: "count_values('value', h_X)", + expr: "count_values('value', h_X)", + steps: 100, }, { expr: "topk(1, a_X)", @@ -214,7 +217,6 @@ func rangeQueryCases() []benchCase { tmp = append(tmp, c) } else { tmp = append(tmp, benchCase{expr: c.expr, steps: 1}) - tmp = append(tmp, benchCase{expr: c.expr, steps: 10}) tmp = append(tmp, benchCase{expr: c.expr, steps: 100}) tmp = append(tmp, benchCase{expr: c.expr, steps: 1000}) } diff --git a/promql/engine.go b/promql/engine.go index 3f50dc3b07..8954b98597 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -44,6 +44,7 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb/chunkenc" + "github.com/prometheus/prometheus/util/annotations" "github.com/prometheus/prometheus/util/stats" "github.com/prometheus/prometheus/util/zeropool" ) @@ -574,7 +575,7 @@ func (ng *Engine) newTestQuery(f func(context.Context) error) Query { // // At this point per query only one EvalStmt is evaluated. Alert and record // statements are not handled by the Engine. -func (ng *Engine) exec(ctx context.Context, q *query) (v parser.Value, ws storage.Warnings, err error) { +func (ng *Engine) exec(ctx context.Context, q *query) (v parser.Value, ws annotations.Annotations, err error) { ng.metrics.currentQueries.Inc() defer func() { ng.metrics.currentQueries.Dec() @@ -667,17 +668,17 @@ func durationMilliseconds(d time.Duration) int64 { } // execEvalStmt evaluates the expression of an evaluation statement for the given time range. -func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.EvalStmt) (parser.Value, storage.Warnings, error) { +func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.EvalStmt) (parser.Value, annotations.Annotations, error) { prepareSpanTimer, ctxPrepare := query.stats.GetSpanTimer(ctx, stats.QueryPreparationTime, ng.metrics.queryPrepareTime) mint, maxt := ng.findMinMaxTime(s) - querier, err := query.queryable.Querier(ctxPrepare, mint, maxt) + querier, err := query.queryable.Querier(mint, maxt) if err != nil { prepareSpanTimer.Finish() return nil, nil, err } defer querier.Close() - ng.populateSeries(querier, s) + ng.populateSeries(ctxPrepare, querier, s) prepareSpanTimer.Finish() // Modify the offset of vector and matrix selectors for the @ modifier @@ -891,7 +892,7 @@ func (ng *Engine) getLastSubqueryInterval(path []parser.Node) time.Duration { return interval } -func (ng *Engine) populateSeries(querier storage.Querier, s *parser.EvalStmt) { +func (ng *Engine) populateSeries(ctx context.Context, querier storage.Querier, s *parser.EvalStmt) { // Whenever a MatrixSelector is evaluated, evalRange is set to the corresponding range. // The evaluation of the VectorSelector inside then evaluates the given range and unsets // the variable. @@ -914,7 +915,7 @@ func (ng *Engine) populateSeries(querier storage.Querier, s *parser.EvalStmt) { } evalRange = 0 hints.By, hints.Grouping = extractGroupsFromPath(path) - n.UnexpandedSeriesSet = querier.Select(false, hints, n.LabelMatchers...) + n.UnexpandedSeriesSet = querier.Select(ctx, false, hints, n.LabelMatchers...) case *parser.MatrixSelector: evalRange = n.Range @@ -953,7 +954,7 @@ func extractGroupsFromPath(p []parser.Node) (bool, []string) { return false, nil } -func checkAndExpandSeriesSet(ctx context.Context, expr parser.Expr) (storage.Warnings, error) { +func checkAndExpandSeriesSet(ctx context.Context, expr parser.Expr) (annotations.Annotations, error) { switch e := expr.(type) { case *parser.MatrixSelector: return checkAndExpandSeriesSet(ctx, e.VectorSelector) @@ -968,7 +969,7 @@ func checkAndExpandSeriesSet(ctx context.Context, expr parser.Expr) (storage.War return nil, nil } -func expandSeriesSet(ctx context.Context, it storage.SeriesSet) (res []storage.Series, ws storage.Warnings, err error) { +func expandSeriesSet(ctx context.Context, it storage.SeriesSet) (res []storage.Series, ws annotations.Annotations, err error) { for it.Next() { select { case <-ctx.Done(): @@ -982,7 +983,7 @@ func expandSeriesSet(ctx context.Context, it storage.SeriesSet) (res []storage.S type errWithWarnings struct { err error - warnings storage.Warnings + warnings annotations.Annotations } func (e errWithWarnings) Error() string { return e.err.Error() } @@ -1017,7 +1018,7 @@ func (ev *evaluator) error(err error) { } // recover is the handler that turns panics into returns from the top level of evaluation. -func (ev *evaluator) recover(expr parser.Expr, ws *storage.Warnings, errp *error) { +func (ev *evaluator) recover(expr parser.Expr, ws *annotations.Annotations, errp *error) { e := recover() if e == nil { return @@ -1033,7 +1034,7 @@ func (ev *evaluator) recover(expr parser.Expr, ws *storage.Warnings, errp *error *errp = fmt.Errorf("unexpected error: %w", err) case errWithWarnings: *errp = err.err - *ws = append(*ws, err.warnings...) + ws.Merge(err.warnings) case error: *errp = err default: @@ -1041,7 +1042,7 @@ func (ev *evaluator) recover(expr parser.Expr, ws *storage.Warnings, errp *error } } -func (ev *evaluator) Eval(expr parser.Expr) (v parser.Value, ws storage.Warnings, err error) { +func (ev *evaluator) Eval(expr parser.Expr) (v parser.Value, ws annotations.Annotations, err error) { defer ev.recover(expr, &ws, &err) v, ws = ev.eval(expr) @@ -1110,19 +1111,19 @@ func (enh *EvalNodeHelper) DropMetricName(l labels.Labels) labels.Labels { // function call results. // The prepSeries function (if provided) can be used to prepare the helper // for each series, then passed to each call funcCall. -func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper), funcCall func([]parser.Value, [][]EvalSeriesHelper, *EvalNodeHelper) (Vector, storage.Warnings), exprs ...parser.Expr) (Matrix, storage.Warnings) { +func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper), funcCall func([]parser.Value, [][]EvalSeriesHelper, *EvalNodeHelper) (Vector, annotations.Annotations), exprs ...parser.Expr) (Matrix, annotations.Annotations) { numSteps := int((ev.endTimestamp-ev.startTimestamp)/ev.interval) + 1 matrixes := make([]Matrix, len(exprs)) origMatrixes := make([]Matrix, len(exprs)) originalNumSamples := ev.currentSamples - var warnings storage.Warnings + var warnings annotations.Annotations for i, e := range exprs { // Functions will take string arguments from the expressions, not the values. if e != nil && e.Type() != parser.ValueTypeString { // ev.currentSamples will be updated to the correct value within the ev.eval call. val, ws := ev.eval(e) - warnings = append(warnings, ws...) + warnings.Merge(ws) matrixes[i] = val.(Matrix) // Keep a copy of the original point slices so that they @@ -1234,7 +1235,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper) enh.Ts = ts result, ws := funcCall(args, bufHelpers, enh) enh.Out = result[:0] // Reuse result vector. - warnings = append(warnings, ws...) + warnings.Merge(ws) ev.currentSamples += len(result) // When we reset currentSamples to tempNumSamples during the next iteration of the loop it also @@ -1311,7 +1312,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper) // evalSubquery evaluates given SubqueryExpr and returns an equivalent // evaluated MatrixSelector in its place. Note that the Name and LabelMatchers are not set. -func (ev *evaluator) evalSubquery(subq *parser.SubqueryExpr) (*parser.MatrixSelector, int, storage.Warnings) { +func (ev *evaluator) evalSubquery(subq *parser.SubqueryExpr) (*parser.MatrixSelector, int, annotations.Annotations) { samplesStats := ev.samplesStats // Avoid double counting samples when running a subquery, those samples will be counted in later stage. ev.samplesStats = ev.samplesStats.NewChild() @@ -1344,7 +1345,7 @@ func (ev *evaluator) evalSubquery(subq *parser.SubqueryExpr) (*parser.MatrixSele } // eval evaluates the given expression as the given AST expression node requires. -func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { +func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotations) { // This is the top-level evaluation method. // Thus, we check for timeout/cancellation here. if err := contextDone(ev.ctx, "expression evaluation"); err != nil { @@ -1373,17 +1374,17 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { param := unwrapStepInvariantExpr(e.Param) unwrapParenExpr(¶m) if s, ok := param.(*parser.StringLiteral); ok { - return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return ev.aggregation(e.Op, sortedGrouping, e.Without, s.Val, v[0].(Vector), sh[0], enh), nil + return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return ev.aggregation(e, sortedGrouping, s.Val, v[0].(Vector), sh[0], enh) }, e.Expr) } - return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { var param float64 if e.Param != nil { param = v[0].(Vector)[0].F } - return ev.aggregation(e.Op, sortedGrouping, e.Without, param, v[1].(Vector), sh[1], enh), nil + return ev.aggregation(e, sortedGrouping, param, v[1].(Vector), sh[1], enh) }, e.Param, e.Expr) case *parser.Call: @@ -1405,7 +1406,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { var ( matrixArgIndex int matrixArg bool - warnings storage.Warnings + warnings annotations.Annotations ) for i := range e.Args { unwrapParenExpr(&e.Args[i]) @@ -1423,7 +1424,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { // Replacing parser.SubqueryExpr with parser.MatrixSelector. val, totalSamples, ws := ev.evalSubquery(subq) e.Args[i] = val - warnings = append(warnings, ws...) + warnings.Merge(ws) defer func() { // subquery result takes space in the memory. Get rid of that at the end. val.VectorSelector.(*parser.VectorSelector).Series = nil @@ -1434,8 +1435,9 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { } if !matrixArg { // Does not have a matrix argument. - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { - return call(v, e.Args, enh), warnings + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + vec, annos := call(v, e.Args, enh) + return vec, warnings.Merge(annos) }, e.Args...) } @@ -1449,7 +1451,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { otherArgs[i] = val.(Matrix) otherInArgs[i] = Vector{Sample{}} inArgs[i] = otherInArgs[i] - warnings = append(warnings, ws...) + warnings.Merge(ws) } } @@ -1460,7 +1462,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { selVS := sel.VectorSelector.(*parser.VectorSelector) ws, err := checkAndExpandSeriesSet(ev.ctx, sel) - warnings = append(warnings, ws...) + warnings.Merge(ws) if err != nil { ev.error(errWithWarnings{fmt.Errorf("expanding series: %w", err), warnings}) } @@ -1523,8 +1525,10 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { inMatrix[0].Histograms = histograms enh.Ts = ts // Make the function call. - outVec := call(inArgs, e.Args, enh) + outVec, annos := call(inArgs, e.Args, enh) + warnings.Merge(annos) ev.samplesStats.IncrementSamplesAtStep(step, int64(len(floats)+len(histograms))) + enh.Out = outVec[:0] if len(outVec) > 0 { if outVec[0].H == nil { @@ -1627,7 +1631,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { case *parser.BinaryExpr: switch lt, rt := e.LHS.Type(), e.RHS.Type(); { case lt == parser.ValueTypeScalar && rt == parser.ValueTypeScalar: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { val := scalarBinop(e.Op, v[0].(Vector)[0].F, v[1].(Vector)[0].F) return append(enh.Out, Sample{F: val}), nil }, e.LHS, e.RHS) @@ -1640,36 +1644,36 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { } switch e.Op { case parser.LAND: - return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return ev.VectorAnd(v[0].(Vector), v[1].(Vector), e.VectorMatching, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) case parser.LOR: - return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return ev.VectorOr(v[0].(Vector), v[1].(Vector), e.VectorMatching, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) case parser.LUNLESS: - return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return ev.VectorUnless(v[0].(Vector), v[1].(Vector), e.VectorMatching, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) default: - return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(initSignatures, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return ev.VectorBinop(e.Op, v[0].(Vector), v[1].(Vector), e.VectorMatching, e.ReturnBool, sh[0], sh[1], enh), nil }, e.LHS, e.RHS) } case lt == parser.ValueTypeVector && rt == parser.ValueTypeScalar: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return ev.VectorscalarBinop(e.Op, v[0].(Vector), Scalar{V: v[1].(Vector)[0].F}, false, e.ReturnBool, enh), nil }, e.LHS, e.RHS) case lt == parser.ValueTypeScalar && rt == parser.ValueTypeVector: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return ev.VectorscalarBinop(e.Op, v[1].(Vector), Scalar{V: v[0].(Vector)[0].F}, true, e.ReturnBool, enh), nil }, e.LHS, e.RHS) } case *parser.NumberLiteral: - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return append(enh.Out, Sample{F: e.Val, Metric: labels.EmptyLabels()}), nil }) @@ -1835,7 +1839,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) { panic(fmt.Errorf("unhandled expression of type: %T", expr)) } -func (ev *evaluator) rangeEvalTimestampFunctionOverVectorSelector(vs *parser.VectorSelector, call FunctionCall, e *parser.Call) (parser.Value, storage.Warnings) { +func (ev *evaluator) rangeEvalTimestampFunctionOverVectorSelector(vs *parser.VectorSelector, call FunctionCall, e *parser.Call) (parser.Value, annotations.Annotations) { ws, err := checkAndExpandSeriesSet(ev.ctx, vs) if err != nil { ev.error(errWithWarnings{fmt.Errorf("expanding series: %w", err), ws}) @@ -1847,7 +1851,7 @@ func (ev *evaluator) rangeEvalTimestampFunctionOverVectorSelector(vs *parser.Vec seriesIterators[i] = storage.NewMemoizedIterator(it, durationMilliseconds(ev.lookbackDelta)) } - return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { + return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { if vs.Timestamp != nil { // This is a special case for "timestamp()" when the @ modifier is used, to ensure that // we return a point for each time step in this case. @@ -1875,7 +1879,8 @@ func (ev *evaluator) rangeEvalTimestampFunctionOverVectorSelector(vs *parser.Vec } } ev.samplesStats.UpdatePeak(ev.currentSamples) - return call([]parser.Value{vec}, e.Args, enh), ws + vec, annos := call([]parser.Value{vec}, e.Args, enh) + return vec, ws.Merge(annos) }) } @@ -1946,7 +1951,7 @@ func putHPointSlice(p []HPoint) { } // matrixSelector evaluates a *parser.MatrixSelector expression. -func (ev *evaluator) matrixSelector(node *parser.MatrixSelector) (Matrix, storage.Warnings) { +func (ev *evaluator) matrixSelector(node *parser.MatrixSelector) (Matrix, annotations.Annotations) { var ( vs = node.VectorSelector.(*parser.VectorSelector) @@ -2526,7 +2531,10 @@ type groupedAggregation struct { // aggregation evaluates an aggregation operation on a Vector. The provided grouping labels // must be sorted. -func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without bool, param interface{}, vec Vector, seriesHelper []EvalSeriesHelper, enh *EvalNodeHelper) Vector { +func (ev *evaluator) aggregation(e *parser.AggregateExpr, grouping []string, param interface{}, vec Vector, seriesHelper []EvalSeriesHelper, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + op := e.Op + without := e.Without + annos := annotations.Annotations{} result := map[uint64]*groupedAggregation{} orderedResult := []*groupedAggregation{} var k int64 @@ -2537,7 +2545,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without } k = int64(f) if k < 1 { - return Vector{} + return Vector{}, annos } } var q float64 @@ -2790,7 +2798,8 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without case parser.AVG: if aggr.hasFloat && aggr.hasHistogram { // We cannot aggregate histogram sample with a float64 sample. - // TODO(zenador): Issue warning when plumbing is in place. + metricName := aggr.labels.Get(labels.MetricName) + annos.Add(annotations.NewMixedFloatsHistogramsWarning(metricName, e.Expr.PositionRange())) continue } if aggr.hasHistogram { @@ -2835,12 +2844,16 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without continue // Bypass default append. case parser.QUANTILE: + if math.IsNaN(q) || q < 0 || q > 1 { + annos.Add(annotations.NewInvalidQuantileWarning(q, e.Param.PositionRange())) + } aggr.floatValue = quantile(q, aggr.heap) case parser.SUM: if aggr.hasFloat && aggr.hasHistogram { // We cannot aggregate histogram sample with a float64 sample. - // TODO(zenador): Issue warning when plumbing is in place. + metricName := aggr.labels.Get(labels.MetricName) + annos.Add(annotations.NewMixedFloatsHistogramsWarning(metricName, e.Expr.PositionRange())) continue } if aggr.hasHistogram { @@ -2856,7 +2869,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without H: aggr.histogramValue, }) } - return enh.Out + return enh.Out, annos } // groupingKey builds and returns the grouping key for the given metric and diff --git a/promql/engine_test.go b/promql/engine_test.go index 20bd3ca638..d628bd128f 100644 --- a/promql/engine_test.go +++ b/promql/engine_test.go @@ -33,8 +33,10 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/timestamp" "github.com/prometheus/prometheus/promql/parser" + "github.com/prometheus/prometheus/promql/parser/posrange" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb/tsdbutil" + "github.com/prometheus/prometheus/util/annotations" "github.com/prometheus/prometheus/util/stats" "github.com/prometheus/prometheus/util/teststorage" ) @@ -195,15 +197,15 @@ type errQuerier struct { err error } -func (q *errQuerier) Select(bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet { +func (q *errQuerier) Select(context.Context, bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet { return errSeriesSet{err: q.err} } -func (*errQuerier) LabelValues(string, ...*labels.Matcher) ([]string, storage.Warnings, error) { +func (*errQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } -func (*errQuerier) LabelNames(...*labels.Matcher) ([]string, storage.Warnings, error) { +func (*errQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } func (*errQuerier) Close() error { return nil } @@ -213,10 +215,10 @@ type errSeriesSet struct { err error } -func (errSeriesSet) Next() bool { return false } -func (errSeriesSet) At() storage.Series { return nil } -func (e errSeriesSet) Err() error { return e.err } -func (e errSeriesSet) Warnings() storage.Warnings { return nil } +func (errSeriesSet) Next() bool { return false } +func (errSeriesSet) At() storage.Series { return nil } +func (e errSeriesSet) Err() error { return e.err } +func (e errSeriesSet) Warnings() annotations.Annotations { return nil } func TestQueryError(t *testing.T) { opts := EngineOpts{ @@ -227,7 +229,7 @@ func TestQueryError(t *testing.T) { } engine := NewEngine(opts) errStorage := ErrStorage{errors.New("storage error")} - queryable := storage.QueryableFunc(func(ctx context.Context, mint, maxt int64) (storage.Querier, error) { + queryable := storage.QueryableFunc(func(mint, maxt int64) (storage.Querier, error) { return &errQuerier{err: errStorage}, nil }) ctx, cancelCtx := context.WithCancel(context.Background()) @@ -252,7 +254,7 @@ type noopHintRecordingQueryable struct { hints []*storage.SelectHints } -func (h *noopHintRecordingQueryable) Querier(context.Context, int64, int64) (storage.Querier, error) { +func (h *noopHintRecordingQueryable) Querier(int64, int64) (storage.Querier, error) { return &hintRecordingQuerier{Querier: &errQuerier{}, h: h}, nil } @@ -262,9 +264,9 @@ type hintRecordingQuerier struct { h *noopHintRecordingQueryable } -func (h *hintRecordingQuerier) Select(sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet { +func (h *hintRecordingQuerier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet { h.h.hints = append(h.h.hints, hints) - return h.Querier.Select(sortSeries, hints, matchers...) + return h.Querier.Select(ctx, sortSeries, hints, matchers...) } func TestSelectHintsSetCorrectly(t *testing.T) { @@ -1676,9 +1678,9 @@ func TestRecoverEvaluatorError(t *testing.T) { func TestRecoverEvaluatorErrorWithWarnings(t *testing.T) { ev := &evaluator{logger: log.NewNopLogger()} var err error - var ws storage.Warnings + var ws annotations.Annotations - warnings := storage.Warnings{errors.New("custom warning")} + warnings := annotations.New().Add(errors.New("custom warning")) e := errWithWarnings{ err: errors.New("custom error"), warnings: warnings, @@ -2147,7 +2149,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { expected: &parser.StepInvariantExpr{ Expr: &parser.NumberLiteral{ Val: 123.4567, - PosRange: parser.PositionRange{Start: 0, End: 8}, + PosRange: posrange.PositionRange{Start: 0, End: 8}, }, }, }, @@ -2156,7 +2158,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { expected: &parser.StepInvariantExpr{ Expr: &parser.StringLiteral{ Val: "foo", - PosRange: parser.PositionRange{Start: 0, End: 5}, + PosRange: posrange.PositionRange{Start: 0, End: 5}, }, }, }, @@ -2169,7 +2171,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -2179,7 +2181,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 9, }, @@ -2196,7 +2198,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -2207,7 +2209,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 14, }, @@ -2227,7 +2229,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 8, }, @@ -2238,7 +2240,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 11, End: 19, }, @@ -2256,7 +2258,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -2276,7 +2278,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { parser.MustLabelMatcher(labels.MatchEqual, "a", "b"), parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2295,13 +2297,13 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 24, }, }, Grouping: []string{"foo"}, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 25, }, @@ -2317,14 +2319,14 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 29, }, Timestamp: makeInt64Pointer(10000), }, Grouping: []string{"foo"}, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 30, }, @@ -2344,13 +2346,13 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric1"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 4, End: 21, }, Timestamp: makeInt64Pointer(10000), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 22, }, @@ -2362,13 +2364,13 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric2"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 29, End: 46, }, Timestamp: makeInt64Pointer(20000), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 25, End: 47, }, @@ -2388,7 +2390,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2405,7 +2407,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 29, End: 40, }, @@ -2415,19 +2417,19 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { EndPos: 49, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 24, End: 50, }, }, Param: &parser.NumberLiteral{ Val: 5, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 21, End: 22, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 16, End: 51, }, @@ -2440,7 +2442,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { expected: &parser.Call{ Func: parser.MustGetFunction("time"), Args: parser.Expressions{}, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 6, }, @@ -2455,7 +2457,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"), parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 14, }, @@ -2475,7 +2477,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"), parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 14, }, @@ -2500,13 +2502,13 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"), parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 4, End: 23, }, Timestamp: makeInt64Pointer(20000), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 24, }, @@ -2537,7 +2539,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { parser.MustLabelMatcher(labels.MatchEqual, "bar", "baz"), parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 33, }, @@ -2546,7 +2548,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { EndPos: 37, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 38, }, @@ -2556,7 +2558,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { EndPos: 56, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 57, }, @@ -2576,7 +2578,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 27, }, @@ -2598,7 +2600,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2626,7 +2628,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 1, End: 4, }, @@ -2639,14 +2641,14 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { parser.MustLabelMatcher(labels.MatchEqual, "__name__", "bar"), }, Timestamp: makeInt64Pointer(1234000), - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 7, End: 27, }, }, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 28, }, @@ -2677,18 +2679,18 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 8, End: 19, }, Timestamp: makeInt64Pointer(10000), }}, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 4, End: 20, }, }}, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 21, }, @@ -2710,13 +2712,13 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric1"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 8, End: 25, }, Timestamp: makeInt64Pointer(10000), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 4, End: 26, }, @@ -2728,19 +2730,19 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric2"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 33, End: 50, }, Timestamp: makeInt64Pointer(20000), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 29, End: 52, }, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 52, }, @@ -2755,7 +2757,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 13, }, @@ -2772,7 +2774,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "foo"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2792,7 +2794,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -2813,7 +2815,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "test"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -2832,7 +2834,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2854,7 +2856,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2884,7 +2886,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { LabelMatchers: []*labels.Matcher{ parser.MustLabelMatcher(labels.MatchEqual, "__name__", "some_metric"), }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 17, }, @@ -2895,20 +2897,20 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { Op: parser.MUL, LHS: &parser.NumberLiteral{ Val: 3, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 21, End: 22, }, }, RHS: &parser.NumberLiteral{ Val: 1024, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 25, End: 29, }, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 20, End: 30, }, @@ -2916,7 +2918,7 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { }, }, }, - PosRange: parser.PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 31, }, diff --git a/promql/functions.go b/promql/functions.go index 5c39d6bd8a..b1245f5a13 100644 --- a/promql/functions.go +++ b/promql/functions.go @@ -28,6 +28,8 @@ import ( "github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql/parser" + "github.com/prometheus/prometheus/promql/parser/posrange" + "github.com/prometheus/prometheus/util/annotations" ) // FunctionCall is the type of a PromQL function implementation @@ -51,20 +53,20 @@ import ( // metrics, the timestamp are not needed. // // Scalar results should be returned as the value of a sample in a Vector. -type FunctionCall func(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector +type FunctionCall func(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) // === time() float64 === -func funcTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return Vector{Sample{ F: float64(enh.Ts) / 1000, - }} + }}, nil } // extrapolatedRate is a utility function for rate/increase/delta. // It calculates the rate (allowing for counter resets if isCounter is true), // extrapolates if the first/last sample is close to the boundary, and returns // the result as either per-second (if isRate is true) or overall. -func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, isCounter, isRate bool) Vector { +func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, isCounter, isRate bool) (Vector, annotations.Annotations) { ms := args[0].(*parser.MatrixSelector) vs := ms.VectorSelector.(*parser.VectorSelector) var ( @@ -75,14 +77,19 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod resultHistogram *histogram.FloatHistogram firstT, lastT int64 numSamplesMinusOne int + annos = annotations.Annotations{} ) // We need either at least two Histograms and no Floats, or at least two // Floats and no Histograms to calculate a rate. Otherwise, drop this // Vector element. + metricName := samples.Metric.Get(labels.MetricName) if len(samples.Histograms) > 0 && len(samples.Floats) > 0 { - // Mix of histograms and floats. TODO(beorn7): Communicate this failure reason. - return enh.Out + return enh.Out, annos.Add(annotations.NewMixedFloatsHistogramsWarning(metricName, args[0].PositionRange())) + } + + if isCounter && !strings.HasSuffix(metricName, "_total") && !strings.HasSuffix(metricName, "_sum") && !strings.HasSuffix(metricName, "_count") { + annos.Add(annotations.NewPossibleNonCounterInfo(metricName, args[0].PositionRange())) } switch { @@ -90,11 +97,11 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod numSamplesMinusOne = len(samples.Histograms) - 1 firstT = samples.Histograms[0].T lastT = samples.Histograms[numSamplesMinusOne].T - resultHistogram = histogramRate(samples.Histograms, isCounter) + var newAnnos annotations.Annotations + resultHistogram, newAnnos = histogramRate(samples.Histograms, isCounter, metricName, args[0].PositionRange()) if resultHistogram == nil { // The histograms are not compatible with each other. - // TODO(beorn7): Communicate this failure reason. - return enh.Out + return enh.Out, annos.Merge(newAnnos) } case len(samples.Floats) > 1: numSamplesMinusOne = len(samples.Floats) - 1 @@ -113,8 +120,8 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod prevValue = currPoint.F } default: - // Not enough samples. TODO(beorn7): Communicate this failure reason. - return enh.Out + // TODO: add RangeTooShortWarning + return enh.Out, annos } // Duration between first/last samples and boundary of range. @@ -165,17 +172,18 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod resultHistogram.Mul(factor) } - return append(enh.Out, Sample{F: resultFloat, H: resultHistogram}) + return append(enh.Out, Sample{F: resultFloat, H: resultHistogram}), annos } // histogramRate is a helper function for extrapolatedRate. It requires // points[0] to be a histogram. It returns nil if any other Point in points is -// not a histogram. -func histogramRate(points []HPoint, isCounter bool) *histogram.FloatHistogram { +// not a histogram, and a warning wrapped in an annotation in that case. +// Otherwise, it returns the calculated histogram and an empty annotation. +func histogramRate(points []HPoint, isCounter bool, metricName string, pos posrange.PositionRange) (*histogram.FloatHistogram, annotations.Annotations) { prev := points[0].H last := points[len(points)-1].H if last == nil { - return nil // Range contains a mix of histograms and floats. + return nil, annotations.New().Add(annotations.NewMixedFloatsHistogramsWarning(metricName, pos)) } minSchema := prev.Schema if last.Schema < minSchema { @@ -190,7 +198,7 @@ func histogramRate(points []HPoint, isCounter bool) *histogram.FloatHistogram { for _, currPoint := range points[1 : len(points)-1] { curr := currPoint.H if curr == nil { - return nil // Range contains a mix of histograms and floats. + return nil, annotations.New().Add(annotations.NewMixedFloatsHistogramsWarning(metricName, pos)) } // TODO(trevorwhitney): Check if isCounter is consistent with curr.CounterResetHint. if !isCounter { @@ -216,40 +224,41 @@ func histogramRate(points []HPoint, isCounter bool) *histogram.FloatHistogram { } h.CounterResetHint = histogram.GaugeType - return h.Compact(0) + return h.Compact(0), nil } -// === delta(Matrix parser.ValueTypeMatrix) Vector === -func funcDelta(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === delta(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcDelta(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return extrapolatedRate(vals, args, enh, false, false) } -// === rate(node parser.ValueTypeMatrix) Vector === -func funcRate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === rate(node parser.ValueTypeMatrix) (Vector, Annotations) === +func funcRate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return extrapolatedRate(vals, args, enh, true, true) } -// === increase(node parser.ValueTypeMatrix) Vector === -func funcIncrease(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === increase(node parser.ValueTypeMatrix) (Vector, Annotations) === +func funcIncrease(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return extrapolatedRate(vals, args, enh, true, false) } -// === irate(node parser.ValueTypeMatrix) Vector === -func funcIrate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === irate(node parser.ValueTypeMatrix) (Vector, Annotations) === +func funcIrate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return instantValue(vals, enh.Out, true) } -// === idelta(node model.ValMatrix) Vector === -func funcIdelta(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === idelta(node model.ValMatrix) (Vector, Annotations) === +func funcIdelta(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return instantValue(vals, enh.Out, false) } -func instantValue(vals []parser.Value, out Vector, isRate bool) Vector { +func instantValue(vals []parser.Value, out Vector, isRate bool) (Vector, annotations.Annotations) { samples := vals[0].(Matrix)[0] // No sense in trying to compute a rate without at least two points. Drop // this Vector element. + // TODO: add RangeTooShortWarning if len(samples.Floats) < 2 { - return out + return out, nil } lastSample := samples.Floats[len(samples.Floats)-1] @@ -266,7 +275,7 @@ func instantValue(vals []parser.Value, out Vector, isRate bool) Vector { sampledInterval := lastSample.T - previousSample.T if sampledInterval == 0 { // Avoid dividing by 0. - return out + return out, nil } if isRate { @@ -274,7 +283,7 @@ func instantValue(vals []parser.Value, out Vector, isRate bool) Vector { resultValue /= float64(sampledInterval) / 1000 } - return append(out, Sample{F: resultValue}) + return append(out, Sample{F: resultValue}), nil } // Calculate the trend value at the given index i in raw data d. @@ -299,7 +308,7 @@ func calcTrendValue(i int, tf, s0, s1, b float64) float64 { // data. A lower smoothing factor increases the influence of historical data. The trend factor (0 < tf < 1) affects // how trends in historical data will affect the current data. A higher trend factor increases the influence. // of trends. Algorithm taken from https://en.wikipedia.org/wiki/Exponential_smoothing titled: "Double exponential smoothing". -func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { samples := vals[0].(Matrix)[0] // The smoothing factor argument. @@ -320,7 +329,7 @@ func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNode // Can't do the smoothing operation with less than two points. if l < 2 { - return enh.Out + return enh.Out, nil } var s0, s1, b float64 @@ -342,34 +351,34 @@ func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNode s0, s1 = s1, x+y } - return append(enh.Out, Sample{F: s1}) + return append(enh.Out, Sample{F: s1}), nil } -// === sort(node parser.ValueTypeVector) Vector === -func funcSort(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === sort(node parser.ValueTypeVector) (Vector, Annotations) === +func funcSort(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { // NaN should sort to the bottom, so take descending sort with NaN first and // reverse it. byValueSorter := vectorByReverseValueHeap(vals[0].(Vector)) sort.Sort(sort.Reverse(byValueSorter)) - return Vector(byValueSorter) + return Vector(byValueSorter), nil } -// === sortDesc(node parser.ValueTypeVector) Vector === -func funcSortDesc(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === sortDesc(node parser.ValueTypeVector) (Vector, Annotations) === +func funcSortDesc(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { // NaN should sort to the bottom, so take ascending sort with NaN first and // reverse it. byValueSorter := vectorByValueHeap(vals[0].(Vector)) sort.Sort(sort.Reverse(byValueSorter)) - return Vector(byValueSorter) + return Vector(byValueSorter), nil } -// === clamp(Vector parser.ValueTypeVector, min, max Scalar) Vector === -func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === clamp(Vector parser.ValueTypeVector, min, max Scalar) (Vector, Annotations) === +func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { vec := vals[0].(Vector) min := vals[1].(Vector)[0].F max := vals[2].(Vector)[0].F if max < min { - return enh.Out + return enh.Out, nil } for _, el := range vec { enh.Out = append(enh.Out, Sample{ @@ -377,11 +386,11 @@ func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper F: math.Max(min, math.Min(max, el.F)), }) } - return enh.Out + return enh.Out, nil } -// === clamp_max(Vector parser.ValueTypeVector, max Scalar) Vector === -func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === clamp_max(Vector parser.ValueTypeVector, max Scalar) (Vector, Annotations) === +func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { vec := vals[0].(Vector) max := vals[1].(Vector)[0].F for _, el := range vec { @@ -390,11 +399,11 @@ func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel F: math.Min(max, el.F), }) } - return enh.Out + return enh.Out, nil } -// === clamp_min(Vector parser.ValueTypeVector, min Scalar) Vector === -func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === clamp_min(Vector parser.ValueTypeVector, min Scalar) (Vector, Annotations) === +func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { vec := vals[0].(Vector) min := vals[1].(Vector)[0].F for _, el := range vec { @@ -403,11 +412,11 @@ func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel F: math.Max(min, el.F), }) } - return enh.Out + return enh.Out, nil } -// === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) Vector === -func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) (Vector, Annotations) === +func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { vec := vals[0].(Vector) // round returns a number rounded to toNearest. // Ties are solved by rounding up. @@ -425,16 +434,16 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper F: f, }) } - return enh.Out + return enh.Out, nil } // === Scalar(node parser.ValueTypeVector) Scalar === -func funcScalar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcScalar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { v := vals[0].(Vector) if len(v) != 1 { - return append(enh.Out, Sample{F: math.NaN()}) + return append(enh.Out, Sample{F: math.NaN()}), nil } - return append(enh.Out, Sample{F: v[0].F}) + return append(enh.Out, Sample{F: v[0].F}), nil } func aggrOverTime(vals []parser.Value, enh *EvalNodeHelper, aggrFn func(Series) float64) Vector { @@ -449,13 +458,14 @@ func aggrHistOverTime(vals []parser.Value, enh *EvalNodeHelper, aggrFn func(Seri return append(enh.Out, Sample{H: aggrFn(el)}) } -// === avg_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - if len(vals[0].(Matrix)[0].Floats) > 0 && len(vals[0].(Matrix)[0].Histograms) > 0 { - // TODO(zenador): Add warning for mixed floats and histograms. - return enh.Out +// === avg_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + firstSeries := vals[0].(Matrix)[0] + if len(firstSeries.Floats) > 0 && len(firstSeries.Histograms) > 0 { + metricName := firstSeries.Metric.Get(labels.MetricName) + return enh.Out, annotations.New().Add(annotations.NewMixedFloatsHistogramsWarning(metricName, args[0].PositionRange())) } - if len(vals[0].(Matrix)[0].Floats) == 0 { + if len(firstSeries.Floats) == 0 { // The passed values only contain histograms. return aggrHistOverTime(vals, enh, func(s Series) *histogram.FloatHistogram { count := 1 @@ -475,7 +485,7 @@ func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode } } return mean - }) + }), nil } return aggrOverTime(vals, enh, func(s Series) float64 { var mean, count, c float64 @@ -505,18 +515,18 @@ func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode return mean } return mean + c - }) + }), nil } -// === count_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcCountOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === count_over_time(Matrix parser.ValueTypeMatrix) (Vector, Notes) === +func funcCountOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return aggrOverTime(vals, enh, func(s Series) float64 { return float64(len(s.Floats) + len(s.Histograms)) - }) + }), nil } -// === last_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcLastOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === last_over_time(Matrix parser.ValueTypeMatrix) (Vector, Notes) === +func funcLastOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { el := vals[0].(Matrix)[0] var f FPoint @@ -533,22 +543,22 @@ func funcLastOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNod return append(enh.Out, Sample{ Metric: el.Metric, F: f.F, - }) + }), nil } return append(enh.Out, Sample{ Metric: el.Metric, H: h.H, - }) + }), nil } -// === max_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { if len(vals[0].(Matrix)[0].Floats) == 0 { // TODO(beorn7): The passed values only contain // histograms. max_over_time ignores histograms for now. If // there are only histograms, we have to return without adding // anything to enh.Out. - return enh.Out + return enh.Out, nil } return aggrOverTime(vals, enh, func(s Series) float64 { max := s.Floats[0].F @@ -558,17 +568,17 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode } } return max - }) + }), nil } -// === min_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === min_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { if len(vals[0].(Matrix)[0].Floats) == 0 { // TODO(beorn7): The passed values only contain // histograms. min_over_time ignores histograms for now. If // there are only histograms, we have to return without adding // anything to enh.Out. - return enh.Out + return enh.Out, nil } return aggrOverTime(vals, enh, func(s Series) float64 { min := s.Floats[0].F @@ -578,16 +588,17 @@ func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode } } return min - }) + }), nil } -// === sum_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - if len(vals[0].(Matrix)[0].Floats) > 0 && len(vals[0].(Matrix)[0].Histograms) > 0 { - // TODO(zenador): Add warning for mixed floats and histograms. - return enh.Out +// === sum_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + firstSeries := vals[0].(Matrix)[0] + if len(firstSeries.Floats) > 0 && len(firstSeries.Histograms) > 0 { + metricName := firstSeries.Metric.Get(labels.MetricName) + return enh.Out, annotations.New().Add(annotations.NewMixedFloatsHistogramsWarning(metricName, args[0].PositionRange())) } - if len(vals[0].(Matrix)[0].Floats) == 0 { + if len(firstSeries.Floats) == 0 { // The passed values only contain histograms. return aggrHistOverTime(vals, enh, func(s Series) *histogram.FloatHistogram { sum := s.Histograms[0].H.Copy() @@ -601,7 +612,7 @@ func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode } } return sum - }) + }), nil } return aggrOverTime(vals, enh, func(s Series) float64 { var sum, c float64 @@ -612,11 +623,11 @@ func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode return sum } return sum + c - }) + }), nil } -// === quantile_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === quantile_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { q := vals[0].(Vector)[0].F el := vals[1].(Matrix)[0] if len(el.Floats) == 0 { @@ -624,24 +635,29 @@ func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *Eva // histograms. quantile_over_time ignores histograms for now. If // there are only histograms, we have to return without adding // anything to enh.Out. - return enh.Out + return enh.Out, nil + } + + annos := annotations.Annotations{} + if math.IsNaN(q) || q < 0 || q > 1 { + annos.Add(annotations.NewInvalidQuantileWarning(q, args[0].PositionRange())) } values := make(vectorByValueHeap, 0, len(el.Floats)) for _, f := range el.Floats { values = append(values, Sample{F: f.F}) } - return append(enh.Out, Sample{F: quantile(q, values)}) + return append(enh.Out, Sample{F: quantile(q, values)}), annos } -// === stddev_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === stddev_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { if len(vals[0].(Matrix)[0].Floats) == 0 { // TODO(beorn7): The passed values only contain // histograms. stddev_over_time ignores histograms for now. If // there are only histograms, we have to return without adding // anything to enh.Out. - return enh.Out + return enh.Out, nil } return aggrOverTime(vals, enh, func(s Series) float64 { var count float64 @@ -654,17 +670,17 @@ func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalN aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux) } return math.Sqrt((aux + cAux) / count) - }) + }), nil } -// === stdvar_over_time(Matrix parser.ValueTypeMatrix) Vector === -func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === stdvar_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { if len(vals[0].(Matrix)[0].Floats) == 0 { // TODO(beorn7): The passed values only contain // histograms. stdvar_over_time ignores histograms for now. If // there are only histograms, we have to return without adding // anything to enh.Out. - return enh.Out + return enh.Out, nil } return aggrOverTime(vals, enh, func(s Series) float64 { var count float64 @@ -677,35 +693,35 @@ func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalN aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux) } return (aux + cAux) / count - }) + }), nil } -// === absent(Vector parser.ValueTypeVector) Vector === -func funcAbsent(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === absent(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAbsent(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { if len(vals[0].(Vector)) > 0 { - return enh.Out + return enh.Out, nil } return append(enh.Out, Sample{ Metric: createLabelsForAbsentFunction(args[0]), F: 1, - }) + }), nil } -// === absent_over_time(Vector parser.ValueTypeMatrix) Vector === +// === absent_over_time(Vector parser.ValueTypeMatrix) (Vector, Annotations) === // As this function has a matrix as argument, it does not get all the Series. // This function will return 1 if the matrix has at least one element. // Due to engine optimization, this function is only called when this condition is true. // Then, the engine post-processes the results to get the expected output. -func funcAbsentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return append(enh.Out, Sample{F: 1}) +func funcAbsentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return append(enh.Out, Sample{F: 1}), nil } -// === present_over_time(Vector parser.ValueTypeMatrix) Vector === -func funcPresentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === present_over_time(Vector parser.ValueTypeMatrix) (Vector, Annotations) === +func funcPresentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return aggrOverTime(vals, enh, func(s Series) float64 { return 1 - }) + }), nil } func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector { @@ -720,127 +736,127 @@ func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float6 return enh.Out } -// === abs(Vector parser.ValueTypeVector) Vector === -func funcAbs(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Abs) +// === abs(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAbs(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Abs), nil } -// === ceil(Vector parser.ValueTypeVector) Vector === -func funcCeil(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Ceil) +// === ceil(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcCeil(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Ceil), nil } -// === floor(Vector parser.ValueTypeVector) Vector === -func funcFloor(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Floor) +// === floor(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcFloor(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Floor), nil } -// === exp(Vector parser.ValueTypeVector) Vector === -func funcExp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Exp) +// === exp(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcExp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Exp), nil } -// === sqrt(Vector VectorNode) Vector === -func funcSqrt(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Sqrt) +// === sqrt(Vector VectorNode) (Vector, Annotations) === +func funcSqrt(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Sqrt), nil } -// === ln(Vector parser.ValueTypeVector) Vector === -func funcLn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Log) +// === ln(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcLn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Log), nil } -// === log2(Vector parser.ValueTypeVector) Vector === -func funcLog2(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Log2) +// === log2(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcLog2(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Log2), nil } -// === log10(Vector parser.ValueTypeVector) Vector === -func funcLog10(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Log10) +// === log10(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcLog10(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Log10), nil } -// === sin(Vector parser.ValueTypeVector) Vector === -func funcSin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Sin) +// === sin(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcSin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Sin), nil } -// === cos(Vector parser.ValueTypeVector) Vector === -func funcCos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Cos) +// === cos(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcCos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Cos), nil } -// === tan(Vector parser.ValueTypeVector) Vector === -func funcTan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Tan) +// === tan(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcTan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Tan), nil } -// == asin(Vector parser.ValueTypeVector) Vector === -func funcAsin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Asin) +// == asin(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAsin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Asin), nil } -// == acos(Vector parser.ValueTypeVector) Vector === -func funcAcos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Acos) +// == acos(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAcos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Acos), nil } -// == atan(Vector parser.ValueTypeVector) Vector === -func funcAtan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Atan) +// == atan(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAtan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Atan), nil } -// == sinh(Vector parser.ValueTypeVector) Vector === -func funcSinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Sinh) +// == sinh(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcSinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Sinh), nil } -// == cosh(Vector parser.ValueTypeVector) Vector === -func funcCosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Cosh) +// == cosh(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcCosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Cosh), nil } -// == tanh(Vector parser.ValueTypeVector) Vector === -func funcTanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Tanh) +// == tanh(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcTanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Tanh), nil } -// == asinh(Vector parser.ValueTypeVector) Vector === -func funcAsinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Asinh) +// == asinh(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAsinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Asinh), nil } -// == acosh(Vector parser.ValueTypeVector) Vector === -func funcAcosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Acosh) +// == acosh(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAcosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Acosh), nil } -// == atanh(Vector parser.ValueTypeVector) Vector === -func funcAtanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return simpleFunc(vals, enh, math.Atanh) +// == atanh(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcAtanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return simpleFunc(vals, enh, math.Atanh), nil } -// === rad(Vector parser.ValueTypeVector) Vector === -func funcRad(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === rad(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcRad(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return simpleFunc(vals, enh, func(v float64) float64 { return v * math.Pi / 180 - }) + }), nil } -// === deg(Vector parser.ValueTypeVector) Vector === -func funcDeg(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === deg(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcDeg(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return simpleFunc(vals, enh, func(v float64) float64 { return v * 180 / math.Pi - }) + }), nil } // === pi() Scalar === -func funcPi(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { - return Vector{Sample{F: math.Pi}} +func funcPi(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return Vector{Sample{F: math.Pi}}, nil } -// === sgn(Vector parser.ValueTypeVector) Vector === -func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === sgn(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return simpleFunc(vals, enh, func(v float64) float64 { switch { case v < 0: @@ -850,11 +866,11 @@ func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) default: return v } - }) + }), nil } -// === timestamp(Vector parser.ValueTypeVector) Vector === -func funcTimestamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === timestamp(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcTimestamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { vec := vals[0].(Vector) for _, el := range vec { enh.Out = append(enh.Out, Sample{ @@ -862,7 +878,7 @@ func funcTimestamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHe F: float64(el.T) / 1000, }) } - return enh.Out + return enh.Out, nil } func kahanSum(samples []float64) float64 { @@ -931,39 +947,39 @@ func linearRegression(samples []FPoint, interceptTime int64) (slope, intercept f return slope, intercept } -// === deriv(node parser.ValueTypeMatrix) Vector === -func funcDeriv(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === deriv(node parser.ValueTypeMatrix) (Vector, Annotations) === +func funcDeriv(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { samples := vals[0].(Matrix)[0] // No sense in trying to compute a derivative without at least two points. // Drop this Vector element. if len(samples.Floats) < 2 { - return enh.Out + return enh.Out, nil } // We pass in an arbitrary timestamp that is near the values in use // to avoid floating point accuracy issues, see // https://github.com/prometheus/prometheus/issues/2674 slope, _ := linearRegression(samples.Floats, samples.Floats[0].T) - return append(enh.Out, Sample{F: slope}) + return append(enh.Out, Sample{F: slope}), nil } -// === predict_linear(node parser.ValueTypeMatrix, k parser.ValueTypeScalar) Vector === -func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === predict_linear(node parser.ValueTypeMatrix, k parser.ValueTypeScalar) (Vector, Annotations) === +func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { samples := vals[0].(Matrix)[0] duration := vals[1].(Vector)[0].F // No sense in trying to predict anything without at least two points. // Drop this Vector element. if len(samples.Floats) < 2 { - return enh.Out + return enh.Out, nil } slope, intercept := linearRegression(samples.Floats, enh.Ts) - return append(enh.Out, Sample{F: slope*duration + intercept}) + return append(enh.Out, Sample{F: slope*duration + intercept}), nil } -// === histogram_count(Vector parser.ValueTypeVector) Vector === -func funcHistogramCount(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === histogram_count(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramCount(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { inVec := vals[0].(Vector) for _, sample := range inVec { @@ -976,11 +992,11 @@ func funcHistogramCount(vals []parser.Value, args parser.Expressions, enh *EvalN F: sample.H.Count, }) } - return enh.Out + return enh.Out, nil } -// === histogram_sum(Vector parser.ValueTypeVector) Vector === -func funcHistogramSum(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === histogram_sum(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramSum(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { inVec := vals[0].(Vector) for _, sample := range inVec { @@ -993,11 +1009,11 @@ func funcHistogramSum(vals []parser.Value, args parser.Expressions, enh *EvalNod F: sample.H.Sum, }) } - return enh.Out + return enh.Out, nil } -// === histogram_stddev(Vector parser.ValueTypeVector) Vector === -func funcHistogramStdDev(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramStdDev(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { inVec := vals[0].(Vector) for _, sample := range inVec { @@ -1026,11 +1042,11 @@ func funcHistogramStdDev(vals []parser.Value, args parser.Expressions, enh *Eval F: math.Sqrt(variance), }) } - return enh.Out + return enh.Out, nil } -// === histogram_stdvar(Vector parser.ValueTypeVector) Vector === -func funcHistogramStdVar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === histogram_stdvar(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramStdVar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { inVec := vals[0].(Vector) for _, sample := range inVec { @@ -1059,11 +1075,11 @@ func funcHistogramStdVar(vals []parser.Value, args parser.Expressions, enh *Eval F: variance, }) } - return enh.Out + return enh.Out, nil } -// === histogram_fraction(lower, upper parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector === -func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === histogram_fraction(lower, upper parser.ValueTypeScalar, Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { lower := vals[0].(Vector)[0].F upper := vals[1].(Vector)[0].F inVec := vals[2].(Vector) @@ -1078,13 +1094,18 @@ func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *Ev F: histogramFraction(lower, upper, sample.H), }) } - return enh.Out + return enh.Out, nil } -// === histogram_quantile(k parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector === -func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === histogram_quantile(k parser.ValueTypeScalar, Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { q := vals[0].(Vector)[0].F inVec := vals[1].(Vector) + annos := annotations.Annotations{} + + if math.IsNaN(q) || q < 0 || q > 1 { + annos.Add(annotations.NewInvalidQuantileWarning(q, args[0].PositionRange())) + } if enh.signatureToMetricWithBuckets == nil { enh.signatureToMetricWithBuckets = map[string]*metricWithBuckets{} @@ -1108,8 +1129,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev sample.Metric.Get(model.BucketLabel), 64, ) if err != nil { - // Oops, no bucket label or malformed label value. Skip. - // TODO(beorn7): Issue a warning somehow. + annos.Add(annotations.NewBadBucketLabelWarning(sample.Metric.Get(labels.MetricName), sample.Metric.Get(model.BucketLabel), args[1].PositionRange())) continue } enh.lblBuf = sample.Metric.BytesWithoutLabels(enh.lblBuf, labels.BucketLabel) @@ -1135,7 +1155,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev // At this data point, we have conventional histogram // buckets and a native histogram with the same name and // labels. Do not evaluate anything. - // TODO(beorn7): Issue a warning somehow. + annos.Add(annotations.NewMixedClassicNativeHistogramsWarning(sample.Metric.Get(labels.MetricName), args[1].PositionRange())) delete(enh.signatureToMetricWithBuckets, string(enh.lblBuf)) continue } @@ -1155,11 +1175,11 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev } } - return enh.Out + return enh.Out, annos } -// === resets(Matrix parser.ValueTypeMatrix) Vector === -func funcResets(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === resets(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcResets(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { floats := vals[0].(Matrix)[0].Floats histograms := vals[0].(Matrix)[0].Histograms resets := 0 @@ -1186,17 +1206,17 @@ func funcResets(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelpe } } - return append(enh.Out, Sample{F: float64(resets)}) + return append(enh.Out, Sample{F: float64(resets)}), nil } -// === changes(Matrix parser.ValueTypeMatrix) Vector === -func funcChanges(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === changes(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcChanges(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { floats := vals[0].(Matrix)[0].Floats changes := 0 if len(floats) == 0 { // TODO(beorn7): Only histogram values, still need to add support. - return enh.Out + return enh.Out, nil } prev := floats[0].F @@ -1208,11 +1228,11 @@ func funcChanges(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelp prev = current } - return append(enh.Out, Sample{F: float64(changes)}) + return append(enh.Out, Sample{F: float64(changes)}), nil } -// === label_replace(Vector parser.ValueTypeVector, dst_label, replacement, src_labelname, regex parser.ValueTypeString) Vector === -func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === label_replace(Vector parser.ValueTypeVector, dst_label, replacement, src_labelname, regex parser.ValueTypeString) (Vector, Annotations) === +func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { var ( vector = vals[0].(Vector) dst = stringFromArg(args[1]) @@ -1263,20 +1283,20 @@ func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNod H: el.H, }) } - return enh.Out + return enh.Out, nil } -// === Vector(s Scalar) Vector === -func funcVector(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === Vector(s Scalar) (Vector, Annotations) === +func funcVector(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return append(enh.Out, Sample{ Metric: labels.Labels{}, F: vals[0].(Vector)[0].F, - }) + }), nil } -// === label_join(vector model.ValVector, dest_labelname, separator, src_labelname...) Vector === -func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +// === label_join(vector model.ValVector, dest_labelname, separator, src_labelname...) (Vector, Annotations) === +func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { var ( vector = vals[0].(Vector) dst = stringFromArg(args[1]) @@ -1331,7 +1351,7 @@ func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHe H: el.H, }) } - return enh.Out + return enh.Out, nil } // Common code for date related functions. @@ -1355,59 +1375,59 @@ func dateWrapper(vals []parser.Value, enh *EvalNodeHelper, f func(time.Time) flo } // === days_in_month(v Vector) Scalar === -func funcDaysInMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcDaysInMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(32 - time.Date(t.Year(), t.Month(), 32, 0, 0, 0, 0, time.UTC).Day()) - }) + }), nil } // === day_of_month(v Vector) Scalar === -func funcDayOfMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcDayOfMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.Day()) - }) + }), nil } // === day_of_week(v Vector) Scalar === -func funcDayOfWeek(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcDayOfWeek(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.Weekday()) - }) + }), nil } // === day_of_year(v Vector) Scalar === -func funcDayOfYear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcDayOfYear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.YearDay()) - }) + }), nil } // === hour(v Vector) Scalar === -func funcHour(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcHour(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.Hour()) - }) + }), nil } // === minute(v Vector) Scalar === -func funcMinute(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcMinute(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.Minute()) - }) + }), nil } // === month(v Vector) Scalar === -func funcMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcMonth(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.Month()) - }) + }), nil } // === year(v Vector) Scalar === -func funcYear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { +func funcYear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { return dateWrapper(vals, enh, func(t time.Time) float64 { return float64(t.Year()) - }) + }), nil } // FunctionCalls is a list of all functions supported by PromQL, including their types. diff --git a/promql/parser/ast.go b/promql/parser/ast.go index 86f1394998..58136266fd 100644 --- a/promql/parser/ast.go +++ b/promql/parser/ast.go @@ -20,6 +20,8 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" + + "github.com/prometheus/prometheus/promql/parser/posrange" ) // Node is a generic interface for all nodes in an AST. @@ -45,7 +47,7 @@ type Node interface { Pretty(level int) string // PositionRange returns the position of the AST Node in the query string. - PositionRange() PositionRange + PositionRange() posrange.PositionRange } // Statement is a generic interface for all statements. @@ -94,7 +96,7 @@ type AggregateExpr struct { Param Expr // Parameter used by some aggregators. Grouping []string // The labels by which to group the Vector. Without bool // Whether to drop the given labels rather than keep them. - PosRange PositionRange + PosRange posrange.PositionRange } // BinaryExpr represents a binary expression between two child expressions. @@ -115,7 +117,7 @@ type Call struct { Func *Function // The function that was called. Args Expressions // Arguments used in the call. - PosRange PositionRange + PosRange posrange.PositionRange } // MatrixSelector represents a Matrix selection. @@ -125,7 +127,7 @@ type MatrixSelector struct { VectorSelector Expr Range time.Duration - EndPos Pos + EndPos posrange.Pos } // SubqueryExpr represents a subquery. @@ -143,27 +145,27 @@ type SubqueryExpr struct { StartOrEnd ItemType // Set when @ is used with start() or end() Step time.Duration - EndPos Pos + EndPos posrange.Pos } // NumberLiteral represents a number. type NumberLiteral struct { Val float64 - PosRange PositionRange + PosRange posrange.PositionRange } // ParenExpr wraps an expression so it cannot be disassembled as a consequence // of operator precedence. type ParenExpr struct { Expr Expr - PosRange PositionRange + PosRange posrange.PositionRange } // StringLiteral represents a string. type StringLiteral struct { Val string - PosRange PositionRange + PosRange posrange.PositionRange } // UnaryExpr represents a unary operation on another expression. @@ -172,7 +174,7 @@ type UnaryExpr struct { Op ItemType Expr Expr - StartPos Pos + StartPos posrange.Pos } // StepInvariantExpr represents a query which evaluates to the same result @@ -184,7 +186,9 @@ type StepInvariantExpr struct { func (e *StepInvariantExpr) String() string { return e.Expr.String() } -func (e *StepInvariantExpr) PositionRange() PositionRange { return e.Expr.PositionRange() } +func (e *StepInvariantExpr) PositionRange() posrange.PositionRange { + return e.Expr.PositionRange() +} // VectorSelector represents a Vector selection. type VectorSelector struct { @@ -204,7 +208,7 @@ type VectorSelector struct { UnexpandedSeriesSet storage.SeriesSet Series []storage.Series - PosRange PositionRange + PosRange posrange.PositionRange } // TestStmt is an internal helper statement that allows execution @@ -215,8 +219,8 @@ func (TestStmt) String() string { return "test statement" } func (TestStmt) PromQLStmt() {} func (t TestStmt) Pretty(int) string { return t.String() } -func (TestStmt) PositionRange() PositionRange { - return PositionRange{ +func (TestStmt) PositionRange() posrange.PositionRange { + return posrange.PositionRange{ Start: -1, End: -1, } @@ -405,17 +409,11 @@ func Children(node Node) []Node { } } -// PositionRange describes a position in the input string of the parser. -type PositionRange struct { - Start Pos - End Pos -} - // mergeRanges is a helper function to merge the PositionRanges of two Nodes. // Note that the arguments must be in the same order as they // occur in the input string. -func mergeRanges(first, last Node) PositionRange { - return PositionRange{ +func mergeRanges(first, last Node) posrange.PositionRange { + return posrange.PositionRange{ Start: first.PositionRange().Start, End: last.PositionRange().End, } @@ -423,33 +421,33 @@ func mergeRanges(first, last Node) PositionRange { // Item implements the Node interface. // This makes it possible to call mergeRanges on them. -func (i *Item) PositionRange() PositionRange { - return PositionRange{ +func (i *Item) PositionRange() posrange.PositionRange { + return posrange.PositionRange{ Start: i.Pos, - End: i.Pos + Pos(len(i.Val)), + End: i.Pos + posrange.Pos(len(i.Val)), } } -func (e *AggregateExpr) PositionRange() PositionRange { +func (e *AggregateExpr) PositionRange() posrange.PositionRange { return e.PosRange } -func (e *BinaryExpr) PositionRange() PositionRange { +func (e *BinaryExpr) PositionRange() posrange.PositionRange { return mergeRanges(e.LHS, e.RHS) } -func (e *Call) PositionRange() PositionRange { +func (e *Call) PositionRange() posrange.PositionRange { return e.PosRange } -func (e *EvalStmt) PositionRange() PositionRange { +func (e *EvalStmt) PositionRange() posrange.PositionRange { return e.Expr.PositionRange() } -func (e Expressions) PositionRange() PositionRange { +func (e Expressions) PositionRange() posrange.PositionRange { if len(e) == 0 { // Position undefined. - return PositionRange{ + return posrange.PositionRange{ Start: -1, End: -1, } @@ -457,39 +455,39 @@ func (e Expressions) PositionRange() PositionRange { return mergeRanges(e[0], e[len(e)-1]) } -func (e *MatrixSelector) PositionRange() PositionRange { - return PositionRange{ +func (e *MatrixSelector) PositionRange() posrange.PositionRange { + return posrange.PositionRange{ Start: e.VectorSelector.PositionRange().Start, End: e.EndPos, } } -func (e *SubqueryExpr) PositionRange() PositionRange { - return PositionRange{ +func (e *SubqueryExpr) PositionRange() posrange.PositionRange { + return posrange.PositionRange{ Start: e.Expr.PositionRange().Start, End: e.EndPos, } } -func (e *NumberLiteral) PositionRange() PositionRange { +func (e *NumberLiteral) PositionRange() posrange.PositionRange { return e.PosRange } -func (e *ParenExpr) PositionRange() PositionRange { +func (e *ParenExpr) PositionRange() posrange.PositionRange { return e.PosRange } -func (e *StringLiteral) PositionRange() PositionRange { +func (e *StringLiteral) PositionRange() posrange.PositionRange { return e.PosRange } -func (e *UnaryExpr) PositionRange() PositionRange { - return PositionRange{ +func (e *UnaryExpr) PositionRange() posrange.PositionRange { + return posrange.PositionRange{ Start: e.StartPos, End: e.Expr.PositionRange().End, } } -func (e *VectorSelector) PositionRange() PositionRange { +func (e *VectorSelector) PositionRange() posrange.PositionRange { return e.PosRange } diff --git a/promql/parser/generated_parser.y b/promql/parser/generated_parser.y index f7951db2b0..676fd9fb5b 100644 --- a/promql/parser/generated_parser.y +++ b/promql/parser/generated_parser.y @@ -22,6 +22,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/value" "github.com/prometheus/prometheus/model/histogram" + "github.com/prometheus/prometheus/promql/parser/posrange" ) %} @@ -199,7 +200,7 @@ start : { yylex.(*parser).generatedParserResult = $2 } | START_SERIES_DESCRIPTION series_description | START_EXPRESSION /* empty */ EOF - { yylex.(*parser).addParseErrf(PositionRange{}, "no expression found in input")} + { yylex.(*parser).addParseErrf(posrange.PositionRange{}, "no expression found in input")} | START_EXPRESSION expr { yylex.(*parser).generatedParserResult = $2 } | START_METRIC_SELECTOR vector_selector @@ -371,7 +372,7 @@ function_call : IDENTIFIER function_call_body $$ = &Call{ Func: fn, Args: $2.(Expressions), - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: $1.Pos, End: yylex.(*parser).lastClosing, }, diff --git a/promql/parser/generated_parser.y.go b/promql/parser/generated_parser.y.go index d7fc9081b0..77a403be35 100644 --- a/promql/parser/generated_parser.y.go +++ b/promql/parser/generated_parser.y.go @@ -15,9 +15,10 @@ import ( "github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/value" + "github.com/prometheus/prometheus/promql/parser/posrange" ) -//line promql/parser/generated_parser.y:29 +//line promql/parser/generated_parser.y:30 type yySymType struct { yys int node Node @@ -229,7 +230,7 @@ const yyEofCode = 1 const yyErrCode = 2 const yyInitialStackSize = 16 -//line promql/parser/generated_parser.y:915 +//line promql/parser/generated_parser.y:916 //line yacctab:1 var yyExca = [...]int16{ @@ -994,62 +995,62 @@ yydefault: case 1: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:199 +//line promql/parser/generated_parser.y:200 { yylex.(*parser).generatedParserResult = yyDollar[2].labels } case 3: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:202 +//line promql/parser/generated_parser.y:203 { - yylex.(*parser).addParseErrf(PositionRange{}, "no expression found in input") + yylex.(*parser).addParseErrf(posrange.PositionRange{}, "no expression found in input") } case 4: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:204 +//line promql/parser/generated_parser.y:205 { yylex.(*parser).generatedParserResult = yyDollar[2].node } case 5: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:206 +//line promql/parser/generated_parser.y:207 { yylex.(*parser).generatedParserResult = yyDollar[2].node } case 7: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:209 +//line promql/parser/generated_parser.y:210 { yylex.(*parser).unexpected("", "") } case 20: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:232 +//line promql/parser/generated_parser.y:233 { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[2].node, yyDollar[3].node) } case 21: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:234 +//line promql/parser/generated_parser.y:235 { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[3].node, yyDollar[2].node) } case 22: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:236 +//line promql/parser/generated_parser.y:237 { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, yyDollar[2].node) } case 23: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:238 +//line promql/parser/generated_parser.y:239 { yylex.(*parser).unexpected("aggregation", "") yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, Expressions{}) } case 24: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:246 +//line promql/parser/generated_parser.y:247 { yyVAL.node = &AggregateExpr{ Grouping: yyDollar[2].strings, @@ -1057,7 +1058,7 @@ yydefault: } case 25: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:252 +//line promql/parser/generated_parser.y:253 { yyVAL.node = &AggregateExpr{ Grouping: yyDollar[2].strings, @@ -1066,103 +1067,103 @@ yydefault: } case 26: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:265 +//line promql/parser/generated_parser.y:266 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 27: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:266 +//line promql/parser/generated_parser.y:267 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 28: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:267 +//line promql/parser/generated_parser.y:268 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 29: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:268 +//line promql/parser/generated_parser.y:269 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 30: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:269 +//line promql/parser/generated_parser.y:270 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 31: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:270 +//line promql/parser/generated_parser.y:271 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 32: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:271 +//line promql/parser/generated_parser.y:272 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 33: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:272 +//line promql/parser/generated_parser.y:273 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 34: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:273 +//line promql/parser/generated_parser.y:274 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 35: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:274 +//line promql/parser/generated_parser.y:275 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 36: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:275 +//line promql/parser/generated_parser.y:276 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 37: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:276 +//line promql/parser/generated_parser.y:277 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 38: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:277 +//line promql/parser/generated_parser.y:278 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 39: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:278 +//line promql/parser/generated_parser.y:279 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 40: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:279 +//line promql/parser/generated_parser.y:280 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 41: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:280 +//line promql/parser/generated_parser.y:281 { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } case 43: yyDollar = yyS[yypt-0 : yypt+1] -//line promql/parser/generated_parser.y:288 +//line promql/parser/generated_parser.y:289 { yyVAL.node = &BinaryExpr{ VectorMatching: &VectorMatching{Card: CardOneToOne}, @@ -1170,7 +1171,7 @@ yydefault: } case 44: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:293 +//line promql/parser/generated_parser.y:294 { yyVAL.node = &BinaryExpr{ VectorMatching: &VectorMatching{Card: CardOneToOne}, @@ -1179,14 +1180,14 @@ yydefault: } case 45: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:301 +//line promql/parser/generated_parser.y:302 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings } case 46: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:306 +//line promql/parser/generated_parser.y:307 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings @@ -1194,7 +1195,7 @@ yydefault: } case 49: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:316 +//line promql/parser/generated_parser.y:317 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardManyToOne @@ -1202,7 +1203,7 @@ yydefault: } case 50: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:322 +//line promql/parser/generated_parser.y:323 { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardOneToMany @@ -1210,51 +1211,51 @@ yydefault: } case 51: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:331 +//line promql/parser/generated_parser.y:332 { yyVAL.strings = yyDollar[2].strings } case 52: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:333 +//line promql/parser/generated_parser.y:334 { yyVAL.strings = yyDollar[2].strings } case 53: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:335 +//line promql/parser/generated_parser.y:336 { yyVAL.strings = []string{} } case 54: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:337 +//line promql/parser/generated_parser.y:338 { yylex.(*parser).unexpected("grouping opts", "\"(\"") yyVAL.strings = nil } case 55: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:343 +//line promql/parser/generated_parser.y:344 { yyVAL.strings = append(yyDollar[1].strings, yyDollar[3].item.Val) } case 56: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:345 +//line promql/parser/generated_parser.y:346 { yyVAL.strings = []string{yyDollar[1].item.Val} } case 57: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:347 +//line promql/parser/generated_parser.y:348 { yylex.(*parser).unexpected("grouping opts", "\",\" or \")\"") yyVAL.strings = yyDollar[1].strings } case 58: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:351 +//line promql/parser/generated_parser.y:352 { if !isLabel(yyDollar[1].item.Val) { yylex.(*parser).unexpected("grouping opts", "label") @@ -1263,14 +1264,14 @@ yydefault: } case 59: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:358 +//line promql/parser/generated_parser.y:359 { yylex.(*parser).unexpected("grouping opts", "label") yyVAL.item = Item{} } case 60: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:366 +//line promql/parser/generated_parser.y:367 { fn, exist := getFunction(yyDollar[1].item.Val, yylex.(*parser).functions) if !exist { @@ -1279,7 +1280,7 @@ yydefault: yyVAL.node = &Call{ Func: fn, Args: yyDollar[2].node.(Expressions), - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: yyDollar[1].item.Pos, End: yylex.(*parser).lastClosing, }, @@ -1287,86 +1288,86 @@ yydefault: } case 61: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:383 +//line promql/parser/generated_parser.y:384 { yyVAL.node = yyDollar[2].node } case 62: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:385 +//line promql/parser/generated_parser.y:386 { yyVAL.node = Expressions{} } case 63: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:389 +//line promql/parser/generated_parser.y:390 { yyVAL.node = append(yyDollar[1].node.(Expressions), yyDollar[3].node.(Expr)) } case 64: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:391 +//line promql/parser/generated_parser.y:392 { yyVAL.node = Expressions{yyDollar[1].node.(Expr)} } case 65: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:393 +//line promql/parser/generated_parser.y:394 { yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "trailing commas not allowed in function call args") yyVAL.node = yyDollar[1].node } case 66: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:404 +//line promql/parser/generated_parser.y:405 { yyVAL.node = &ParenExpr{Expr: yyDollar[2].node.(Expr), PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item)} } case 67: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:412 +//line promql/parser/generated_parser.y:413 { yylex.(*parser).addOffset(yyDollar[1].node, yyDollar[3].duration) yyVAL.node = yyDollar[1].node } case 68: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:417 +//line promql/parser/generated_parser.y:418 { yylex.(*parser).addOffset(yyDollar[1].node, -yyDollar[4].duration) yyVAL.node = yyDollar[1].node } case 69: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:422 +//line promql/parser/generated_parser.y:423 { yylex.(*parser).unexpected("offset", "duration") yyVAL.node = yyDollar[1].node } case 70: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:429 +//line promql/parser/generated_parser.y:430 { yylex.(*parser).setTimestamp(yyDollar[1].node, yyDollar[3].float) yyVAL.node = yyDollar[1].node } case 71: yyDollar = yyS[yypt-5 : yypt+1] -//line promql/parser/generated_parser.y:434 +//line promql/parser/generated_parser.y:435 { yylex.(*parser).setAtModifierPreprocessor(yyDollar[1].node, yyDollar[3].item) yyVAL.node = yyDollar[1].node } case 72: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:439 +//line promql/parser/generated_parser.y:440 { yylex.(*parser).unexpected("@", "timestamp") yyVAL.node = yyDollar[1].node } case 75: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:449 +//line promql/parser/generated_parser.y:450 { var errMsg string vs, ok := yyDollar[1].node.(*VectorSelector) @@ -1391,7 +1392,7 @@ yydefault: } case 76: yyDollar = yyS[yypt-6 : yypt+1] -//line promql/parser/generated_parser.y:474 +//line promql/parser/generated_parser.y:475 { yyVAL.node = &SubqueryExpr{ Expr: yyDollar[1].node.(Expr), @@ -1403,35 +1404,35 @@ yydefault: } case 77: yyDollar = yyS[yypt-6 : yypt+1] -//line promql/parser/generated_parser.y:484 +//line promql/parser/generated_parser.y:485 { yylex.(*parser).unexpected("subquery selector", "\"]\"") yyVAL.node = yyDollar[1].node } case 78: yyDollar = yyS[yypt-5 : yypt+1] -//line promql/parser/generated_parser.y:486 +//line promql/parser/generated_parser.y:487 { yylex.(*parser).unexpected("subquery selector", "duration or \"]\"") yyVAL.node = yyDollar[1].node } case 79: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:488 +//line promql/parser/generated_parser.y:489 { yylex.(*parser).unexpected("subquery or range", "\":\" or \"]\"") yyVAL.node = yyDollar[1].node } case 80: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:490 +//line promql/parser/generated_parser.y:491 { yylex.(*parser).unexpected("subquery selector", "duration") yyVAL.node = yyDollar[1].node } case 81: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:500 +//line promql/parser/generated_parser.y:501 { if nl, ok := yyDollar[2].node.(*NumberLiteral); ok { if yyDollar[1].item.Typ == SUB { @@ -1445,7 +1446,7 @@ yydefault: } case 82: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:518 +//line promql/parser/generated_parser.y:519 { vs := yyDollar[2].node.(*VectorSelector) vs.PosRange = mergeRanges(&yyDollar[1].item, vs) @@ -1455,7 +1456,7 @@ yydefault: } case 83: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:526 +//line promql/parser/generated_parser.y:527 { vs := &VectorSelector{ Name: yyDollar[1].item.Val, @@ -1467,7 +1468,7 @@ yydefault: } case 84: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:536 +//line promql/parser/generated_parser.y:537 { vs := yyDollar[1].node.(*VectorSelector) yylex.(*parser).assembleVectorSelector(vs) @@ -1475,7 +1476,7 @@ yydefault: } case 85: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:544 +//line promql/parser/generated_parser.y:545 { yyVAL.node = &VectorSelector{ LabelMatchers: yyDollar[2].matchers, @@ -1484,7 +1485,7 @@ yydefault: } case 86: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:551 +//line promql/parser/generated_parser.y:552 { yyVAL.node = &VectorSelector{ LabelMatchers: yyDollar[2].matchers, @@ -1493,7 +1494,7 @@ yydefault: } case 87: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:558 +//line promql/parser/generated_parser.y:559 { yyVAL.node = &VectorSelector{ LabelMatchers: []*labels.Matcher{}, @@ -1502,7 +1503,7 @@ yydefault: } case 88: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:567 +//line promql/parser/generated_parser.y:568 { if yyDollar[1].matchers != nil { yyVAL.matchers = append(yyDollar[1].matchers, yyDollar[3].matcher) @@ -1512,47 +1513,47 @@ yydefault: } case 89: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:575 +//line promql/parser/generated_parser.y:576 { yyVAL.matchers = []*labels.Matcher{yyDollar[1].matcher} } case 90: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:577 +//line promql/parser/generated_parser.y:578 { yylex.(*parser).unexpected("label matching", "\",\" or \"}\"") yyVAL.matchers = yyDollar[1].matchers } case 91: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:581 +//line promql/parser/generated_parser.y:582 { yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) } case 92: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:583 +//line promql/parser/generated_parser.y:584 { yylex.(*parser).unexpected("label matching", "string") yyVAL.matcher = nil } case 93: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:585 +//line promql/parser/generated_parser.y:586 { yylex.(*parser).unexpected("label matching", "label matching operator") yyVAL.matcher = nil } case 94: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:587 +//line promql/parser/generated_parser.y:588 { yylex.(*parser).unexpected("label matching", "identifier or \"}\"") yyVAL.matcher = nil } case 95: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:595 +//line promql/parser/generated_parser.y:596 { b := labels.NewBuilder(yyDollar[2].labels) b.Set(labels.MetricName, yyDollar[1].item.Val) @@ -1560,83 +1561,83 @@ yydefault: } case 96: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:597 +//line promql/parser/generated_parser.y:598 { yyVAL.labels = yyDollar[1].labels } case 119: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:604 +//line promql/parser/generated_parser.y:605 { yyVAL.labels = labels.New(yyDollar[2].lblList...) } case 120: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:606 +//line promql/parser/generated_parser.y:607 { yyVAL.labels = labels.New(yyDollar[2].lblList...) } case 121: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:608 +//line promql/parser/generated_parser.y:609 { yyVAL.labels = labels.New() } case 122: yyDollar = yyS[yypt-0 : yypt+1] -//line promql/parser/generated_parser.y:610 +//line promql/parser/generated_parser.y:611 { yyVAL.labels = labels.New() } case 123: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:614 +//line promql/parser/generated_parser.y:615 { yyVAL.lblList = append(yyDollar[1].lblList, yyDollar[3].label) } case 124: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:616 +//line promql/parser/generated_parser.y:617 { yyVAL.lblList = []labels.Label{yyDollar[1].label} } case 125: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:618 +//line promql/parser/generated_parser.y:619 { yylex.(*parser).unexpected("label set", "\",\" or \"}\"") yyVAL.lblList = yyDollar[1].lblList } case 126: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:623 +//line promql/parser/generated_parser.y:624 { yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} } case 127: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:625 +//line promql/parser/generated_parser.y:626 { yylex.(*parser).unexpected("label set", "string") yyVAL.label = labels.Label{} } case 128: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:627 +//line promql/parser/generated_parser.y:628 { yylex.(*parser).unexpected("label set", "\"=\"") yyVAL.label = labels.Label{} } case 129: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:629 +//line promql/parser/generated_parser.y:630 { yylex.(*parser).unexpected("label set", "identifier or \"}\"") yyVAL.label = labels.Label{} } case 130: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:640 +//line promql/parser/generated_parser.y:641 { yylex.(*parser).generatedParserResult = &seriesDescription{ labels: yyDollar[1].labels, @@ -1645,38 +1646,38 @@ yydefault: } case 131: yyDollar = yyS[yypt-0 : yypt+1] -//line promql/parser/generated_parser.y:649 +//line promql/parser/generated_parser.y:650 { yyVAL.series = []SequenceValue{} } case 132: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:651 +//line promql/parser/generated_parser.y:652 { yyVAL.series = append(yyDollar[1].series, yyDollar[3].series...) } case 133: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:653 +//line promql/parser/generated_parser.y:654 { yyVAL.series = yyDollar[1].series } case 134: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:655 +//line promql/parser/generated_parser.y:656 { yylex.(*parser).unexpected("series values", "") yyVAL.series = nil } case 135: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:659 +//line promql/parser/generated_parser.y:660 { yyVAL.series = []SequenceValue{{Omitted: true}} } case 136: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:661 +//line promql/parser/generated_parser.y:662 { yyVAL.series = []SequenceValue{} for i := uint64(0); i < yyDollar[3].uint; i++ { @@ -1685,13 +1686,13 @@ yydefault: } case 137: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:668 +//line promql/parser/generated_parser.y:669 { yyVAL.series = []SequenceValue{{Value: yyDollar[1].float}} } case 138: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:670 +//line promql/parser/generated_parser.y:671 { yyVAL.series = []SequenceValue{} // Add an additional value for time 0, which we ignore in tests. @@ -1701,7 +1702,7 @@ yydefault: } case 139: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:678 +//line promql/parser/generated_parser.y:679 { yyVAL.series = []SequenceValue{} // Add an additional value for time 0, which we ignore in tests. @@ -1712,13 +1713,13 @@ yydefault: } case 140: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:688 +//line promql/parser/generated_parser.y:689 { yyVAL.series = []SequenceValue{{Histogram: yyDollar[1].histogram}} } case 141: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:692 +//line promql/parser/generated_parser.y:693 { yyVAL.series = []SequenceValue{} // Add an additional value for time 0, which we ignore in tests. @@ -1729,7 +1730,7 @@ yydefault: } case 142: yyDollar = yyS[yypt-5 : yypt+1] -//line promql/parser/generated_parser.y:701 +//line promql/parser/generated_parser.y:702 { val, err := yylex.(*parser).histogramsIncreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) if err != nil { @@ -1739,7 +1740,7 @@ yydefault: } case 143: yyDollar = yyS[yypt-5 : yypt+1] -//line promql/parser/generated_parser.y:709 +//line promql/parser/generated_parser.y:710 { val, err := yylex.(*parser).histogramsDecreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) if err != nil { @@ -1749,7 +1750,7 @@ yydefault: } case 144: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:719 +//line promql/parser/generated_parser.y:720 { if yyDollar[1].item.Val != "stale" { yylex.(*parser).unexpected("series values", "number or \"stale\"") @@ -1758,138 +1759,138 @@ yydefault: } case 147: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:731 +//line promql/parser/generated_parser.y:732 { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } case 148: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:735 +//line promql/parser/generated_parser.y:736 { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } case 149: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:739 +//line promql/parser/generated_parser.y:740 { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } case 150: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:744 +//line promql/parser/generated_parser.y:745 { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } case 151: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:752 +//line promql/parser/generated_parser.y:753 { yyVAL.descriptors = *(yylex.(*parser).mergeMaps(&yyDollar[1].descriptors, &yyDollar[3].descriptors)) } case 152: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:756 +//line promql/parser/generated_parser.y:757 { yyVAL.descriptors = yyDollar[1].descriptors } case 153: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:759 +//line promql/parser/generated_parser.y:760 { yylex.(*parser).unexpected("histogram description", "histogram description key, e.g. buckets:[5 10 7]") } case 154: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:766 +//line promql/parser/generated_parser.y:767 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["schema"] = yyDollar[3].int } case 155: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:771 +//line promql/parser/generated_parser.y:772 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["sum"] = yyDollar[3].float } case 156: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:776 +//line promql/parser/generated_parser.y:777 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["count"] = yyDollar[3].float } case 157: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:781 +//line promql/parser/generated_parser.y:782 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["z_bucket"] = yyDollar[3].float } case 158: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:786 +//line promql/parser/generated_parser.y:787 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["z_bucket_w"] = yyDollar[3].float } case 159: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:791 +//line promql/parser/generated_parser.y:792 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["buckets"] = yyDollar[3].bucket_set } case 160: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:796 +//line promql/parser/generated_parser.y:797 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["offset"] = yyDollar[3].int } case 161: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:801 +//line promql/parser/generated_parser.y:802 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["n_buckets"] = yyDollar[3].bucket_set } case 162: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:806 +//line promql/parser/generated_parser.y:807 { yyVAL.descriptors = yylex.(*parser).newMap() yyVAL.descriptors["n_offset"] = yyDollar[3].int } case 163: yyDollar = yyS[yypt-4 : yypt+1] -//line promql/parser/generated_parser.y:813 +//line promql/parser/generated_parser.y:814 { yyVAL.bucket_set = yyDollar[2].bucket_set } case 164: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:817 +//line promql/parser/generated_parser.y:818 { yyVAL.bucket_set = yyDollar[2].bucket_set } case 165: yyDollar = yyS[yypt-3 : yypt+1] -//line promql/parser/generated_parser.y:823 +//line promql/parser/generated_parser.y:824 { yyVAL.bucket_set = append(yyDollar[1].bucket_set, yyDollar[3].float) } case 166: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:827 +//line promql/parser/generated_parser.y:828 { yyVAL.bucket_set = []float64{yyDollar[1].float} } case 213: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:852 +//line promql/parser/generated_parser.y:853 { yyVAL.node = &NumberLiteral{ Val: yylex.(*parser).number(yyDollar[1].item.Val), @@ -1898,25 +1899,25 @@ yydefault: } case 214: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:860 +//line promql/parser/generated_parser.y:861 { yyVAL.float = yylex.(*parser).number(yyDollar[1].item.Val) } case 215: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:862 +//line promql/parser/generated_parser.y:863 { yyVAL.float = yyDollar[2].float } case 216: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:863 +//line promql/parser/generated_parser.y:864 { yyVAL.float = -yyDollar[2].float } case 219: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:869 +//line promql/parser/generated_parser.y:870 { var err error yyVAL.uint, err = strconv.ParseUint(yyDollar[1].item.Val, 10, 64) @@ -1926,19 +1927,19 @@ yydefault: } case 220: yyDollar = yyS[yypt-2 : yypt+1] -//line promql/parser/generated_parser.y:878 +//line promql/parser/generated_parser.y:879 { yyVAL.int = -int64(yyDollar[2].uint) } case 221: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:879 +//line promql/parser/generated_parser.y:880 { yyVAL.int = int64(yyDollar[1].uint) } case 222: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:883 +//line promql/parser/generated_parser.y:884 { var err error yyVAL.duration, err = parseDuration(yyDollar[1].item.Val) @@ -1948,7 +1949,7 @@ yydefault: } case 223: yyDollar = yyS[yypt-1 : yypt+1] -//line promql/parser/generated_parser.y:894 +//line promql/parser/generated_parser.y:895 { yyVAL.node = &StringLiteral{ Val: yylex.(*parser).unquoteString(yyDollar[1].item.Val), @@ -1957,13 +1958,13 @@ yydefault: } case 224: yyDollar = yyS[yypt-0 : yypt+1] -//line promql/parser/generated_parser.y:907 +//line promql/parser/generated_parser.y:908 { yyVAL.duration = 0 } case 226: yyDollar = yyS[yypt-0 : yypt+1] -//line promql/parser/generated_parser.y:911 +//line promql/parser/generated_parser.y:912 { yyVAL.strings = nil } diff --git a/promql/parser/lex.go b/promql/parser/lex.go index 4f5e735cb6..c8bfcc2e1e 100644 --- a/promql/parser/lex.go +++ b/promql/parser/lex.go @@ -19,13 +19,15 @@ import ( "strings" "unicode" "unicode/utf8" + + "github.com/prometheus/prometheus/promql/parser/posrange" ) // Item represents a token or text string returned from the scanner. type Item struct { - Typ ItemType // The type of this Item. - Pos Pos // The starting position, in bytes, of this Item in the input string. - Val string // The value of this Item. + Typ ItemType // The type of this Item. + Pos posrange.Pos // The starting position, in bytes, of this Item in the input string. + Val string // The value of this Item. } // String returns a descriptive string for the Item. @@ -234,10 +236,6 @@ const eof = -1 // stateFn represents the state of the scanner as a function that returns the next state. type stateFn func(*Lexer) stateFn -// Pos is the position in a string. -// Negative numbers indicate undefined positions. -type Pos int - type histogramState int const ( @@ -250,14 +248,14 @@ const ( // Lexer holds the state of the scanner. type Lexer struct { - input string // The string being scanned. - state stateFn // The next lexing function to enter. - pos Pos // Current position in the input. - start Pos // Start position of this Item. - width Pos // Width of last rune read from input. - lastPos Pos // Position of most recent Item returned by NextItem. - itemp *Item // Pointer to where the next scanned item should be placed. - scannedItem bool // Set to true every time an item is scanned. + input string // The string being scanned. + state stateFn // The next lexing function to enter. + pos posrange.Pos // Current position in the input. + start posrange.Pos // Start position of this Item. + width posrange.Pos // Width of last rune read from input. + lastPos posrange.Pos // Position of most recent Item returned by NextItem. + itemp *Item // Pointer to where the next scanned item should be placed. + scannedItem bool // Set to true every time an item is scanned. parenDepth int // Nesting depth of ( ) exprs. braceOpen bool // Whether a { is opened. @@ -278,7 +276,7 @@ func (l *Lexer) next() rune { return eof } r, w := utf8.DecodeRuneInString(l.input[l.pos:]) - l.width = Pos(w) + l.width = posrange.Pos(w) l.pos += l.width return r } @@ -827,7 +825,7 @@ func lexSpace(l *Lexer) stateFn { // lexLineComment scans a line comment. Left comment marker is known to be present. func lexLineComment(l *Lexer) stateFn { - l.pos += Pos(len(lineComment)) + l.pos += posrange.Pos(len(lineComment)) for r := l.next(); !isEndOfLine(r) && r != eof; { r = l.next() } diff --git a/promql/parser/lex_test.go b/promql/parser/lex_test.go index 237b71ec85..23c9dfbee0 100644 --- a/promql/parser/lex_test.go +++ b/promql/parser/lex_test.go @@ -17,6 +17,8 @@ import ( "testing" "github.com/stretchr/testify/require" + + "github.com/prometheus/prometheus/promql/parser/posrange" ) type testCase struct { @@ -824,7 +826,7 @@ func TestLexer(t *testing.T) { require.Fail(t, "unexpected lexing error at position %d: %s", lastItem.Pos, lastItem) } - eofItem := Item{EOF, Pos(len(test.input)), ""} + eofItem := Item{EOF, posrange.Pos(len(test.input)), ""} require.Equal(t, lastItem, eofItem, "%d: input %q", i, test.input) out = out[:len(out)-1] diff --git a/promql/parser/parse.go b/promql/parser/parse.go index 05ff22f863..34217697a6 100644 --- a/promql/parser/parse.go +++ b/promql/parser/parse.go @@ -29,6 +29,7 @@ import ( "github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/timestamp" + "github.com/prometheus/prometheus/promql/parser/posrange" "github.com/prometheus/prometheus/util/strutil" ) @@ -54,7 +55,7 @@ type parser struct { // Everytime an Item is lexed that could be the end // of certain expressions its end position is stored here. - lastClosing Pos + lastClosing posrange.Pos yyParser yyParserImpl @@ -121,7 +122,7 @@ func (p *parser) Close() { // ParseErr wraps a parsing error with line and position context. type ParseErr struct { - PositionRange PositionRange + PositionRange posrange.PositionRange Err error Query string @@ -130,27 +131,7 @@ type ParseErr struct { } func (e *ParseErr) Error() string { - pos := int(e.PositionRange.Start) - lastLineBreak := -1 - line := e.LineOffset + 1 - - var positionStr string - - if pos < 0 || pos > len(e.Query) { - positionStr = "invalid position:" - } else { - - for i, c := range e.Query[:pos] { - if c == '\n' { - lastLineBreak = i - line++ - } - } - - col := pos - lastLineBreak - positionStr = fmt.Sprintf("%d:%d:", line, col) - } - return fmt.Sprintf("%s parse error: %s", positionStr, e.Err) + return fmt.Sprintf("%s: parse error: %s", e.PositionRange.StartPosInput(e.Query, e.LineOffset), e.Err) } type ParseErrors []ParseErr @@ -275,12 +256,12 @@ func ParseSeriesDesc(input string) (labels labels.Labels, values []SequenceValue } // addParseErrf formats the error and appends it to the list of parsing errors. -func (p *parser) addParseErrf(positionRange PositionRange, format string, args ...interface{}) { +func (p *parser) addParseErrf(positionRange posrange.PositionRange, format string, args ...interface{}) { p.addParseErr(positionRange, fmt.Errorf(format, args...)) } // addParseErr appends the provided error to the list of parsing errors. -func (p *parser) addParseErr(positionRange PositionRange, err error) { +func (p *parser) addParseErr(positionRange posrange.PositionRange, err error) { perr := ParseErr{ PositionRange: positionRange, Err: err, @@ -366,9 +347,9 @@ func (p *parser) Lex(lval *yySymType) int { switch typ { case ERROR: - pos := PositionRange{ + pos := posrange.PositionRange{ Start: p.lex.start, - End: Pos(len(p.lex.input)), + End: posrange.Pos(len(p.lex.input)), } p.addParseErr(pos, errors.New(p.yyParser.lval.item.Val)) @@ -378,7 +359,7 @@ func (p *parser) Lex(lval *yySymType) int { lval.item.Typ = EOF p.InjectItem(0) case RIGHT_BRACE, RIGHT_PAREN, RIGHT_BRACKET, DURATION, NUMBER: - p.lastClosing = lval.item.Pos + Pos(len(lval.item.Val)) + p.lastClosing = lval.item.Pos + posrange.Pos(len(lval.item.Val)) } return int(typ) @@ -436,7 +417,7 @@ func (p *parser) newAggregateExpr(op Item, modifier, args Node) (ret *AggregateE ret = modifier.(*AggregateExpr) arguments := args.(Expressions) - ret.PosRange = PositionRange{ + ret.PosRange = posrange.PositionRange{ Start: op.Pos, End: p.lastClosing, } @@ -477,7 +458,7 @@ func (p *parser) newMap() (ret map[string]interface{}) { func (p *parser) mergeMaps(left, right *map[string]interface{}) (ret *map[string]interface{}) { for key, value := range *right { if _, ok := (*left)[key]; ok { - p.addParseErrf(PositionRange{}, "duplicate key \"%s\" in histogram", key) + p.addParseErrf(posrange.PositionRange{}, "duplicate key \"%s\" in histogram", key) continue } (*left)[key] = value @@ -677,7 +658,7 @@ func (p *parser) checkAST(node Node) (typ ValueType) { // opRange returns the PositionRange of the operator part of the BinaryExpr. // This is made a function instead of a variable, so it is lazily evaluated on demand. - opRange := func() (r PositionRange) { + opRange := func() (r posrange.PositionRange) { // Remove whitespace at the beginning and end of the range. for r.Start = n.LHS.PositionRange().End; isSpace(rune(p.lex.input[r.Start])); r.Start++ { // nolint:revive } @@ -881,7 +862,7 @@ func (p *parser) newLabelMatcher(label, operator, value Item) *labels.Matcher { // addOffset is used to set the offset in the generated parser. func (p *parser) addOffset(e Node, offset time.Duration) { var orgoffsetp *time.Duration - var endPosp *Pos + var endPosp *posrange.Pos switch s := e.(type) { case *VectorSelector: @@ -921,7 +902,7 @@ func (p *parser) setTimestamp(e Node, ts float64) { p.addParseErrf(e.PositionRange(), "timestamp out of bounds for @ modifier: %f", ts) } var timestampp **int64 - var endPosp *Pos + var endPosp *posrange.Pos timestampp, _, endPosp, ok := p.getAtModifierVars(e) if !ok { @@ -950,11 +931,11 @@ func (p *parser) setAtModifierPreprocessor(e Node, op Item) { *endPosp = p.lastClosing } -func (p *parser) getAtModifierVars(e Node) (**int64, *ItemType, *Pos, bool) { +func (p *parser) getAtModifierVars(e Node) (**int64, *ItemType, *posrange.Pos, bool) { var ( timestampp **int64 preprocp *ItemType - endPosp *Pos + endPosp *posrange.Pos ) switch s := e.(type) { case *VectorSelector: diff --git a/promql/parser/parse_test.go b/promql/parser/parse_test.go index 41543cc6b9..5624a67613 100644 --- a/promql/parser/parse_test.go +++ b/promql/parser/parse_test.go @@ -26,6 +26,8 @@ import ( "github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/labels" + + "github.com/prometheus/prometheus/promql/parser/posrange" ) var testExpr = []struct { @@ -39,84 +41,84 @@ var testExpr = []struct { input: "1", expected: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, }, { input: "+Inf", expected: &NumberLiteral{ Val: math.Inf(1), - PosRange: PositionRange{Start: 0, End: 4}, + PosRange: posrange.PositionRange{Start: 0, End: 4}, }, }, { input: "-Inf", expected: &NumberLiteral{ Val: math.Inf(-1), - PosRange: PositionRange{Start: 0, End: 4}, + PosRange: posrange.PositionRange{Start: 0, End: 4}, }, }, { input: ".5", expected: &NumberLiteral{ Val: 0.5, - PosRange: PositionRange{Start: 0, End: 2}, + PosRange: posrange.PositionRange{Start: 0, End: 2}, }, }, { input: "5.", expected: &NumberLiteral{ Val: 5, - PosRange: PositionRange{Start: 0, End: 2}, + PosRange: posrange.PositionRange{Start: 0, End: 2}, }, }, { input: "123.4567", expected: &NumberLiteral{ Val: 123.4567, - PosRange: PositionRange{Start: 0, End: 8}, + PosRange: posrange.PositionRange{Start: 0, End: 8}, }, }, { input: "5e-3", expected: &NumberLiteral{ Val: 0.005, - PosRange: PositionRange{Start: 0, End: 4}, + PosRange: posrange.PositionRange{Start: 0, End: 4}, }, }, { input: "5e3", expected: &NumberLiteral{ Val: 5000, - PosRange: PositionRange{Start: 0, End: 3}, + PosRange: posrange.PositionRange{Start: 0, End: 3}, }, }, { input: "0xc", expected: &NumberLiteral{ Val: 12, - PosRange: PositionRange{Start: 0, End: 3}, + PosRange: posrange.PositionRange{Start: 0, End: 3}, }, }, { input: "0755", expected: &NumberLiteral{ Val: 493, - PosRange: PositionRange{Start: 0, End: 4}, + PosRange: posrange.PositionRange{Start: 0, End: 4}, }, }, { input: "+5.5e-3", expected: &NumberLiteral{ Val: 0.0055, - PosRange: PositionRange{Start: 0, End: 7}, + PosRange: posrange.PositionRange{Start: 0, End: 7}, }, }, { input: "-0755", expected: &NumberLiteral{ Val: -493, - PosRange: PositionRange{Start: 0, End: 5}, + PosRange: posrange.PositionRange{Start: 0, End: 5}, }, }, { @@ -125,11 +127,11 @@ var testExpr = []struct { Op: ADD, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 4, End: 5}, + PosRange: posrange.PositionRange{Start: 4, End: 5}, }, }, }, @@ -139,11 +141,11 @@ var testExpr = []struct { Op: SUB, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 4, End: 5}, + PosRange: posrange.PositionRange{Start: 4, End: 5}, }, }, }, @@ -153,11 +155,11 @@ var testExpr = []struct { Op: MUL, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 4, End: 5}, + PosRange: posrange.PositionRange{Start: 4, End: 5}, }, }, }, @@ -167,11 +169,11 @@ var testExpr = []struct { Op: MOD, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 4, End: 5}, + PosRange: posrange.PositionRange{Start: 4, End: 5}, }, }, }, @@ -181,11 +183,11 @@ var testExpr = []struct { Op: DIV, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 4, End: 5}, + PosRange: posrange.PositionRange{Start: 4, End: 5}, }, }, }, @@ -195,11 +197,11 @@ var testExpr = []struct { Op: EQLC, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 10, End: 11}, + PosRange: posrange.PositionRange{Start: 10, End: 11}, }, ReturnBool: true, }, @@ -210,11 +212,11 @@ var testExpr = []struct { Op: NEQ, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 10, End: 11}, + PosRange: posrange.PositionRange{Start: 10, End: 11}, }, ReturnBool: true, }, @@ -225,11 +227,11 @@ var testExpr = []struct { Op: GTR, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 9, End: 10}, + PosRange: posrange.PositionRange{Start: 9, End: 10}, }, ReturnBool: true, }, @@ -240,11 +242,11 @@ var testExpr = []struct { Op: GTE, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 10, End: 11}, + PosRange: posrange.PositionRange{Start: 10, End: 11}, }, ReturnBool: true, }, @@ -255,11 +257,11 @@ var testExpr = []struct { Op: LSS, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 9, End: 10}, + PosRange: posrange.PositionRange{Start: 9, End: 10}, }, ReturnBool: true, }, @@ -270,11 +272,11 @@ var testExpr = []struct { Op: LTE, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 10, End: 11}, + PosRange: posrange.PositionRange{Start: 10, End: 11}, }, ReturnBool: true, }, @@ -287,11 +289,11 @@ var testExpr = []struct { Op: POW, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 1, End: 2}, + PosRange: posrange.PositionRange{Start: 1, End: 2}, }, RHS: &NumberLiteral{ Val: 2, - PosRange: PositionRange{Start: 3, End: 4}, + PosRange: posrange.PositionRange{Start: 3, End: 4}, }, }, }, @@ -302,11 +304,11 @@ var testExpr = []struct { Op: MUL, LHS: &NumberLiteral{ Val: -1, - PosRange: PositionRange{Start: 0, End: 2}, + PosRange: posrange.PositionRange{Start: 0, End: 2}, }, RHS: &NumberLiteral{ Val: 2, - PosRange: PositionRange{Start: 3, End: 4}, + PosRange: posrange.PositionRange{Start: 3, End: 4}, }, }, }, @@ -316,11 +318,11 @@ var testExpr = []struct { Op: ADD, LHS: &NumberLiteral{ Val: -1, - PosRange: PositionRange{Start: 0, End: 2}, + PosRange: posrange.PositionRange{Start: 0, End: 2}, }, RHS: &NumberLiteral{ Val: 2, - PosRange: PositionRange{Start: 3, End: 4}, + PosRange: posrange.PositionRange{Start: 3, End: 4}, }, }, }, @@ -332,11 +334,11 @@ var testExpr = []struct { Op: POW, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 1, End: 2}, + PosRange: posrange.PositionRange{Start: 1, End: 2}, }, RHS: &NumberLiteral{ Val: -2, - PosRange: PositionRange{Start: 3, End: 5}, + PosRange: posrange.PositionRange{Start: 3, End: 5}, }, }, }, @@ -347,17 +349,17 @@ var testExpr = []struct { Op: ADD, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 2}, + PosRange: posrange.PositionRange{Start: 0, End: 2}, }, RHS: &BinaryExpr{ Op: MUL, LHS: &NumberLiteral{ Val: -2, - PosRange: PositionRange{Start: 5, End: 7}, + PosRange: posrange.PositionRange{Start: 5, End: 7}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 10, End: 11}, + PosRange: posrange.PositionRange{Start: 10, End: 11}, }, }, }, @@ -368,27 +370,27 @@ var testExpr = []struct { Op: ADD, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &BinaryExpr{ Op: DIV, LHS: &NumberLiteral{ Val: 2, - PosRange: PositionRange{Start: 4, End: 5}, + PosRange: posrange.PositionRange{Start: 4, End: 5}, }, RHS: &ParenExpr{ Expr: &BinaryExpr{ Op: MUL, LHS: &NumberLiteral{ Val: 3, - PosRange: PositionRange{Start: 7, End: 8}, + PosRange: posrange.PositionRange{Start: 7, End: 8}, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 9, End: 10}, + PosRange: posrange.PositionRange{Start: 9, End: 10}, }, }, - PosRange: PositionRange{Start: 6, End: 11}, + PosRange: posrange.PositionRange{Start: 6, End: 11}, }, }, }, @@ -400,23 +402,23 @@ var testExpr = []struct { ReturnBool: true, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 0, End: 1}, + PosRange: posrange.PositionRange{Start: 0, End: 1}, }, RHS: &BinaryExpr{ Op: SUB, LHS: &NumberLiteral{ Val: 2, - PosRange: PositionRange{Start: 9, End: 10}, + PosRange: posrange.PositionRange{Start: 9, End: 10}, }, RHS: &BinaryExpr{ Op: MUL, LHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 13, End: 14}, + PosRange: posrange.PositionRange{Start: 13, End: 14}, }, RHS: &NumberLiteral{ Val: 2, - PosRange: PositionRange{Start: 17, End: 18}, + PosRange: posrange.PositionRange{Start: 17, End: 18}, }, }, }, @@ -431,7 +433,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 1, End: 12, }, @@ -447,7 +449,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 1, End: 12, }, @@ -463,7 +465,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 2, End: 13, }, @@ -606,7 +608,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -616,7 +618,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 9, }, @@ -633,7 +635,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -643,7 +645,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "sum"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 9, }, @@ -660,14 +662,14 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 7, End: 8}, + PosRange: posrange.PositionRange{Start: 7, End: 8}, }, }, }, @@ -680,14 +682,14 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, }, RHS: &NumberLiteral{ Val: 1, - PosRange: PositionRange{Start: 12, End: 13}, + PosRange: posrange.PositionRange{Start: 12, End: 13}, }, ReturnBool: true, }, @@ -698,14 +700,14 @@ var testExpr = []struct { Op: DIV, LHS: &NumberLiteral{ Val: 2.5, - PosRange: PositionRange{Start: 0, End: 3}, + PosRange: posrange.PositionRange{Start: 0, End: 3}, }, RHS: &VectorSelector{ Name: "bar", LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 9, }, @@ -721,7 +723,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -731,7 +733,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 8, End: 11, }, @@ -748,7 +750,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -758,7 +760,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 7, End: 10, }, @@ -775,7 +777,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -785,7 +787,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 11, End: 14, }, @@ -805,7 +807,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -815,7 +817,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 9, }, @@ -829,7 +831,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bla"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 16, }, @@ -839,7 +841,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "blub"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 21, End: 25, }, @@ -863,7 +865,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -873,7 +875,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 8, End: 11, }, @@ -885,7 +887,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "baz"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 22, }, @@ -897,7 +899,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "qux"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 26, End: 29, }, @@ -915,7 +917,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -927,7 +929,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bla"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 17, }, @@ -937,7 +939,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "blub"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 51, End: 55, }, @@ -965,7 +967,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -975,7 +977,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 20, End: 23, }, @@ -996,7 +998,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1006,7 +1008,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 31, End: 34, }, @@ -1027,7 +1029,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1037,7 +1039,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 22, End: 25, }, @@ -1058,7 +1060,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1068,7 +1070,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 16, }, @@ -1089,7 +1091,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1099,7 +1101,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 28, End: 31, }, @@ -1119,7 +1121,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1129,7 +1131,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 22, }, @@ -1149,7 +1151,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1159,7 +1161,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "baz"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 22, }, @@ -1180,7 +1182,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1190,7 +1192,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 36, End: 39, }, @@ -1212,7 +1214,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1222,7 +1224,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 43, End: 46, }, @@ -1243,7 +1245,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1253,7 +1255,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 42, End: 45, }, @@ -1274,7 +1276,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1284,7 +1286,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 41, End: 44, }, @@ -1306,7 +1308,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1316,7 +1318,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 47, End: 50, }, @@ -1431,7 +1433,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1444,7 +1446,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "min"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1458,7 +1460,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 13, }, @@ -1472,7 +1474,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 14, }, @@ -1486,7 +1488,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 16, }, @@ -1500,7 +1502,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 17, }, @@ -1514,7 +1516,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 16, }, @@ -1528,7 +1530,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 10, }, @@ -1542,7 +1544,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 8, }, @@ -1556,7 +1558,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 8, }, @@ -1570,7 +1572,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 10, }, @@ -1584,7 +1586,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 12, }, @@ -1598,7 +1600,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 12, }, @@ -1612,7 +1614,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 9, }, @@ -1626,7 +1628,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 10, }, @@ -1640,7 +1642,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 9, }, @@ -1654,7 +1656,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 12, }, @@ -1693,7 +1695,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "a", "bc"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo:bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 15, }, @@ -1707,7 +1709,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "NaN", "bc"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 13, }, @@ -1721,7 +1723,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "}"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 12, }, @@ -1738,7 +1740,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchNotRegexp, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 48, }, @@ -1755,7 +1757,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchNotRegexp, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 49, }, @@ -1877,7 +1879,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -1894,7 +1896,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -1911,7 +1913,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -1929,7 +1931,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -1947,7 +1949,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -1965,7 +1967,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -1984,7 +1986,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "a", "b"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2003,7 +2005,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "a", "b"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -2111,13 +2113,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 24, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 25, }, @@ -2132,13 +2134,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 24, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 25, }, @@ -2153,13 +2155,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 24, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 25, }, @@ -2175,13 +2177,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 30, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 31, }, @@ -2197,13 +2199,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 5, End: 16, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 31, }, @@ -2218,12 +2220,12 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 7, End: 18, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 19, }, @@ -2238,13 +2240,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 16, End: 27, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 28, }, @@ -2259,13 +2261,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 10, End: 21, }, }, Grouping: []string{}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 22, }, @@ -2280,13 +2282,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 18, End: 29, }, }, Grouping: []string{"foo", "bar"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 30, }, @@ -2301,13 +2303,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 25, }, }, Grouping: []string{"foo"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 26, }, @@ -2322,19 +2324,19 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 8, End: 19, }, }, Param: &NumberLiteral{ Val: 5, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 5, End: 6, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 20, }, @@ -2349,19 +2351,19 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 22, End: 33, }, }, Param: &StringLiteral{ Val: "value", - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 13, End: 20, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 34, }, @@ -2378,13 +2380,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 53, End: 64, }, }, Grouping: []string{"and", "by", "avg", "count", "alert", "annotations"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 65, }, @@ -2476,7 +2478,7 @@ var testExpr = []struct { expected: &Call{ Func: MustGetFunction("time"), Args: Expressions{}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 6, }, @@ -2493,13 +2495,13 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchNotEqual, "foo", "bar"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 29, }, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 30, }, @@ -2516,7 +2518,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 5, End: 16, }, @@ -2525,7 +2527,7 @@ var testExpr = []struct { EndPos: 20, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 21, }, @@ -2541,13 +2543,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 17, }, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 18, }, @@ -2563,20 +2565,20 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 6, End: 17, }, }, &NumberLiteral{ Val: 5, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 20, }, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 21, }, @@ -2668,12 +2670,12 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "sum"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 4, End: 7, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 8, }, @@ -2688,7 +2690,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "a"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 1, }, @@ -2698,7 +2700,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "sum"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 4, End: 7, }, @@ -2711,42 +2713,42 @@ var testExpr = []struct { input: `"double-quoted string \" with escaped quote"`, expected: &StringLiteral{ Val: "double-quoted string \" with escaped quote", - PosRange: PositionRange{Start: 0, End: 44}, + PosRange: posrange.PositionRange{Start: 0, End: 44}, }, }, { input: `'single-quoted string \' with escaped quote'`, expected: &StringLiteral{ Val: "single-quoted string ' with escaped quote", - PosRange: PositionRange{Start: 0, End: 44}, + PosRange: posrange.PositionRange{Start: 0, End: 44}, }, }, { input: "`backtick-quoted string`", expected: &StringLiteral{ Val: "backtick-quoted string", - PosRange: PositionRange{Start: 0, End: 24}, + PosRange: posrange.PositionRange{Start: 0, End: 24}, }, }, { input: `"\a\b\f\n\r\t\v\\\" - \xFF\377\u1234\U00010111\U0001011111☺"`, expected: &StringLiteral{ Val: "\a\b\f\n\r\t\v\\\" - \xFF\377\u1234\U00010111\U0001011111☺", - PosRange: PositionRange{Start: 0, End: 62}, + PosRange: posrange.PositionRange{Start: 0, End: 62}, }, }, { input: `'\a\b\f\n\r\t\v\\\' - \xFF\377\u1234\U00010111\U0001011111☺'`, expected: &StringLiteral{ Val: "\a\b\f\n\r\t\v\\' - \xFF\377\u1234\U00010111\U0001011111☺", - PosRange: PositionRange{Start: 0, End: 62}, + PosRange: posrange.PositionRange{Start: 0, End: 62}, }, }, { input: "`" + `\a\b\f\n\r\t\v\\\"\' - \xFF\377\u1234\U00010111\U0001011111☺` + "`", expected: &StringLiteral{ Val: `\a\b\f\n\r\t\v\\\"\' - \xFF\377\u1234\U00010111\U0001011111☺`, - PosRange: PositionRange{Start: 0, End: 64}, + PosRange: posrange.PositionRange{Start: 0, End: 64}, }, }, { @@ -2779,7 +2781,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 14, }, @@ -2798,7 +2800,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 14, }, @@ -2816,7 +2818,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -2841,7 +2843,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 33, }, @@ -2850,7 +2852,7 @@ var testExpr = []struct { EndPos: 37, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 38, }, @@ -2861,7 +2863,7 @@ var testExpr = []struct { EndPos: 45, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 46, }, @@ -2884,7 +2886,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 33, }, @@ -2893,7 +2895,7 @@ var testExpr = []struct { EndPos: 37, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 38, }, @@ -2902,7 +2904,7 @@ var testExpr = []struct { EndPos: 43, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 44, }, @@ -2929,7 +2931,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 33, }, @@ -2938,7 +2940,7 @@ var testExpr = []struct { EndPos: 37, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 38, }, @@ -2948,7 +2950,7 @@ var testExpr = []struct { EndPos: 53, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 54, }, @@ -2975,7 +2977,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 33, }, @@ -2984,7 +2986,7 @@ var testExpr = []struct { EndPos: 37, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 38, }, @@ -2994,7 +2996,7 @@ var testExpr = []struct { EndPos: 56, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 57, }, @@ -3021,7 +3023,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "bar", "baz"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 33, }, @@ -3030,7 +3032,7 @@ var testExpr = []struct { EndPos: 37, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 14, End: 38, }, @@ -3040,7 +3042,7 @@ var testExpr = []struct { EndPos: 56, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 57, }, @@ -3061,13 +3063,13 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 53, End: 64, }, }, Grouping: []string{"and", "by", "avg", "count", "alert", "annotations"}, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 65, }, @@ -3085,7 +3087,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 21, }, @@ -3104,7 +3106,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 17, }, @@ -3123,7 +3125,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 27, }, @@ -3143,7 +3145,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 27, }, @@ -3163,7 +3165,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "some_metric"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -3189,7 +3191,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 1, End: 4, }, @@ -3200,13 +3202,13 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "nm", "val"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 7, End: 20, }, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 21, }, @@ -3229,7 +3231,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 1, End: 4, }, @@ -3240,13 +3242,13 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "nm", "val"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 7, End: 20, }, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 21, }, @@ -3270,7 +3272,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 1, End: 4, }, @@ -3282,13 +3284,13 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, Timestamp: makeInt64Pointer(1234000), - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 7, End: 27, }, }, }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 28, }, @@ -3327,7 +3329,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 13, }, @@ -3341,7 +3343,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 11, }, @@ -3356,7 +3358,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -3374,7 +3376,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "test"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 4, }, @@ -3391,7 +3393,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -3410,7 +3412,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -3439,7 +3441,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "start"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 5, }, @@ -3452,7 +3454,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "end"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -3466,7 +3468,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "end", "foo"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "start"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 16, }, @@ -3480,7 +3482,7 @@ var testExpr = []struct { MustLabelMatcher(labels.MatchEqual, "start", "foo"), MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "end"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 16, }, @@ -3495,7 +3497,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -3505,7 +3507,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 21, End: 24, }, @@ -3526,7 +3528,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 0, End: 3, }, @@ -3536,7 +3538,7 @@ var testExpr = []struct { LabelMatchers: []*labels.Matcher{ MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "bar"), }, - PosRange: PositionRange{ + PosRange: posrange.PositionRange{ Start: 19, End: 22, }, @@ -3604,7 +3606,7 @@ func TestParseExpressions(t *testing.T) { for _, e := range errorList { require.True(t, 0 <= e.PositionRange.Start, "parse error has negative position\nExpression '%s'\nError: %v", test.input, e) require.True(t, e.PositionRange.Start <= e.PositionRange.End, "parse error has negative length\nExpression '%s'\nError: %v", test.input, e) - require.True(t, e.PositionRange.End <= Pos(len(test.input)), "parse error is not contained in input\nExpression '%s'\nError: %v", test.input, e) + require.True(t, e.PositionRange.End <= posrange.Pos(len(test.input)), "parse error is not contained in input\nExpression '%s'\nError: %v", test.input, e) } } }) diff --git a/promql/parser/posrange/posrange.go b/promql/parser/posrange/posrange.go new file mode 100644 index 0000000000..531fd8a30c --- /dev/null +++ b/promql/parser/posrange/posrange.go @@ -0,0 +1,54 @@ +// Copyright 2023 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// posrange is used to report a position in query strings for error +// and warning messages. +package posrange + +import "fmt" + +// Pos is the position in a string. +// Negative numbers indicate undefined positions. +type Pos int + +// PositionRange describes a position in the input string of the parser. +type PositionRange struct { + Start Pos + End Pos +} + +// StartPosInput uses the query string to convert the PositionRange into a +// line:col string, indicating when this is not possible if the query is empty +// or the position is invalid. When this is used to convert ParseErr to a string, +// lineOffset is an additional line offset to be added, and is only used inside +// unit tests. +func (p PositionRange) StartPosInput(query string, lineOffset int) string { + if query == "" { + return "unknown position" + } + pos := int(p.Start) + if pos < 0 || pos > len(query) { + return "invalid position" + } + + lastLineBreak := -1 + line := lineOffset + 1 + for i, c := range query[:pos] { + if c == '\n' { + lastLineBreak = i + line++ + } + } + col := pos - lastLineBreak + return fmt.Sprintf("%d:%d", line, col) +} diff --git a/promql/test.go b/promql/test.go index f0fd7dab34..f6a31ee431 100644 --- a/promql/test.go +++ b/promql/test.go @@ -34,6 +34,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/timestamp" "github.com/prometheus/prometheus/promql/parser" + "github.com/prometheus/prometheus/promql/parser/posrange" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/util/teststorage" "github.com/prometheus/prometheus/util/testutil" @@ -197,7 +198,7 @@ func (t *test) parseEval(lines []string, i int) (int, *evalCmd, error) { if err != nil { parser.EnrichParseError(err, func(parseErr *parser.ParseErr) { parseErr.LineOffset = i - posOffset := parser.Pos(strings.Index(lines[i], expr)) + posOffset := posrange.Pos(strings.Index(lines[i], expr)) parseErr.PositionRange.Start += posOffset parseErr.PositionRange.End += posOffset parseErr.Query = lines[i] diff --git a/promql/test_test.go b/promql/test_test.go index cc1df62d09..ee2a0e264b 100644 --- a/promql/test_test.go +++ b/promql/test_test.go @@ -123,7 +123,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) { // Check the series. queryable := suite.Queryable() - querier, err := queryable.Querier(suite.Context(), math.MinInt64, math.MaxInt64) + querier, err := queryable.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) for _, s := range tc.series { var matchers []*labels.Matcher @@ -134,7 +134,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) { }) // Get the series for the matcher. - ss := querier.Select(false, nil, matchers...) + ss := querier.Select(suite.Context(), false, nil, matchers...) require.True(t, ss.Next()) storageSeries := ss.At() require.False(t, ss.Next(), "Expecting only 1 series") diff --git a/promql/value.go b/promql/value.go index 1b2a9d221d..68e37f37ee 100644 --- a/promql/value.go +++ b/promql/value.go @@ -24,8 +24,8 @@ import ( "github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/promql/parser" - "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb/chunkenc" + "github.com/prometheus/prometheus/util/annotations" ) func (Matrix) Type() parser.ValueType { return parser.ValueTypeMatrix } @@ -303,7 +303,7 @@ func (m Matrix) ContainsSameLabelset() bool { type Result struct { Err error Value parser.Value - Warnings storage.Warnings + Warnings annotations.Annotations } // Vector returns a Vector if the result value is one. An error is returned if diff --git a/rules/alerting.go b/rules/alerting.go index e517753a8e..efee35d644 100644 --- a/rules/alerting.go +++ b/rules/alerting.go @@ -261,7 +261,7 @@ func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) pro } // QueryforStateSeries returns the series for ALERTS_FOR_STATE. -func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (storage.Series, error) { +func (r *AlertingRule) QueryforStateSeries(ctx context.Context, alert *Alert, q storage.Querier) (storage.Series, error) { smpl := r.forStateSample(alert, time.Now(), 0) var matchers []*labels.Matcher smpl.Metric.Range(func(l labels.Label) { @@ -271,7 +271,7 @@ func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (sto } matchers = append(matchers, mt) }) - sset := q.Select(false, nil, matchers...) + sset := q.Select(ctx, false, nil, matchers...) var s storage.Series for sset.Next() { diff --git a/rules/alerting_test.go b/rules/alerting_test.go index cf5e31bf69..19d804a816 100644 --- a/rules/alerting_test.go +++ b/rules/alerting_test.go @@ -659,7 +659,7 @@ func TestQueryForStateSeries(t *testing.T) { ValidUntil: time.Time{}, } - series, err := rule.QueryforStateSeries(alert, querier) + series, err := rule.QueryforStateSeries(context.Background(), alert, querier) require.Equal(t, tst.expectedSeries, series) require.Equal(t, tst.expectedError, err) diff --git a/rules/manager.go b/rules/manager.go index a506c7bf36..4ec2e90827 100644 --- a/rules/manager.go +++ b/rules/manager.go @@ -844,7 +844,7 @@ func (g *Group) RestoreForState(ts time.Time) { // We allow restoration only if alerts were active before after certain time. mint := ts.Add(-g.opts.OutageTolerance) mintMS := int64(model.TimeFromUnixNano(mint.UnixNano())) - q, err := g.opts.Queryable.Querier(g.opts.Context, mintMS, maxtMS) + q, err := g.opts.Queryable.Querier(mintMS, maxtMS) if err != nil { level.Error(g.logger).Log("msg", "Failed to get Querier", "err", err) return @@ -873,7 +873,7 @@ func (g *Group) RestoreForState(ts time.Time) { alertRule.ForEachActiveAlert(func(a *Alert) { var s storage.Series - s, err := alertRule.QueryforStateSeries(a, q) + s, err := alertRule.QueryforStateSeries(g.opts.Context, a, q) if err != nil { // Querier Warnings are ignored. We do not care unless we have an error. level.Error(g.logger).Log( diff --git a/rules/manager_test.go b/rules/manager_test.go index b538231914..ed4b52b1c1 100644 --- a/rules/manager_test.go +++ b/rules/manager_test.go @@ -572,14 +572,14 @@ func TestStaleness(t *testing.T) { group.Eval(ctx, time.Unix(1, 0).Add(evalDelay)) group.Eval(ctx, time.Unix(2, 0).Add(evalDelay)) - querier, err := st.Querier(context.Background(), 0, 2000) + querier, err := st.Querier(0, 2000) require.NoError(t, err) defer querier.Close() matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a_plus_one") require.NoError(t, err) - set := querier.Select(false, nil, matcher) + set := querier.Select(ctx, false, nil, matcher) samples, err := readSeriesSet(set) require.NoError(t, err) @@ -696,14 +696,14 @@ func TestDeletedRuleMarkedStale(t *testing.T) { newGroup.Eval(context.Background(), time.Unix(0, 0)) - querier, err := st.Querier(context.Background(), 0, 2000) + querier, err := st.Querier(0, 2000) require.NoError(t, err) defer querier.Close() matcher, err := labels.NewMatcher(labels.MatchEqual, "l1", "v1") require.NoError(t, err) - set := querier.Select(false, nil, matcher) + set := querier.Select(context.Background(), false, nil, matcher) samples, err := readSeriesSet(set) require.NoError(t, err) @@ -1359,14 +1359,14 @@ func TestMetricsStalenessOnManagerShutdown(t *testing.T) { func countStaleNaN(t *testing.T, st storage.Storage) int { var c int - querier, err := st.Querier(context.Background(), 0, time.Now().Unix()*1000) + querier, err := st.Querier(0, time.Now().Unix()*1000) require.NoError(t, err) defer querier.Close() matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "test_2") require.NoError(t, err) - set := querier.Select(false, nil, matcher) + set := querier.Select(context.Background(), false, nil, matcher) samples, err := readSeriesSet(set) require.NoError(t, err) @@ -1848,9 +1848,9 @@ func TestNativeHistogramsInRecordingRules(t *testing.T) { group.Eval(context.Background(), ts.Add(10*time.Second)) - q, err := db.Querier(context.Background(), ts.UnixMilli(), ts.Add(20*time.Second).UnixMilli()) + q, err := db.Querier(ts.UnixMilli(), ts.Add(20*time.Second).UnixMilli()) require.NoError(t, err) - ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "__name__", "sum:histogram_metric")) + ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "__name__", "sum:histogram_metric")) require.True(t, ss.Next()) s := ss.At() require.False(t, ss.Next()) diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index 4c80d1d988..3b7d6a7abb 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -2925,9 +2925,9 @@ func TestScrapeLoopDiscardDuplicateLabels(t *testing.T) { require.Error(t, err) require.NoError(t, slApp.Rollback()) - q, err := s.Querier(ctx, time.Time{}.UnixNano(), 0) + q, err := s.Querier(time.Time{}.UnixNano(), 0) require.NoError(t, err) - series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) + series := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) require.Equal(t, false, series.Next(), "series found in tsdb") require.NoError(t, series.Err()) @@ -2937,9 +2937,9 @@ func TestScrapeLoopDiscardDuplicateLabels(t *testing.T) { require.NoError(t, err) require.NoError(t, slApp.Commit()) - q, err = s.Querier(ctx, time.Time{}.UnixNano(), 0) + q, err = s.Querier(time.Time{}.UnixNano(), 0) require.NoError(t, err) - series = q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "le", "500")) + series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "le", "500")) require.Equal(t, true, series.Next(), "series not found in tsdb") require.NoError(t, series.Err()) require.Equal(t, false, series.Next(), "more than one series found in tsdb") @@ -2984,9 +2984,9 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) { require.NoError(t, slApp.Rollback()) require.Equal(t, errNameLabelMandatory, err) - q, err := s.Querier(ctx, time.Time{}.UnixNano(), 0) + q, err := s.Querier(time.Time{}.UnixNano(), 0) require.NoError(t, err) - series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) + series := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) require.Equal(t, false, series.Next(), "series found in tsdb") require.NoError(t, series.Err()) } @@ -3346,9 +3346,9 @@ func TestScrapeReportSingleAppender(t *testing.T) { start := time.Now() for time.Since(start) < 3*time.Second { - q, err := s.Querier(ctx, time.Time{}.UnixNano(), time.Now().UnixNano()) + q, err := s.Querier(time.Time{}.UnixNano(), time.Now().UnixNano()) require.NoError(t, err) - series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".+")) + series := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".+")) c := 0 for series.Next() { @@ -3418,10 +3418,10 @@ func TestScrapeReportLimit(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - q, err := s.Querier(ctx, time.Time{}.UnixNano(), time.Now().UnixNano()) + q, err := s.Querier(time.Time{}.UnixNano(), time.Now().UnixNano()) require.NoError(t, err) defer q.Close() - series := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "up")) + series := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "up")) var found bool for series.Next() { diff --git a/scripts/golangci-lint.yml b/scripts/golangci-lint.yml index a619a7325d..8ace97bde1 100644 --- a/scripts/golangci-lint.yml +++ b/scripts/golangci-lint.yml @@ -18,15 +18,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 - name: install Go - uses: actions/setup-go@v3 + uses: actions/setup-go@6edd4406fa81c3da01a34fa6f6343087c207a568 # v3.5.0 with: go-version: 1.20.x - name: Install snmp_exporter/generator dependencies run: sudo apt-get update && sudo apt-get -y install libsnmp-dev if: github.repository == 'prometheus/snmp_exporter' - name: Lint - uses: golangci/golangci-lint-action@v3.4.0 + uses: golangci/golangci-lint-action@3a919529898de77ec3da873e3063ca4b10e7f5cc # v3.7.0 with: version: v1.54.2 diff --git a/storage/buffer.go b/storage/buffer.go index b1b5f81484..6ff7b8a1ba 100644 --- a/storage/buffer.go +++ b/storage/buffer.go @@ -42,7 +42,6 @@ func NewBuffer(delta int64) *BufferedSeriesIterator { // NewBufferIterator returns a new iterator that buffers the values within the // time range of the current element and the duration of delta before. func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator { - // TODO(codesome): based on encoding, allocate different buffer. bit := &BufferedSeriesIterator{ buf: newSampleRing(delta, 0, chunkenc.ValNone), delta: delta, diff --git a/storage/fanout.go b/storage/fanout.go index a9db4f6280..33257046f2 100644 --- a/storage/fanout.go +++ b/storage/fanout.go @@ -72,15 +72,15 @@ func (f *fanout) StartTime() (int64, error) { return firstTime, nil } -func (f *fanout) Querier(ctx context.Context, mint, maxt int64) (Querier, error) { - primary, err := f.primary.Querier(ctx, mint, maxt) +func (f *fanout) Querier(mint, maxt int64) (Querier, error) { + primary, err := f.primary.Querier(mint, maxt) if err != nil { return nil, err } secondaries := make([]Querier, 0, len(f.secondaries)) for _, storage := range f.secondaries { - querier, err := storage.Querier(ctx, mint, maxt) + querier, err := storage.Querier(mint, maxt) if err != nil { // Close already open Queriers, append potential errors to returned error. errs := tsdb_errors.NewMulti(err, primary.Close()) @@ -94,15 +94,15 @@ func (f *fanout) Querier(ctx context.Context, mint, maxt int64) (Querier, error) return NewMergeQuerier([]Querier{primary}, secondaries, ChainedSeriesMerge), nil } -func (f *fanout) ChunkQuerier(ctx context.Context, mint, maxt int64) (ChunkQuerier, error) { - primary, err := f.primary.ChunkQuerier(ctx, mint, maxt) +func (f *fanout) ChunkQuerier(mint, maxt int64) (ChunkQuerier, error) { + primary, err := f.primary.ChunkQuerier(mint, maxt) if err != nil { return nil, err } secondaries := make([]ChunkQuerier, 0, len(f.secondaries)) for _, storage := range f.secondaries { - querier, err := storage.ChunkQuerier(ctx, mint, maxt) + querier, err := storage.ChunkQuerier(mint, maxt) if err != nil { // Close already open Queriers, append potential errors to returned error. errs := tsdb_errors.NewMulti(err, primary.Close()) diff --git a/storage/fanout_test.go b/storage/fanout_test.go index b4490636df..0f9363d7a7 100644 --- a/storage/fanout_test.go +++ b/storage/fanout_test.go @@ -24,6 +24,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb/chunkenc" + "github.com/prometheus/prometheus/util/annotations" "github.com/prometheus/prometheus/util/teststorage" ) @@ -75,14 +76,14 @@ func TestFanout_SelectSorted(t *testing.T) { fanoutStorage := storage.NewFanout(nil, priStorage, remoteStorage1, remoteStorage2) t.Run("querier", func(t *testing.T) { - querier, err := fanoutStorage.Querier(context.Background(), 0, 8000) + querier, err := fanoutStorage.Querier(0, 8000) require.NoError(t, err) defer querier.Close() matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a") require.NoError(t, err) - seriesSet := querier.Select(true, nil, matcher) + seriesSet := querier.Select(ctx, true, nil, matcher) result := make(map[int64]float64) var labelsResult labels.Labels @@ -102,14 +103,14 @@ func TestFanout_SelectSorted(t *testing.T) { require.Equal(t, inputTotalSize, len(result)) }) t.Run("chunk querier", func(t *testing.T) { - querier, err := fanoutStorage.ChunkQuerier(ctx, 0, 8000) + querier, err := fanoutStorage.ChunkQuerier(0, 8000) require.NoError(t, err) defer querier.Close() matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a") require.NoError(t, err) - seriesSet := storage.NewSeriesSetFromChunkSeriesSet(querier.Select(true, nil, matcher)) + seriesSet := storage.NewSeriesSetFromChunkSeriesSet(querier.Select(ctx, true, nil, matcher)) result := make(map[int64]float64) var labelsResult labels.Labels @@ -159,12 +160,12 @@ func TestFanoutErrors(t *testing.T) { fanoutStorage := storage.NewFanout(nil, tc.primary, tc.secondary) t.Run("samples", func(t *testing.T) { - querier, err := fanoutStorage.Querier(context.Background(), 0, 8000) + querier, err := fanoutStorage.Querier(0, 8000) require.NoError(t, err) defer querier.Close() matcher := labels.MustNewMatcher(labels.MatchEqual, "a", "b") - ss := querier.Select(true, nil, matcher) + ss := querier.Select(context.Background(), true, nil, matcher) // Exhaust. for ss.Next() { @@ -178,18 +179,19 @@ func TestFanoutErrors(t *testing.T) { if tc.warning != nil { require.Greater(t, len(ss.Warnings()), 0, "warnings expected") - require.Error(t, ss.Warnings()[0]) - require.Equal(t, tc.warning.Error(), ss.Warnings()[0].Error()) + w := ss.Warnings() + require.Error(t, w.AsErrors()[0]) + require.Equal(t, tc.warning.Error(), w.AsStrings("", 0)[0]) } }) t.Run("chunks", func(t *testing.T) { t.Skip("enable once TestStorage and TSDB implements ChunkQuerier") - querier, err := fanoutStorage.ChunkQuerier(context.Background(), 0, 8000) + querier, err := fanoutStorage.ChunkQuerier(0, 8000) require.NoError(t, err) defer querier.Close() matcher := labels.MustNewMatcher(labels.MatchEqual, "a", "b") - ss := querier.Select(true, nil, matcher) + ss := querier.Select(context.Background(), true, nil, matcher) // Exhaust. for ss.Next() { @@ -203,8 +205,9 @@ func TestFanoutErrors(t *testing.T) { if tc.warning != nil { require.Greater(t, len(ss.Warnings()), 0, "warnings expected") - require.Error(t, ss.Warnings()[0]) - require.Equal(t, tc.warning.Error(), ss.Warnings()[0].Error()) + w := ss.Warnings() + require.Error(t, w.AsErrors()[0]) + require.Equal(t, tc.warning.Error(), w.AsStrings("", 0)[0]) } }) } @@ -216,33 +219,33 @@ type errStorage struct{} type errQuerier struct{} -func (errStorage) Querier(_ context.Context, _, _ int64) (storage.Querier, error) { +func (errStorage) Querier(_, _ int64) (storage.Querier, error) { return errQuerier{}, nil } type errChunkQuerier struct{ errQuerier } -func (errStorage) ChunkQuerier(_ context.Context, _, _ int64) (storage.ChunkQuerier, error) { +func (errStorage) ChunkQuerier(_, _ int64) (storage.ChunkQuerier, error) { return errChunkQuerier{}, nil } func (errStorage) Appender(_ context.Context) storage.Appender { return nil } func (errStorage) StartTime() (int64, error) { return 0, nil } func (errStorage) Close() error { return nil } -func (errQuerier) Select(bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet { +func (errQuerier) Select(context.Context, bool, *storage.SelectHints, ...*labels.Matcher) storage.SeriesSet { return storage.ErrSeriesSet(errSelect) } -func (errQuerier) LabelValues(string, ...*labels.Matcher) ([]string, storage.Warnings, error) { +func (errQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, errors.New("label values error") } -func (errQuerier) LabelNames(...*labels.Matcher) ([]string, storage.Warnings, error) { +func (errQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, errors.New("label names error") } func (errQuerier) Close() error { return nil } -func (errChunkQuerier) Select(bool, *storage.SelectHints, ...*labels.Matcher) storage.ChunkSeriesSet { +func (errChunkQuerier) Select(context.Context, bool, *storage.SelectHints, ...*labels.Matcher) storage.ChunkSeriesSet { return storage.ErrChunkSeriesSet(errSelect) } diff --git a/storage/generic.go b/storage/generic.go index 6762f32a1a..e5f4b4d03a 100644 --- a/storage/generic.go +++ b/storage/generic.go @@ -17,19 +17,22 @@ package storage import ( + "context" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/util/annotations" ) type genericQuerier interface { LabelQuerier - Select(bool, *SelectHints, ...*labels.Matcher) genericSeriesSet + Select(context.Context, bool, *SelectHints, ...*labels.Matcher) genericSeriesSet } type genericSeriesSet interface { Next() bool At() Labels Err() error - Warnings() Warnings + Warnings() annotations.Annotations } type genericSeriesMergeFunc func(...Labels) Labels @@ -58,11 +61,11 @@ type genericQuerierAdapter struct { cq ChunkQuerier } -func (q *genericQuerierAdapter) Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) genericSeriesSet { +func (q *genericQuerierAdapter) Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) genericSeriesSet { if q.q != nil { - return &genericSeriesSetAdapter{q.q.Select(sortSeries, hints, matchers...)} + return &genericSeriesSetAdapter{q.q.Select(ctx, sortSeries, hints, matchers...)} } - return &genericChunkSeriesSetAdapter{q.cq.Select(sortSeries, hints, matchers...)} + return &genericChunkSeriesSetAdapter{q.cq.Select(ctx, sortSeries, hints, matchers...)} } func newGenericQuerierFrom(q Querier) genericQuerier { @@ -85,8 +88,8 @@ func (a *seriesSetAdapter) At() Series { return a.genericSeriesSet.At().(Series) } -func (q *querierAdapter) Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet { - return &seriesSetAdapter{q.genericQuerier.Select(sortSeries, hints, matchers...)} +func (q *querierAdapter) Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet { + return &seriesSetAdapter{q.genericQuerier.Select(ctx, sortSeries, hints, matchers...)} } type chunkQuerierAdapter struct { @@ -101,8 +104,8 @@ func (a *chunkSeriesSetAdapter) At() ChunkSeries { return a.genericSeriesSet.At().(ChunkSeries) } -func (q *chunkQuerierAdapter) Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) ChunkSeriesSet { - return &chunkSeriesSetAdapter{q.genericQuerier.Select(sortSeries, hints, matchers...)} +func (q *chunkQuerierAdapter) Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) ChunkSeriesSet { + return &chunkSeriesSetAdapter{q.genericQuerier.Select(ctx, sortSeries, hints, matchers...)} } type seriesMergerAdapter struct { @@ -137,4 +140,4 @@ func (noopGenericSeriesSet) At() Labels { return nil } func (noopGenericSeriesSet) Err() error { return nil } -func (noopGenericSeriesSet) Warnings() Warnings { return nil } +func (noopGenericSeriesSet) Warnings() annotations.Annotations { return nil } diff --git a/storage/interface.go b/storage/interface.go index 74ddc5acad..5a2f5f4e58 100644 --- a/storage/interface.go +++ b/storage/interface.go @@ -24,6 +24,7 @@ import ( "github.com/prometheus/prometheus/model/metadata" "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" + "github.com/prometheus/prometheus/util/annotations" ) // The errors exposed. @@ -91,7 +92,7 @@ type ExemplarStorage interface { // Use it when you need to have access to all samples without chunk encoding abstraction e.g promQL. type Queryable interface { // Querier returns a new Querier on the storage. - Querier(ctx context.Context, mint, maxt int64) (Querier, error) + Querier(mint, maxt int64) (Querier, error) } // A MockQueryable is used for testing purposes so that a mock Querier can be used. @@ -99,7 +100,7 @@ type MockQueryable struct { MockQuerier Querier } -func (q *MockQueryable) Querier(context.Context, int64, int64) (Querier, error) { +func (q *MockQueryable) Querier(int64, int64) (Querier, error) { return q.MockQuerier, nil } @@ -110,7 +111,7 @@ type Querier interface { // Select returns a set of series that matches the given label matchers. // Caller can specify if it requires returned series to be sorted. Prefer not requiring sorting for better performance. // It allows passing hints that can help in optimising select, but it's up to implementation how this is used if used at all. - Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet + Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet } // MockQuerier is used for test purposes to mock the selected series that is returned. @@ -118,11 +119,11 @@ type MockQuerier struct { SelectMockFunction func(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet } -func (q *MockQuerier) LabelValues(string, ...*labels.Matcher) ([]string, Warnings, error) { +func (q *MockQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } -func (q *MockQuerier) LabelNames(...*labels.Matcher) ([]string, Warnings, error) { +func (q *MockQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } @@ -130,7 +131,7 @@ func (q *MockQuerier) Close() error { return nil } -func (q *MockQuerier) Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet { +func (q *MockQuerier) Select(_ context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) SeriesSet { return q.SelectMockFunction(sortSeries, hints, matchers...) } @@ -138,7 +139,7 @@ func (q *MockQuerier) Select(sortSeries bool, hints *SelectHints, matchers ...*l // Use it when you need to have access to samples in encoded format. type ChunkQueryable interface { // ChunkQuerier returns a new ChunkQuerier on the storage. - ChunkQuerier(ctx context.Context, mint, maxt int64) (ChunkQuerier, error) + ChunkQuerier(mint, maxt int64) (ChunkQuerier, error) } // ChunkQuerier provides querying access over time series data of a fixed time range. @@ -148,7 +149,7 @@ type ChunkQuerier interface { // Select returns a set of series that matches the given label matchers. // Caller can specify if it requires returned series to be sorted. Prefer not requiring sorting for better performance. // It allows passing hints that can help in optimising select, but it's up to implementation how this is used if used at all. - Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) ChunkSeriesSet + Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) ChunkSeriesSet } // LabelQuerier provides querying access over labels. @@ -157,12 +158,12 @@ type LabelQuerier interface { // It is not safe to use the strings beyond the lifetime of the querier. // If matchers are specified the returned result set is reduced // to label values of metrics matching the matchers. - LabelValues(name string, matchers ...*labels.Matcher) ([]string, Warnings, error) + LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) // LabelNames returns all the unique label names present in the block in sorted order. // If matchers are specified the returned result set is reduced // to label names of metrics matching the matchers. - LabelNames(matchers ...*labels.Matcher) ([]string, Warnings, error) + LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) // Close releases the resources of the Querier. Close() error @@ -205,11 +206,11 @@ type SelectHints struct { // TODO(bwplotka): Move to promql/engine_test.go? // QueryableFunc is an adapter to allow the use of ordinary functions as // Queryables. It follows the idea of http.HandlerFunc. -type QueryableFunc func(ctx context.Context, mint, maxt int64) (Querier, error) +type QueryableFunc func(mint, maxt int64) (Querier, error) // Querier calls f() with the given parameters. -func (f QueryableFunc) Querier(ctx context.Context, mint, maxt int64) (Querier, error) { - return f(ctx, mint, maxt) +func (f QueryableFunc) Querier(mint, maxt int64) (Querier, error) { + return f(mint, maxt) } // Appender provides batched appends against a storage. @@ -310,7 +311,7 @@ type SeriesSet interface { Err() error // A collection of warnings for the whole set. // Warnings could be return even iteration has not failed with error. - Warnings() Warnings + Warnings() annotations.Annotations } var emptySeriesSet = errSeriesSet{} @@ -324,10 +325,10 @@ type testSeriesSet struct { series Series } -func (s testSeriesSet) Next() bool { return true } -func (s testSeriesSet) At() Series { return s.series } -func (s testSeriesSet) Err() error { return nil } -func (s testSeriesSet) Warnings() Warnings { return nil } +func (s testSeriesSet) Next() bool { return true } +func (s testSeriesSet) At() Series { return s.series } +func (s testSeriesSet) Err() error { return nil } +func (s testSeriesSet) Warnings() annotations.Annotations { return nil } // TestSeriesSet returns a mock series set func TestSeriesSet(series Series) SeriesSet { @@ -338,10 +339,10 @@ type errSeriesSet struct { err error } -func (s errSeriesSet) Next() bool { return false } -func (s errSeriesSet) At() Series { return nil } -func (s errSeriesSet) Err() error { return s.err } -func (s errSeriesSet) Warnings() Warnings { return nil } +func (s errSeriesSet) Next() bool { return false } +func (s errSeriesSet) At() Series { return nil } +func (s errSeriesSet) Err() error { return s.err } +func (s errSeriesSet) Warnings() annotations.Annotations { return nil } // ErrSeriesSet returns a series set that wraps an error. func ErrSeriesSet(err error) SeriesSet { @@ -359,10 +360,10 @@ type errChunkSeriesSet struct { err error } -func (s errChunkSeriesSet) Next() bool { return false } -func (s errChunkSeriesSet) At() ChunkSeries { return nil } -func (s errChunkSeriesSet) Err() error { return s.err } -func (s errChunkSeriesSet) Warnings() Warnings { return nil } +func (s errChunkSeriesSet) Next() bool { return false } +func (s errChunkSeriesSet) At() ChunkSeries { return nil } +func (s errChunkSeriesSet) Err() error { return s.err } +func (s errChunkSeriesSet) Warnings() annotations.Annotations { return nil } // ErrChunkSeriesSet returns a chunk series set that wraps an error. func ErrChunkSeriesSet(err error) ChunkSeriesSet { @@ -408,7 +409,7 @@ type ChunkSeriesSet interface { Err() error // A collection of warnings for the whole set. // Warnings could be return even iteration has not failed with error. - Warnings() Warnings + Warnings() annotations.Annotations } // ChunkSeries exposes a single time series and allows iterating over chunks. @@ -442,5 +443,3 @@ type ChunkIterable interface { // chunks of the series, sorted by min time. Iterator(chunks.Iterator) chunks.Iterator } - -type Warnings []error diff --git a/storage/lazy.go b/storage/lazy.go index 62f76cb6ac..fab974c286 100644 --- a/storage/lazy.go +++ b/storage/lazy.go @@ -13,6 +13,10 @@ package storage +import ( + "github.com/prometheus/prometheus/util/annotations" +) + // lazyGenericSeriesSet is a wrapped series set that is initialised on first call to Next(). type lazyGenericSeriesSet struct { init func() (genericSeriesSet, bool) @@ -43,25 +47,25 @@ func (c *lazyGenericSeriesSet) At() Labels { return nil } -func (c *lazyGenericSeriesSet) Warnings() Warnings { +func (c *lazyGenericSeriesSet) Warnings() annotations.Annotations { if c.set != nil { return c.set.Warnings() } return nil } -type warningsOnlySeriesSet Warnings +type warningsOnlySeriesSet annotations.Annotations -func (warningsOnlySeriesSet) Next() bool { return false } -func (warningsOnlySeriesSet) Err() error { return nil } -func (warningsOnlySeriesSet) At() Labels { return nil } -func (c warningsOnlySeriesSet) Warnings() Warnings { return Warnings(c) } +func (warningsOnlySeriesSet) Next() bool { return false } +func (warningsOnlySeriesSet) Err() error { return nil } +func (warningsOnlySeriesSet) At() Labels { return nil } +func (c warningsOnlySeriesSet) Warnings() annotations.Annotations { return annotations.Annotations(c) } type errorOnlySeriesSet struct { err error } -func (errorOnlySeriesSet) Next() bool { return false } -func (errorOnlySeriesSet) At() Labels { return nil } -func (s errorOnlySeriesSet) Err() error { return s.err } -func (errorOnlySeriesSet) Warnings() Warnings { return nil } +func (errorOnlySeriesSet) Next() bool { return false } +func (errorOnlySeriesSet) At() Labels { return nil } +func (s errorOnlySeriesSet) Err() error { return s.err } +func (errorOnlySeriesSet) Warnings() annotations.Annotations { return nil } diff --git a/storage/merge.go b/storage/merge.go index a196b0bc0d..e979ad04ab 100644 --- a/storage/merge.go +++ b/storage/merge.go @@ -16,6 +16,7 @@ package storage import ( "bytes" "container/heap" + "context" "fmt" "math" "sync" @@ -27,6 +28,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" + "github.com/prometheus/prometheus/util/annotations" ) type mergeGenericQuerier struct { @@ -97,19 +99,19 @@ func NewMergeChunkQuerier(primaries, secondaries []ChunkQuerier, mergeFn Vertica } // Select returns a set of series that matches the given label matchers. -func (q *mergeGenericQuerier) Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) genericSeriesSet { +func (q *mergeGenericQuerier) Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) genericSeriesSet { if len(q.queriers) == 0 { return noopGenericSeriesSet{} } if len(q.queriers) == 1 { - return q.queriers[0].Select(sortSeries, hints, matchers...) + return q.queriers[0].Select(ctx, sortSeries, hints, matchers...) } seriesSets := make([]genericSeriesSet, 0, len(q.queriers)) if !q.concurrentSelect { for _, querier := range q.queriers { // We need to sort for merge to work. - seriesSets = append(seriesSets, querier.Select(true, hints, matchers...)) + seriesSets = append(seriesSets, querier.Select(ctx, true, hints, matchers...)) } return &lazyGenericSeriesSet{init: func() (genericSeriesSet, bool) { s := newGenericMergeSeriesSet(seriesSets, q.mergeFn) @@ -128,7 +130,7 @@ func (q *mergeGenericQuerier) Select(sortSeries bool, hints *SelectHints, matche defer wg.Done() // We need to sort for NewMergeSeriesSet to work. - seriesSetChan <- qr.Select(true, hints, matchers...) + seriesSetChan <- qr.Select(ctx, true, hints, matchers...) }(querier) } go func() { @@ -157,8 +159,8 @@ func (l labelGenericQueriers) SplitByHalf() (labelGenericQueriers, labelGenericQ // LabelValues returns all potential values for a label name. // If matchers are specified the returned result set is reduced // to label values of metrics matching the matchers. -func (q *mergeGenericQuerier) LabelValues(name string, matchers ...*labels.Matcher) ([]string, Warnings, error) { - res, ws, err := q.lvals(q.queriers, name, matchers...) +func (q *mergeGenericQuerier) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { + res, ws, err := q.lvals(ctx, q.queriers, name, matchers...) if err != nil { return nil, nil, fmt.Errorf("LabelValues() from merge generic querier for label %s: %w", name, err) } @@ -166,23 +168,23 @@ func (q *mergeGenericQuerier) LabelValues(name string, matchers ...*labels.Match } // lvals performs merge sort for LabelValues from multiple queriers. -func (q *mergeGenericQuerier) lvals(lq labelGenericQueriers, n string, matchers ...*labels.Matcher) ([]string, Warnings, error) { +func (q *mergeGenericQuerier) lvals(ctx context.Context, lq labelGenericQueriers, n string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { if lq.Len() == 0 { return nil, nil, nil } if lq.Len() == 1 { - return lq.Get(0).LabelValues(n, matchers...) + return lq.Get(0).LabelValues(ctx, n, matchers...) } a, b := lq.SplitByHalf() - var ws Warnings - s1, w, err := q.lvals(a, n, matchers...) - ws = append(ws, w...) + var ws annotations.Annotations + s1, w, err := q.lvals(ctx, a, n, matchers...) + ws.Merge(w) if err != nil { return nil, ws, err } - s2, ws, err := q.lvals(b, n, matchers...) - ws = append(ws, w...) + s2, ws, err := q.lvals(ctx, b, n, matchers...) + ws.Merge(w) if err != nil { return nil, ws, err } @@ -217,16 +219,16 @@ func mergeStrings(a, b []string) []string { } // LabelNames returns all the unique label names present in all queriers in sorted order. -func (q *mergeGenericQuerier) LabelNames(matchers ...*labels.Matcher) ([]string, Warnings, error) { +func (q *mergeGenericQuerier) LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { var ( labelNamesMap = make(map[string]struct{}) - warnings Warnings + warnings annotations.Annotations ) for _, querier := range q.queriers { - names, wrn, err := querier.LabelNames(matchers...) + names, wrn, err := querier.LabelNames(ctx, matchers...) if wrn != nil { // TODO(bwplotka): We could potentially wrap warnings. - warnings = append(warnings, wrn...) + warnings.Merge(wrn) } if err != nil { return nil, nil, fmt.Errorf("LabelNames() from merge generic querier: %w", err) @@ -381,10 +383,10 @@ func (c *genericMergeSeriesSet) Err() error { return nil } -func (c *genericMergeSeriesSet) Warnings() Warnings { - var ws Warnings +func (c *genericMergeSeriesSet) Warnings() annotations.Annotations { + var ws annotations.Annotations for _, set := range c.sets { - ws = append(ws, set.Warnings()...) + ws.Merge(set.Warnings()) } return ws } diff --git a/storage/merge_test.go b/storage/merge_test.go index 29d0fd753c..d28a264492 100644 --- a/storage/merge_test.go +++ b/storage/merge_test.go @@ -14,6 +14,7 @@ package storage import ( + "context" "errors" "fmt" "math" @@ -27,6 +28,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/tsdbutil" + "github.com/prometheus/prometheus/util/annotations" ) func TestMergeQuerierWithChainMerger(t *testing.T) { @@ -187,7 +189,7 @@ func TestMergeQuerierWithChainMerger(t *testing.T) { } qs = append(qs, tc.extraQueriers...) - mergedQuerier := NewMergeQuerier([]Querier{p}, qs, ChainedSeriesMerge).Select(false, nil) + mergedQuerier := NewMergeQuerier([]Querier{p}, qs, ChainedSeriesMerge).Select(context.Background(), false, nil) // Get all merged series upfront to make sure there are no incorrectly retained shared // buffers causing bugs. @@ -363,7 +365,7 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) { } qs = append(qs, tc.extraQueriers...) - merged := NewMergeChunkQuerier([]ChunkQuerier{p}, qs, NewCompactingChunkSeriesMerger(nil)).Select(false, nil) + merged := NewMergeChunkQuerier([]ChunkQuerier{p}, qs, NewCompactingChunkSeriesMerger(nil)).Select(context.Background(), false, nil) for merged.Next() { require.True(t, tc.expected.Next(), "Expected Next() to be true") actualSeries := merged.At() @@ -737,7 +739,7 @@ func (a seriesByLabel) Len() int { return len(a) } func (a seriesByLabel) Swap(i, j int) { a[i], a[j] = a[j], a[i] } func (a seriesByLabel) Less(i, j int) bool { return labels.Compare(a[i].Labels(), a[j].Labels()) < 0 } -func (m *mockQuerier) Select(sortSeries bool, _ *SelectHints, _ ...*labels.Matcher) SeriesSet { +func (m *mockQuerier) Select(_ context.Context, sortSeries bool, _ *SelectHints, _ ...*labels.Matcher) SeriesSet { cpy := make([]Series, len(m.toReturn)) copy(cpy, m.toReturn) if sortSeries { @@ -761,7 +763,7 @@ func (a chunkSeriesByLabel) Less(i, j int) bool { return labels.Compare(a[i].Labels(), a[j].Labels()) < 0 } -func (m *mockChunkQurier) Select(sortSeries bool, _ *SelectHints, _ ...*labels.Matcher) ChunkSeriesSet { +func (m *mockChunkQurier) Select(_ context.Context, sortSeries bool, _ *SelectHints, _ ...*labels.Matcher) ChunkSeriesSet { cpy := make([]ChunkSeries, len(m.toReturn)) copy(cpy, m.toReturn) if sortSeries { @@ -792,7 +794,7 @@ func (m *mockSeriesSet) At() Series { return m.series[m.idx] } func (m *mockSeriesSet) Err() error { return nil } -func (m *mockSeriesSet) Warnings() Warnings { return nil } +func (m *mockSeriesSet) Warnings() annotations.Annotations { return nil } type mockChunkSeriesSet struct { idx int @@ -815,7 +817,7 @@ func (m *mockChunkSeriesSet) At() ChunkSeries { return m.series[m.idx] } func (m *mockChunkSeriesSet) Err() error { return nil } -func (m *mockChunkSeriesSet) Warnings() Warnings { return nil } +func (m *mockChunkSeriesSet) Warnings() annotations.Annotations { return nil } func TestChainSampleIterator(t *testing.T) { for _, tc := range []struct { @@ -989,7 +991,7 @@ type mockGenericQuerier struct { sortedSeriesRequested []bool resp []string - warnings Warnings + warnings annotations.Annotations err error } @@ -998,14 +1000,14 @@ type labelNameRequest struct { matchers []*labels.Matcher } -func (m *mockGenericQuerier) Select(b bool, _ *SelectHints, _ ...*labels.Matcher) genericSeriesSet { +func (m *mockGenericQuerier) Select(_ context.Context, b bool, _ *SelectHints, _ ...*labels.Matcher) genericSeriesSet { m.mtx.Lock() m.sortedSeriesRequested = append(m.sortedSeriesRequested, b) m.mtx.Unlock() return &mockGenericSeriesSet{resp: m.resp, warnings: m.warnings, err: m.err} } -func (m *mockGenericQuerier) LabelValues(name string, matchers ...*labels.Matcher) ([]string, Warnings, error) { +func (m *mockGenericQuerier) LabelValues(_ context.Context, name string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { m.mtx.Lock() m.labelNamesRequested = append(m.labelNamesRequested, labelNameRequest{ name: name, @@ -1015,7 +1017,7 @@ func (m *mockGenericQuerier) LabelValues(name string, matchers ...*labels.Matche return m.resp, m.warnings, m.err } -func (m *mockGenericQuerier) LabelNames(...*labels.Matcher) ([]string, Warnings, error) { +func (m *mockGenericQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { m.mtx.Lock() m.labelNamesCalls++ m.mtx.Unlock() @@ -1029,7 +1031,7 @@ func (m *mockGenericQuerier) Close() error { type mockGenericSeriesSet struct { resp []string - warnings Warnings + warnings annotations.Annotations err error curr int @@ -1046,8 +1048,8 @@ func (m *mockGenericSeriesSet) Next() bool { return true } -func (m *mockGenericSeriesSet) Err() error { return m.err } -func (m *mockGenericSeriesSet) Warnings() Warnings { return m.warnings } +func (m *mockGenericSeriesSet) Err() error { return m.err } +func (m *mockGenericSeriesSet) Warnings() annotations.Annotations { return m.warnings } func (m *mockGenericSeriesSet) At() Labels { return mockLabels(m.resp[m.curr-1]) @@ -1074,6 +1076,7 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { var ( errStorage = errors.New("storage error") warnStorage = errors.New("storage warning") + ctx = context.Background() ) for _, tcase := range []struct { name string @@ -1082,10 +1085,9 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { expectedSelectsSeries []labels.Labels expectedLabels []string - expectedWarnings [4]Warnings + expectedWarnings annotations.Annotations expectedErrs [4]error }{ - {}, { name: "one successful primary querier", queriers: []genericQuerier{&mockGenericQuerier{resp: []string{"a", "b"}, warnings: nil, err: nil}}, @@ -1159,31 +1161,21 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { expectedSelectsSeries: []labels.Labels{ labels.FromStrings("test", "a"), }, - expectedLabels: []string{"a"}, - expectedWarnings: [4]Warnings{ - []error{errStorage, errStorage}, - []error{errStorage, errStorage}, - []error{errStorage, errStorage}, - []error{errStorage, errStorage}, - }, + expectedLabels: []string{"a"}, + expectedWarnings: annotations.New().Add(errStorage), }, { name: "successful queriers with warnings", queriers: []genericQuerier{ - &mockGenericQuerier{resp: []string{"a"}, warnings: []error{warnStorage}, err: nil}, - &secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: []error{warnStorage}, err: nil}}, + &mockGenericQuerier{resp: []string{"a"}, warnings: annotations.New().Add(warnStorage), err: nil}, + &secondaryQuerier{genericQuerier: &mockGenericQuerier{resp: []string{"b"}, warnings: annotations.New().Add(warnStorage), err: nil}}, }, expectedSelectsSeries: []labels.Labels{ labels.FromStrings("test", "a"), labels.FromStrings("test", "b"), }, - expectedLabels: []string{"a", "b"}, - expectedWarnings: [4]Warnings{ - []error{warnStorage, warnStorage}, - []error{warnStorage, warnStorage}, - []error{warnStorage, warnStorage}, - []error{warnStorage, warnStorage}, - }, + expectedLabels: []string{"a", "b"}, + expectedWarnings: annotations.New().Add(warnStorage), }, } { t.Run(tcase.name, func(t *testing.T) { @@ -1193,12 +1185,12 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { } t.Run("Select", func(t *testing.T) { - res := q.Select(false, nil) + res := q.Select(context.Background(), false, nil) var lbls []labels.Labels for res.Next() { lbls = append(lbls, res.At().Labels()) } - require.Equal(t, tcase.expectedWarnings[0], res.Warnings()) + require.Subset(t, tcase.expectedWarnings, res.Warnings()) require.Equal(t, tcase.expectedErrs[0], res.Err()) require.True(t, errors.Is(res.Err(), tcase.expectedErrs[0]), "expected error doesn't match") require.Equal(t, tcase.expectedSelectsSeries, lbls) @@ -1214,8 +1206,8 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { } }) t.Run("LabelNames", func(t *testing.T) { - res, w, err := q.LabelNames() - require.Equal(t, tcase.expectedWarnings[1], w) + res, w, err := q.LabelNames(ctx) + require.Subset(t, tcase.expectedWarnings, w) require.True(t, errors.Is(err, tcase.expectedErrs[1]), "expected error doesn't match") require.Equal(t, tcase.expectedLabels, res) @@ -1229,8 +1221,8 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { } }) t.Run("LabelValues", func(t *testing.T) { - res, w, err := q.LabelValues("test") - require.Equal(t, tcase.expectedWarnings[2], w) + res, w, err := q.LabelValues(ctx, "test") + require.Subset(t, tcase.expectedWarnings, w) require.True(t, errors.Is(err, tcase.expectedErrs[2]), "expected error doesn't match") require.Equal(t, tcase.expectedLabels, res) @@ -1245,8 +1237,8 @@ func TestMergeGenericQuerierWithSecondaries_ErrorHandling(t *testing.T) { }) t.Run("LabelValuesWithMatchers", func(t *testing.T) { matcher := labels.MustNewMatcher(labels.MatchEqual, "otherLabel", "someValue") - res, w, err := q.LabelValues("test2", matcher) - require.Equal(t, tcase.expectedWarnings[3], w) + res, w, err := q.LabelValues(ctx, "test2", matcher) + require.Subset(t, tcase.expectedWarnings, w) require.True(t, errors.Is(err, tcase.expectedErrs[3]), "expected error doesn't match") require.Equal(t, tcase.expectedLabels, res) diff --git a/storage/noop.go b/storage/noop.go index 83953ca43f..be5741ddd8 100644 --- a/storage/noop.go +++ b/storage/noop.go @@ -14,7 +14,10 @@ package storage import ( + "context" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/util/annotations" ) type noopQuerier struct{} @@ -24,15 +27,15 @@ func NoopQuerier() Querier { return noopQuerier{} } -func (noopQuerier) Select(bool, *SelectHints, ...*labels.Matcher) SeriesSet { +func (noopQuerier) Select(context.Context, bool, *SelectHints, ...*labels.Matcher) SeriesSet { return NoopSeriesSet() } -func (noopQuerier) LabelValues(string, ...*labels.Matcher) ([]string, Warnings, error) { +func (noopQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } -func (noopQuerier) LabelNames(...*labels.Matcher) ([]string, Warnings, error) { +func (noopQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } @@ -47,15 +50,15 @@ func NoopChunkedQuerier() ChunkQuerier { return noopChunkQuerier{} } -func (noopChunkQuerier) Select(bool, *SelectHints, ...*labels.Matcher) ChunkSeriesSet { +func (noopChunkQuerier) Select(context.Context, bool, *SelectHints, ...*labels.Matcher) ChunkSeriesSet { return NoopChunkedSeriesSet() } -func (noopChunkQuerier) LabelValues(string, ...*labels.Matcher) ([]string, Warnings, error) { +func (noopChunkQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } -func (noopChunkQuerier) LabelNames(...*labels.Matcher) ([]string, Warnings, error) { +func (noopChunkQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, nil } @@ -76,7 +79,7 @@ func (noopSeriesSet) At() Series { return nil } func (noopSeriesSet) Err() error { return nil } -func (noopSeriesSet) Warnings() Warnings { return nil } +func (noopSeriesSet) Warnings() annotations.Annotations { return nil } type noopChunkedSeriesSet struct{} @@ -91,4 +94,4 @@ func (noopChunkedSeriesSet) At() ChunkSeries { return nil } func (noopChunkedSeriesSet) Err() error { return nil } -func (noopChunkedSeriesSet) Warnings() Warnings { return nil } +func (noopChunkedSeriesSet) Warnings() annotations.Annotations { return nil } diff --git a/storage/remote/codec.go b/storage/remote/codec.go index 4927c16fdc..4e0166d17e 100644 --- a/storage/remote/codec.go +++ b/storage/remote/codec.go @@ -38,6 +38,7 @@ import ( "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" + "github.com/prometheus/prometheus/util/annotations" ) const ( @@ -122,7 +123,7 @@ func ToQuery(from, to int64, matchers []*labels.Matcher, hints *storage.SelectHi } // ToQueryResult builds a QueryResult proto. -func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult, storage.Warnings, error) { +func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult, annotations.Annotations, error) { numSamples := 0 resp := &prompb.QueryResult{} var iter chunkenc.Iterator @@ -224,7 +225,7 @@ func StreamChunkedReadResponses( sortedExternalLabels []prompb.Label, maxBytesInFrame int, marshalPool *sync.Pool, -) (storage.Warnings, error) { +) (annotations.Annotations, error) { var ( chks []prompb.Chunk lbls []prompb.Label @@ -340,7 +341,7 @@ func (e errSeriesSet) Err() error { return e.err } -func (e errSeriesSet) Warnings() storage.Warnings { return nil } +func (e errSeriesSet) Warnings() annotations.Annotations { return nil } // concreteSeriesSet implements storage.SeriesSet. type concreteSeriesSet struct { @@ -361,7 +362,7 @@ func (c *concreteSeriesSet) Err() error { return nil } -func (c *concreteSeriesSet) Warnings() storage.Warnings { return nil } +func (c *concreteSeriesSet) Warnings() annotations.Annotations { return nil } // concreteSeries implements storage.Series. type concreteSeries struct { diff --git a/storage/remote/codec_test.go b/storage/remote/codec_test.go index dbd5cec219..585bdfd88f 100644 --- a/storage/remote/codec_test.go +++ b/storage/remote/codec_test.go @@ -30,6 +30,7 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/tsdbutil" + "github.com/prometheus/prometheus/util/annotations" ) var testHistogram = histogram.Histogram{ @@ -810,7 +811,7 @@ func (c *mockChunkSeriesSet) At() storage.ChunkSeries { } } -func (c *mockChunkSeriesSet) Warnings() storage.Warnings { return nil } +func (c *mockChunkSeriesSet) Warnings() annotations.Annotations { return nil } func (c *mockChunkSeriesSet) Err() error { return nil diff --git a/storage/remote/read.go b/storage/remote/read.go index af61334f48..723030091a 100644 --- a/storage/remote/read.go +++ b/storage/remote/read.go @@ -20,6 +20,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/util/annotations" ) type sampleAndChunkQueryableClient struct { @@ -48,9 +49,8 @@ func NewSampleAndChunkQueryableClient( } } -func (c *sampleAndChunkQueryableClient) Querier(ctx context.Context, mint, maxt int64) (storage.Querier, error) { +func (c *sampleAndChunkQueryableClient) Querier(mint, maxt int64) (storage.Querier, error) { q := &querier{ - ctx: ctx, mint: mint, maxt: maxt, client: c.client, @@ -75,10 +75,9 @@ func (c *sampleAndChunkQueryableClient) Querier(ctx context.Context, mint, maxt return q, nil } -func (c *sampleAndChunkQueryableClient) ChunkQuerier(ctx context.Context, mint, maxt int64) (storage.ChunkQuerier, error) { +func (c *sampleAndChunkQueryableClient) ChunkQuerier(mint, maxt int64) (storage.ChunkQuerier, error) { cq := &chunkQuerier{ querier: querier{ - ctx: ctx, mint: mint, maxt: maxt, client: c.client, @@ -125,7 +124,6 @@ func (c *sampleAndChunkQueryableClient) preferLocalStorage(mint, maxt int64) (cm } type querier struct { - ctx context.Context mint, maxt int64 client ReadClient @@ -140,7 +138,7 @@ type querier struct { // // If requiredMatchers are given, select returns a NoopSeriesSet if the given matchers don't match the label set of the // requiredMatchers. Otherwise it'll just call remote endpoint. -func (q *querier) Select(sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet { +func (q *querier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet { if len(q.requiredMatchers) > 0 { // Copy to not modify slice configured by user. requiredMatchers := append([]*labels.Matcher{}, q.requiredMatchers...) @@ -167,7 +165,7 @@ func (q *querier) Select(sortSeries bool, hints *storage.SelectHints, matchers . return storage.ErrSeriesSet(fmt.Errorf("toQuery: %w", err)) } - res, err := q.client.Read(q.ctx, query) + res, err := q.client.Read(ctx, query) if err != nil { return storage.ErrSeriesSet(fmt.Errorf("remote_read: %w", err)) } @@ -212,13 +210,13 @@ func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, []s } // LabelValues implements storage.Querier and is a noop. -func (q *querier) LabelValues(string, ...*labels.Matcher) ([]string, storage.Warnings, error) { +func (q *querier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { // TODO: Implement: https://github.com/prometheus/prometheus/issues/3351 return nil, nil, errors.New("not implemented") } // LabelNames implements storage.Querier and is a noop. -func (q *querier) LabelNames(...*labels.Matcher) ([]string, storage.Warnings, error) { +func (q *querier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { // TODO: Implement: https://github.com/prometheus/prometheus/issues/3351 return nil, nil, errors.New("not implemented") } @@ -235,9 +233,9 @@ type chunkQuerier struct { // Select implements storage.ChunkQuerier and uses the given matchers to read chunk series sets from the client. // It uses remote.querier.Select so it supports external labels and required matchers if specified. -func (q *chunkQuerier) Select(sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.ChunkSeriesSet { +func (q *chunkQuerier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.ChunkSeriesSet { // TODO(bwplotka) Support remote read chunked and allow returning chunks directly (TODO ticket). - return storage.NewSeriesSetToChunkSet(q.querier.Select(sortSeries, hints, matchers...)) + return storage.NewSeriesSetToChunkSet(q.querier.Select(ctx, sortSeries, hints, matchers...)) } // Note strings in toFilter must be sorted. diff --git a/storage/remote/read_handler.go b/storage/remote/read_handler.go index aca4d7dd57..5cb4d39774 100644 --- a/storage/remote/read_handler.go +++ b/storage/remote/read_handler.go @@ -27,6 +27,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/util/annotations" "github.com/prometheus/prometheus/util/gate" ) @@ -131,7 +132,7 @@ func (h *readHandler) remoteReadSamples( return err } - querier, err := h.queryable.Querier(ctx, query.StartTimestampMs, query.EndTimestampMs) + querier, err := h.queryable.Querier(query.StartTimestampMs, query.EndTimestampMs) if err != nil { return err } @@ -154,8 +155,8 @@ func (h *readHandler) remoteReadSamples( } } - var ws storage.Warnings - resp.Results[i], ws, err = ToQueryResult(querier.Select(false, hints, filteredMatchers...), h.remoteReadSampleLimit) + var ws annotations.Annotations + resp.Results[i], ws, err = ToQueryResult(querier.Select(ctx, false, hints, filteredMatchers...), h.remoteReadSampleLimit) if err != nil { return err } @@ -198,7 +199,7 @@ func (h *readHandler) remoteReadStreamedXORChunks(ctx context.Context, w http.Re return err } - querier, err := h.queryable.ChunkQuerier(ctx, query.StartTimestampMs, query.EndTimestampMs) + querier, err := h.queryable.ChunkQuerier(query.StartTimestampMs, query.EndTimestampMs) if err != nil { return err } @@ -225,7 +226,7 @@ func (h *readHandler) remoteReadStreamedXORChunks(ctx context.Context, w http.Re NewChunkedWriter(w, f), int64(i), // The streaming API has to provide the series sorted. - querier.Select(true, hints, filteredMatchers...), + querier.Select(ctx, true, hints, filteredMatchers...), sortedExternalLabels, h.remoteReadMaxBytesInFrame, h.marshalPool, diff --git a/storage/remote/read_test.go b/storage/remote/read_test.go index fe0633d7d2..54d4825f6a 100644 --- a/storage/remote/read_test.go +++ b/storage/remote/read_test.go @@ -27,7 +27,7 @@ import ( "github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/prompb" - "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/util/annotations" ) func TestNoDuplicateReadConfigs(t *testing.T) { @@ -469,13 +469,13 @@ func TestSampleAndChunkQueryableClient(t *testing.T) { tc.readRecent, tc.callback, ) - q, err := c.Querier(context.TODO(), tc.mint, tc.maxt) + q, err := c.Querier(tc.mint, tc.maxt) require.NoError(t, err) defer require.NoError(t, q.Close()) - ss := q.Select(true, nil, tc.matchers...) + ss := q.Select(context.Background(), true, nil, tc.matchers...) require.NoError(t, err) - require.Equal(t, storage.Warnings(nil), ss.Warnings()) + require.Equal(t, annotations.Annotations(nil), ss.Warnings()) require.Equal(t, tc.expectedQuery, m.got) diff --git a/storage/remote/storage.go b/storage/remote/storage.go index d01f96b3ba..b6533f9275 100644 --- a/storage/remote/storage.go +++ b/storage/remote/storage.go @@ -152,14 +152,14 @@ func (s *Storage) StartTime() (int64, error) { // Returned querier will never return error as all queryables are assumed best effort. // Additionally all returned queriers ensure that its Select's SeriesSets have ready data after first `Next` invoke. // This is because Prometheus (fanout and secondary queries) can't handle the stream failing half way through by design. -func (s *Storage) Querier(ctx context.Context, mint, maxt int64) (storage.Querier, error) { +func (s *Storage) Querier(mint, maxt int64) (storage.Querier, error) { s.mtx.Lock() queryables := s.queryables s.mtx.Unlock() queriers := make([]storage.Querier, 0, len(queryables)) for _, queryable := range queryables { - q, err := queryable.Querier(ctx, mint, maxt) + q, err := queryable.Querier(mint, maxt) if err != nil { return nil, err } @@ -170,14 +170,14 @@ func (s *Storage) Querier(ctx context.Context, mint, maxt int64) (storage.Querie // ChunkQuerier returns a storage.MergeQuerier combining the remote client queriers // of each configured remote read endpoint. -func (s *Storage) ChunkQuerier(ctx context.Context, mint, maxt int64) (storage.ChunkQuerier, error) { +func (s *Storage) ChunkQuerier(mint, maxt int64) (storage.ChunkQuerier, error) { s.mtx.Lock() queryables := s.queryables s.mtx.Unlock() queriers := make([]storage.ChunkQuerier, 0, len(queryables)) for _, queryable := range queryables { - q, err := queryable.ChunkQuerier(ctx, mint, maxt) + q, err := queryable.ChunkQuerier(mint, maxt) if err != nil { return nil, err } diff --git a/storage/secondary.go b/storage/secondary.go index d66a286172..44d9781835 100644 --- a/storage/secondary.go +++ b/storage/secondary.go @@ -14,9 +14,11 @@ package storage import ( + "context" "sync" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/util/annotations" ) // secondaryQuerier is a wrapper that allows a querier to be treated in a best effort manner. @@ -47,28 +49,28 @@ func newSecondaryQuerierFromChunk(cq ChunkQuerier) genericQuerier { return &secondaryQuerier{genericQuerier: newGenericQuerierFromChunk(cq)} } -func (s *secondaryQuerier) LabelValues(name string, matchers ...*labels.Matcher) ([]string, Warnings, error) { - vals, w, err := s.genericQuerier.LabelValues(name, matchers...) +func (s *secondaryQuerier) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { + vals, w, err := s.genericQuerier.LabelValues(ctx, name, matchers...) if err != nil { - return nil, append([]error{err}, w...), nil + return nil, w.Add(err), nil } return vals, w, nil } -func (s *secondaryQuerier) LabelNames(matchers ...*labels.Matcher) ([]string, Warnings, error) { - names, w, err := s.genericQuerier.LabelNames(matchers...) +func (s *secondaryQuerier) LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { + names, w, err := s.genericQuerier.LabelNames(ctx, matchers...) if err != nil { - return nil, append([]error{err}, w...), nil + return nil, w.Add(err), nil } return names, w, nil } -func (s *secondaryQuerier) Select(sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) genericSeriesSet { +func (s *secondaryQuerier) Select(ctx context.Context, sortSeries bool, hints *SelectHints, matchers ...*labels.Matcher) genericSeriesSet { if s.done { panic("secondaryQuerier: Select invoked after first Next of any returned SeriesSet was done") } - s.asyncSets = append(s.asyncSets, s.genericQuerier.Select(sortSeries, hints, matchers...)) + s.asyncSets = append(s.asyncSets, s.genericQuerier.Select(ctx, sortSeries, hints, matchers...)) curr := len(s.asyncSets) - 1 return &lazyGenericSeriesSet{init: func() (genericSeriesSet, bool) { s.once.Do(func() { @@ -82,7 +84,7 @@ func (s *secondaryQuerier) Select(sortSeries bool, hints *SelectHints, matchers if err := set.Err(); err != nil { // One of the sets failed, ensure current one returning errors as warnings, and rest of the sets return nothing. // (All or nothing logic). - s.asyncSets[curr] = warningsOnlySeriesSet(append([]error{err}, ws...)) + s.asyncSets[curr] = warningsOnlySeriesSet(ws.Add(err)) for i := range s.asyncSets { if curr == i { continue diff --git a/tsdb/agent/db.go b/tsdb/agent/db.go index 77b77fc23d..72ceddb64c 100644 --- a/tsdb/agent/db.go +++ b/tsdb/agent/db.go @@ -716,12 +716,12 @@ func (db *DB) StartTime() (int64, error) { } // Querier implements the Storage interface. -func (db *DB) Querier(context.Context, int64, int64) (storage.Querier, error) { +func (db *DB) Querier(int64, int64) (storage.Querier, error) { return nil, ErrUnsupported } // ChunkQuerier implements the Storage interface. -func (db *DB) ChunkQuerier(context.Context, int64, int64) (storage.ChunkQuerier, error) { +func (db *DB) ChunkQuerier(int64, int64) (storage.ChunkQuerier, error) { return nil, ErrUnsupported } diff --git a/tsdb/agent/db_test.go b/tsdb/agent/db_test.go index 7eda6110cc..fe9dead807 100644 --- a/tsdb/agent/db_test.go +++ b/tsdb/agent/db_test.go @@ -103,12 +103,12 @@ func TestUnsupportedFunctions(t *testing.T) { defer s.Close() t.Run("Querier", func(t *testing.T) { - _, err := s.Querier(context.TODO(), 0, 0) + _, err := s.Querier(0, 0) require.Equal(t, err, ErrUnsupported) }) t.Run("ChunkQuerier", func(t *testing.T) { - _, err := s.ChunkQuerier(context.TODO(), 0, 0) + _, err := s.ChunkQuerier(0, 0) require.Equal(t, err, ErrUnsupported) }) diff --git a/tsdb/block.go b/tsdb/block.go index d1c75fc83a..cddb1b1b70 100644 --- a/tsdb/block.go +++ b/tsdb/block.go @@ -15,6 +15,7 @@ package tsdb import ( + "context" "encoding/json" "io" "os" @@ -65,22 +66,22 @@ type IndexReader interface { Symbols() index.StringIter // SortedLabelValues returns sorted possible label values. - SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) + SortedLabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) // LabelValues returns possible label values which may not be sorted. - LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) + LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) // Postings returns the postings list iterator for the label pairs. // The Postings here contain the offsets to the series inside the index. // Found IDs are not strictly required to point to a valid Series, e.g. // during background garbage collections. - Postings(name string, values ...string) (index.Postings, error) + Postings(ctx context.Context, name string, values ...string) (index.Postings, error) // PostingsForMatchers assembles a single postings iterator based on the given matchers. // The resulting postings are not ordered by series. // If concurrent hint is set to true, call will be optimized for a (most likely) concurrent call with same matchers, // avoiding same calculations twice, however this implementation may lead to a worse performance when called once. - PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) + PostingsForMatchers(ctx context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) // SortedPostings returns a postings list that is reordered to be sorted // by the label set of the underlying series. @@ -97,16 +98,16 @@ type IndexReader interface { Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error // LabelNames returns all the unique label names present in the index in sorted order. - LabelNames(matchers ...*labels.Matcher) ([]string, error) + LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, error) // LabelValueFor returns label value for the given label name in the series referred to by ID. // If the series couldn't be found or the series doesn't have the requested label a // storage.ErrNotFound is returned as error. - LabelValueFor(id storage.SeriesRef, label string) (string, error) + LabelValueFor(ctx context.Context, id storage.SeriesRef, label string) (string, error) // LabelNamesFor returns all the label names for the series referred to by IDs. // The names returned are sorted. - LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) + LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) // Close releases the underlying resources of the reader. Close() error @@ -476,14 +477,14 @@ func (r blockIndexReader) Symbols() index.StringIter { return r.ir.Symbols() } -func (r blockIndexReader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (r blockIndexReader) SortedLabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { var st []string var err error if len(matchers) == 0 { - st, err = r.ir.SortedLabelValues(name) + st, err = r.ir.SortedLabelValues(ctx, name) } else { - st, err = r.LabelValues(name, matchers...) + st, err = r.LabelValues(ctx, name, matchers...) if err == nil { slices.Sort(st) } @@ -492,33 +493,33 @@ func (r blockIndexReader) SortedLabelValues(name string, matchers ...*labels.Mat return st, errors.Wrapf(err, "block: %s", r.b.Meta().ULID) } -func (r blockIndexReader) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (r blockIndexReader) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { if len(matchers) == 0 { - st, err := r.ir.LabelValues(name) + st, err := r.ir.LabelValues(ctx, name) return st, errors.Wrapf(err, "block: %s", r.b.Meta().ULID) } - return labelValuesWithMatchers(r.ir, name, matchers...) + return labelValuesWithMatchers(ctx, r.ir, name, matchers...) } -func (r blockIndexReader) LabelNames(matchers ...*labels.Matcher) ([]string, error) { +func (r blockIndexReader) LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, error) { if len(matchers) == 0 { - return r.b.LabelNames() + return r.b.LabelNames(ctx) } - return labelNamesWithMatchers(r.ir, matchers...) + return labelNamesWithMatchers(ctx, r.ir, matchers...) } -func (r blockIndexReader) Postings(name string, values ...string) (index.Postings, error) { - p, err := r.ir.Postings(name, values...) +func (r blockIndexReader) Postings(ctx context.Context, name string, values ...string) (index.Postings, error) { + p, err := r.ir.Postings(ctx, name, values...) if err != nil { return p, errors.Wrapf(err, "block: %s", r.b.Meta().ULID) } return p, nil } -func (r blockIndexReader) PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { - return r.ir.PostingsForMatchers(concurrent, ms...) +func (r blockIndexReader) PostingsForMatchers(ctx context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { + return r.ir.PostingsForMatchers(ctx, concurrent, ms...) } func (r blockIndexReader) SortedPostings(p index.Postings) index.Postings { @@ -542,14 +543,14 @@ func (r blockIndexReader) Close() error { } // LabelValueFor returns label value for the given label name in the series referred to by ID. -func (r blockIndexReader) LabelValueFor(id storage.SeriesRef, label string) (string, error) { - return r.ir.LabelValueFor(id, label) +func (r blockIndexReader) LabelValueFor(ctx context.Context, id storage.SeriesRef, label string) (string, error) { + return r.ir.LabelValueFor(ctx, id, label) } // LabelNamesFor returns all the label names for the series referred to by IDs. // The names returned are sorted. -func (r blockIndexReader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { - return r.ir.LabelNamesFor(ids...) +func (r blockIndexReader) LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) { + return r.ir.LabelNamesFor(ctx, ids...) } type blockTombstoneReader struct { @@ -573,7 +574,7 @@ func (r blockChunkReader) Close() error { } // Delete matching series between mint and maxt in the block. -func (pb *Block) Delete(mint, maxt int64, ms ...*labels.Matcher) error { +func (pb *Block) Delete(ctx context.Context, mint, maxt int64, ms ...*labels.Matcher) error { pb.mtx.Lock() defer pb.mtx.Unlock() @@ -581,7 +582,7 @@ func (pb *Block) Delete(mint, maxt int64, ms ...*labels.Matcher) error { return ErrClosing } - p, err := pb.indexr.PostingsForMatchers(false, ms...) + p, err := pb.indexr.PostingsForMatchers(ctx, false, ms...) if err != nil { return errors.Wrap(err, "select series") } @@ -715,8 +716,8 @@ func (pb *Block) OverlapsClosedInterval(mint, maxt int64) bool { } // LabelNames returns all the unique label names present in the Block in sorted order. -func (pb *Block) LabelNames() ([]string, error) { - return pb.indexr.LabelNames() +func (pb *Block) LabelNames(ctx context.Context) ([]string, error) { + return pb.indexr.LabelNames(ctx) } func clampInterval(a, b, mint, maxt int64) (int64, int64) { diff --git a/tsdb/block_test.go b/tsdb/block_test.go index 64c82e6e51..0ced5afffe 100644 --- a/tsdb/block_test.go +++ b/tsdb/block_test.go @@ -198,7 +198,7 @@ func TestCorruptedChunk(t *testing.T) { querier, err := NewBlockQuerier(b, 0, 1) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) }() - set := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + set := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) // Check chunk errors during iter time. require.True(t, set.Next()) @@ -211,6 +211,7 @@ func TestCorruptedChunk(t *testing.T) { func TestLabelValuesWithMatchers(t *testing.T) { tmpdir := t.TempDir() + ctx := context.Background() var seriesEntries []storage.Series for i := 0; i < 100; i++ { @@ -265,11 +266,11 @@ func TestLabelValuesWithMatchers(t *testing.T) { for _, tt := range testCases { t.Run(tt.name, func(t *testing.T) { - actualValues, err := indexReader.SortedLabelValues(tt.labelName, tt.matchers...) + actualValues, err := indexReader.SortedLabelValues(ctx, tt.labelName, tt.matchers...) require.NoError(t, err) require.Equal(t, tt.expectedValues, actualValues) - actualValues, err = indexReader.LabelValues(tt.labelName, tt.matchers...) + actualValues, err = indexReader.LabelValues(ctx, tt.labelName, tt.matchers...) sort.Strings(actualValues) require.NoError(t, err) require.Equal(t, tt.expectedValues, actualValues) @@ -304,7 +305,7 @@ func TestBlockSize(t *testing.T) { // Delete some series and check the sizes again. { - require.NoError(t, blockInit.Delete(1, 10, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))) + require.NoError(t, blockInit.Delete(context.Background(), 1, 10, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))) expAfterDelete := blockInit.Size() require.Greater(t, expAfterDelete, expSizeInit, "after a delete the block size should be bigger as the tombstone file should grow %v > %v", expAfterDelete, expSizeInit) actAfterDelete, err := fileutil.DirSize(blockDirInit) @@ -368,6 +369,7 @@ func TestReadIndexFormatV1(t *testing.T) { func BenchmarkLabelValuesWithMatchers(b *testing.B) { tmpdir := b.TempDir() + ctx := context.Background() var seriesEntries []storage.Series metricCount := 1000000 @@ -401,7 +403,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) { b.ReportAllocs() for benchIdx := 0; benchIdx < b.N; benchIdx++ { - actualValues, err := indexReader.LabelValues("b_tens", matchers...) + actualValues, err := indexReader.LabelValues(ctx, "b_tens", matchers...) require.NoError(b, err) require.Equal(b, 9, len(actualValues)) } @@ -409,6 +411,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) { func TestLabelNamesWithMatchers(t *testing.T) { tmpdir := t.TempDir() + ctx := context.Background() var seriesEntries []storage.Series for i := 0; i < 100; i++ { @@ -474,7 +477,7 @@ func TestLabelNamesWithMatchers(t *testing.T) { for _, tt := range testCases { t.Run(tt.name, func(t *testing.T) { - actualNames, err := indexReader.LabelNames(tt.matchers...) + actualNames, err := indexReader.LabelNames(ctx, tt.matchers...) require.NoError(t, err) require.Equal(t, tt.expectedNames, actualNames) }) diff --git a/tsdb/compact.go b/tsdb/compact.go index add96af710..45582eeca4 100644 --- a/tsdb/compact.go +++ b/tsdb/compact.go @@ -1009,7 +1009,7 @@ func (c DefaultBlockPopulator) PopulateBlock(ctx context.Context, metrics *Compa closers = append(closers, tombsr) k, v := index.AllPostingsKey() - all, err := indexr.Postings(k, v) + all, err := indexr.Postings(ctx, k, v) if err != nil { return err } @@ -1021,7 +1021,7 @@ func (c DefaultBlockPopulator) PopulateBlock(ctx context.Context, metrics *Compa // To iterate series when populating symbols, we cannot reuse postings we just got, but need to get a new copy. // Postings can only be iterated once. k, v = index.AllPostingsKey() - all, err = indexr.Postings(k, v) + all, err = indexr.Postings(ctx, k, v) if err != nil { return err } diff --git a/tsdb/compact_test.go b/tsdb/compact_test.go index 6ca83861cf..b3c8c11f1f 100644 --- a/tsdb/compact_test.go +++ b/tsdb/compact_test.go @@ -506,6 +506,7 @@ func samplesForRange(minTime, maxTime int64, maxSamplesPerChunk int) (ret [][]sa func TestCompaction_CompactWithSplitting(t *testing.T) { seriesCounts := []int{10, 1234} shardCounts := []uint64{1, 13} + ctx := context.Background() for _, series := range seriesCounts { dir, err := os.MkdirTemp("", "compact") @@ -533,7 +534,7 @@ func TestCompaction_CompactWithSplitting(t *testing.T) { for _, shardCount := range shardCounts { t.Run(fmt.Sprintf("series=%d, shards=%d", series, shardCount), func(t *testing.T) { - c, err := NewLeveledCompactorWithChunkSize(context.Background(), nil, log.NewNopLogger(), []int64{0}, nil, chunks.DefaultChunkSegmentSize, nil, true) + c, err := NewLeveledCompactorWithChunkSize(ctx, nil, log.NewNopLogger(), []int64{0}, nil, chunks.DefaultChunkSegmentSize, nil, true) require.NoError(t, err) blockIDs, err := c.CompactWithSplitting(dir, blockDirs, openBlocks, shardCount) @@ -585,7 +586,7 @@ func TestCompaction_CompactWithSplitting(t *testing.T) { }() k, v := index.AllPostingsKey() - p, err := idxr.Postings(k, v) + p, err := idxr.Postings(ctx, k, v) require.NoError(t, err) var lbls labels.ScratchBuilder @@ -1471,6 +1472,8 @@ func TestDeleteCompactionBlockAfterFailedReload(t *testing.T) { for title, bootStrap := range tests { t.Run(title, func(t *testing.T) { + ctx := context.Background() + db := openTestDB(t, nil, []int64{1, 100}) defer func() { require.NoError(t, db.Close()) @@ -1494,7 +1497,7 @@ func TestDeleteCompactionBlockAfterFailedReload(t *testing.T) { // Do the compaction and check the metrics. // Compaction should succeed, but the reloadBlocks should fail and // the new block created from the compaction should be deleted. - require.Error(t, db.Compact()) + require.Error(t, db.Compact(ctx)) require.Equal(t, 1.0, prom_testutil.ToFloat64(db.metrics.reloadsFailed), "'failed db reloadBlocks' count metrics mismatch") require.Equal(t, 1.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran), "`compaction` count metric mismatch") require.Equal(t, 1.0, prom_testutil.ToFloat64(db.metrics.compactionsFailed), "`compactions failed` count metric mismatch") diff --git a/tsdb/db.go b/tsdb/db.go index 12d97b0b0e..34f6bbc89f 100644 --- a/tsdb/db.go +++ b/tsdb/db.go @@ -567,22 +567,22 @@ func (db *DBReadOnly) loadDataAsQueryable(maxt int64) (storage.SampleAndChunkQue // Querier loads the blocks and wal and returns a new querier over the data partition for the given time range. // Current implementation doesn't support multiple Queriers. -func (db *DBReadOnly) Querier(ctx context.Context, mint, maxt int64) (storage.Querier, error) { +func (db *DBReadOnly) Querier(mint, maxt int64) (storage.Querier, error) { q, err := db.loadDataAsQueryable(maxt) if err != nil { return nil, err } - return q.Querier(ctx, mint, maxt) + return q.Querier(mint, maxt) } // ChunkQuerier loads blocks and the wal and returns a new chunk querier over the data partition for the given time range. // Current implementation doesn't support multiple ChunkQueriers. -func (db *DBReadOnly) ChunkQuerier(ctx context.Context, mint, maxt int64) (storage.ChunkQuerier, error) { +func (db *DBReadOnly) ChunkQuerier(mint, maxt int64) (storage.ChunkQuerier, error) { q, err := db.loadDataAsQueryable(maxt) if err != nil { return nil, err } - return q.ChunkQuerier(ctx, mint, maxt) + return q.ChunkQuerier(mint, maxt) } // Blocks returns a slice of block readers for persisted blocks. @@ -956,7 +956,7 @@ func open(dir string, l log.Logger, r prometheus.Registerer, opts *Options, rngs db.oooWasEnabled.Store(true) } - go db.run() + go db.run(ctx) return db, nil } @@ -997,7 +997,7 @@ func (db *DB) Dir() string { return db.dir } -func (db *DB) run() { +func (db *DB) run(ctx context.Context) { defer close(db.donec) backoff := time.Duration(0) @@ -1028,7 +1028,7 @@ func (db *DB) run() { db.autoCompactMtx.Lock() if db.autoCompact { - if err := db.Compact(); err != nil { + if err := db.Compact(ctx); err != nil { level.Error(db.logger).Log("msg", "compaction failed", "err", err) backoff = exponential(backoff, 1*time.Second, 1*time.Minute) } else { @@ -1148,7 +1148,7 @@ func (a dbAppender) Commit() error { // which will also delete the blocks that fall out of the retention window. // Old blocks are only deleted on reloadBlocks based on the new block's parent information. // See DB.reloadBlocks documentation for further information. -func (db *DB) Compact() (returnErr error) { +func (db *DB) Compact(ctx context.Context) (returnErr error) { db.cmtx.Lock() defer db.cmtx.Unlock() defer func() { @@ -1221,7 +1221,7 @@ func (db *DB) Compact() (returnErr error) { if lastBlockMaxt != math.MinInt64 { // The head was compacted, so we compact OOO head as well. - if err := db.compactOOOHead(); err != nil { + if err := db.compactOOOHead(ctx); err != nil { return errors.Wrap(err, "compact ooo head") } } @@ -1245,18 +1245,18 @@ func (db *DB) CompactHead(head *RangeHead) error { } // CompactOOOHead compacts the OOO Head. -func (db *DB) CompactOOOHead() error { +func (db *DB) CompactOOOHead(ctx context.Context) error { db.cmtx.Lock() defer db.cmtx.Unlock() - return db.compactOOOHead() + return db.compactOOOHead(ctx) } -func (db *DB) compactOOOHead() error { +func (db *DB) compactOOOHead(ctx context.Context) error { if !db.oooWasEnabled.Load() { return nil } - oooHead, err := NewOOOCompactionHead(db.head) + oooHead, err := NewOOOCompactionHead(ctx, db.head) if err != nil { return errors.Wrap(err, "get ooo compaction head") } @@ -1894,7 +1894,7 @@ func (db *DB) Snapshot(dir string, withHead bool) error { } // Querier returns a new querier over the data partition for the given time range. -func (db *DB) Querier(_ context.Context, mint, maxt int64) (storage.Querier, error) { +func (db *DB) Querier(mint, maxt int64) (storage.Querier, error) { var blocks []BlockReader db.mtx.RLock() @@ -2042,7 +2042,7 @@ func (db *DB) blockChunkQuerierForRange(mint, maxt int64) ([]storage.ChunkQuerie } // ChunkQuerier returns a new chunk querier over the data partition for the given time range. -func (db *DB) ChunkQuerier(_ context.Context, mint, maxt int64) (storage.ChunkQuerier, error) { +func (db *DB) ChunkQuerier(mint, maxt int64) (storage.ChunkQuerier, error) { blockQueriers, err := db.blockChunkQuerierForRange(mint, maxt) if err != nil { return nil, err @@ -2069,7 +2069,7 @@ func rangeForTimestamp(t, width int64) (maxt int64) { } // Delete implements deletion of metrics. It only has atomicity guarantees on a per-block basis. -func (db *DB) Delete(mint, maxt int64, ms ...*labels.Matcher) error { +func (db *DB) Delete(ctx context.Context, mint, maxt int64, ms ...*labels.Matcher) error { db.cmtx.Lock() defer db.cmtx.Unlock() @@ -2081,13 +2081,13 @@ func (db *DB) Delete(mint, maxt int64, ms ...*labels.Matcher) error { for _, b := range db.blocks { if b.OverlapsClosedInterval(mint, maxt) { g.Go(func(b *Block) func() error { - return func() error { return b.Delete(mint, maxt, ms...) } + return func() error { return b.Delete(ctx, mint, maxt, ms...) } }(b)) } } if db.head.OverlapsClosedInterval(mint, maxt) { g.Go(func() error { - return db.head.Delete(mint, maxt, ms...) + return db.head.Delete(ctx, mint, maxt, ms...) }) } diff --git a/tsdb/db_test.go b/tsdb/db_test.go index 3da64a00dd..654e15066e 100644 --- a/tsdb/db_test.go +++ b/tsdb/db_test.go @@ -53,6 +53,7 @@ import ( "github.com/prometheus/prometheus/tsdb/tombstones" "github.com/prometheus/prometheus/tsdb/tsdbutil" "github.com/prometheus/prometheus/tsdb/wlog" + "github.com/prometheus/prometheus/util/annotations" "github.com/prometheus/prometheus/util/testutil" ) @@ -95,7 +96,7 @@ func openTestDB(t testing.TB, opts *Options, rngs []int64) (db *DB) { // query runs a matcher query against the querier and fully expands its data. func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[string][]chunks.Sample { - ss := q.Select(false, nil, matchers...) + ss := q.Select(context.Background(), false, nil, matchers...) defer func() { require.NoError(t, q.Close()) }() @@ -157,7 +158,7 @@ func queryAndExpandChunks(t testing.TB, q storage.ChunkQuerier, matchers ...*lab // queryChunks runs a matcher query against the querier and expands its data. func queryChunks(t testing.TB, q storage.ChunkQuerier, matchers ...*labels.Matcher) map[string][]chunks.Meta { - ss := q.Select(false, nil, matchers...) + ss := q.Select(context.Background(), false, nil, matchers...) defer func() { require.NoError(t, q.Close()) }() @@ -226,7 +227,7 @@ func TestDataAvailableOnlyAfterCommit(t *testing.T) { _, err := app.Append(0, labels.FromStrings("foo", "bar"), 0, 0) require.NoError(t, err) - querier, err := db.Querier(context.TODO(), 0, 1) + querier, err := db.Querier(0, 1) require.NoError(t, err) seriesSet := query(t, querier, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) require.Equal(t, map[string][]chunks.Sample{}, seriesSet) @@ -234,7 +235,7 @@ func TestDataAvailableOnlyAfterCommit(t *testing.T) { err = app.Commit() require.NoError(t, err) - querier, err = db.Querier(context.TODO(), 0, 1) + querier, err = db.Querier(0, 1) require.NoError(t, err) defer querier.Close() @@ -292,7 +293,7 @@ func TestNoPanicAfterWALCorruption(t *testing.T) { }() require.Equal(t, 1.0, prom_testutil.ToFloat64(db.head.metrics.walCorruptionsTotal), "WAL corruption count mismatch") - querier, err := db.Querier(context.TODO(), 0, maxt) + querier, err := db.Querier(0, maxt) require.NoError(t, err) seriesSet := query(t, querier, labels.MustNewMatcher(labels.MatchEqual, "", "")) // The last sample should be missing as it was after the WAL segment corruption. @@ -313,7 +314,7 @@ func TestDataNotAvailableAfterRollback(t *testing.T) { err = app.Rollback() require.NoError(t, err) - querier, err := db.Querier(context.TODO(), 0, 1) + querier, err := db.Querier(0, 1) require.NoError(t, err) defer querier.Close() @@ -364,7 +365,7 @@ func TestDBAppenderAddRef(t *testing.T) { require.NoError(t, app2.Commit()) - q, err := db.Querier(context.TODO(), 0, 200) + q, err := db.Querier(0, 200) require.NoError(t, err) res := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) @@ -404,7 +405,7 @@ func TestAppendEmptyLabelsIgnored(t *testing.T) { } func TestDeleteSimple(t *testing.T) { - numSamples := int64(10) + const numSamples int64 = 10 cases := []struct { Intervals tombstones.Intervals @@ -453,14 +454,14 @@ Outer: // TODO(gouthamve): Reset the tombstones somehow. // Delete the ranges. for _, r := range c.Intervals { - require.NoError(t, db.Delete(r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) + require.NoError(t, db.Delete(ctx, r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) } // Compare the result. - q, err := db.Querier(context.TODO(), 0, numSamples) + q, err := db.Querier(0, numSamples) require.NoError(t, err) - res := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + res := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { @@ -617,7 +618,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { require.NoError(t, app.Commit()) // Make sure the right value is stored. - q, err := db.Querier(context.TODO(), 0, 10) + q, err := db.Querier(0, 10) require.NoError(t, err) ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) @@ -634,7 +635,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { require.NoError(t, err) require.NoError(t, app.Commit()) - q, err = db.Querier(context.TODO(), 0, 10) + q, err = db.Querier(0, 10) require.NoError(t, err) ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) @@ -667,12 +668,12 @@ func TestDB_Snapshot(t *testing.T) { require.NoError(t, err) defer func() { require.NoError(t, db.Close()) }() - querier, err := db.Querier(context.TODO(), mint, mint+1000) + querier, err := db.Querier(mint, mint+1000) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) }() // sum values - seriesSet := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + seriesSet := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) var series chunkenc.Iterator sum := 0.0 for seriesSet.Next() { @@ -716,12 +717,12 @@ func TestDB_Snapshot_ChunksOutsideOfCompactedRange(t *testing.T) { require.NoError(t, err) defer func() { require.NoError(t, db.Close()) }() - querier, err := db.Querier(context.TODO(), mint, mint+1000) + querier, err := db.Querier(mint, mint+1000) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) }() // Sum values. - seriesSet := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + seriesSet := querier.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) var series chunkenc.Iterator sum := 0.0 for seriesSet.Next() { @@ -740,7 +741,7 @@ func TestDB_Snapshot_ChunksOutsideOfCompactedRange(t *testing.T) { } func TestDB_SnapshotWithDelete(t *testing.T) { - numSamples := int64(10) + const numSamples int64 = 10 db := openTestDB(t, nil, nil) defer func() { require.NoError(t, db.Close()) }() @@ -770,7 +771,7 @@ Outer: // TODO(gouthamve): Reset the tombstones somehow. // Delete the ranges. for _, r := range c.intervals { - require.NoError(t, db.Delete(r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) + require.NoError(t, db.Delete(ctx, r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) } // create snapshot @@ -784,11 +785,11 @@ Outer: defer func() { require.NoError(t, newDB.Close()) }() // Compare the result. - q, err := newDB.Querier(context.TODO(), 0, numSamples) + q, err := newDB.Querier(0, numSamples) require.NoError(t, err) defer func() { require.NoError(t, q.Close()) }() - res := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + res := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { @@ -959,10 +960,10 @@ func TestDB_e2e(t *testing.T) { } } - q, err := db.Querier(context.TODO(), mint, maxt) + q, err := db.Querier(mint, maxt) require.NoError(t, err) - ss := q.Select(false, nil, qry.ms...) + ss := q.Select(ctx, false, nil, qry.ms...) result := map[string][]chunks.Sample{} for ss.Next() { @@ -1004,10 +1005,10 @@ func TestWALFlushedOnDBClose(t *testing.T) { require.NoError(t, err) defer func() { require.NoError(t, db.Close()) }() - q, err := db.Querier(context.TODO(), 0, 1) + q, err := db.Querier(0, 1) require.NoError(t, err) - values, ws, err := q.LabelValues("labelname") + values, ws, err := q.LabelValues(ctx, "labelname") require.NoError(t, err) require.Equal(t, 0, len(ws)) require.Equal(t, []string{"labelvalue"}, values) @@ -1150,10 +1151,10 @@ func testWALReplayRaceOnSamplesLoggedBeforeSeries(t *testing.T, numSamplesBefore }) // Query back chunks for all series. - q, err := reopenDB.ChunkQuerier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := reopenDB.ChunkQuerier(math.MinInt64, math.MaxInt64) require.NoError(t, err) - set := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "series_id", ".+")) + set := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "series_id", ".+")) actualSeries := 0 var chunksIt chunks.Iterator @@ -1176,7 +1177,7 @@ func testWALReplayRaceOnSamplesLoggedBeforeSeries(t *testing.T, numSamplesBefore } func TestTombstoneClean(t *testing.T) { - numSamples := int64(10) + const numSamples int64 = 10 db := openTestDB(t, nil, nil) @@ -1214,18 +1215,18 @@ func TestTombstoneClean(t *testing.T) { defer db.Close() for _, r := range c.intervals { - require.NoError(t, db.Delete(r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) + require.NoError(t, db.Delete(ctx, r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) } // All of the setup for THIS line. require.NoError(t, db.CleanTombstones()) // Compare the result. - q, err := db.Querier(context.TODO(), 0, numSamples) + q, err := db.Querier(0, numSamples) require.NoError(t, err) defer q.Close() - res := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + res := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) expSamples := make([]chunks.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { @@ -1299,7 +1300,7 @@ func TestTombstoneCleanResultEmptyBlock(t *testing.T) { // Create tombstones by deleting all samples. for _, r := range intervals { - require.NoError(t, db.Delete(r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) + require.NoError(t, db.Delete(ctx, r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) } require.NoError(t, db.CleanTombstones()) @@ -1723,12 +1724,12 @@ func TestNotMatcherSelectsLabelsUnsetSeries(t *testing.T) { series: labelpairs[:1], }} - q, err := db.Querier(context.TODO(), 0, 10) + q, err := db.Querier(0, 10) require.NoError(t, err) defer func() { require.NoError(t, q.Close()) }() for _, c := range cases { - ss := q.Select(false, nil, c.selector...) + ss := q.Select(ctx, false, nil, c.selector...) lres, _, ws, err := expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) @@ -1738,7 +1739,7 @@ func TestNotMatcherSelectsLabelsUnsetSeries(t *testing.T) { // expandSeriesSet returns the raw labels in the order they are retrieved from // the series set and the samples keyed by Labels().String(). -func expandSeriesSet(ss storage.SeriesSet) ([]labels.Labels, map[string][]sample, storage.Warnings, error) { +func expandSeriesSet(ss storage.SeriesSet) ([]labels.Labels, map[string][]sample, annotations.Annotations, error) { resultLabels := []labels.Labels{} resultSamples := map[string][]sample{} var it chunkenc.Iterator @@ -1871,7 +1872,7 @@ func TestChunkAtBlockBoundary(t *testing.T) { err := app.Commit() require.NoError(t, err) - err = db.Compact() + err = db.Compact(ctx) require.NoError(t, err) var builder labels.ScratchBuilder @@ -1884,7 +1885,7 @@ func TestChunkAtBlockBoundary(t *testing.T) { meta := block.Meta() k, v := index.AllPostingsKey() - p, err := r.Postings(k, v) + p, err := r.Postings(ctx, k, v) require.NoError(t, err) var chks []chunks.Meta @@ -1927,19 +1928,19 @@ func TestQuerierWithBoundaryChunks(t *testing.T) { err := app.Commit() require.NoError(t, err) - err = db.Compact() + err = db.Compact(ctx) require.NoError(t, err) require.GreaterOrEqual(t, len(db.blocks), 3, "invalid test, less than three blocks in DB") - q, err := db.Querier(context.TODO(), blockRange, 2*blockRange) + q, err := db.Querier(blockRange, 2*blockRange) require.NoError(t, err) defer q.Close() // The requested interval covers 2 blocks, so the querier's label values for blockID should give us 2 values, one from each block. - b, ws, err := q.LabelValues("blockID") + b, ws, err := q.LabelValues(ctx, "blockID") require.NoError(t, err) - require.Equal(t, storage.Warnings(nil), ws) + require.Equal(t, annotations.Annotations{}, ws) require.Equal(t, []string{"1", "2"}, b) } @@ -2058,7 +2059,7 @@ func TestNoEmptyBlocks(t *testing.T) { defaultMatcher := labels.MustNewMatcher(labels.MatchRegexp, "", ".*") t.Run("Test no blocks after compact with empty head.", func(t *testing.T) { - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) actBlocks, err := blockDirs(db.Dir()) require.NoError(t, err) require.Equal(t, len(db.Blocks()), len(actBlocks)) @@ -2075,8 +2076,8 @@ func TestNoEmptyBlocks(t *testing.T) { _, err = app.Append(0, defaultLabel, 3+rangeToTriggerCompaction, 0) require.NoError(t, err) require.NoError(t, app.Commit()) - require.NoError(t, db.Delete(math.MinInt64, math.MaxInt64, defaultMatcher)) - require.NoError(t, db.Compact()) + require.NoError(t, db.Delete(ctx, math.MinInt64, math.MaxInt64, defaultMatcher)) + require.NoError(t, db.Compact(ctx)) require.Equal(t, 1, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here") actBlocks, err := blockDirs(db.Dir()) @@ -2098,7 +2099,7 @@ func TestNoEmptyBlocks(t *testing.T) { require.NoError(t, err) require.NoError(t, app.Commit()) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, 2, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here") actBlocks, err = blockDirs(db.Dir()) require.NoError(t, err) @@ -2118,8 +2119,8 @@ func TestNoEmptyBlocks(t *testing.T) { _, err = app.Append(0, defaultLabel, currentTime+rangeToTriggerCompaction, 0) require.NoError(t, err) require.NoError(t, app.Commit()) - require.NoError(t, db.head.Delete(math.MinInt64, math.MaxInt64, defaultMatcher)) - require.NoError(t, db.Compact()) + require.NoError(t, db.head.Delete(ctx, math.MinInt64, math.MaxInt64, defaultMatcher)) + require.NoError(t, db.Compact(ctx)) require.Equal(t, 3, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here") require.Equal(t, oldBlocks, db.Blocks()) }) @@ -2137,8 +2138,8 @@ func TestNoEmptyBlocks(t *testing.T) { oldBlocks := db.Blocks() require.NoError(t, db.reloadBlocks()) // Reload the db to register the new blocks. require.Equal(t, len(blocks)+len(oldBlocks), len(db.Blocks())) // Ensure all blocks are registered. - require.NoError(t, db.Delete(math.MinInt64, math.MaxInt64, defaultMatcher)) - require.NoError(t, db.Compact()) + require.NoError(t, db.Delete(ctx, math.MinInt64, math.MaxInt64, defaultMatcher)) + require.NoError(t, db.Compact(ctx)) require.Equal(t, 5, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here once for each block that have tombstones") actBlocks, err := blockDirs(db.Dir()) @@ -2149,6 +2150,7 @@ func TestNoEmptyBlocks(t *testing.T) { } func TestDB_LabelNames(t *testing.T) { + ctx := context.Background() tests := []struct { // Add 'sampleLabels1' -> Test Head -> Compact -> Test Disk -> // -> Add 'sampleLabels2' -> Test Head+Disk @@ -2192,7 +2194,6 @@ func TestDB_LabelNames(t *testing.T) { // Appends samples into the database. appendSamples := func(db *DB, mint, maxt int64, sampleLabels [][2]string) { t.Helper() - ctx := context.Background() app := db.Appender(ctx) for i := mint; i <= maxt; i++ { for _, tuple := range sampleLabels { @@ -2205,6 +2206,7 @@ func TestDB_LabelNames(t *testing.T) { require.NoError(t, err) } for _, tst := range tests { + ctx := context.Background() db := openTestDB(t, nil, nil) defer func() { require.NoError(t, db.Close()) @@ -2215,34 +2217,34 @@ func TestDB_LabelNames(t *testing.T) { // Testing head. headIndexr, err := db.head.Index() require.NoError(t, err) - labelNames, err := headIndexr.LabelNames() + labelNames, err := headIndexr.LabelNames(ctx) require.NoError(t, err) require.Equal(t, tst.exp1, labelNames) require.NoError(t, headIndexr.Close()) // Testing disk. - err = db.Compact() + err = db.Compact(ctx) require.NoError(t, err) // All blocks have same label names, hence check them individually. // No need to aggregate and check. for _, b := range db.Blocks() { blockIndexr, err := b.Index() require.NoError(t, err) - labelNames, err = blockIndexr.LabelNames() + labelNames, err = blockIndexr.LabelNames(ctx) require.NoError(t, err) require.Equal(t, tst.exp1, labelNames) require.NoError(t, blockIndexr.Close()) } // Adding more samples to head with new label names - // so that we can test (head+disk).LabelNames() (the union). + // so that we can test (head+disk).LabelNames(ctx) (the union). appendSamples(db, 5, 9, tst.sampleLabels2) // Testing DB (union). - q, err := db.Querier(context.TODO(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) - var ws storage.Warnings - labelNames, ws, err = q.LabelNames() + var ws annotations.Annotations + labelNames, ws, err = q.LabelNames(ctx) require.NoError(t, err) require.Equal(t, 0, len(ws)) require.NoError(t, q.Close()) @@ -2271,21 +2273,21 @@ func TestCorrectNumTombstones(t *testing.T) { } require.NoError(t, app.Commit()) - err := db.Compact() + err := db.Compact(ctx) require.NoError(t, err) require.Equal(t, 1, len(db.blocks)) - require.NoError(t, db.Delete(0, 1, defaultMatcher)) + require.NoError(t, db.Delete(ctx, 0, 1, defaultMatcher)) require.Equal(t, uint64(1), db.blocks[0].meta.Stats.NumTombstones) // {0, 1} and {2, 3} are merged to form 1 tombstone. - require.NoError(t, db.Delete(2, 3, defaultMatcher)) + require.NoError(t, db.Delete(ctx, 2, 3, defaultMatcher)) require.Equal(t, uint64(1), db.blocks[0].meta.Stats.NumTombstones) - require.NoError(t, db.Delete(5, 6, defaultMatcher)) + require.NoError(t, db.Delete(ctx, 5, 6, defaultMatcher)) require.Equal(t, uint64(2), db.blocks[0].meta.Stats.NumTombstones) - require.NoError(t, db.Delete(9, 11, defaultMatcher)) + require.NoError(t, db.Delete(ctx, 9, 11, defaultMatcher)) require.Equal(t, uint64(3), db.blocks[0].meta.Stats.NumTombstones) } @@ -2441,10 +2443,10 @@ func TestDBReadOnly(t *testing.T) { require.NoError(t, err) require.Greater(t, expDbSize, dbSizeBeforeAppend, "db size didn't increase after an append") - q, err := dbWritable.Querier(context.TODO(), math.MinInt64, math.MaxInt64) + q, err := dbWritable.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) expSeries = query(t, q, matchAll) - cq, err := dbWritable.ChunkQuerier(context.TODO(), math.MinInt64, math.MaxInt64) + cq, err := dbWritable.ChunkQuerier(math.MinInt64, math.MaxInt64) require.NoError(t, err) expChunks = queryAndExpandChunks(t, cq, matchAll) @@ -2483,7 +2485,7 @@ func TestDBReadOnly(t *testing.T) { }) t.Run("querier", func(t *testing.T) { // Open a read only db and ensure that the API returns the same result as the normal DB. - q, err := dbReadOnly.Querier(context.TODO(), math.MinInt64, math.MaxInt64) + q, err := dbReadOnly.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) readOnlySeries := query(t, q, matchAll) readOnlyDBHash := testutil.DirHash(t, dbDir) @@ -2493,7 +2495,7 @@ func TestDBReadOnly(t *testing.T) { require.Equal(t, expDBHash, readOnlyDBHash, "after all read operations the db hash should remain the same") }) t.Run("chunk querier", func(t *testing.T) { - cq, err := dbReadOnly.ChunkQuerier(context.TODO(), math.MinInt64, math.MaxInt64) + cq, err := dbReadOnly.ChunkQuerier(math.MinInt64, math.MaxInt64) require.NoError(t, err) readOnlySeries := queryAndExpandChunks(t, cq, matchAll) readOnlyDBHash := testutil.DirHash(t, dbDir) @@ -2514,7 +2516,7 @@ func TestDBReadOnlyClosing(t *testing.T) { require.Equal(t, db.Close(), ErrClosed) _, err = db.Blocks() require.Equal(t, err, ErrClosed) - _, err = db.Querier(context.TODO(), 0, 1) + _, err = db.Querier(0, 1) require.Equal(t, err, ErrClosed) } @@ -2561,12 +2563,12 @@ func TestDBReadOnly_FlushWAL(t *testing.T) { require.NoError(t, err) require.Equal(t, len(blocks), 1) - querier, err := db.Querier(context.TODO(), 0, int64(maxt)-1) + querier, err := db.Querier(0, int64(maxt)-1) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) }() // Sum the values. - seriesSet := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, defaultLabelName, "flush")) + seriesSet := querier.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, defaultLabelName, "flush")) var series chunkenc.Iterator sum := 0.0 @@ -2631,11 +2633,11 @@ func TestDBCannotSeePartialCommits(t *testing.T) { inconsistencies := 0 for i := 0; i < 10; i++ { func() { - querier, err := db.Querier(context.Background(), 0, 1000000) + querier, err := db.Querier(0, 1000000) require.NoError(t, err) defer querier.Close() - ss := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss := querier.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err := expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) @@ -2665,7 +2667,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) { require.NoError(t, err) defer db.Close() - querierBeforeAdd, err := db.Querier(context.Background(), 0, 1000000) + querierBeforeAdd, err := db.Querier(0, 1000000) require.NoError(t, err) defer querierBeforeAdd.Close() @@ -2674,18 +2676,18 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) { _, err = app.Append(0, labels.FromStrings("foo", "bar"), 0, 0) require.NoError(t, err) - querierAfterAddButBeforeCommit, err := db.Querier(context.Background(), 0, 1000000) + querierAfterAddButBeforeCommit, err := db.Querier(0, 1000000) require.NoError(t, err) defer querierAfterAddButBeforeCommit.Close() // None of the queriers should return anything after the Add but before the commit. - ss := querierBeforeAdd.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss := querierBeforeAdd.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err := expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) require.Equal(t, map[string][]sample{}, seriesSet) - ss = querierAfterAddButBeforeCommit.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss = querierAfterAddButBeforeCommit.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err = expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) @@ -2696,25 +2698,25 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) { require.NoError(t, err) // Nothing returned for querier created before the Add. - ss = querierBeforeAdd.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss = querierBeforeAdd.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err = expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) require.Equal(t, map[string][]sample{}, seriesSet) // Series exists but has no samples for querier created after Add. - ss = querierAfterAddButBeforeCommit.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss = querierAfterAddButBeforeCommit.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err = expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) require.Equal(t, map[string][]sample{`{foo="bar"}`: {}}, seriesSet) - querierAfterCommit, err := db.Querier(context.Background(), 0, 1000000) + querierAfterCommit, err := db.Querier(0, 1000000) require.NoError(t, err) defer querierAfterCommit.Close() // Samples are returned for querier created after Commit. - ss = querierAfterCommit.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss = querierAfterCommit.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err = expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) @@ -3015,11 +3017,11 @@ func TestCompactHead(t *testing.T) { require.Equal(t, 1, len(db.Blocks())) require.Equal(t, int64(maxt), db.Head().MinTime()) defer func() { require.NoError(t, db.Close()) }() - querier, err := db.Querier(context.Background(), 0, int64(maxt)-1) + querier, err := db.Querier(0, int64(maxt)-1) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) }() - seriesSet := querier.Select(false, nil, &labels.Matcher{Type: labels.MatchEqual, Name: "a", Value: "b"}) + seriesSet := querier.Select(ctx, false, nil, &labels.Matcher{Type: labels.MatchEqual, Name: "a", Value: "b"}) var series chunkenc.Iterator var actSamples []sample @@ -3040,12 +3042,14 @@ func TestCompactHeadWithDeletion(t *testing.T) { db, err := Open(t.TempDir(), log.NewNopLogger(), prometheus.NewRegistry(), nil, nil) require.NoError(t, err) - app := db.Appender(context.Background()) + ctx := context.Background() + + app := db.Appender(ctx) _, err = app.Append(0, labels.FromStrings("a", "b"), 10, rand.Float64()) require.NoError(t, err) require.NoError(t, app.Commit()) - err = db.Delete(0, 100, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + err = db.Delete(ctx, 0, 100, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.NoError(t, err) // This recreates the bug. @@ -3204,6 +3208,7 @@ func TestOneCheckpointPerCompactCall(t *testing.T) { } tmpDir := t.TempDir() + ctx := context.Background() db, err := Open(tmpDir, log.NewNopLogger(), prometheus.NewRegistry(), tsdbCfg, nil) require.NoError(t, err) @@ -3235,7 +3240,7 @@ func TestOneCheckpointPerCompactCall(t *testing.T) { require.Equal(t, 60, last) require.Equal(t, 0.0, prom_testutil.ToFloat64(db.head.metrics.checkpointCreationTotal)) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, 1.0, prom_testutil.ToFloat64(db.head.metrics.checkpointCreationTotal)) // As the data spans for 59 blocks, 58 go to disk and 1 remains in Head. @@ -3293,7 +3298,7 @@ func TestOneCheckpointPerCompactCall(t *testing.T) { require.Equal(t, 62, last) require.Equal(t, 0.0, prom_testutil.ToFloat64(db.head.metrics.checkpointCreationTotal)) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, 1.0, prom_testutil.ToFloat64(db.head.metrics.checkpointCreationTotal)) // No new blocks should be created as there was not data in between the new samples and the blocks. @@ -3392,7 +3397,7 @@ func testQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChunks(t } // Compact the TSDB head for the first time. We expect the head chunks file has been cut. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, float64(1), prom_testutil.ToFloat64(db.Head().metrics.headTruncateTotal)) // Push more samples for another 1x block duration period. @@ -3410,7 +3415,7 @@ func testQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChunks(t // At this point we expect 2 mmap-ed head chunks. // Get a querier and make sure it's closed only once the test is over. - querier, err := db.Querier(ctx, 0, math.MaxInt64) + querier, err := db.Querier(0, math.MaxInt64) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) @@ -3418,7 +3423,7 @@ func testQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChunks(t // Query back all series. hints := &storage.SelectHints{Start: 0, End: math.MaxInt64, Step: interval} - seriesSet := querier.Select(true, hints, labels.MustNewMatcher(labels.MatchRegexp, labels.MetricName, ".+")) + seriesSet := querier.Select(ctx, true, hints, labels.MustNewMatcher(labels.MatchRegexp, labels.MetricName, ".+")) // Fetch samples iterators from all series. var iterators []chunkenc.Iterator @@ -3437,7 +3442,7 @@ func testQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChunks(t require.Equal(t, actualSeries, numSeries) // Compact the TSDB head again. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, float64(2), prom_testutil.ToFloat64(db.Head().metrics.headTruncateTotal)) // At this point we expect 1 head chunk has been deleted. @@ -3528,7 +3533,7 @@ func testChunkQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChun } // Compact the TSDB head for the first time. We expect the head chunks file has been cut. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, float64(1), prom_testutil.ToFloat64(db.Head().metrics.headTruncateTotal)) // Push more samples for another 1x block duration period. @@ -3546,7 +3551,7 @@ func testChunkQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChun // At this point we expect 2 mmap-ed head chunks. // Get a querier and make sure it's closed only once the test is over. - querier, err := db.ChunkQuerier(ctx, 0, math.MaxInt64) + querier, err := db.ChunkQuerier(0, math.MaxInt64) require.NoError(t, err) defer func() { require.NoError(t, querier.Close()) @@ -3554,7 +3559,7 @@ func testChunkQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChun // Query back all series. hints := &storage.SelectHints{Start: 0, End: math.MaxInt64, Step: interval} - seriesSet := querier.Select(true, hints, labels.MustNewMatcher(labels.MatchRegexp, labels.MetricName, ".+")) + seriesSet := querier.Select(ctx, true, hints, labels.MustNewMatcher(labels.MatchRegexp, labels.MetricName, ".+")) // Iterate all series and get their chunks. var it chunks.Iterator @@ -3571,7 +3576,7 @@ func testChunkQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChun require.Equal(t, actualSeries, numSeries) // Compact the TSDB head again. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, float64(2), prom_testutil.ToFloat64(db.Head().metrics.headTruncateTotal)) // At this point we expect 1 head chunk has been deleted. @@ -3803,6 +3808,7 @@ func TestOOOWALWrite(t *testing.T) { // Tests https://github.com/prometheus/prometheus/issues/10291#issuecomment-1044373110. func TestDBPanicOnMmappingHeadChunk(t *testing.T) { dir := t.TempDir() + ctx := context.Background() db, err := Open(dir, nil, nil, DefaultOptions(), nil) require.NoError(t, err) @@ -3833,7 +3839,7 @@ func TestDBPanicOnMmappingHeadChunk(t *testing.T) { addSamples(numSamples) require.Len(t, db.Blocks(), 0) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Len(t, db.Blocks(), 0) // Restarting. @@ -3848,7 +3854,7 @@ func TestDBPanicOnMmappingHeadChunk(t *testing.T) { addSamples(numSamples) require.Len(t, db.Blocks(), 0) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Len(t, db.Blocks(), 1) // More samples to m-map and panic. @@ -4114,6 +4120,7 @@ func TestMetadataAssertInMemoryData(t *testing.T) { // are not included in this compaction. func TestOOOCompaction(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderCapMax = 30 @@ -4177,7 +4184,7 @@ func TestOOOCompaction(t *testing.T) { series2.String(): series2Samples, } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -4211,7 +4218,7 @@ func TestOOOCompaction(t *testing.T) { require.Greater(t, f.Size(), int64(100)) // OOO compaction happens here. - require.NoError(t, db.CompactOOOHead()) + require.NoError(t, db.CompactOOOHead(ctx)) // 3 blocks exist now. [0, 120), [120, 240), [240, 360) require.Equal(t, len(db.Blocks()), 3) @@ -4279,7 +4286,7 @@ func TestOOOCompaction(t *testing.T) { require.Equal(t, "000001", files[0].Name()) // This will merge overlapping block. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, len(db.Blocks()), 3) // [0, 120), [120, 240), [240, 360) verifySamples(db.Blocks()[0], 90, 119) @@ -4293,6 +4300,7 @@ func TestOOOCompaction(t *testing.T) { // when the normal head's compaction is done. func TestOOOCompactionWithNormalCompaction(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderCapMax = 30 @@ -4335,7 +4343,7 @@ func TestOOOCompactionWithNormalCompaction(t *testing.T) { } // If the normal Head is not compacted, the OOO head compaction does not take place. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, len(db.Blocks()), 0) // Add more in-order samples in future that would trigger the compaction. @@ -4345,7 +4353,7 @@ func TestOOOCompactionWithNormalCompaction(t *testing.T) { require.Equal(t, len(db.Blocks()), 0) // Compacts normal and OOO head. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) // 2 blocks exist now. [0, 120), [250, 360) require.Equal(t, len(db.Blocks()), 2) @@ -4392,6 +4400,7 @@ func TestOOOCompactionWithNormalCompaction(t *testing.T) { // and out-of-order head func TestOOOCompactionWithDisabledWriteLog(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderCapMax = 30 @@ -4435,7 +4444,7 @@ func TestOOOCompactionWithDisabledWriteLog(t *testing.T) { } // If the normal Head is not compacted, the OOO head compaction does not take place. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Equal(t, len(db.Blocks()), 0) // Add more in-order samples in future that would trigger the compaction. @@ -4445,7 +4454,7 @@ func TestOOOCompactionWithDisabledWriteLog(t *testing.T) { require.Equal(t, len(db.Blocks()), 0) // Compacts normal and OOO head. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) // 2 blocks exist now. [0, 120), [250, 360) require.Equal(t, len(db.Blocks()), 2) @@ -4492,6 +4501,7 @@ func TestOOOCompactionWithDisabledWriteLog(t *testing.T) { // data from the mmap chunks. func TestOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderCapMax = 10 @@ -4568,7 +4578,7 @@ func TestOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T) { series2.String(): series2Samples, } - q, err := db.Querier(context.Background(), fromMins*time.Minute.Milliseconds(), toMins*time.Minute.Milliseconds()) + q, err := db.Querier(fromMins*time.Minute.Milliseconds(), toMins*time.Minute.Milliseconds()) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -4580,7 +4590,7 @@ func TestOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T) { // Compaction should also work fine. require.Equal(t, len(db.Blocks()), 0) - require.NoError(t, db.CompactOOOHead()) + require.NoError(t, db.CompactOOOHead(ctx)) require.Equal(t, len(db.Blocks()), 1) // One block from OOO data. require.Equal(t, int64(0), db.Blocks()[0].MinTime()) require.Equal(t, 120*time.Minute.Milliseconds(), db.Blocks()[0].MaxTime()) @@ -4668,7 +4678,7 @@ func Test_Querier_OOOQuery(t *testing.T) { return expSamples[i].T() < expSamples[j].T() }) - querier, err := db.Querier(context.TODO(), tc.queryMinT, tc.queryMaxT) + querier, err := db.Querier(tc.queryMinT, tc.queryMaxT) require.NoError(t, err) defer querier.Close() @@ -4753,7 +4763,7 @@ func Test_ChunkQuerier_OOOQuery(t *testing.T) { return expSamples[i].T() < expSamples[j].T() }) - querier, err := db.ChunkQuerier(context.TODO(), tc.queryMinT, tc.queryMaxT) + querier, err := db.ChunkQuerier(tc.queryMinT, tc.queryMaxT) require.NoError(t, err) defer querier.Close() @@ -4814,7 +4824,7 @@ func TestOOOAppendAndQuery(t *testing.T) { } testQuery := func(from, to int64) { - querier, err := db.Querier(context.TODO(), from, to) + querier, err := db.Querier(from, to) require.NoError(t, err) seriesSet := query(t, querier, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.")) @@ -4943,7 +4953,7 @@ func TestOOODisabled(t *testing.T) { addSample(s1, 59, 59, true) // Out of time window again. addSample(s1, 301, 310, false) // More in-order samples. - querier, err := db.Querier(context.TODO(), math.MinInt64, math.MaxInt64) + querier, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) seriesSet := query(t, querier, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.")) @@ -4995,7 +5005,7 @@ func TestWBLAndMmapReplay(t *testing.T) { } testQuery := func(exp map[string][]chunks.Sample) { - querier, err := db.Querier(context.TODO(), math.MinInt64, math.MaxInt64) + querier, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) seriesSet := query(t, querier, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.")) @@ -5151,6 +5161,7 @@ func TestWBLAndMmapReplay(t *testing.T) { func TestOOOCompactionFailure(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderCapMax = 30 @@ -5213,7 +5224,7 @@ func TestOOOCompactionFailure(t *testing.T) { originalCompactor := db.compactor db.compactor = &mockCompactorFailing{t: t} for i := 0; i < 5; i++ { - require.Error(t, db.CompactOOOHead()) + require.Error(t, db.CompactOOOHead(ctx)) } require.Equal(t, len(db.Blocks()), 0) @@ -5224,7 +5235,7 @@ func TestOOOCompactionFailure(t *testing.T) { verifyFirstWBLFileIs0(6) db.compactor = originalCompactor - require.NoError(t, db.CompactOOOHead()) + require.NoError(t, db.CompactOOOHead(ctx)) oldBlocks := db.Blocks() require.Equal(t, len(db.Blocks()), 3) @@ -5236,7 +5247,7 @@ func TestOOOCompactionFailure(t *testing.T) { // The failed compaction should not have left the ooo Head corrupted. // Hence, expect no new blocks with another OOO compaction call. - require.NoError(t, db.CompactOOOHead()) + require.NoError(t, db.CompactOOOHead(ctx)) require.Equal(t, len(db.Blocks()), 3) require.Equal(t, oldBlocks, db.Blocks()) @@ -5383,7 +5394,7 @@ func TestWBLCorruption(t *testing.T) { series1.String(): expSamples, } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -5491,7 +5502,7 @@ func TestOOOMmapCorruption(t *testing.T) { series1.String(): expSamples, } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -5557,6 +5568,8 @@ func TestOOOMmapCorruption(t *testing.T) { } func TestOutOfOrderRuntimeConfig(t *testing.T) { + ctx := context.Background() + getDB := func(oooTimeWindow int64) *DB { dir := t.TempDir() @@ -5609,7 +5622,7 @@ func TestOutOfOrderRuntimeConfig(t *testing.T) { series1.String(): expSamples, } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -5623,7 +5636,7 @@ func TestOutOfOrderRuntimeConfig(t *testing.T) { require.Greater(t, size, int64(0)) require.Len(t, db.Blocks(), 0) - require.NoError(t, db.compactOOOHead()) + require.NoError(t, db.compactOOOHead(ctx)) require.Greater(t, len(db.Blocks()), 0) // WBL is empty. @@ -5812,7 +5825,7 @@ func TestNoGapAfterRestartWithOOO(t *testing.T) { series1.String(): expSamples, } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -5843,6 +5856,7 @@ func TestNoGapAfterRestartWithOOO(t *testing.T) { for i, c := range cases { t.Run(fmt.Sprintf("case=%d", i), func(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderTimeWindow = 30 * time.Minute.Milliseconds() @@ -5863,7 +5877,7 @@ func TestNoGapAfterRestartWithOOO(t *testing.T) { verifySamples(t, db, c.inOrderMint, c.inOrderMaxt) // We get 2 blocks. 1 from OOO, 1 from in-order. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) verifyBlockRanges := func() { blocks := db.Blocks() require.Equal(t, len(c.blockRanges), len(blocks)) @@ -5931,7 +5945,7 @@ func TestWblReplayAfterOOODisableAndRestart(t *testing.T) { series1.String(): expSamples, } - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) @@ -6000,6 +6014,7 @@ func TestPanicOnApplyConfig(t *testing.T) { func TestDiskFillingUpAfterDisablingOOO(t *testing.T) { dir := t.TempDir() + ctx := context.Background() opts := DefaultOptions() opts.OutOfOrderTimeWindow = 60 * time.Minute.Milliseconds() @@ -6064,14 +6079,14 @@ func TestDiskFillingUpAfterDisablingOOO(t *testing.T) { db.head.mmapHeadChunks() checkMmapFileContents([]string{"000001", "000002"}, nil) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) checkMmapFileContents([]string{"000002"}, []string{"000001"}) require.Nil(t, ms.ooo, "OOO mmap chunk was not compacted") addSamples(501, 650) db.head.mmapHeadChunks() checkMmapFileContents([]string{"000002", "000003"}, []string{"000001"}) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) checkMmapFileContents(nil, []string{"000001", "000002", "000003"}) // Verify that WBL is empty. @@ -6133,7 +6148,7 @@ func testHistogramAppendAndQueryHelper(t *testing.T, floatHistogram bool) { testQuery := func(name, value string, exp map[string][]chunks.Sample) { t.Helper() - q, err := db.Querier(ctx, math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) act := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, name, value)) require.Equal(t, exp, act) @@ -6369,8 +6384,6 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) { require.NoError(t, db.Close()) }) - ctx := context.Background() - var it chunkenc.Iterator exp := make(map[string][]chunks.Sample) for _, series := range blockSeries { @@ -6406,7 +6419,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) { require.NoError(t, db.reload()) require.Len(t, db.Blocks(), len(blockSeries)) - q, err := db.Querier(ctx, math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) res := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) compareSeries(t, exp, res) @@ -6423,7 +6436,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) { require.NoError(t, db.reload()) require.Len(t, db.Blocks(), 1) - q, err = db.Querier(ctx, math.MinInt64, math.MaxInt64) + q, err = db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) res = query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) @@ -6550,7 +6563,7 @@ func TestNativeHistogramFlag(t *testing.T) { require.NoError(t, app.Commit()) - q, err := db.Querier(context.Background(), math.MinInt, math.MaxInt64) + q, err := db.Querier(math.MinInt, math.MaxInt64) require.NoError(t, err) act := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) require.Equal(t, map[string][]chunks.Sample{ @@ -6634,12 +6647,12 @@ func TestChunkQuerierReadWriteRace(t *testing.T) { } reader := func() { - querier, err := db.ChunkQuerier(context.Background(), math.MinInt64, math.MaxInt64) + querier, err := db.ChunkQuerier(math.MinInt64, math.MaxInt64) require.NoError(t, err) defer func(q storage.ChunkQuerier) { require.NoError(t, q.Close()) }(querier) - ss := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) for ss.Next() { cs := ss.At() it := cs.Iterator(nil) diff --git a/tsdb/example_test.go b/tsdb/example_test.go index da0e37923d..46deae5198 100644 --- a/tsdb/example_test.go +++ b/tsdb/example_test.go @@ -59,9 +59,9 @@ func Example() { // ... adding more samples. // Open a querier for reading. - querier, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + querier, err := db.Querier(math.MinInt64, math.MaxInt64) noErr(err) - ss := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) for ss.Next() { series := ss.At() diff --git a/tsdb/head.go b/tsdb/head.go index eee034951a..66c44ec990 100644 --- a/tsdb/head.go +++ b/tsdb/head.go @@ -14,6 +14,7 @@ package tsdb import ( + "context" "fmt" "io" "math" @@ -1453,19 +1454,23 @@ func (h *RangeHead) String() string { // Delete all samples in the range of [mint, maxt] for series that satisfy the given // label matchers. -func (h *Head) Delete(mint, maxt int64, ms ...*labels.Matcher) error { +func (h *Head) Delete(ctx context.Context, mint, maxt int64, ms ...*labels.Matcher) error { // Do not delete anything beyond the currently valid range. mint, maxt = clampInterval(mint, maxt, h.MinTime(), h.MaxTime()) ir := h.indexRange(mint, maxt) - p, err := ir.PostingsForMatchers(false, ms...) + p, err := ir.PostingsForMatchers(ctx, false, ms...) if err != nil { return errors.Wrap(err, "select series") } var stones []tombstones.Stone for p.Next() { + if err := ctx.Err(); err != nil { + return errors.Wrap(err, "select series") + } + series := h.series.getByID(chunks.HeadSeriesRef(p.At())) if series == nil { level.Debug(h.logger).Log("msg", "Series not found in Head.Delete") @@ -1485,6 +1490,10 @@ func (h *Head) Delete(mint, maxt int64, ms ...*labels.Matcher) error { if p.Err() != nil { return p.Err() } + if ctx.Err() != nil { + return errors.Wrap(err, "select series") + } + if h.wal != nil { var enc record.Encoder if err := h.wal.Log(enc.Tombstones(stones, nil)); err != nil { diff --git a/tsdb/head_read.go b/tsdb/head_read.go index d19bc7a4ce..8a2605c2fd 100644 --- a/tsdb/head_read.go +++ b/tsdb/head_read.go @@ -62,8 +62,8 @@ func (h *headIndexReader) Symbols() index.StringIter { // specific label name that are within the time range mint to maxt. // If matchers are specified the returned result set is reduced // to label values of metrics matching the matchers. -func (h *headIndexReader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { - values, err := h.LabelValues(name, matchers...) +func (h *headIndexReader) SortedLabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { + values, err := h.LabelValues(ctx, name, matchers...) if err == nil { slices.Sort(values) } @@ -74,21 +74,21 @@ func (h *headIndexReader) SortedLabelValues(name string, matchers ...*labels.Mat // specific label name that are within the time range mint to maxt. // If matchers are specified the returned result set is reduced // to label values of metrics matching the matchers. -func (h *headIndexReader) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (h *headIndexReader) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { if h.maxt < h.head.MinTime() || h.mint > h.head.MaxTime() { return []string{}, nil } if len(matchers) == 0 { - return h.head.postings.LabelValues(name), nil + return h.head.postings.LabelValues(ctx, name), nil } - return labelValuesWithMatchers(h, name, matchers...) + return labelValuesWithMatchers(ctx, h, name, matchers...) } // LabelNames returns all the unique label names present in the head // that are within the time range mint to maxt. -func (h *headIndexReader) LabelNames(matchers ...*labels.Matcher) ([]string, error) { +func (h *headIndexReader) LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, error) { if h.maxt < h.head.MinTime() || h.mint > h.head.MaxTime() { return []string{}, nil } @@ -99,11 +99,11 @@ func (h *headIndexReader) LabelNames(matchers ...*labels.Matcher) ([]string, err return labelNames, nil } - return labelNamesWithMatchers(h, matchers...) + return labelNamesWithMatchers(ctx, h, matchers...) } // Postings returns the postings list iterator for the label pairs. -func (h *headIndexReader) Postings(name string, values ...string) (index.Postings, error) { +func (h *headIndexReader) Postings(ctx context.Context, name string, values ...string) (index.Postings, error) { switch len(values) { case 0: return index.EmptyPostings(), nil @@ -116,12 +116,12 @@ func (h *headIndexReader) Postings(name string, values ...string) (index.Posting res = append(res, p) } } - return index.Merge(res...), nil + return index.Merge(ctx, res...), nil } } -func (h *headIndexReader) PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { - return h.head.pfmc.PostingsForMatchers(h, concurrent, ms...) +func (h *headIndexReader) PostingsForMatchers(ctx context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { + return h.head.pfmc.PostingsForMatchers(ctx, h, concurrent, ms...) } func (h *headIndexReader) SortedPostings(p index.Postings) index.Postings { @@ -245,7 +245,7 @@ func (s *memSeries) oooHeadChunkID(pos int) chunks.HeadChunkID { } // LabelValueFor returns label value for the given label name in the series referred to by ID. -func (h *headIndexReader) LabelValueFor(id storage.SeriesRef, label string) (string, error) { +func (h *headIndexReader) LabelValueFor(_ context.Context, id storage.SeriesRef, label string) (string, error) { memSeries := h.head.series.getByID(chunks.HeadSeriesRef(id)) if memSeries == nil { return "", storage.ErrNotFound @@ -261,9 +261,12 @@ func (h *headIndexReader) LabelValueFor(id storage.SeriesRef, label string) (str // LabelNamesFor returns all the label names for the series referred to by IDs. // The names returned are sorted. -func (h *headIndexReader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { +func (h *headIndexReader) LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) { namesMap := make(map[string]struct{}) for _, id := range ids { + if ctx.Err() != nil { + return nil, ctx.Err() + } memSeries := h.head.series.getByID(chunks.HeadSeriesRef(id)) if memSeries == nil { return nil, storage.ErrNotFound diff --git a/tsdb/head_test.go b/tsdb/head_test.go index d028cfd296..082fcbae26 100644 --- a/tsdb/head_test.go +++ b/tsdb/head_test.go @@ -135,7 +135,7 @@ func BenchmarkHeadAppender_Append_Commit_ExistingSeries(b *testing.B) { } } -func populateTestWAL(t testing.TB, w *wlog.WL, recs []interface{}) { +func populateTestWL(t testing.TB, w *wlog.WL, recs []interface{}) { var enc record.Encoder for _, r := range recs { switch v := r.(type) { @@ -147,6 +147,8 @@ func populateTestWAL(t testing.TB, w *wlog.WL, recs []interface{}) { require.NoError(t, w.Log(enc.Tombstones(v, nil))) case []record.RefExemplar: require.NoError(t, w.Log(enc.Exemplars(v, nil))) + case []record.RefMmapMarker: + require.NoError(t, w.Log(enc.MmapMarkers(v, nil))) } } } @@ -197,13 +199,18 @@ func readTestWAL(t testing.TB, dir string) (recs []interface{}) { return recs } -func BenchmarkLoadWAL(b *testing.B) { +func BenchmarkLoadWLs(b *testing.B) { cases := []struct { // Total series is (batches*seriesPerBatch). batches int seriesPerBatch int samplesPerSeries int mmappedChunkT int64 + // The first oooSeriesPct*seriesPerBatch series in a batch are selected as "OOO" series. + oooSeriesPct float64 + // The first oooSamplesPct*samplesPerSeries samples in an OOO series are written as OOO samples. + oooSamplesPct float64 + oooCapMax int64 }{ { // Less series and more samples. 2 hour WAL with 1 second scrape interval. batches: 10, @@ -226,6 +233,31 @@ func BenchmarkLoadWAL(b *testing.B) { samplesPerSeries: 480, mmappedChunkT: 3800, }, + { // A lot of OOO samples (50% series with 50% of samples being OOO). + batches: 10, + seriesPerBatch: 1000, + samplesPerSeries: 480, + oooSeriesPct: 0.5, + oooSamplesPct: 0.5, + oooCapMax: DefaultOutOfOrderCapMax, + }, + { // Fewer OOO samples (10% of series with 10% of samples being OOO). + batches: 10, + seriesPerBatch: 1000, + samplesPerSeries: 480, + oooSeriesPct: 0.1, + oooSamplesPct: 0.1, + }, + { // 2 hour WAL with 15 second scrape interval, and mmapped chunks up to last 100 samples. + // Four mmap markers per OOO series: 480 * 0.3 = 144, 144 / 32 (DefaultOutOfOrderCapMax) = 4. + batches: 100, + seriesPerBatch: 1000, + samplesPerSeries: 480, + mmappedChunkT: 3800, + oooSeriesPct: 0.2, + oooSamplesPct: 0.3, + oooCapMax: DefaultOutOfOrderCapMax, + }, } labelsPerSeries := 5 @@ -241,12 +273,17 @@ func BenchmarkLoadWAL(b *testing.B) { continue } lastExemplarsPerSeries = exemplarsPerSeries - b.Run(fmt.Sprintf("batches=%d,seriesPerBatch=%d,samplesPerSeries=%d,exemplarsPerSeries=%d,mmappedChunkT=%d", c.batches, c.seriesPerBatch, c.samplesPerSeries, exemplarsPerSeries, c.mmappedChunkT), + b.Run(fmt.Sprintf("batches=%d,seriesPerBatch=%d,samplesPerSeries=%d,exemplarsPerSeries=%d,mmappedChunkT=%d,oooSeriesPct=%.3f,oooSamplesPct=%.3f,oooCapMax=%d", c.batches, c.seriesPerBatch, c.samplesPerSeries, exemplarsPerSeries, c.mmappedChunkT, c.oooSeriesPct, c.oooSamplesPct, c.oooCapMax), func(b *testing.B) { dir := b.TempDir() - w, err := wlog.New(nil, nil, dir, wlog.CompressionNone) + wal, err := wlog.New(nil, nil, dir, wlog.CompressionNone) require.NoError(b, err) + var wbl *wlog.WL + if c.oooSeriesPct != 0 { + wbl, err = wlog.New(nil, nil, dir, wlog.CompressionNone) + require.NoError(b, err) + } // Write series. refSeries := make([]record.RefSeries, 0, c.seriesPerBatch) @@ -260,22 +297,33 @@ func BenchmarkLoadWAL(b *testing.B) { } refSeries = append(refSeries, record.RefSeries{Ref: chunks.HeadSeriesRef(i) * 101, Labels: labels.FromMap(lbls)}) } - populateTestWAL(b, w, []interface{}{refSeries}) + populateTestWL(b, wal, []interface{}{refSeries}) } // Write samples. refSamples := make([]record.RefSample, 0, c.seriesPerBatch) + + oooSeriesPerBatch := int(float64(c.seriesPerBatch) * c.oooSeriesPct) + oooSamplesPerSeries := int(float64(c.samplesPerSeries) * c.oooSamplesPct) + for i := 0; i < c.samplesPerSeries; i++ { for j := 0; j < c.batches; j++ { refSamples = refSamples[:0] - for k := j * c.seriesPerBatch; k < (j+1)*c.seriesPerBatch; k++ { + + k := j * c.seriesPerBatch + // Skip appending the first oooSamplesPerSeries samples for the series in the batch that + // should have OOO samples. OOO samples are appended after all the in-order samples. + if i < oooSamplesPerSeries { + k += oooSeriesPerBatch + } + for ; k < (j+1)*c.seriesPerBatch; k++ { refSamples = append(refSamples, record.RefSample{ Ref: chunks.HeadSeriesRef(k) * 101, T: int64(i) * 10, V: float64(i) * 100, }) } - populateTestWAL(b, w, []interface{}{refSamples}) + populateTestWL(b, wal, []interface{}{refSamples}) } } @@ -293,6 +341,10 @@ func BenchmarkLoadWAL(b *testing.B) { lbls := labels.Labels{} s := newMemSeries(lbls, chunks.HeadSeriesRef(k)*101, labels.StableHash(lbls), 0, defaultIsolationDisabled) s.append(c.mmappedChunkT, 42, 0, cOpts) + // There's only one head chunk because only a single sample is appended. mmapChunks() + // ignores the latest chunk, so we need to cut a new head chunk to guarantee the chunk with + // the sample at c.mmappedChunkT is mmapped. + s.cutNewHeadChunk(c.mmappedChunkT, chunkenc.EncXOR, c.mmappedChunkT) s.mmapChunks(chunkDiskMapper) } require.NoError(b, chunkDiskMapper.Close()) @@ -311,7 +363,39 @@ func BenchmarkLoadWAL(b *testing.B) { Labels: labels.FromStrings("traceID", fmt.Sprintf("trace-%d", i)), }) } - populateTestWAL(b, w, []interface{}{refExemplars}) + populateTestWL(b, wal, []interface{}{refExemplars}) + } + } + + // Write OOO samples and mmap markers. + refMarkers := make([]record.RefMmapMarker, 0, oooSeriesPerBatch) + refSamples = make([]record.RefSample, 0, oooSeriesPerBatch) + for i := 0; i < oooSamplesPerSeries; i++ { + shouldAddMarkers := c.oooCapMax != 0 && i != 0 && int64(i)%c.oooCapMax == 0 + + for j := 0; j < c.batches; j++ { + refSamples = refSamples[:0] + if shouldAddMarkers { + refMarkers = refMarkers[:0] + } + for k := j * c.seriesPerBatch; k < (j*c.seriesPerBatch)+oooSeriesPerBatch; k++ { + ref := chunks.HeadSeriesRef(k) * 101 + if shouldAddMarkers { + // loadWBL() checks that the marker's MmapRef is less than or equal to the ref + // for the last mmap chunk. Setting MmapRef to 0 to always pass that check. + refMarkers = append(refMarkers, record.RefMmapMarker{Ref: ref, MmapRef: 0}) + } + refSamples = append(refSamples, record.RefSample{ + Ref: ref, + T: int64(i) * 10, + V: float64(i) * 100, + }) + } + if shouldAddMarkers { + populateTestWL(b, wbl, []interface{}{refMarkers}) + } + populateTestWL(b, wal, []interface{}{refSamples}) + populateTestWL(b, wbl, []interface{}{refSamples}) } } @@ -321,13 +405,19 @@ func BenchmarkLoadWAL(b *testing.B) { for i := 0; i < b.N; i++ { opts := DefaultHeadOptions() opts.ChunkRange = 1000 - opts.ChunkDirRoot = w.Dir() - h, err := NewHead(nil, nil, w, nil, opts, nil) + opts.ChunkDirRoot = dir + if c.oooCapMax > 0 { + opts.OutOfOrderCapMax.Store(c.oooCapMax) + } + h, err := NewHead(nil, nil, wal, wbl, opts, nil) require.NoError(b, err) h.Init(0) } b.StopTimer() - w.Close() + wal.Close() + if wbl != nil { + wbl.Close() + } }) } } @@ -564,7 +654,7 @@ func TestHead_ReadWAL(t *testing.T) { require.NoError(t, head.Close()) }() - populateTestWAL(t, w, entries) + populateTestWL(t, w, entries) require.NoError(t, head.Init(math.MinInt64)) require.Equal(t, uint64(101), head.lastSeriesID.Load()) @@ -717,6 +807,8 @@ func TestHead_Truncate(t *testing.T) { h.initTime(0) + ctx := context.Background() + s1, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1", "b", "1")) s2, _, _ := h.getOrCreate(2, labels.FromStrings("a", "2", "b", "1")) s3, _, _ := h.getOrCreate(3, labels.FromStrings("a", "1", "b", "2")) @@ -785,7 +877,7 @@ func TestHead_Truncate(t *testing.T) { ss = map[string]struct{}{} values[name] = ss } - for _, value := range h.postings.LabelValues(name) { + for _, value := range h.postings.LabelValues(ctx, name) { ss[value] = struct{}{} } } @@ -1039,11 +1131,11 @@ func TestHeadDeleteSeriesWithoutSamples(t *testing.T) { require.NoError(t, head.Close()) }() - populateTestWAL(t, w, entries) + populateTestWL(t, w, entries) require.NoError(t, head.Init(math.MinInt64)) - require.NoError(t, head.Delete(0, 100, labels.MustNewMatcher(labels.MatchEqual, "a", "1"))) + require.NoError(t, head.Delete(context.Background(), 0, 100, labels.MustNewMatcher(labels.MatchEqual, "a", "1"))) }) } } @@ -1115,7 +1207,7 @@ func TestHeadDeleteSimple(t *testing.T) { // Delete the ranges. for _, r := range c.dranges { - require.NoError(t, head.Delete(r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, lblDefault.Name, lblDefault.Value))) + require.NoError(t, head.Delete(context.Background(), r.Mint, r.Maxt, labels.MustNewMatcher(labels.MatchEqual, lblDefault.Name, lblDefault.Value))) } // Add more samples. @@ -1142,7 +1234,7 @@ func TestHeadDeleteSimple(t *testing.T) { for _, h := range []*Head{head, reloadedHead} { q, err := NewBlockQuerier(h, h.MinTime(), h.MaxTime()) require.NoError(t, err) - actSeriesSet := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, lblDefault.Name, lblDefault.Value)) + actSeriesSet := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, lblDefault.Name, lblDefault.Value)) require.NoError(t, q.Close()) expSeriesSet := newMockSeriesSet([]storage.Series{ storage.NewListSeries(lblsDefault, func() []chunks.Sample { @@ -1197,12 +1289,12 @@ func TestDeleteUntilCurMax(t *testing.T) { require.NoError(t, err) } require.NoError(t, app.Commit()) - require.NoError(t, hb.Delete(0, 10000, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) + require.NoError(t, hb.Delete(context.Background(), 0, 10000, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) // Test the series returns no samples. The series is cleared only after compaction. q, err := NewBlockQuerier(hb, 0, 100000) require.NoError(t, err) - res := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + res := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.True(t, res.Next(), "series is not present") s := res.At() it := s.Iterator(nil) @@ -1219,7 +1311,7 @@ func TestDeleteUntilCurMax(t *testing.T) { require.NoError(t, app.Commit()) q, err = NewBlockQuerier(hb, 0, 100000) require.NoError(t, err) - res = q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + res = q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.True(t, res.Next(), "series don't exist") exps := res.At() it = exps.Iterator(nil) @@ -1244,7 +1336,7 @@ func TestDeletedSamplesAndSeriesStillInWALAfterCheckpoint(t *testing.T) { require.NoError(t, err) require.NoError(t, app.Commit()) } - require.NoError(t, hb.Delete(0, int64(numSamples), labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) + require.NoError(t, hb.Delete(context.Background(), 0, int64(numSamples), labels.MustNewMatcher(labels.MatchEqual, "a", "b"))) require.NoError(t, hb.Truncate(1)) require.NoError(t, hb.Close()) @@ -1376,7 +1468,7 @@ func TestDelete_e2e(t *testing.T) { } for _, del := range dels { for _, r := range del.drange { - require.NoError(t, hb.Delete(r.Mint, r.Maxt, del.ms...)) + require.NoError(t, hb.Delete(context.Background(), r.Mint, r.Maxt, del.ms...)) } matched := labels.Slice{} for _, l := range lbls { @@ -1391,7 +1483,7 @@ func TestDelete_e2e(t *testing.T) { q, err := NewBlockQuerier(hb, 0, 100000) require.NoError(t, err) defer q.Close() - ss := q.Select(true, nil, del.ms...) + ss := q.Select(context.Background(), true, nil, del.ms...) // Build the mockSeriesSet. matchedSeries := make([]storage.Series, 0, len(matched)) for _, m := range matched { @@ -1840,7 +1932,7 @@ func TestUncommittedSamplesNotLostOnTruncate(t *testing.T) { require.NoError(t, err) defer q.Close() - ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "1")) + ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "1")) require.Equal(t, true, ss.Next()) for ss.Next() { } @@ -1869,7 +1961,7 @@ func TestRemoveSeriesAfterRollbackAndTruncate(t *testing.T) { q, err := NewBlockQuerier(h, 1500, 2500) require.NoError(t, err) - ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "1")) + ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "1")) require.Equal(t, false, ss.Next()) require.Equal(t, 0, len(ss.Warnings())) require.NoError(t, q.Close()) @@ -2154,7 +2246,7 @@ func TestMemSeriesIsolation(t *testing.T) { require.NoError(t, err) defer querier.Close() - ss := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) + ss := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) _, seriesSet, ws, err := expandSeriesSet(ss) require.NoError(t, err) require.Equal(t, 0, len(ws)) @@ -2461,7 +2553,7 @@ func TestOutOfOrderSamplesMetric(t *testing.T) { require.NoError(t, app.Commit()) require.Equal(t, int64(math.MinInt64), db.head.minValidTime.Load()) - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Greater(t, db.head.minValidTime.Load(), int64(0)) app = db.Appender(ctx) @@ -2526,7 +2618,7 @@ func testHeadSeriesChunkRace(t *testing.T) { h.gc() wg.Done() }() - ss := q.Select(false, nil, matcher) + ss := q.Select(context.Background(), false, nil, matcher) for ss.Next() { } require.NoError(t, ss.Err()) @@ -2552,9 +2644,10 @@ func TestHeadLabelNamesValuesWithMinMaxRange(t *testing.T) { } expectedLabelNames = []string{"a", "b", "c"} expectedLabelValues = []string{"d", "e", "f"} + ctx = context.Background() ) - app := head.Appender(context.Background()) + app := head.Appender(ctx) for i, name := range expectedLabelNames { _, err := app.Append(0, labels.FromStrings(name, expectedLabelValues[i]), seriesTimestamps[i], 0) require.NoError(t, err) @@ -2579,12 +2672,12 @@ func TestHeadLabelNamesValuesWithMinMaxRange(t *testing.T) { for _, tt := range testCases { t.Run(tt.name, func(t *testing.T) { headIdxReader := head.indexRange(tt.mint, tt.maxt) - actualLabelNames, err := headIdxReader.LabelNames() + actualLabelNames, err := headIdxReader.LabelNames(ctx) require.NoError(t, err) require.Equal(t, tt.expectedNames, actualLabelNames) if len(tt.expectedValues) > 0 { for i, name := range expectedLabelNames { - actualLabelValue, err := headIdxReader.SortedLabelValues(name) + actualLabelValue, err := headIdxReader.SortedLabelValues(ctx, name) require.NoError(t, err) require.Equal(t, []string{tt.expectedValues[i]}, actualLabelValue) } @@ -2597,6 +2690,8 @@ func TestHeadLabelValuesWithMatchers(t *testing.T) { head, _ := newTestHead(t, 1000, wlog.CompressionNone, false) t.Cleanup(func() { require.NoError(t, head.Close()) }) + ctx := context.Background() + app := head.Appender(context.Background()) for i := 0; i < 100; i++ { _, err := app.Append(0, labels.FromStrings( @@ -2640,11 +2735,11 @@ func TestHeadLabelValuesWithMatchers(t *testing.T) { t.Run(tt.name, func(t *testing.T) { headIdxReader := head.indexRange(0, 200) - actualValues, err := headIdxReader.SortedLabelValues(tt.labelName, tt.matchers...) + actualValues, err := headIdxReader.SortedLabelValues(ctx, tt.labelName, tt.matchers...) require.NoError(t, err) require.Equal(t, tt.expectedValues, actualValues) - actualValues, err = headIdxReader.LabelValues(tt.labelName, tt.matchers...) + actualValues, err = headIdxReader.LabelValues(ctx, tt.labelName, tt.matchers...) sort.Strings(actualValues) require.NoError(t, err) require.Equal(t, tt.expectedValues, actualValues) @@ -2713,7 +2808,7 @@ func TestHeadLabelNamesWithMatchers(t *testing.T) { t.Run(tt.name, func(t *testing.T) { headIdxReader := head.indexRange(0, 200) - actualNames, err := headIdxReader.LabelNames(tt.matchers...) + actualNames, err := headIdxReader.LabelNames(context.Background(), tt.matchers...) require.NoError(t, err) require.Equal(t, tt.expectedNames, actualNames) }) @@ -2726,8 +2821,10 @@ func TestHeadShardedPostings(t *testing.T) { require.NoError(t, head.Close()) }() + ctx := context.Background() + // Append some series. - app := head.Appender(context.Background()) + app := head.Appender(ctx) for i := 0; i < 100; i++ { _, err := app.Append(0, labels.FromStrings("unique", fmt.Sprintf("value%d", i), "const", "1"), 100, 0) require.NoError(t, err) @@ -2738,7 +2835,7 @@ func TestHeadShardedPostings(t *testing.T) { // List all postings for a given label value. This is what we expect to get // in output from all shards. - p, err := ir.Postings("const", "1") + p, err := ir.Postings(ctx, "const", "1") require.NoError(t, err) var expected []storage.SeriesRef @@ -2754,7 +2851,7 @@ func TestHeadShardedPostings(t *testing.T) { actualPostings := make([]storage.SeriesRef, 0, len(expected)) for shardIndex := uint64(0); shardIndex < shardCount; shardIndex++ { - p, err = ir.Postings("const", "1") + p, err = ir.Postings(ctx, "const", "1") require.NoError(t, err) p = ir.ShardedPostings(p, shardIndex, shardCount) @@ -2877,6 +2974,8 @@ func BenchmarkHeadLabelValuesWithMatchers(b *testing.B) { head, _ := newTestHead(b, chunkRange, wlog.CompressionNone, false) b.Cleanup(func() { require.NoError(b, head.Close()) }) + ctx := context.Background() + app := head.Appender(context.Background()) metricCount := 1000000 @@ -2897,7 +2996,7 @@ func BenchmarkHeadLabelValuesWithMatchers(b *testing.B) { b.ReportAllocs() for benchIdx := 0; benchIdx < b.N; benchIdx++ { - actualValues, err := headIdxReader.LabelValues("b_tens", matchers...) + actualValues, err := headIdxReader.LabelValues(ctx, "b_tens", matchers...) require.NoError(b, err) require.Equal(b, 9, len(actualValues)) } @@ -2974,6 +3073,7 @@ func TestIteratorSeekIntoBuffer(t *testing.T) { func TestChunkNotFoundHeadGCRace(t *testing.T) { db := newTestDB(t) db.DisableCompactions() + ctx := context.Background() var ( app = db.Appender(context.Background()) @@ -2993,11 +3093,11 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) { require.NoError(t, app.Commit()) // Get a querier before compaction (or when compaction is about to begin). - q, err := db.Querier(context.Background(), mint, maxt) + q, err := db.Querier(mint, maxt) require.NoError(t, err) // Query the compacted range and get the first series before compaction. - ss := q.Select(true, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + ss := q.Select(context.Background(), true, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.True(t, ss.Next()) s := ss.At() @@ -3006,7 +3106,7 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) { go func() { defer wg.Done() // Compacting head while the querier spans the compaction time. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Greater(t, len(db.Blocks()), 0) }() @@ -3039,6 +3139,7 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) { func TestDataMissingOnQueryDuringCompaction(t *testing.T) { db := newTestDB(t) db.DisableCompactions() + ctx := context.Background() var ( app = db.Appender(context.Background()) @@ -3060,7 +3161,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) { require.NoError(t, app.Commit()) // Get a querier before compaction (or when compaction is about to begin). - q, err := db.Querier(context.Background(), mint, maxt) + q, err := db.Querier(mint, maxt) require.NoError(t, err) var wg sync.WaitGroup @@ -3068,7 +3169,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) { go func() { defer wg.Done() // Compacting head while the querier spans the compaction time. - require.NoError(t, db.Compact()) + require.NoError(t, db.Compact(ctx)) require.Greater(t, len(db.Blocks()), 0) }() @@ -3174,11 +3275,11 @@ func TestWaitForPendingReadersInTimeRange(t *testing.T) { require.True(t, waitOver.Load()) } - q, err := db.Querier(context.Background(), c.mint, c.maxt) + q, err := db.Querier(c.mint, c.maxt) require.NoError(t, err) checkWaiting(q) - cq, err := db.ChunkQuerier(context.Background(), c.mint, c.maxt) + cq, err := db.ChunkQuerier(c.mint, c.maxt) require.NoError(t, err) checkWaiting(cq) }) @@ -3258,7 +3359,7 @@ func TestAppendHistogram(t *testing.T) { require.NoError(t, q.Close()) }) - ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.True(t, ss.Next()) s := ss.At() @@ -3911,7 +4012,7 @@ func testHistogramStaleSampleHelper(t *testing.T, floatHistogram bool) { require.NoError(t, q.Close()) }) - ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) + ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.True(t, ss.Next()) s := ss.At() @@ -4303,7 +4404,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) { } // Query back and expect same order of samples. - q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) series := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) @@ -5309,6 +5410,7 @@ func BenchmarkCuttingHeadHistogramChunks(b *testing.B) { } func TestCuttingNewHeadChunks(t *testing.T) { + ctx := context.Background() testCases := map[string]struct { numTotalSamples int timestampJitter bool @@ -5442,7 +5544,7 @@ func TestCuttingNewHeadChunks(t *testing.T) { chkReader, err := h.Chunks() require.NoError(t, err) - p, err := idxReader.Postings("foo", "bar") + p, err := idxReader.Postings(ctx, "foo", "bar") require.NoError(t, err) var lblBuilder labels.ScratchBuilder diff --git a/tsdb/head_wal.go b/tsdb/head_wal.go index 88adbaacf8..6a2ce45281 100644 --- a/tsdb/head_wal.go +++ b/tsdb/head_wal.go @@ -591,9 +591,6 @@ func (wp *walSubsetProcessor) processWALSamples(h *Head, mmappedChunks, oooMmapp if s.T <= ms.mmMaxTime { continue } - if s.T <= ms.mmMaxTime { - continue - } if _, chunkCreated := ms.append(s.T, s.V, 0, appendChunkOpts); chunkCreated { h.metrics.chunksCreated.Inc() h.metrics.chunks.Inc() @@ -754,7 +751,9 @@ func (h *Head) loadWBL(r *wlog.Reader, multiRef map[chunks.HeadSeriesRef]chunks. m = len(samples) } for i := 0; i < concurrency; i++ { - shards[i] = processors[i].reuseBuf() + if shards[i] == nil { + shards[i] = processors[i].reuseBuf() + } } for _, sam := range samples[:m] { if r, ok := multiRef[sam.Ref]; ok { @@ -764,7 +763,10 @@ func (h *Head) loadWBL(r *wlog.Reader, multiRef map[chunks.HeadSeriesRef]chunks. shards[mod] = append(shards[mod], sam) } for i := 0; i < concurrency; i++ { - processors[i].input <- shards[i] + if len(shards[i]) > 0 { + processors[i].input <- wblSubsetProcessorInputItem{samples: shards[i]} + shards[i] = nil + } } samples = samples[m:] } @@ -790,23 +792,7 @@ func (h *Head) loadWBL(r *wlog.Reader, multiRef map[chunks.HeadSeriesRef]chunks. continue } idx := uint64(ms.ref) % uint64(concurrency) - // It is possible that some old sample is being processed in processWALSamples that - // could cause race below. So we wait for the goroutine to empty input the buffer and finish - // processing all old samples after emptying the buffer. - processors[idx].waitUntilIdle() - // Lock the subset so we can modify the series object - processors[idx].mx.Lock() - - // All samples till now have been m-mapped. Hence clear out the headChunk. - // In case some samples slipped through and went into m-map chunks because of changed - // chunk size parameters, we are not taking care of that here. - // TODO(codesome): see if there is a way to avoid duplicate m-map chunks if - // the size of ooo chunk was reduced between restart. - if ms.ooo != nil { - ms.ooo.oooHeadChunk = nil - } - - processors[idx].mx.Unlock() + processors[idx].input <- wblSubsetProcessorInputItem{mmappedSeries: ms} } default: panic(fmt.Errorf("unexpected decodedCh type: %T", d)) @@ -858,14 +844,18 @@ func isErrLoadOOOWal(err error) bool { } type wblSubsetProcessor struct { - mx sync.Mutex // Take this lock while modifying series in the subset. - input chan []record.RefSample + input chan wblSubsetProcessorInputItem output chan []record.RefSample } +type wblSubsetProcessorInputItem struct { + mmappedSeries *memSeries + samples []record.RefSample +} + func (wp *wblSubsetProcessor) setup() { wp.output = make(chan []record.RefSample, 300) - wp.input = make(chan []record.RefSample, 300) + wp.input = make(chan wblSubsetProcessorInputItem, 300) } func (wp *wblSubsetProcessor) closeAndDrain() { @@ -886,16 +876,23 @@ func (wp *wblSubsetProcessor) reuseBuf() []record.RefSample { // processWBLSamples adds the samples it receives to the head and passes // the buffer received to an output channel for reuse. -// Samples before the minValidTime timestamp are discarded. func (wp *wblSubsetProcessor) processWBLSamples(h *Head) (unknownRefs uint64) { defer close(wp.output) oooCapMax := h.opts.OutOfOrderCapMax.Load() // We don't check for minValidTime for ooo samples. mint, maxt := int64(math.MaxInt64), int64(math.MinInt64) - for samples := range wp.input { - wp.mx.Lock() - for _, s := range samples { + for in := range wp.input { + if in.mmappedSeries != nil && in.mmappedSeries.ooo != nil { + // All samples till now have been m-mapped. Hence clear out the headChunk. + // In case some samples slipped through and went into m-map chunks because of changed + // chunk size parameters, we are not taking care of that here. + // TODO(codesome): see if there is a way to avoid duplicate m-map chunks if + // the size of ooo chunk was reduced between restart. + in.mmappedSeries.ooo.oooHeadChunk = nil + continue + } + for _, s := range in.samples { ms := h.series.getByID(s.Ref) if ms == nil { unknownRefs++ @@ -915,8 +912,10 @@ func (wp *wblSubsetProcessor) processWBLSamples(h *Head) (unknownRefs uint64) { } } } - wp.mx.Unlock() - + select { + case wp.output <- in.samples: + default: + } } h.updateMinOOOMaxOOOTime(mint, maxt) @@ -924,21 +923,6 @@ func (wp *wblSubsetProcessor) processWBLSamples(h *Head) (unknownRefs uint64) { return unknownRefs } -func (wp *wblSubsetProcessor) waitUntilIdle() { - select { - case <-wp.output: // Allow output side to drain to avoid deadlock. - default: - } - wp.input <- []record.RefSample{} - for len(wp.input) != 0 { - time.Sleep(10 * time.Microsecond) - select { - case <-wp.output: // Allow output side to drain to avoid deadlock. - default: - } - } -} - const ( chunkSnapshotRecordTypeSeries uint8 = 1 chunkSnapshotRecordTypeTombstones uint8 = 2 diff --git a/tsdb/index/index.go b/tsdb/index/index.go index 3b672ec2cc..4de6904147 100644 --- a/tsdb/index/index.go +++ b/tsdb/index/index.go @@ -924,7 +924,7 @@ func (w *Writer) writePostingsToTmpFiles() error { // Symbol numbers are in order, so the strings will also be in order. slices.Sort(values) for _, v := range values { - value, err := w.symbols.Lookup(v) + value, err := w.symbols.Lookup(w.ctx, v) if err != nil { return err } @@ -1314,7 +1314,7 @@ func NewSymbols(bs ByteSlice, version, off int) (*Symbols, error) { return s, nil } -func (s Symbols) Lookup(o uint32) (string, error) { +func (s Symbols) Lookup(ctx context.Context, o uint32) (string, error) { d := encoding.Decbuf{ B: s.bs.Range(0, s.bs.Len()), } @@ -1326,6 +1326,9 @@ func (s Symbols) Lookup(o uint32) (string, error) { d.Skip(s.offsets[int(o/symbolFactor)]) // Walk until we find the one we want. for i := o - (o / symbolFactor * symbolFactor); i > 0; i-- { + if ctx.Err() != nil { + return "", ctx.Err() + } d.UvarintBytes() } } else { @@ -1453,11 +1456,11 @@ func (r *Reader) Close() error { return r.c.Close() } -func (r *Reader) lookupSymbol(o uint32) (string, error) { +func (r *Reader) lookupSymbol(ctx context.Context, o uint32) (string, error) { if s, ok := r.nameSymbols[o]; ok { return s, nil } - return r.symbols.Lookup(o) + return r.symbols.Lookup(ctx, o) } // Symbols returns an iterator over the symbols that exist within the index. @@ -1473,8 +1476,8 @@ func (r *Reader) SymbolTableSize() uint64 { // SortedLabelValues returns value tuples that exist for the given label name. // It is not safe to use the return value beyond the lifetime of the byte slice // passed into the Reader. -func (r *Reader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { - values, err := r.LabelValues(name, matchers...) +func (r *Reader) SortedLabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { + values, err := r.LabelValues(ctx, name, matchers...) if err == nil && r.version == FormatV1 { slices.Sort(values) } @@ -1485,7 +1488,7 @@ func (r *Reader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([] // It is not safe to use the return value beyond the lifetime of the byte slice // passed into the Reader. // TODO(replay): Support filtering by matchers -func (r *Reader) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (r *Reader) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { if len(matchers) > 0 { return nil, errors.Errorf("matchers parameter is not implemented: %+v", matchers) } @@ -1516,7 +1519,7 @@ func (r *Reader) LabelValues(name string, matchers ...*labels.Matcher) ([]string lastVal := e[len(e)-1].value skip := 0 - for d.Err() == nil { + for d.Err() == nil && ctx.Err() == nil { if skip == 0 { // These are always the same number of bytes, // and it's faster to skip than parse. @@ -1537,15 +1540,20 @@ func (r *Reader) LabelValues(name string, matchers ...*labels.Matcher) ([]string if d.Err() != nil { return nil, errors.Wrap(d.Err(), "get postings offset entry") } - return values, nil + + return values, ctx.Err() } // LabelNamesFor returns all the label names for the series referred to by IDs. // The names returned are sorted. -func (r *Reader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { +func (r *Reader) LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) { // Gather offsetsMap the name offsetsMap in the symbol table first offsetsMap := make(map[uint32]struct{}) for _, id := range ids { + if ctx.Err() != nil { + return nil, ctx.Err() + } + offset := id // In version 2 series IDs are no longer exact references but series are 16-byte padded // and the ID is the multiple of 16 of the actual position. @@ -1571,7 +1579,7 @@ func (r *Reader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { // Lookup the unique symbols. names := make([]string, 0, len(offsetsMap)) for off := range offsetsMap { - name, err := r.lookupSymbol(off) + name, err := r.lookupSymbol(ctx, off) if err != nil { return nil, errors.Wrap(err, "lookup symbol in LabelNamesFor") } @@ -1584,7 +1592,7 @@ func (r *Reader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { } // LabelValueFor returns label value for the given label name in the series referred to by ID. -func (r *Reader) LabelValueFor(id storage.SeriesRef, label string) (string, error) { +func (r *Reader) LabelValueFor(ctx context.Context, id storage.SeriesRef, label string) (string, error) { offset := id // In version 2 series IDs are no longer exact references but series are 16-byte padded // and the ID is the multiple of 16 of the actual position. @@ -1597,7 +1605,7 @@ func (r *Reader) LabelValueFor(id storage.SeriesRef, label string) (string, erro return "", errors.Wrap(d.Err(), "label values for") } - value, err := r.dec.LabelValueFor(buf, label) + value, err := r.dec.LabelValueFor(ctx, buf, label) if err != nil { return "", storage.ErrNotFound } @@ -1624,7 +1632,7 @@ func (r *Reader) Series(id storage.SeriesRef, builder *labels.ScratchBuilder, ch return errors.Wrap(r.dec.Series(d.Get(), builder, chks), "read series") } -func (r *Reader) Postings(name string, values ...string) (Postings, error) { +func (r *Reader) Postings(ctx context.Context, name string, values ...string) (Postings, error) { if r.version == FormatV1 { e, ok := r.postingsV1[name] if !ok { @@ -1644,7 +1652,7 @@ func (r *Reader) Postings(name string, values ...string) (Postings, error) { } res = append(res, p) } - return Merge(res...), nil + return Merge(ctx, res...), nil } e, ok := r.postings[name] @@ -1683,7 +1691,7 @@ func (r *Reader) Postings(name string, values ...string) (Postings, error) { // Iterate on the offset table. var postingsOff uint64 // The offset into the postings table. - for d.Err() == nil { + for d.Err() == nil && ctx.Err() == nil { if skip == 0 { // These are always the same number of bytes, // and it's faster to skip than parse. @@ -1720,9 +1728,12 @@ func (r *Reader) Postings(name string, values ...string) (Postings, error) { if d.Err() != nil { return nil, errors.Wrap(d.Err(), "get postings offset entry") } + if ctx.Err() != nil { + return nil, errors.Wrap(ctx.Err(), "get postings offset entry") + } } - return Merge(res...), nil + return Merge(ctx, res...), nil } // SortedPostings returns the given postings list reordered so that the backing series @@ -1789,7 +1800,7 @@ func (r *Reader) Size() int64 { // LabelNames returns all the unique label names present in the index. // TODO(twilkie) implement support for matchers -func (r *Reader) LabelNames(matchers ...*labels.Matcher) ([]string, error) { +func (r *Reader) LabelNames(_ context.Context, matchers ...*labels.Matcher) ([]string, error) { if len(matchers) > 0 { return nil, errors.Errorf("matchers parameter is not implemented: %+v", matchers) } @@ -1833,7 +1844,7 @@ func (s stringListIter) Err() error { return nil } // It currently does not contain decoding methods for all entry types but can be extended // by them if there's demand. type Decoder struct { - LookupSymbol func(uint32) (string, error) + LookupSymbol func(context.Context, uint32) (string, error) } // Postings returns a postings list for b and its number of elements. @@ -1870,7 +1881,7 @@ func (dec *Decoder) LabelNamesOffsetsFor(b []byte) ([]uint32, error) { } // LabelValueFor decodes a label for a given series. -func (dec *Decoder) LabelValueFor(b []byte, label string) (string, error) { +func (dec *Decoder) LabelValueFor(ctx context.Context, b []byte, label string) (string, error) { d := encoding.Decbuf{B: b} k := d.Uvarint() @@ -1882,13 +1893,13 @@ func (dec *Decoder) LabelValueFor(b []byte, label string) (string, error) { return "", errors.Wrap(d.Err(), "read series label offsets") } - ln, err := dec.LookupSymbol(lno) + ln, err := dec.LookupSymbol(ctx, lno) if err != nil { return "", errors.Wrap(err, "lookup label name") } if ln == label { - lv, err := dec.LookupSymbol(lvo) + lv, err := dec.LookupSymbol(ctx, lvo) if err != nil { return "", errors.Wrap(err, "lookup label value") } @@ -1920,11 +1931,11 @@ func (dec *Decoder) Series(b []byte, builder *labels.ScratchBuilder, chks *[]chu return errors.Wrap(d.Err(), "read series label offsets") } - ln, err := dec.LookupSymbol(lno) + ln, err := dec.LookupSymbol(context.TODO(), lno) if err != nil { return errors.Wrap(err, "lookup label name") } - lv, err := dec.LookupSymbol(lvo) + lv, err := dec.LookupSymbol(context.TODO(), lvo) if err != nil { return errors.Wrap(err, "lookup label value") } diff --git a/tsdb/index/index_test.go b/tsdb/index/index_test.go index ec0b432568..4594009855 100644 --- a/tsdb/index/index_test.go +++ b/tsdb/index/index_test.go @@ -93,7 +93,7 @@ func (m mockIndex) Close() error { return nil } -func (m mockIndex) LabelValues(name string) ([]string, error) { +func (m mockIndex) LabelValues(_ context.Context, name string) ([]string, error) { values := []string{} for l := range m.postings { if l.Name == name { @@ -103,13 +103,13 @@ func (m mockIndex) LabelValues(name string) ([]string, error) { return values, nil } -func (m mockIndex) Postings(name string, values ...string) (Postings, error) { +func (m mockIndex) Postings(ctx context.Context, name string, values ...string) (Postings, error) { p := []Postings{} for _, value := range values { l := labels.Label{Name: name, Value: value} p = append(p, m.SortedPostings(NewListPostings(m.postings[l]))) } - return Merge(p...), nil + return Merge(ctx, p...), nil } func (m mockIndex) SortedPostings(p Postings) Postings { @@ -162,6 +162,7 @@ func TestIndexRW_Create_Open(t *testing.T) { func TestIndexRW_Postings(t *testing.T) { dir := t.TempDir() + ctx := context.Background() fn := filepath.Join(dir, indexFilename) @@ -194,7 +195,7 @@ func TestIndexRW_Postings(t *testing.T) { ir, err := NewFileReader(fn) require.NoError(t, err) - p, err := ir.Postings("a", "1") + p, err := ir.Postings(ctx, "a", "1") require.NoError(t, err) var c []chunks.Meta @@ -228,7 +229,7 @@ func TestIndexRW_Postings(t *testing.T) { d := encoding.NewDecbufAt(ir.b, int(off), castagnoliTable) require.Equal(t, 1, d.Be32int(), "Unexpected number of label indices table names") for i := d.Be32(); i > 0 && d.Err() == nil; i-- { - v, err := ir.lookupSymbol(d.Be32()) + v, err := ir.lookupSymbol(ctx, d.Be32()) require.NoError(t, err) labelIndices[lbl] = append(labelIndices[lbl], v) } @@ -253,7 +254,7 @@ func TestIndexRW_Postings(t *testing.T) { // List all postings for a given label value. This is what we expect to get // in output from all shards. - p, err = ir.Postings("a", "1") + p, err = ir.Postings(ctx, "a", "1") require.NoError(t, err) var expected []storage.SeriesRef @@ -269,7 +270,7 @@ func TestIndexRW_Postings(t *testing.T) { actualPostings := make([]storage.SeriesRef, 0, len(expected)) for shardIndex := uint64(0); shardIndex < shardCount; shardIndex++ { - p, err = ir.Postings("a", "1") + p, err = ir.Postings(ctx, "a", "1") require.NoError(t, err) p = ir.ShardedPostings(p, shardIndex, shardCount) @@ -302,6 +303,7 @@ func TestIndexRW_Postings(t *testing.T) { func TestPostingsMany(t *testing.T) { dir := t.TempDir() + ctx := context.Background() fn := filepath.Join(dir, indexFilename) @@ -370,7 +372,7 @@ func TestPostingsMany(t *testing.T) { var builder labels.ScratchBuilder for _, c := range cases { - it, err := ir.Postings("i", c.in...) + it, err := ir.Postings(ctx, "i", c.in...) require.NoError(t, err) got := []string{} @@ -392,6 +394,7 @@ func TestPostingsMany(t *testing.T) { func TestPersistence_index_e2e(t *testing.T) { dir := t.TempDir() + ctx := context.Background() lbls, err := labels.ReadLabels(filepath.Join("..", "testdata", "20kseries.json"), 20000) require.NoError(t, err) @@ -470,10 +473,10 @@ func TestPersistence_index_e2e(t *testing.T) { require.NoError(t, err) for p := range mi.postings { - gotp, err := ir.Postings(p.Name, p.Value) + gotp, err := ir.Postings(ctx, p.Name, p.Value) require.NoError(t, err) - expp, err := mi.Postings(p.Name, p.Value) + expp, err := mi.Postings(ctx, p.Name, p.Value) require.NoError(t, err) var chks, expchks []chunks.Meta @@ -503,7 +506,7 @@ func TestPersistence_index_e2e(t *testing.T) { for k, v := range labelPairs { sort.Strings(v) - res, err := ir.SortedLabelValues(k) + res, err := ir.SortedLabelValues(ctx, k) require.NoError(t, err) require.Equal(t, len(v), len(res)) @@ -573,6 +576,7 @@ func TestNewFileReaderErrorNoOpenFiles(t *testing.T) { } func TestSymbols(t *testing.T) { + ctx := context.Background() buf := encoding.Encbuf{} // Add prefix to the buffer to simulate symbols as part of larger buffer. @@ -595,11 +599,11 @@ func TestSymbols(t *testing.T) { require.Equal(t, 32, s.Size()) for i := 99; i >= 0; i-- { - s, err := s.Lookup(uint32(i)) + s, err := s.Lookup(ctx, uint32(i)) require.NoError(t, err) require.Equal(t, string(rune(i)), s) } - _, err = s.Lookup(100) + _, err = s.Lookup(ctx, 100) require.Error(t, err) for i := 99; i >= 0; i-- { @@ -631,10 +635,12 @@ func BenchmarkReader_ShardedPostings(b *testing.B) { require.NoError(b, os.RemoveAll(dir)) }() + ctx := context.Background() + // Generate an index. fn := filepath.Join(dir, indexFilename) - iw, err := NewWriter(context.Background(), fn) + iw, err := NewWriter(ctx, fn) require.NoError(b, err) for i := 1; i <= numSeries; i++ { @@ -664,7 +670,7 @@ func BenchmarkReader_ShardedPostings(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - allPostings, err := ir.Postings("const", fmt.Sprintf("%10d", 1)) + allPostings, err := ir.Postings(ctx, "const", fmt.Sprintf("%10d", 1)) require.NoError(b, err) ir.ShardedPostings(allPostings, uint64(n%numShards), numShards) diff --git a/tsdb/index/postings.go b/tsdb/index/postings.go index 9de86f5486..c0a80f733f 100644 --- a/tsdb/index/postings.go +++ b/tsdb/index/postings.go @@ -15,6 +15,7 @@ package index import ( "container/heap" + "context" "encoding/binary" "runtime" "sort" @@ -135,7 +136,7 @@ func (p *MemPostings) LabelNames() []string { } // LabelValues returns label values for the given name. -func (p *MemPostings) LabelValues(name string) []string { +func (p *MemPostings) LabelValues(_ context.Context, name string) []string { p.mtx.RLock() defer p.mtx.RUnlock() @@ -519,7 +520,7 @@ func (it *intersectPostings) Err() error { } // Merge returns a new iterator over the union of the input iterators. -func Merge(its ...Postings) Postings { +func Merge(ctx context.Context, its ...Postings) Postings { if len(its) == 0 { return EmptyPostings() } @@ -527,7 +528,7 @@ func Merge(its ...Postings) Postings { return its[0] } - p, ok := newMergedPostings(its) + p, ok := newMergedPostings(ctx, its) if !ok { return EmptyPostings() } @@ -559,12 +560,14 @@ type mergedPostings struct { err error } -func newMergedPostings(p []Postings) (m *mergedPostings, nonEmpty bool) { +func newMergedPostings(ctx context.Context, p []Postings) (m *mergedPostings, nonEmpty bool) { ph := make(postingsHeap, 0, len(p)) for _, it := range p { // NOTE: mergedPostings struct requires the user to issue an initial Next. switch { + case ctx.Err() != nil: + return &mergedPostings{err: ctx.Err()}, true case it.Next(): ph = append(ph, it) case it.Err() != nil: diff --git a/tsdb/index/postings_test.go b/tsdb/index/postings_test.go index e05bf52d30..cf479498ef 100644 --- a/tsdb/index/postings_test.go +++ b/tsdb/index/postings_test.go @@ -385,7 +385,7 @@ func TestMultiMerge(t *testing.T) { i2 := newListPostings(2, 4, 5, 6, 7, 8, 999, 1001) i3 := newListPostings(1, 2, 5, 6, 7, 8, 1001, 1200) - res, err := ExpandPostings(Merge(i1, i2, i3)) + res, err := ExpandPostings(Merge(context.Background(), i1, i2, i3)) require.NoError(t, err) require.Equal(t, []storage.SeriesRef{1, 2, 3, 4, 5, 6, 7, 8, 999, 1000, 1001, 1200}, res) } @@ -473,10 +473,12 @@ func TestMergedPostings(t *testing.T) { t.Fatal("merge result expectancy cannot be nil") } + ctx := context.Background() + expected, err := ExpandPostings(c.res) require.NoError(t, err) - m := Merge(c.in...) + m := Merge(ctx, c.in...) if c.res == EmptyPostings() { require.Equal(t, EmptyPostings(), m) @@ -537,10 +539,12 @@ func TestMergedPostingsSeek(t *testing.T) { } for _, c := range cases { + ctx := context.Background() + a := newListPostings(c.a...) b := newListPostings(c.b...) - p := Merge(a, b) + p := Merge(ctx, a, b) require.Equal(t, c.success, p.Seek(c.seek)) @@ -796,6 +800,7 @@ func TestIntersectWithMerge(t *testing.T) { a := newListPostings(21, 22, 23, 24, 25, 30) b := Merge( + context.Background(), newListPostings(10, 20, 30), newListPostings(15, 26, 30), ) diff --git a/tsdb/ooo_head_read.go b/tsdb/ooo_head_read.go index 33f774a8c2..c362b0feb5 100644 --- a/tsdb/ooo_head_read.go +++ b/tsdb/ooo_head_read.go @@ -15,6 +15,7 @@ package tsdb import ( + "context" "errors" "math" @@ -156,23 +157,23 @@ func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, builder *labels.Scra // PostingsForMatchers needs to be overridden so that the right IndexReader // implementation gets passed down to the PostingsForMatchers call. -func (oh *OOOHeadIndexReader) PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { - return oh.head.pfmc.PostingsForMatchers(oh, concurrent, ms...) +func (oh *OOOHeadIndexReader) PostingsForMatchers(ctx context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { + return oh.head.pfmc.PostingsForMatchers(ctx, oh, concurrent, ms...) } // LabelValues needs to be overridden from the headIndexReader implementation due // to the check that happens at the beginning where we make sure that the query // interval overlaps with the head minooot and maxooot. -func (oh *OOOHeadIndexReader) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (oh *OOOHeadIndexReader) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { if oh.maxt < oh.head.MinOOOTime() || oh.mint > oh.head.MaxOOOTime() { return []string{}, nil } if len(matchers) == 0 { - return oh.head.postings.LabelValues(name), nil + return oh.head.postings.LabelValues(ctx, name), nil } - return labelValuesWithMatchers(oh, name, matchers...) + return labelValuesWithMatchers(ctx, oh, name, matchers...) } type chunkMetaAndChunkDiskMapperRef struct { @@ -196,7 +197,7 @@ func lessByMinTimeAndMinRef(a, b chunks.Meta) bool { return a.MinTime < b.MinTime } -func (oh *OOOHeadIndexReader) Postings(name string, values ...string) (index.Postings, error) { +func (oh *OOOHeadIndexReader) Postings(ctx context.Context, name string, values ...string) (index.Postings, error) { switch len(values) { case 0: return index.EmptyPostings(), nil @@ -208,7 +209,7 @@ func (oh *OOOHeadIndexReader) Postings(name string, values ...string) (index.Pos for _, value := range values { res = append(res, oh.head.postings.Get(name, value)) // TODO(ganesh) Also call GetOOOPostings } - return index.Merge(res...), nil + return index.Merge(ctx, res...), nil } } @@ -274,7 +275,7 @@ type OOOCompactionHead struct { // 4. Cuts a new WBL file for the OOO WBL. // All the above together have a bit of CPU and memory overhead, and can have a bit of impact // on the sample append latency. So call NewOOOCompactionHead only right before compaction. -func NewOOOCompactionHead(head *Head) (*OOOCompactionHead, error) { +func NewOOOCompactionHead(ctx context.Context, head *Head) (*OOOCompactionHead, error) { ch := &OOOCompactionHead{ chunkRange: head.chunkRange.Load(), mint: math.MaxInt64, @@ -293,7 +294,7 @@ func NewOOOCompactionHead(head *Head) (*OOOCompactionHead, error) { n, v := index.AllPostingsKey() // TODO: verify this gets only ooo samples. - p, err := ch.oooIR.Postings(n, v) + p, err := ch.oooIR.Postings(ctx, n, v) if err != nil { return nil, err } @@ -402,7 +403,7 @@ func (ir *OOOCompactionHeadIndexReader) Symbols() index.StringIter { return ir.ch.oooIR.Symbols() } -func (ir *OOOCompactionHeadIndexReader) Postings(name string, values ...string) (index.Postings, error) { +func (ir *OOOCompactionHeadIndexReader) Postings(_ context.Context, name string, values ...string) (index.Postings, error) { n, v := index.AllPostingsKey() if name != n || len(values) != 1 || values[0] != v { return nil, errors.New("only AllPostingsKey is supported") @@ -423,27 +424,27 @@ func (ir *OOOCompactionHeadIndexReader) Series(ref storage.SeriesRef, builder *l return ir.ch.oooIR.series(ref, builder, chks, ir.ch.lastMmapRef) } -func (ir *OOOCompactionHeadIndexReader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (ir *OOOCompactionHeadIndexReader) SortedLabelValues(_ context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { return nil, errors.New("not implemented") } -func (ir *OOOCompactionHeadIndexReader) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (ir *OOOCompactionHeadIndexReader) LabelValues(_ context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { return nil, errors.New("not implemented") } -func (ir *OOOCompactionHeadIndexReader) PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { +func (ir *OOOCompactionHeadIndexReader) PostingsForMatchers(_ context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { return nil, errors.New("not implemented") } -func (ir *OOOCompactionHeadIndexReader) LabelNames(matchers ...*labels.Matcher) ([]string, error) { +func (ir *OOOCompactionHeadIndexReader) LabelNames(context.Context, ...*labels.Matcher) ([]string, error) { return nil, errors.New("not implemented") } -func (ir *OOOCompactionHeadIndexReader) LabelValueFor(id storage.SeriesRef, label string) (string, error) { +func (ir *OOOCompactionHeadIndexReader) LabelValueFor(context.Context, storage.SeriesRef, string) (string, error) { return "", errors.New("not implemented") } -func (ir *OOOCompactionHeadIndexReader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { +func (ir *OOOCompactionHeadIndexReader) LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) { return nil, errors.New("not implemented") } diff --git a/tsdb/ooo_head_read_test.go b/tsdb/ooo_head_read_test.go index 013b59aa52..d774a8455f 100644 --- a/tsdb/ooo_head_read_test.go +++ b/tsdb/ooo_head_read_test.go @@ -378,6 +378,8 @@ func TestOOOHeadChunkReader_LabelValues(t *testing.T) { head, _ := newTestHead(t, chunkRange, wlog.CompressionNone, true) t.Cleanup(func() { require.NoError(t, head.Close()) }) + ctx := context.Background() + app := head.Appender(context.Background()) // Add in-order samples @@ -437,24 +439,24 @@ func TestOOOHeadChunkReader_LabelValues(t *testing.T) { // We first want to test using a head index reader that covers the biggest query interval oh := NewOOOHeadIndexReader(head, tc.queryMinT, tc.queryMaxT) matchers := []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "foo", "bar1")} - values, err := oh.LabelValues("foo", matchers...) + values, err := oh.LabelValues(ctx, "foo", matchers...) sort.Strings(values) require.NoError(t, err) require.Equal(t, tc.expValues1, values) matchers = []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^bar.")} - values, err = oh.LabelValues("foo", matchers...) + values, err = oh.LabelValues(ctx, "foo", matchers...) sort.Strings(values) require.NoError(t, err) require.Equal(t, tc.expValues2, values) matchers = []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.")} - values, err = oh.LabelValues("foo", matchers...) + values, err = oh.LabelValues(ctx, "foo", matchers...) sort.Strings(values) require.NoError(t, err) require.Equal(t, tc.expValues3, values) - values, err = oh.LabelValues("foo") + values, err = oh.LabelValues(ctx, "foo") sort.Strings(values) require.NoError(t, err) require.Equal(t, tc.expValues4, values) diff --git a/tsdb/postings_for_matchers_cache.go b/tsdb/postings_for_matchers_cache.go index 8892d7c2ef..fe6b3a27ef 100644 --- a/tsdb/postings_for_matchers_cache.go +++ b/tsdb/postings_for_matchers_cache.go @@ -2,6 +2,7 @@ package tsdb import ( "container/list" + "context" "strings" "sync" "time" @@ -18,13 +19,13 @@ const ( // IndexPostingsReader is a subset of IndexReader methods, the minimum required to evaluate PostingsForMatchers type IndexPostingsReader interface { // LabelValues returns possible label values which may not be sorted. - LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) + LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) // Postings returns the postings list iterator for the label pairs. // The Postings here contain the offsets to the series inside the index. // Found IDs are not strictly required to point to a valid Series, e.g. // during background garbage collections. Input values must be sorted. - Postings(name string, values ...string) (index.Postings, error) + Postings(ctx context.Context, name string, values ...string) (index.Postings, error) } // NewPostingsForMatchersCache creates a new PostingsForMatchersCache. @@ -63,15 +64,15 @@ type PostingsForMatchersCache struct { postingsForMatchers func(ix IndexPostingsReader, ms ...*labels.Matcher) (index.Postings, error) } -func (c *PostingsForMatchersCache) PostingsForMatchers(ix IndexPostingsReader, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { +func (c *PostingsForMatchersCache) PostingsForMatchers(ctx context.Context, ix IndexPostingsReader, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { if !concurrent && !c.force { return c.postingsForMatchers(ix, ms...) } c.expire() - return c.postingsForMatchersPromise(ix, ms)() + return c.postingsForMatchersPromise(ctx, ix, ms)() } -func (c *PostingsForMatchersCache) postingsForMatchersPromise(ix IndexPostingsReader, ms []*labels.Matcher) func() (index.Postings, error) { +func (c *PostingsForMatchersCache) postingsForMatchersPromise(_ context.Context, ix IndexPostingsReader, ms []*labels.Matcher) func() (index.Postings, error) { var ( wg sync.WaitGroup cloner *index.PostingsCloner @@ -198,8 +199,8 @@ type indexReaderWithPostingsForMatchers struct { pfmc *PostingsForMatchersCache } -func (ir indexReaderWithPostingsForMatchers) PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { - return ir.pfmc.PostingsForMatchers(ir, concurrent, ms...) +func (ir indexReaderWithPostingsForMatchers) PostingsForMatchers(ctx context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { + return ir.pfmc.PostingsForMatchers(ctx, ir, concurrent, ms...) } var _ IndexReader = indexReaderWithPostingsForMatchers{} diff --git a/tsdb/postings_for_matchers_cache_test.go b/tsdb/postings_for_matchers_cache_test.go index 819183c5eb..d94669cf84 100644 --- a/tsdb/postings_for_matchers_cache_test.go +++ b/tsdb/postings_for_matchers_cache_test.go @@ -1,6 +1,7 @@ package tsdb import ( + "context" "fmt" "strings" "sync" @@ -26,6 +27,8 @@ func TestPostingsForMatchersCache(t *testing.T) { return c } + ctx := context.Background() + t.Run("happy case one call", func(t *testing.T) { for _, concurrent := range []bool{true, false} { t.Run(fmt.Sprintf("concurrent=%t", concurrent), func(t *testing.T) { @@ -38,7 +41,7 @@ func TestPostingsForMatchersCache(t *testing.T) { return index.ErrPostings(expectedPostingsErr), nil }, &timeNowMock{}, false) - p, err := c.PostingsForMatchers(indexForPostingsMock{}, concurrent, expectedMatchers...) + p, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, concurrent, expectedMatchers...) require.NoError(t, err) require.NotNil(t, p) require.Equal(t, p.Err(), expectedPostingsErr, "Expected ErrPostings with err %q, got %T with err %q", expectedPostingsErr, p, p.Err()) @@ -54,7 +57,7 @@ func TestPostingsForMatchersCache(t *testing.T) { return nil, expectedErr }, &timeNowMock{}, false) - _, err := c.PostingsForMatchers(indexForPostingsMock{}, true, expectedMatchers...) + _, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, expectedMatchers...) require.Equal(t, expectedErr, err) }) @@ -114,7 +117,7 @@ func TestPostingsForMatchersCache(t *testing.T) { // perform all calls for i := 0; i < len(calls); i++ { go func(i int) { - _, err := c.PostingsForMatchers(indexForPostingsMock{}, concurrent, calls[i]...) + _, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, concurrent, calls[i]...) results[i] = err.Error() resultsWg.Done() }(i) @@ -151,12 +154,12 @@ func TestPostingsForMatchersCache(t *testing.T) { }, &timeNowMock{}, false) // first call, fills the cache - p, err := c.PostingsForMatchers(indexForPostingsMock{}, false, expectedMatchers...) + p, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, false, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") // second call within the ttl (we didn't advance the time), should call again because concurrent==false - p, err = c.PostingsForMatchers(indexForPostingsMock{}, false, expectedMatchers...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, false, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 2") }) @@ -171,12 +174,12 @@ func TestPostingsForMatchersCache(t *testing.T) { }, &timeNowMock{}, false) // first call, fills the cache - p, err := c.PostingsForMatchers(indexForPostingsMock{}, true, expectedMatchers...) + p, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") // second call within the ttl (we didn't advance the time), should call again because concurrent==false - p, err = c.PostingsForMatchers(indexForPostingsMock{}, true, expectedMatchers...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 2") }) @@ -194,21 +197,21 @@ func TestPostingsForMatchersCache(t *testing.T) { }, timeNow, false) // first call, fills the cache - p, err := c.PostingsForMatchers(indexForPostingsMock{}, true, expectedMatchers...) + p, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") timeNow.advance(defaultPostingsForMatchersCacheTTL / 2) // second call within the ttl, should use the cache - p, err = c.PostingsForMatchers(indexForPostingsMock{}, true, expectedMatchers...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") timeNow.advance(defaultPostingsForMatchersCacheTTL / 2) // third call is after ttl (exactly), should call again - p, err = c.PostingsForMatchers(indexForPostingsMock{}, true, expectedMatchers...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, expectedMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 2") }) @@ -230,12 +233,12 @@ func TestPostingsForMatchersCache(t *testing.T) { // each one of the first testCacheSize calls is cached properly for _, matchers := range calls { // first call - p, err := c.PostingsForMatchers(indexForPostingsMock{}, true, matchers...) + p, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, matchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") // cached value - p, err = c.PostingsForMatchers(indexForPostingsMock{}, true, matchers...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, matchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") } @@ -243,17 +246,17 @@ func TestPostingsForMatchersCache(t *testing.T) { // one extra call is made, which is cached properly, but evicts the first cached value someExtraMatchers := []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")} // first call - p, err := c.PostingsForMatchers(indexForPostingsMock{}, true, someExtraMatchers...) + p, err := c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, someExtraMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") // cached value - p, err = c.PostingsForMatchers(indexForPostingsMock{}, true, someExtraMatchers...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, someExtraMatchers...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 1") // make first call again, it's calculated again - p, err = c.PostingsForMatchers(indexForPostingsMock{}, true, calls[0]...) + p, err = c.PostingsForMatchers(ctx, indexForPostingsMock{}, true, calls[0]...) require.NoError(t, err) require.EqualError(t, p.Err(), "result from call 2") }) @@ -261,11 +264,11 @@ func TestPostingsForMatchersCache(t *testing.T) { type indexForPostingsMock struct{} -func (idx indexForPostingsMock) LabelValues(string, ...*labels.Matcher) ([]string, error) { +func (idx indexForPostingsMock) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, error) { panic("implement me") } -func (idx indexForPostingsMock) Postings(string, ...string) (index.Postings, error) { +func (idx indexForPostingsMock) Postings(context.Context, string, ...string) (index.Postings, error) { panic("implement me") } diff --git a/tsdb/querier.go b/tsdb/querier.go index 729086d19a..8c4a66a559 100644 --- a/tsdb/querier.go +++ b/tsdb/querier.go @@ -14,6 +14,7 @@ package tsdb import ( + "context" "fmt" "math" @@ -29,6 +30,7 @@ import ( tsdb_errors "github.com/prometheus/prometheus/tsdb/errors" "github.com/prometheus/prometheus/tsdb/index" "github.com/prometheus/prometheus/tsdb/tombstones" + "github.com/prometheus/prometheus/util/annotations" ) type blockBaseQuerier struct { @@ -72,13 +74,13 @@ func newBlockBaseQuerier(b BlockReader, mint, maxt int64) (*blockBaseQuerier, er }, nil } -func (q *blockBaseQuerier) LabelValues(name string, matchers ...*labels.Matcher) ([]string, storage.Warnings, error) { - res, err := q.index.SortedLabelValues(name, matchers...) +func (q *blockBaseQuerier) LabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { + res, err := q.index.SortedLabelValues(ctx, name, matchers...) return res, nil, err } -func (q *blockBaseQuerier) LabelNames(matchers ...*labels.Matcher) ([]string, storage.Warnings, error) { - res, err := q.index.LabelNames(matchers...) +func (q *blockBaseQuerier) LabelNames(ctx context.Context, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) { + res, err := q.index.LabelNames(ctx, matchers...) return res, nil, err } @@ -109,12 +111,12 @@ func NewBlockQuerier(b BlockReader, mint, maxt int64) (storage.Querier, error) { return &blockQuerier{blockBaseQuerier: q}, nil } -func (q *blockQuerier) Select(sortSeries bool, hints *storage.SelectHints, ms ...*labels.Matcher) storage.SeriesSet { +func (q *blockQuerier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, ms ...*labels.Matcher) storage.SeriesSet { mint := q.mint maxt := q.maxt disableTrimming := false sharded := hints != nil && hints.ShardCount > 0 - p, err := q.index.PostingsForMatchers(sharded, ms...) + p, err := q.index.PostingsForMatchers(ctx, sharded, ms...) if err != nil { return storage.ErrSeriesSet(err) } @@ -152,7 +154,7 @@ func NewBlockChunkQuerier(b BlockReader, mint, maxt int64) (storage.ChunkQuerier return &blockChunkQuerier{blockBaseQuerier: q}, nil } -func (q *blockChunkQuerier) Select(sortSeries bool, hints *storage.SelectHints, ms ...*labels.Matcher) storage.ChunkSeriesSet { +func (q *blockChunkQuerier) Select(ctx context.Context, sortSeries bool, hints *storage.SelectHints, ms ...*labels.Matcher) storage.ChunkSeriesSet { mint := q.mint maxt := q.maxt disableTrimming := false @@ -162,7 +164,7 @@ func (q *blockChunkQuerier) Select(sortSeries bool, hints *storage.SelectHints, disableTrimming = hints.DisableTrimming } sharded := hints != nil && hints.ShardCount > 0 - p, err := q.index.PostingsForMatchers(sharded, ms...) + p, err := q.index.PostingsForMatchers(ctx, sharded, ms...) if err != nil { return storage.ErrChunkSeriesSet(err) } @@ -207,7 +209,7 @@ func PostingsForMatchers(ix IndexPostingsReader, ms ...*labels.Matcher) (index.P // We prefer to get AllPostings so that the base of subtraction (i.e. allPostings) // doesn't include series that may be added to the index reader during this function call. k, v := index.AllPostingsKey() - allPostings, err := ix.Postings(k, v) + allPostings, err := ix.Postings(context.TODO(), k, v) if err != nil { return nil, err } @@ -227,7 +229,7 @@ func PostingsForMatchers(ix IndexPostingsReader, ms ...*labels.Matcher) (index.P switch { case m.Name == "" && m.Value == "": // Special-case for AllPostings, used in tests at least. k, v := index.AllPostingsKey() - allPostings, err := ix.Postings(k, v) + allPostings, err := ix.Postings(context.TODO(), k, v) if err != nil { return nil, err } @@ -304,18 +306,18 @@ func postingsForMatcher(ix IndexPostingsReader, m *labels.Matcher) (index.Postin // Fast-path for equal matching. if m.Type == labels.MatchEqual { - return ix.Postings(m.Name, m.Value) + return ix.Postings(context.TODO(), m.Name, m.Value) } // Fast-path for set matching. if m.Type == labels.MatchRegexp { setMatches := m.SetMatches() if len(setMatches) > 0 { - return ix.Postings(m.Name, setMatches...) + return ix.Postings(context.TODO(), m.Name, setMatches...) } } - vals, err := ix.LabelValues(m.Name) + vals, err := ix.LabelValues(context.TODO(), m.Name) if err != nil { return nil, err } @@ -331,7 +333,7 @@ func postingsForMatcher(ix IndexPostingsReader, m *labels.Matcher) (index.Postin return index.EmptyPostings(), nil } - return ix.Postings(m.Name, res...) + return ix.Postings(context.TODO(), m.Name, res...) } // inversePostingsForMatcher returns the postings for the series with the label name set but not matching the matcher. @@ -342,17 +344,17 @@ func inversePostingsForMatcher(ix IndexPostingsReader, m *labels.Matcher) (index if m.Type == labels.MatchNotRegexp { setMatches := m.SetMatches() if len(setMatches) > 0 { - return ix.Postings(m.Name, setMatches...) + return ix.Postings(context.TODO(), m.Name, setMatches...) } } // Fast-path for MatchNotEqual matching. // Inverse of a MatchNotEqual is MatchEqual (double negation). if m.Type == labels.MatchNotEqual { - return ix.Postings(m.Name, m.Value) + return ix.Postings(context.TODO(), m.Name, m.Value) } - vals, err := ix.LabelValues(m.Name) + vals, err := ix.LabelValues(context.TODO(), m.Name) if err != nil { return nil, err } @@ -369,18 +371,18 @@ func inversePostingsForMatcher(ix IndexPostingsReader, m *labels.Matcher) (index } } - return ix.Postings(m.Name, res...) + return ix.Postings(context.TODO(), m.Name, res...) } const maxExpandedPostingsFactor = 100 // Division factor for maximum number of matched series. -func labelValuesWithMatchers(r IndexReader, name string, matchers ...*labels.Matcher) ([]string, error) { +func labelValuesWithMatchers(ctx context.Context, r IndexReader, name string, matchers ...*labels.Matcher) ([]string, error) { p, err := PostingsForMatchers(r, matchers...) if err != nil { return nil, errors.Wrap(err, "fetching postings for matchers") } - allValues, err := r.LabelValues(name) + allValues, err := r.LabelValues(ctx, name) if err != nil { return nil, errors.Wrapf(err, "fetching values of label %s", name) } @@ -434,7 +436,7 @@ func labelValuesWithMatchers(r IndexReader, name string, matchers ...*labels.Mat valuesPostings := make([]index.Postings, len(allValues)) for i, value := range allValues { - valuesPostings[i], err = r.Postings(name, value) + valuesPostings[i], err = r.Postings(ctx, name, value) if err != nil { return nil, errors.Wrapf(err, "fetching postings for %s=%q", name, value) } @@ -533,8 +535,8 @@ func (p *prependPostings) Err() error { return p.rest.Err() } -func labelNamesWithMatchers(r IndexReader, matchers ...*labels.Matcher) ([]string, error) { - p, err := r.PostingsForMatchers(false, matchers...) +func labelNamesWithMatchers(ctx context.Context, r IndexReader, matchers ...*labels.Matcher) ([]string, error) { + p, err := r.PostingsForMatchers(ctx, false, matchers...) if err != nil { return nil, err } @@ -547,7 +549,7 @@ func labelNamesWithMatchers(r IndexReader, matchers ...*labels.Matcher) ([]strin return nil, errors.Wrapf(p.Err(), "postings for label names with matchers") } - return r.LabelNamesFor(postings...) + return r.LabelNamesFor(ctx, postings...) } // seriesData, used inside other iterators, are updated when we move from one series to another. @@ -667,7 +669,7 @@ func (b *blockBaseSeriesSet) Err() error { return b.p.Err() } -func (b *blockBaseSeriesSet) Warnings() storage.Warnings { return nil } +func (b *blockBaseSeriesSet) Warnings() annotations.Annotations { return nil } // populateWithDelGenericSeriesIterator allows to iterate over given chunk // metas. In each iteration it ensures that chunks are trimmed based on given diff --git a/tsdb/querier_bench_test.go b/tsdb/querier_bench_test.go index 71aac0fb1a..ee2e00a800 100644 --- a/tsdb/querier_bench_test.go +++ b/tsdb/querier_bench_test.go @@ -188,6 +188,8 @@ func benchmarkLabelValuesWithMatchers(b *testing.B, ir IndexReader) { nonPrimesTimes := labels.MustNewMatcher(labels.MatchEqual, "i_times_n", "20") // 1*20, 2*10, 4*5, 5*4 times12 := labels.MustNewMatcher(labels.MatchRegexp, "i_times_n", "12.*") + ctx := context.Background() + cases := []struct { name string labelName string @@ -213,7 +215,7 @@ func benchmarkLabelValuesWithMatchers(b *testing.B, ir IndexReader) { for _, c := range cases { b.Run(c.name, func(b *testing.B) { for i := 0; i < b.N; i++ { - _, err := labelValuesWithMatchers(ir, c.labelName, c.matchers...) + _, err := labelValuesWithMatchers(ctx, ir, c.labelName, c.matchers...) require.NoError(b, err) } }) @@ -278,7 +280,7 @@ func BenchmarkQuerierSelect(b *testing.B) { } } - ss := q.Select(sorted, hints, matcher) + ss := q.Select(context.Background(), sorted, hints, matcher) for ss.Next() { // nolint:revive } require.NoError(b, ss.Err()) diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 28b036301b..f7775e712a 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -38,20 +38,21 @@ import ( "github.com/prometheus/prometheus/tsdb/index" "github.com/prometheus/prometheus/tsdb/tombstones" "github.com/prometheus/prometheus/tsdb/tsdbutil" + "github.com/prometheus/prometheus/util/annotations" ) // TODO(bwplotka): Replace those mocks with remote.concreteSeriesSet. type mockSeriesSet struct { next func() bool series func() storage.Series - ws func() storage.Warnings + ws func() annotations.Annotations err func() error } -func (m *mockSeriesSet) Next() bool { return m.next() } -func (m *mockSeriesSet) At() storage.Series { return m.series() } -func (m *mockSeriesSet) Err() error { return m.err() } -func (m *mockSeriesSet) Warnings() storage.Warnings { return m.ws() } +func (m *mockSeriesSet) Next() bool { return m.next() } +func (m *mockSeriesSet) At() storage.Series { return m.series() } +func (m *mockSeriesSet) Err() error { return m.err() } +func (m *mockSeriesSet) Warnings() annotations.Annotations { return m.ws() } func newMockSeriesSet(list []storage.Series) *mockSeriesSet { i := -1 @@ -64,21 +65,21 @@ func newMockSeriesSet(list []storage.Series) *mockSeriesSet { return list[i] }, err: func() error { return nil }, - ws: func() storage.Warnings { return nil }, + ws: func() annotations.Annotations { return nil }, } } type mockChunkSeriesSet struct { next func() bool series func() storage.ChunkSeries - ws func() storage.Warnings + ws func() annotations.Annotations err func() error } -func (m *mockChunkSeriesSet) Next() bool { return m.next() } -func (m *mockChunkSeriesSet) At() storage.ChunkSeries { return m.series() } -func (m *mockChunkSeriesSet) Err() error { return m.err() } -func (m *mockChunkSeriesSet) Warnings() storage.Warnings { return m.ws() } +func (m *mockChunkSeriesSet) Next() bool { return m.next() } +func (m *mockChunkSeriesSet) At() storage.ChunkSeries { return m.series() } +func (m *mockChunkSeriesSet) Err() error { return m.err() } +func (m *mockChunkSeriesSet) Warnings() annotations.Annotations { return m.ws() } func newMockChunkSeriesSet(list []storage.ChunkSeries) *mockChunkSeriesSet { i := -1 @@ -91,7 +92,7 @@ func newMockChunkSeriesSet(list []storage.ChunkSeries) *mockChunkSeriesSet { return list[i] }, err: func() error { return nil }, - ws: func() storage.Warnings { return nil }, + ws: func() annotations.Annotations { return nil }, } } @@ -182,7 +183,7 @@ func testBlockQuerier(t *testing.T, c blockQuerierTestCase, ir IndexReader, cr C }, } - res := q.Select(false, c.hints, c.ms...) + res := q.Select(context.Background(), false, c.hints, c.ms...) defer func() { require.NoError(t, q.Close()) }() for { @@ -217,7 +218,7 @@ func testBlockQuerier(t *testing.T, c blockQuerierTestCase, ir IndexReader, cr C maxt: c.maxt, }, } - res := q.Select(false, c.hints, c.ms...) + res := q.Select(context.Background(), false, c.hints, c.ms...) defer func() { require.NoError(t, q.Close()) }() for { @@ -544,6 +545,7 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { } func TestBlockQuerier_TrimmingDoesNotModifyOriginalTombstoneIntervals(t *testing.T) { + ctx := context.Background() c := blockQuerierTestCase{ mint: 2, maxt: 6, @@ -567,7 +569,7 @@ func TestBlockQuerier_TrimmingDoesNotModifyOriginalTombstoneIntervals(t *testing } ir, cr, _, _ := createIdxChkReaders(t, testData) stones := tombstones.NewMemTombstones() - p, err := ir.Postings("a", "a") + p, err := ir.Postings(ctx, "a", "a") require.NoError(t, err) refs, err := index.ExpandPostings(p) require.NoError(t, err) @@ -1492,13 +1494,13 @@ func (m mockIndex) Close() error { return nil } -func (m mockIndex) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { - values, _ := m.LabelValues(name, matchers...) +func (m mockIndex) SortedLabelValues(ctx context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { + values, _ := m.LabelValues(ctx, name, matchers...) sort.Strings(values) return values, nil } -func (m mockIndex) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (m mockIndex) LabelValues(_ context.Context, name string, matchers ...*labels.Matcher) ([]string, error) { var values []string if len(matchers) == 0 { @@ -1522,11 +1524,11 @@ func (m mockIndex) LabelValues(name string, matchers ...*labels.Matcher) ([]stri return values, nil } -func (m mockIndex) LabelValueFor(id storage.SeriesRef, label string) (string, error) { +func (m mockIndex) LabelValueFor(_ context.Context, id storage.SeriesRef, label string) (string, error) { return m.series[id].l.Get(label), nil } -func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { +func (m mockIndex) LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) { namesMap := make(map[string]bool) for _, id := range ids { m.series[id].l.Range(func(lbl labels.Label) { @@ -1540,13 +1542,13 @@ func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { return names, nil } -func (m mockIndex) Postings(name string, values ...string) (index.Postings, error) { +func (m mockIndex) Postings(ctx context.Context, name string, values ...string) (index.Postings, error) { res := make([]index.Postings, 0, len(values)) for _, value := range values { l := labels.Label{Name: name, Value: value} res = append(res, index.NewListPostings(m.postings[l])) } - return index.Merge(res...), nil + return index.Merge(ctx, res...), nil } func (m mockIndex) SortedPostings(p index.Postings) index.Postings { @@ -1561,7 +1563,7 @@ func (m mockIndex) SortedPostings(p index.Postings) index.Postings { return index.NewListPostings(ep) } -func (m mockIndex) PostingsForMatchers(concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { +func (m mockIndex) PostingsForMatchers(_ context.Context, concurrent bool, ms ...*labels.Matcher) (index.Postings, error) { var ps []storage.SeriesRef for p, s := range m.series { if matches(ms, s.l) { @@ -1614,7 +1616,7 @@ func (m mockIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, return nil } -func (m mockIndex) LabelNames(matchers ...*labels.Matcher) ([]string, error) { +func (m mockIndex) LabelNames(_ context.Context, matchers ...*labels.Matcher) ([]string, error) { names := map[string]struct{}{} if len(matchers) == 0 { for l := range m.postings { @@ -1771,7 +1773,7 @@ func BenchmarkQuerySeek(b *testing.B) { b.ReportAllocs() var it chunkenc.Iterator - ss := sq.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) + ss := sq.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")) for ss.Next() { it = ss.At().Iterator(it) for t := mint; t <= maxt; t++ { @@ -1904,7 +1906,7 @@ func BenchmarkSetMatcher(b *testing.B) { b.ResetTimer() b.ReportAllocs() for n := 0; n < b.N; n++ { - ss := sq.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "test", c.pattern)) + ss := sq.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "test", c.pattern)) for ss.Next() { } require.NoError(b, ss.Err()) @@ -2253,10 +2255,10 @@ func TestQuerierIndexQueriesRace(t *testing.T) { t.Cleanup(cancel) for i := 0; i < testRepeats; i++ { - q, err := db.Querier(ctx, math.MinInt64, math.MaxInt64) + q, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(t, err) - values, _, err := q.LabelValues("seq", c.matchers...) + values, _, err := q.LabelValues(ctx, "seq", c.matchers...) require.NoError(t, err) require.Emptyf(t, values, `label values for label "seq" should be empty`) } @@ -2294,7 +2296,7 @@ func TestClose(t *testing.T) { require.NoError(t, db.Close()) }() - q, err := db.Querier(context.TODO(), 0, 20) + q, err := db.Querier(0, 20) require.NoError(t, err) require.NoError(t, q.Close()) require.Error(t, q.Close()) @@ -2427,7 +2429,7 @@ func benchQuery(b *testing.B, expExpansions int, q storage.Querier, selectors la b.ResetTimer() b.ReportAllocs() for i := 0; i < b.N; i++ { - ss := q.Select(false, nil, selectors...) + ss := q.Select(context.Background(), false, nil, selectors...) var actualExpansions int var it chunkenc.Iterator for ss.Next() { @@ -2454,24 +2456,24 @@ func (m mockMatcherIndex) Symbols() index.StringIter { return nil } func (m mockMatcherIndex) Close() error { return nil } // SortedLabelValues will return error if it is called. -func (m mockMatcherIndex) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (m mockMatcherIndex) SortedLabelValues(context.Context, string, ...*labels.Matcher) ([]string, error) { return []string{}, errors.New("sorted label values called") } // LabelValues will return error if it is called. -func (m mockMatcherIndex) LabelValues(name string, matchers ...*labels.Matcher) ([]string, error) { +func (m mockMatcherIndex) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, error) { return []string{}, errors.New("label values called") } -func (m mockMatcherIndex) LabelValueFor(id storage.SeriesRef, label string) (string, error) { +func (m mockMatcherIndex) LabelValueFor(context.Context, storage.SeriesRef, string) (string, error) { return "", errors.New("label value for called") } -func (m mockMatcherIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) { +func (m mockMatcherIndex) LabelNamesFor(ctx context.Context, ids ...storage.SeriesRef) ([]string, error) { return nil, errors.New("label names for for called") } -func (m mockMatcherIndex) Postings(name string, values ...string) (index.Postings, error) { +func (m mockMatcherIndex) Postings(context.Context, string, ...string) (index.Postings, error) { return index.EmptyPostings(), nil } @@ -2491,7 +2493,7 @@ func (m mockMatcherIndex) Series(ref storage.SeriesRef, builder *labels.ScratchB return nil } -func (m mockMatcherIndex) LabelNames(...*labels.Matcher) ([]string, error) { +func (m mockMatcherIndex) LabelNames(context.Context, ...*labels.Matcher) ([]string, error) { return []string{}, nil } @@ -2655,7 +2657,7 @@ func BenchmarkHeadChunkQuerier(b *testing.B) { } require.NoError(b, app.Commit()) - querier, err := db.ChunkQuerier(context.Background(), math.MinInt64, math.MaxInt64) + querier, err := db.ChunkQuerier(math.MinInt64, math.MaxInt64) require.NoError(b, err) defer func(q storage.ChunkQuerier) { require.NoError(b, q.Close()) @@ -2663,7 +2665,7 @@ func BenchmarkHeadChunkQuerier(b *testing.B) { b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { - ss := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) + ss := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) total := 0 for ss.Next() { cs := ss.At() @@ -2700,7 +2702,7 @@ func BenchmarkHeadQuerier(b *testing.B) { } require.NoError(b, app.Commit()) - querier, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64) + querier, err := db.Querier(math.MinInt64, math.MaxInt64) require.NoError(b, err) defer func(q storage.Querier) { require.NoError(b, q.Close()) @@ -2708,7 +2710,7 @@ func BenchmarkHeadQuerier(b *testing.B) { b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { - ss := querier.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) + ss := querier.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*")) total := int64(0) for ss.Next() { cs := ss.At() @@ -2726,6 +2728,7 @@ func BenchmarkHeadQuerier(b *testing.B) { // This is a regression test for the case where gauge histograms were not handled by // populateWithDelChunkSeriesIterator correctly. func TestQueryWithDeletedHistograms(t *testing.T) { + ctx := context.Background() testcases := map[string]func(int) (*histogram.Histogram, *histogram.FloatHistogram){ "intCounter": func(i int) (*histogram.Histogram, *histogram.FloatHistogram) { return tsdbutil.GenerateTestHistogram(i), nil @@ -2770,13 +2773,13 @@ func TestQueryWithDeletedHistograms(t *testing.T) { require.NoError(t, err) // Delete the last 20. - err = db.Delete(80, 100, matcher) + err = db.Delete(ctx, 80, 100, matcher) require.NoError(t, err) - chunkQuerier, err := db.ChunkQuerier(context.Background(), 0, 100) + chunkQuerier, err := db.ChunkQuerier(0, 100) require.NoError(t, err) - css := chunkQuerier.Select(false, nil, matcher) + css := chunkQuerier.Select(context.Background(), false, nil, matcher) seriesCount := 0 for css.Next() { @@ -2866,7 +2869,9 @@ func TestLabelsValuesWithMatchersOptimization(t *testing.T) { require.NoError(t, h.Close()) }() - app := h.Appender(context.Background()) + ctx := context.Background() + + app := h.Appender(ctx) addSeries := func(l labels.Labels) { app.Append(0, l, 0, 0) } @@ -2905,7 +2910,7 @@ func TestLabelsValuesWithMatchersOptimization(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { - values, err := labelValuesWithMatchers(ir, c.labelName, c.matchers...) + values, err := labelValuesWithMatchers(ctx, ir, c.labelName, c.matchers...) require.NoError(t, err) require.ElementsMatch(t, c.expectedResults, values) }) diff --git a/tsdb/repair_test.go b/tsdb/repair_test.go index d4e9b76ad0..c199ecdd4e 100644 --- a/tsdb/repair_test.go +++ b/tsdb/repair_test.go @@ -14,6 +14,7 @@ package tsdb import ( + "context" "os" "path/filepath" "testing" @@ -28,6 +29,7 @@ import ( func TestRepairBadIndexVersion(t *testing.T) { tmpDir := t.TempDir() + ctx := context.Background() // The broken index used in this test was written by the following script // at a broken revision. @@ -78,7 +80,7 @@ func TestRepairBadIndexVersion(t *testing.T) { // Read current index to check integrity. r, err := index.NewFileReader(filepath.Join(tmpDbDir, indexFilename)) require.NoError(t, err) - p, err := r.Postings("b", "1") + p, err := r.Postings(ctx, "b", "1") require.NoError(t, err) var builder labels.ScratchBuilder for p.Next() { @@ -97,7 +99,7 @@ func TestRepairBadIndexVersion(t *testing.T) { r, err = index.NewFileReader(filepath.Join(tmpDbDir, indexFilename)) require.NoError(t, err) defer r.Close() - p, err = r.Postings("b", "1") + p, err = r.Postings(ctx, "b", "1") require.NoError(t, err) res := []labels.Labels{} diff --git a/util/annotations/annotations.go b/util/annotations/annotations.go new file mode 100644 index 0000000000..e8b59dc7f6 --- /dev/null +++ b/util/annotations/annotations.go @@ -0,0 +1,165 @@ +// Copyright 2023 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package annotations + +import ( + "errors" + "fmt" + + "github.com/prometheus/common/model" + + "github.com/prometheus/prometheus/promql/parser/posrange" +) + +// Annotations is a general wrapper for warnings and other information +// that is returned by the query API along with the results. +// Each individual annotation is modeled by a Go error. +// They are deduplicated based on the string returned by error.Error(). +// The zero value is usable without further initialization, see New(). +type Annotations map[string]error + +// New returns new Annotations ready to use. Note that the zero value of +// Annotations is also fully usable, but using this method is often more +// readable. +func New() *Annotations { + return &Annotations{} +} + +// Add adds an annotation (modeled as a Go error) in-place and returns the +// modified Annotations for convenience. +func (a *Annotations) Add(err error) Annotations { + if *a == nil { + *a = Annotations{} + } + (*a)[err.Error()] = err + return *a +} + +// Merge adds the contents of the second annotation to the first, modifying +// the first in-place, and returns the merged first Annotation for convenience. +func (a *Annotations) Merge(aa Annotations) Annotations { + if *a == nil { + *a = Annotations{} + } + for key, val := range aa { + (*a)[key] = val + } + return *a +} + +// AsErrors is a convenience function to return the annotations map as a slice +// of errors. +func (a Annotations) AsErrors() []error { + arr := make([]error, 0, len(a)) + for _, err := range a { + arr = append(arr, err) + } + return arr +} + +// AsStrings is a convenience function to return the annotations map as a slice +// of strings. The query string is used to get the line number and character offset +// positioning info of the elements which trigger an annotation. We limit the number +// of annotations returned here with maxAnnos (0 for no limit). +func (a Annotations) AsStrings(query string, maxAnnos int) []string { + arr := make([]string, 0, len(a)) + for _, err := range a { + if maxAnnos > 0 && len(arr) >= maxAnnos { + break + } + anErr, ok := err.(annoErr) + if ok { + anErr.Query = query + err = anErr + } + arr = append(arr, err.Error()) + } + if maxAnnos > 0 && len(a) > maxAnnos { + arr = append(arr, fmt.Sprintf("%d more annotations omitted", len(a)-maxAnnos)) + } + return arr +} + +//nolint:revive // Ignore ST1012 +var ( + // Currently there are only 2 types, warnings and info. + // For now, info are visually identical with warnings as we have not updated + // the API spec or the frontend to show a different kind of warning. But we + // make the distinction here to prepare for adding them in future. + PromQLInfo = errors.New("PromQL info") + PromQLWarning = errors.New("PromQL warning") + + InvalidQuantileWarning = fmt.Errorf("%w: quantile value should be between 0 and 1", PromQLWarning) + BadBucketLabelWarning = fmt.Errorf("%w: bucket label %q is missing or has a malformed value", PromQLWarning, model.BucketLabel) + MixedFloatsHistogramsWarning = fmt.Errorf("%w: encountered a mix of histograms and floats for metric name", PromQLWarning) + MixedClassicNativeHistogramsWarning = fmt.Errorf("%w: vector contains a mix of classic and native histograms for metric name", PromQLWarning) + + PossibleNonCounterInfo = fmt.Errorf("%w: metric might not be a counter, name does not end in _total/_sum/_count:", PromQLInfo) +) + +type annoErr struct { + PositionRange posrange.PositionRange + Err error + Query string +} + +func (e annoErr) Error() string { + return fmt.Sprintf("%s (%s)", e.Err, e.PositionRange.StartPosInput(e.Query, 0)) +} + +// NewInvalidQuantileWarning is used when the user specifies an invalid quantile +// value, i.e. a float that is outside the range [0, 1] or NaN. +func NewInvalidQuantileWarning(q float64, pos posrange.PositionRange) annoErr { + return annoErr{ + PositionRange: pos, + Err: fmt.Errorf("%w, got %g", InvalidQuantileWarning, q), + } +} + +// NewBadBucketLabelWarning is used when there is an error parsing the bucket label +// of a classic histogram. +func NewBadBucketLabelWarning(metricName, label string, pos posrange.PositionRange) annoErr { + return annoErr{ + PositionRange: pos, + Err: fmt.Errorf("%w of %q for metric name %q", BadBucketLabelWarning, label, metricName), + } +} + +// NewMixedFloatsHistogramsWarning is used when the queried series includes both +// float samples and histogram samples for functions that do not support mixed +// samples. +func NewMixedFloatsHistogramsWarning(metricName string, pos posrange.PositionRange) annoErr { + return annoErr{ + PositionRange: pos, + Err: fmt.Errorf("%w %q", MixedFloatsHistogramsWarning, metricName), + } +} + +// NewMixedClassicNativeHistogramsWarning is used when the queried series includes +// both classic and native histograms. +func NewMixedClassicNativeHistogramsWarning(metricName string, pos posrange.PositionRange) annoErr { + return annoErr{ + PositionRange: pos, + Err: fmt.Errorf("%w %q", MixedClassicNativeHistogramsWarning, metricName), + } +} + +// NewPossibleNonCounterInfo is used when a counter metric does not have the suffixes +// _total, _sum or _count. +func NewPossibleNonCounterInfo(metricName string, pos posrange.PositionRange) annoErr { + return annoErr{ + PositionRange: pos, + Err: fmt.Errorf("%w %q", PossibleNonCounterInfo, metricName), + } +} diff --git a/web/api/v1/api.go b/web/api/v1/api.go index 227027e462..62a376b0ba 100644 --- a/web/api/v1/api.go +++ b/web/api/v1/api.go @@ -51,6 +51,7 @@ import ( "github.com/prometheus/prometheus/storage/remote" "github.com/prometheus/prometheus/tsdb" "github.com/prometheus/prometheus/tsdb/index" + "github.com/prometheus/prometheus/util/annotations" "github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/stats" ) @@ -161,7 +162,7 @@ type Response struct { type apiFuncResult struct { data interface{} err *apiError - warnings storage.Warnings + warnings annotations.Annotations finalizer func() } @@ -170,7 +171,7 @@ type apiFunc func(r *http.Request) apiFuncResult // TSDBAdminStats defines the tsdb interfaces used by the v1 API for admin operations as well as statistics. type TSDBAdminStats interface { CleanTombstones() error - Delete(mint, maxt int64, ms ...*labels.Matcher) error + Delete(ctx context.Context, mint, maxt int64, ms ...*labels.Matcher) error Snapshot(dir string, withHead bool) error Stats(statsByLabelName string, limit int) (*tsdb.Stats, error) WALReplayStatus() (tsdb.WALReplayStatus, error) @@ -337,7 +338,7 @@ func (api *API) Register(r *route.Router) { } if result.data != nil { - api.respond(w, r, result.data, result.warnings) + api.respond(w, r, result.data, result.warnings, r.FormValue("query")) return } w.WriteHeader(http.StatusNoContent) @@ -659,7 +660,7 @@ func (api *API) labelNames(r *http.Request) apiFuncResult { return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} } - q, err := api.Queryable.Querier(r.Context(), timestamp.FromTime(start), timestamp.FromTime(end)) + q, err := api.Queryable.Querier(timestamp.FromTime(start), timestamp.FromTime(end)) if err != nil { return apiFuncResult{nil, returnAPIError(err), nil, nil} } @@ -667,18 +668,18 @@ func (api *API) labelNames(r *http.Request) apiFuncResult { var ( names []string - warnings storage.Warnings + warnings annotations.Annotations ) if len(matcherSets) > 0 { labelNamesSet := make(map[string]struct{}) for _, matchers := range matcherSets { - vals, callWarnings, err := q.LabelNames(matchers...) + vals, callWarnings, err := q.LabelNames(r.Context(), matchers...) if err != nil { return apiFuncResult{nil, returnAPIError(err), warnings, nil} } - warnings = append(warnings, callWarnings...) + warnings.Merge(callWarnings) for _, val := range vals { labelNamesSet[val] = struct{}{} } @@ -691,7 +692,7 @@ func (api *API) labelNames(r *http.Request) apiFuncResult { } slices.Sort(names) } else { - names, warnings, err = q.LabelNames() + names, warnings, err = q.LabelNames(r.Context()) if err != nil { return apiFuncResult{nil, &apiError{errorExec, err}, warnings, nil} } @@ -725,7 +726,7 @@ func (api *API) labelValues(r *http.Request) (result apiFuncResult) { return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} } - q, err := api.Queryable.Querier(r.Context(), timestamp.FromTime(start), timestamp.FromTime(end)) + q, err := api.Queryable.Querier(timestamp.FromTime(start), timestamp.FromTime(end)) if err != nil { return apiFuncResult{nil, &apiError{errorExec, err}, nil, nil} } @@ -743,17 +744,17 @@ func (api *API) labelValues(r *http.Request) (result apiFuncResult) { var ( vals []string - warnings storage.Warnings + warnings annotations.Annotations ) if len(matcherSets) > 0 { - var callWarnings storage.Warnings + var callWarnings annotations.Annotations labelValuesSet := make(map[string]struct{}) for _, matchers := range matcherSets { - vals, callWarnings, err = q.LabelValues(name, matchers...) + vals, callWarnings, err = q.LabelValues(ctx, name, matchers...) if err != nil { return apiFuncResult{nil, &apiError{errorExec, err}, warnings, closer} } - warnings = append(warnings, callWarnings...) + warnings.Merge(callWarnings) for _, val := range vals { labelValuesSet[val] = struct{}{} } @@ -764,7 +765,7 @@ func (api *API) labelValues(r *http.Request) (result apiFuncResult) { vals = append(vals, val) } } else { - vals, warnings, err = q.LabelValues(name) + vals, warnings, err = q.LabelValues(ctx, name) if err != nil { return apiFuncResult{nil, &apiError{errorExec, err}, warnings, closer} } @@ -793,6 +794,8 @@ var ( ) func (api *API) series(r *http.Request) (result apiFuncResult) { + ctx := r.Context() + if err := r.ParseForm(); err != nil { return apiFuncResult{nil, &apiError{errorBadData, errors.Wrapf(err, "error parsing form values")}, nil, nil} } @@ -814,7 +817,7 @@ func (api *API) series(r *http.Request) (result apiFuncResult) { return invalidParamError(err, "match[]") } - q, err := api.Queryable.Querier(r.Context(), timestamp.FromTime(start), timestamp.FromTime(end)) + q, err := api.Queryable.Querier(timestamp.FromTime(start), timestamp.FromTime(end)) if err != nil { return apiFuncResult{nil, returnAPIError(err), nil, nil} } @@ -841,13 +844,13 @@ func (api *API) series(r *http.Request) (result apiFuncResult) { var sets []storage.SeriesSet for _, mset := range matcherSets { // We need to sort this select results to merge (deduplicate) the series sets later. - s := q.Select(true, hints, mset...) + s := q.Select(ctx, true, hints, mset...) sets = append(sets, s) } set = storage.NewMergeSeriesSet(sets, storage.ChainedSeriesMerge) } else { // At this point at least one match exists. - set = q.Select(false, hints, matcherSets[0]...) + set = q.Select(ctx, false, hints, matcherSets[0]...) } metrics := []labels.Labels{} @@ -1577,7 +1580,7 @@ func (api *API) serveWALReplayStatus(w http.ResponseWriter, r *http.Request) { Min: status.Min, Max: status.Max, Current: status.Current, - }, nil) + }, nil, "") } func (api *API) remoteRead(w http.ResponseWriter, r *http.Request) { @@ -1630,7 +1633,7 @@ func (api *API) deleteSeries(r *http.Request) apiFuncResult { if err != nil { return invalidParamError(err, "match[]") } - if err := api.db.Delete(timestamp.FromTime(start), timestamp.FromTime(end), matchers...); err != nil { + if err := api.db.Delete(r.Context(), timestamp.FromTime(start), timestamp.FromTime(end), matchers...); err != nil { return apiFuncResult{nil, &apiError{errorInternal, err}, nil, nil} } } @@ -1683,17 +1686,15 @@ func (api *API) cleanTombstones(*http.Request) apiFuncResult { return apiFuncResult{nil, nil, nil, nil} } -func (api *API) respond(w http.ResponseWriter, req *http.Request, data interface{}, warnings storage.Warnings) { +// Query string is needed to get the position information for the annotations, and it +// can be empty if the position information isn't needed. +func (api *API) respond(w http.ResponseWriter, req *http.Request, data interface{}, warnings annotations.Annotations, query string) { statusMessage := statusSuccess - var warningStrings []string - for _, warning := range warnings { - warningStrings = append(warningStrings, warning.Error()) - } resp := &Response{ Status: statusMessage, Data: data, - Warnings: warningStrings, + Warnings: warnings.AsStrings(query, 10), } codec, err := api.negotiateCodec(req, resp) diff --git a/web/api/v1/api_test.go b/web/api/v1/api_test.go index c4710c69fb..c86165b780 100644 --- a/web/api/v1/api_test.go +++ b/web/api/v1/api_test.go @@ -993,14 +993,14 @@ func setupRemote(s storage.Storage) *httptest.Server { } } - querier, err := s.Querier(r.Context(), query.StartTimestampMs, query.EndTimestampMs) + querier, err := s.Querier(query.StartTimestampMs, query.EndTimestampMs) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } defer querier.Close() - set := querier.Select(false, hints, matchers...) + set := querier.Select(r.Context(), false, hints, matchers...) resp.Results[i], _, err = remote.ToQueryResult(set, 1e6) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) @@ -2767,9 +2767,9 @@ type fakeDB struct { err error } -func (f *fakeDB) CleanTombstones() error { return f.err } -func (f *fakeDB) Delete(int64, int64, ...*labels.Matcher) error { return f.err } -func (f *fakeDB) Snapshot(string, bool) error { return f.err } +func (f *fakeDB) CleanTombstones() error { return f.err } +func (f *fakeDB) Delete(context.Context, int64, int64, ...*labels.Matcher) error { return f.err } +func (f *fakeDB) Snapshot(string, bool) error { return f.err } func (f *fakeDB) Stats(statsByLabelName string, limit int) (_ *tsdb.Stats, retErr error) { dbDir, err := os.MkdirTemp("", "tsdb-api-ready") if err != nil { @@ -2985,7 +2985,7 @@ func TestRespondSuccess(t *testing.T) { api.InstallCodec(&testCodec{contentType: MIMEType{"test", "can-encode-2"}, canEncode: true}) s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - api.respond(w, r, "test", nil) + api.respond(w, r, "test", nil, "") })) defer s.Close() @@ -3074,7 +3074,7 @@ func TestRespondSuccess_DefaultCodecCannotEncodeResponse(t *testing.T) { api.InstallCodec(&testCodec{contentType: MIMEType{"application", "default-format"}, canEncode: false}) s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - api.respond(w, r, "test", nil) + api.respond(w, r, "test", nil, "") })) defer s.Close() @@ -3473,7 +3473,7 @@ func BenchmarkRespond(b *testing.B) { api := API{} api.InstallCodec(JSONCodec{}) for n := 0; n < b.N; n++ { - api.respond(&testResponseWriter, request, c.response, nil) + api.respond(&testResponseWriter, request, c.response, nil, "") } }) } diff --git a/web/api/v1/errors_test.go b/web/api/v1/errors_test.go index 8d194a0581..4673af201e 100644 --- a/web/api/v1/errors_test.go +++ b/web/api/v1/errors_test.go @@ -36,6 +36,7 @@ import ( "github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/scrape" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/util/annotations" ) func TestApiStatusCodes(t *testing.T) { @@ -154,11 +155,11 @@ func (t errorTestQueryable) ExemplarQuerier(ctx context.Context) (storage.Exempl return nil, t.err } -func (t errorTestQueryable) ChunkQuerier(ctx context.Context, mint, maxt int64) (storage.ChunkQuerier, error) { +func (t errorTestQueryable) ChunkQuerier(mint, maxt int64) (storage.ChunkQuerier, error) { return nil, t.err } -func (t errorTestQueryable) Querier(ctx context.Context, mint, maxt int64) (storage.Querier, error) { +func (t errorTestQueryable) Querier(mint, maxt int64) (storage.Querier, error) { if t.q != nil { return t.q, nil } @@ -170,11 +171,11 @@ type errorTestQuerier struct { err error } -func (t errorTestQuerier) LabelValues(name string, matchers ...*labels.Matcher) ([]string, storage.Warnings, error) { +func (t errorTestQuerier) LabelValues(context.Context, string, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, t.err } -func (t errorTestQuerier) LabelNames(matchers ...*labels.Matcher) ([]string, storage.Warnings, error) { +func (t errorTestQuerier) LabelNames(context.Context, ...*labels.Matcher) ([]string, annotations.Annotations, error) { return nil, nil, t.err } @@ -182,7 +183,7 @@ func (t errorTestQuerier) Close() error { return nil } -func (t errorTestQuerier) Select(sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet { +func (t errorTestQuerier) Select(_ context.Context, sortSeries bool, hints *storage.SelectHints, matchers ...*labels.Matcher) storage.SeriesSet { if t.s != nil { return t.s } @@ -205,7 +206,7 @@ func (t errorTestSeriesSet) Err() error { return t.err } -func (t errorTestSeriesSet) Warnings() storage.Warnings { +func (t errorTestSeriesSet) Warnings() annotations.Annotations { return nil } diff --git a/web/federate.go b/web/federate.go index 1c50faed06..fde1942bb2 100644 --- a/web/federate.go +++ b/web/federate.go @@ -57,6 +57,8 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) { h.mtx.RLock() defer h.mtx.RUnlock() + ctx := req.Context() + if err := req.ParseForm(); err != nil { http.Error(w, fmt.Sprintf("error parsing form values: %v", err), http.StatusBadRequest) return @@ -80,7 +82,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) { ) w.Header().Set("Content-Type", string(format)) - q, err := h.localStorage.Querier(req.Context(), mint, maxt) + q, err := h.localStorage.Querier(mint, maxt) if err != nil { federationErrors.Inc() if errors.Cause(err) == tsdb.ErrNotReady { @@ -98,7 +100,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) { var sets []storage.SeriesSet for _, mset := range matcherSets { - s := q.Select(true, hints, mset...) + s := q.Select(ctx, true, hints, mset...) sets = append(sets, s) } diff --git a/web/federate_test.go b/web/federate_test.go index 30db0d640f..ab93dcf281 100644 --- a/web/federate_test.go +++ b/web/federate_test.go @@ -237,7 +237,7 @@ type notReadyReadStorage struct { LocalStorage } -func (notReadyReadStorage) Querier(context.Context, int64, int64) (storage.Querier, error) { +func (notReadyReadStorage) Querier(int64, int64) (storage.Querier, error) { return nil, errors.Wrap(tsdb.ErrNotReady, "wrap") } diff --git a/web/ui/module/codemirror-promql/package.json b/web/ui/module/codemirror-promql/package.json index 205cd2c0df..1524dc4129 100644 --- a/web/ui/module/codemirror-promql/package.json +++ b/web/ui/module/codemirror-promql/package.json @@ -1,6 +1,6 @@ { "name": "@prometheus-io/codemirror-promql", - "version": "0.46.0", + "version": "0.47.0", "description": "a CodeMirror mode for the PromQL language", "types": "dist/esm/index.d.ts", "module": "dist/esm/index.js", @@ -29,7 +29,7 @@ }, "homepage": "https://github.com/prometheus/prometheus/blob/main/web/ui/module/codemirror-promql/README.md", "dependencies": { - "@prometheus-io/lezer-promql": "0.46.0", + "@prometheus-io/lezer-promql": "0.47.0", "lru-cache": "^7.18.3" }, "devDependencies": { diff --git a/web/ui/module/lezer-promql/package.json b/web/ui/module/lezer-promql/package.json index 06a4cacf07..92dc7a7a46 100644 --- a/web/ui/module/lezer-promql/package.json +++ b/web/ui/module/lezer-promql/package.json @@ -1,6 +1,6 @@ { "name": "@prometheus-io/lezer-promql", - "version": "0.46.0", + "version": "0.47.0", "description": "lezer-based PromQL grammar", "main": "dist/index.cjs", "type": "module", diff --git a/web/ui/package-lock.json b/web/ui/package-lock.json index 95d7d70b1d..04c1a3eddc 100644 --- a/web/ui/package-lock.json +++ b/web/ui/package-lock.json @@ -30,10 +30,10 @@ }, "module/codemirror-promql": { "name": "@prometheus-io/codemirror-promql", - "version": "0.46.0", + "version": "0.47.0", "license": "Apache-2.0", "dependencies": { - "@prometheus-io/lezer-promql": "0.46.0", + "@prometheus-io/lezer-promql": "0.47.0", "lru-cache": "^7.18.3" }, "devDependencies": { @@ -70,7 +70,7 @@ }, "module/lezer-promql": { "name": "@prometheus-io/lezer-promql", - "version": "0.46.0", + "version": "0.47.0", "license": "Apache-2.0", "devDependencies": { "@lezer/generator": "^1.2.3", @@ -20770,7 +20770,7 @@ }, "react-app": { "name": "@prometheus-io/app", - "version": "0.46.0", + "version": "0.47.0", "dependencies": { "@codemirror/autocomplete": "^6.7.1", "@codemirror/commands": "^6.2.4", @@ -20788,7 +20788,7 @@ "@lezer/lr": "^1.3.6", "@nexucis/fuzzy": "^0.4.1", "@nexucis/kvsearch": "^0.8.1", - "@prometheus-io/codemirror-promql": "0.46.0", + "@prometheus-io/codemirror-promql": "0.47.0", "bootstrap": "^4.6.2", "css.escape": "^1.5.1", "downshift": "^7.6.0", @@ -23428,7 +23428,7 @@ "@lezer/lr": "^1.3.6", "@nexucis/fuzzy": "^0.4.1", "@nexucis/kvsearch": "^0.8.1", - "@prometheus-io/codemirror-promql": "0.46.0", + "@prometheus-io/codemirror-promql": "0.47.0", "@testing-library/react-hooks": "^7.0.2", "@types/enzyme": "^3.10.13", "@types/flot": "0.0.32", @@ -23492,7 +23492,7 @@ "@lezer/common": "^1.0.3", "@lezer/highlight": "^1.1.6", "@lezer/lr": "^1.3.6", - "@prometheus-io/lezer-promql": "0.46.0", + "@prometheus-io/lezer-promql": "0.47.0", "isomorphic-fetch": "^3.0.0", "lru-cache": "^7.18.3", "nock": "^13.3.1" diff --git a/web/ui/react-app/package.json b/web/ui/react-app/package.json index 33eee82cff..dd0a95640f 100644 --- a/web/ui/react-app/package.json +++ b/web/ui/react-app/package.json @@ -1,6 +1,6 @@ { "name": "@prometheus-io/app", - "version": "0.46.0", + "version": "0.47.0", "private": true, "dependencies": { "@codemirror/autocomplete": "^6.7.1", @@ -19,7 +19,7 @@ "@lezer/lr": "^1.3.6", "@nexucis/fuzzy": "^0.4.1", "@nexucis/kvsearch": "^0.8.1", - "@prometheus-io/codemirror-promql": "0.46.0", + "@prometheus-io/codemirror-promql": "0.47.0", "bootstrap": "^4.6.2", "css.escape": "^1.5.1", "downshift": "^7.6.0", diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolContent.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolContent.tsx index e1cbce6fcb..36c996a3a3 100644 --- a/web/ui/react-app/src/pages/targets/ScrapePoolContent.tsx +++ b/web/ui/react-app/src/pages/targets/ScrapePoolContent.tsx @@ -35,12 +35,7 @@ const ScrapePoolContentTable: FC> = ({ items }) {target.health.toUpperCase()} - + {formatRelative(target.lastScrape, now())} diff --git a/web/ui/react-app/src/pages/targets/TargetLabels.module.css b/web/ui/react-app/src/pages/targets/TargetLabels.module.css deleted file mode 100644 index 9c3768818e..0000000000 --- a/web/ui/react-app/src/pages/targets/TargetLabels.module.css +++ /dev/null @@ -1,3 +0,0 @@ -.discovered { - white-space: nowrap; -} diff --git a/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx b/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx index 3ebe34f64c..b897a8cf79 100644 --- a/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx +++ b/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx @@ -17,15 +17,12 @@ describe('targetLabels', () => { job: 'node_exporter', foo: 'bar', }, - idx: 1, - scrapePool: 'cortex/node-exporter_group/0', }; const targetLabels = shallow(); it('renders a div of series labels', () => { const div = targetLabels.find('div').filterWhere((elem) => elem.hasClass('series-labels-container')); expect(div).toHaveLength(1); - expect(div.prop('id')).toEqual('series-labels-cortex/node-exporter_group/0-1'); }); it('wraps each label in a label badge', () => { @@ -38,15 +35,4 @@ describe('targetLabels', () => { }); expect(targetLabels.find(Badge)).toHaveLength(3); }); - - it('renders a tooltip for discovered labels', () => { - const tooltip = targetLabels.find(Tooltip); - expect(tooltip).toHaveLength(1); - expect(tooltip.prop('isOpen')).toBe(false); - expect(tooltip.prop('target')).toEqual('series-labels-cortex\\/node-exporter_group\\/0-1'); - }); - - it('renders discovered labels', () => { - expect(toJson(targetLabels)).toMatchSnapshot(); - }); }); diff --git a/web/ui/react-app/src/pages/targets/TargetLabels.tsx b/web/ui/react-app/src/pages/targets/TargetLabels.tsx index d85c58304e..e75dd2b767 100644 --- a/web/ui/react-app/src/pages/targets/TargetLabels.tsx +++ b/web/ui/react-app/src/pages/targets/TargetLabels.tsx @@ -1,7 +1,7 @@ -import React, { FC, Fragment, useState } from 'react'; -import { Badge, Tooltip } from 'reactstrap'; -import 'css.escape'; -import styles from './TargetLabels.module.css'; +import { faChevronDown, faChevronUp } from '@fortawesome/free-solid-svg-icons'; +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import React, { FC, useState } from 'react'; +import { Badge, Button } from 'reactstrap'; interface Labels { [key: string]: string; @@ -10,21 +10,14 @@ interface Labels { export interface TargetLabelsProps { discoveredLabels: Labels; labels: Labels; - idx: number; - scrapePool: string; } -const formatLabels = (labels: Labels): string[] => Object.keys(labels).map((key) => `${key}="${labels[key]}"`); - -const TargetLabels: FC = ({ discoveredLabels, labels, idx, scrapePool }) => { - const [tooltipOpen, setTooltipOpen] = useState(false); - - const toggle = (): void => setTooltipOpen(!tooltipOpen); - const id = `series-labels-${scrapePool}-${idx}`; +const TargetLabels: FC = ({ discoveredLabels, labels }) => { + const [showDiscovered, setShowDiscovered] = useState(false); return ( <> -
+
{Object.keys(labels).map((labelName) => { return ( @@ -32,22 +25,28 @@ const TargetLabels: FC = ({ discoveredLabels, labels, idx, sc ); })} +
- - Before relabeling: - {formatLabels(discoveredLabels).map((s: string, labelIndex: number) => ( - -
- {s} -
- ))} -
+ {showDiscovered && ( + <> +
Discovered labels:
+ {Object.keys(discoveredLabels).map((labelName) => ( +
+ + {`${labelName}="${discoveredLabels[labelName]}"`} + +
+ ))} + + )} ); }; diff --git a/web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap b/web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap deleted file mode 100644 index 3c5c856f00..0000000000 --- a/web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap +++ /dev/null @@ -1,81 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`targetLabels renders discovered labels 1`] = ` - -
- - instance="localhost:9100" - - - job="node_exporter" - - - foo="bar" - -
- - - Before relabeling: - -
- - __address__="localhost:9100" - -
- - __metrics_path__="/metrics" - -
- - __scheme__="http" - -
- - job="node_exporter" - -
-
-`;