mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-24 05:04:05 -08:00
Fixing conflicts with commit 86d3d5fec6
This commit is contained in:
commit
13c2945af0
22
.github/workflows/buf-lint.yml
vendored
Normal file
22
.github/workflows/buf-lint.yml
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
name: buf.build
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/buf-lint.yml"
|
||||
- "**.proto"
|
||||
jobs:
|
||||
buf:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: bufbuild/buf-setup-action@v1.13.1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: bufbuild/buf-lint-action@v1
|
||||
with:
|
||||
input: 'prompb'
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: 'prompb'
|
||||
against: 'https://github.com/prometheus/prometheus.git#branch=main,ref=HEAD,subdir=prompb'
|
25
.github/workflows/buf.yml
vendored
Normal file
25
.github/workflows/buf.yml
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
name: buf.build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
buf:
|
||||
name: lint and publish
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: bufbuild/buf-setup-action@v1.13.1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: bufbuild/buf-lint-action@v1
|
||||
with:
|
||||
input: 'prompb'
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: 'prompb'
|
||||
against: 'https://github.com/prometheus/prometheus.git#branch=main,ref=HEAD~1,subdir=prompb'
|
||||
- uses: bufbuild/buf-push-action@v1
|
||||
with:
|
||||
input: 'prompb'
|
||||
buf_token: ${{ secrets.BUF_TOKEN }}
|
221
.github/workflows/ci.yml
vendored
Normal file
221
.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,221 @@
|
|||
---
|
||||
name: CI
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
jobs:
|
||||
test_go:
|
||||
name: Go tests
|
||||
runs-on: ubuntu-latest
|
||||
# Whenever the Go version is updated here, .promu.yml
|
||||
# should also be updated.
|
||||
container:
|
||||
image: quay.io/prometheus/golang-builder:1.19-base
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- uses: ./.github/promci/actions/setup_environment
|
||||
- run: make GO_ONLY=1 SKIP_GOLANGCI_LINT=1
|
||||
- run: go test ./tsdb/ -test.tsdb-isolation=false
|
||||
- run: GOARCH=386 go test ./cmd/prometheus
|
||||
- run: make -C documentation/examples/remote_storage
|
||||
- run: make -C documentation/examples
|
||||
- uses: ./.github/promci/actions/check_proto
|
||||
with:
|
||||
version: "3.15.8"
|
||||
|
||||
test_ui:
|
||||
name: UI tests
|
||||
runs-on: ubuntu-latest
|
||||
# Whenever the Go version is updated here, .promu.yml
|
||||
# should also be updated.
|
||||
container:
|
||||
image: quay.io/prometheus/golang-builder:1.19-base
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- uses: ./.github/promci/actions/setup_environment
|
||||
with:
|
||||
enable_go: false
|
||||
enable_npm: true
|
||||
- run: make assets-tarball
|
||||
- run: make ui-lint
|
||||
- run: make ui-test
|
||||
- uses: ./.github/promci/actions/save_artifacts
|
||||
with:
|
||||
directory: .tarballs
|
||||
|
||||
test_windows:
|
||||
name: Go tests on Windows
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '>=1.19 <1.20'
|
||||
- run: |
|
||||
$TestTargets = go list ./... | Where-Object { $_ -NotMatch "(github.com/prometheus/prometheus/discovery.*|github.com/prometheus/prometheus/config|github.com/prometheus/prometheus/web)"}
|
||||
go test $TestTargets -vet=off -v
|
||||
shell: powershell
|
||||
|
||||
test_golang_oldest:
|
||||
name: Go tests with previous Go version
|
||||
runs-on: ubuntu-latest
|
||||
# The go verson in this image should be N-1 wrt test_go.
|
||||
container:
|
||||
image: quay.io/prometheus/golang-builder:1.18-base
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: make build
|
||||
- run: go test ./tsdb/...
|
||||
- run: go test ./tsdb/ -test.tsdb-isolation=false
|
||||
|
||||
test_mixins:
|
||||
name: Mixins tests
|
||||
runs-on: ubuntu-latest
|
||||
# Whenever the Go version is updated here, .promu.yml
|
||||
# should also be updated.
|
||||
container:
|
||||
image: quay.io/prometheus/golang-builder:1.19-base
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: go install ./cmd/promtool/.
|
||||
- run: go install github.com/google/go-jsonnet/cmd/jsonnet@latest
|
||||
- run: go install github.com/google/go-jsonnet/cmd/jsonnetfmt@latest
|
||||
- run: go install github.com/jsonnet-bundler/jsonnet-bundler/cmd/jb@latest
|
||||
- run: make -C documentation/prometheus-mixin clean
|
||||
- run: make -C documentation/prometheus-mixin jb_install
|
||||
- run: make -C documentation/prometheus-mixin
|
||||
- run: git diff --exit-code
|
||||
|
||||
build:
|
||||
name: Build Prometheus for common architectures
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
!(github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v2.'))
|
||||
&&
|
||||
!(github.event_name == 'pull_request' && startsWith(github.event.pull_request.base.ref, 'release-'))
|
||||
&&
|
||||
!(github.event_name == 'push' && github.event.ref == 'refs/heads/main')
|
||||
strategy:
|
||||
matrix:
|
||||
thread: [ 0, 1, 2 ]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- uses: ./.github/promci/actions/build
|
||||
with:
|
||||
promu_opts: "-p linux/amd64 -p windows/amd64 -p linux/arm64 -p darwin/amd64 -p darwin/arm64 -p linux/386"
|
||||
parallelism: 3
|
||||
thread: ${{ matrix.thread }}
|
||||
build_all:
|
||||
name: Build Prometheus for all architectures
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
(github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v2.'))
|
||||
||
|
||||
(github.event_name == 'pull_request' && startsWith(github.event.pull_request.base.ref, 'release-'))
|
||||
||
|
||||
(github.event_name == 'push' && github.event.ref == 'refs/heads/main')
|
||||
strategy:
|
||||
matrix:
|
||||
thread: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ]
|
||||
|
||||
# Whenever the Go version is updated here, .promu.yml
|
||||
# should also be updated.
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- uses: ./.github/promci/actions/build
|
||||
with:
|
||||
parallelism: 12
|
||||
thread: ${{ matrix.thread }}
|
||||
golangci:
|
||||
name: golangci-lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.20.x
|
||||
- name: Install snmp_exporter/generator dependencies
|
||||
run: sudo apt-get update && sudo apt-get -y install libsnmp-dev
|
||||
if: github.repository == 'prometheus/snmp_exporter'
|
||||
- name: Lint
|
||||
uses: golangci/golangci-lint-action@v3.4.0
|
||||
with:
|
||||
version: v1.51.2
|
||||
fuzzing:
|
||||
uses: ./.github/workflows/fuzzing.yml
|
||||
if: github.event_name == 'pull_request'
|
||||
codeql:
|
||||
uses: ./.github/workflows/codeql-analysis.yml
|
||||
|
||||
publish_main:
|
||||
name: Publish main branch artifacts
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test_ui, test_go, test_windows, golangci, codeql, build_all]
|
||||
if: github.event_name == 'push' && github.event.ref == 'refs/heads/main'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- uses: ./.github/promci/actions/publish_main
|
||||
with:
|
||||
docker_hub_login: ${{ secrets.docker_hub_login }}
|
||||
docker_hub_password: ${{ secrets.docker_hub_password }}
|
||||
quay_io_login: ${{ secrets.quay_io_login }}
|
||||
quay_io_password: ${{ secrets.quay_io_password }}
|
||||
publish_release:
|
||||
name: Publish release arfefacts
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test_ui, test_go, test_windows, golangci, codeql, build_all]
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v2.')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- uses: ./.github/promci/actions/publish_release
|
||||
with:
|
||||
docker_hub_login: ${{ secrets.docker_hub_login }}
|
||||
docker_hub_password: ${{ secrets.docker_hub_password }}
|
||||
quay_io_login: ${{ secrets.quay_io_login }}
|
||||
quay_io_password: ${{ secrets.quay_io_password }}
|
||||
github_token: ${{ secrets.PROMBOT_GITHUB_TOKEN }}
|
||||
publish_ui_release:
|
||||
name: Publish UI on npm Registry
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test_ui, codeql]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: prometheus/promci@v0.0.2
|
||||
- name: Install nodejs
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version-file: "web/ui/.nvmrc"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- uses: actions/cache@v3.2.4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
- name: Check libraries version
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v2.')
|
||||
run: ./scripts/ui_release.sh --check-package "$(echo ${{ github.ref_name }}|sed s/v2/v0/)"
|
||||
- name: build
|
||||
run: make assets
|
||||
- name: Copy files before publishing libs
|
||||
run: ./scripts/ui_release.sh --copy
|
||||
- name: Publish dry-run libraries
|
||||
if: "!(github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v2.'))"
|
||||
run: ./scripts/ui_release.sh --publish dry-run
|
||||
- name: Publish libraries
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v2.')
|
||||
run: ./scripts/ui_release.sh --publish
|
||||
env:
|
||||
# The setup-node action writes an .npmrc file with this env variable
|
||||
# as the placeholder for the auth token
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
24
CHANGELOG.md
24
CHANGELOG.md
|
@ -1,5 +1,27 @@
|
|||
# Changelog
|
||||
|
||||
## 2.42.0 / 2023-01-31
|
||||
|
||||
This release comes with a bunch of feature coverage for native histograms and breaking changes.
|
||||
|
||||
If you are trying native histograms already, we recommend you remove the `wal` directory when upgrading.
|
||||
Because the old WAL record for native histograms is not backward compatible in v2.42.0, this will lead to some data loss for the latest data.
|
||||
|
||||
Additionally, if you scrape "float histograms" or use recording rules on native histograms in v2.42.0 (which writes float histograms),
|
||||
it is a one-way street since older versions do not support float histograms.
|
||||
|
||||
* [CHANGE] **breaking** TSDB: Changed WAL record format for the experimental native histograms. #11783
|
||||
* [FEATURE] Add 'keep_firing_for' field to alerting rules. #11827
|
||||
* [FEATURE] Promtool: Add support of selecting timeseries for TSDB dump. #11872
|
||||
* [ENHANCEMENT] Agent: Native histogram support. #11842
|
||||
* [ENHANCEMENT] Rules: Support native histograms in recording rules. #11838
|
||||
* [ENHANCEMENT] SD: Add container ID as a meta label for pod targets for Kubernetes. #11844
|
||||
* [ENHANCEMENT] SD: Add VM size label to azure service discovery. #11650
|
||||
* [ENHANCEMENT] Support native histograms in federation. #11830
|
||||
* [ENHANCEMENT] TSDB: Add gauge histogram support. #11783 #11840 #11814
|
||||
* [ENHANCEMENT] TSDB/Scrape: Support FloatHistogram that represents buckets as float64 values. #11522 #11817 #11716
|
||||
* [ENHANCEMENT] UI: Show individual scrape pools on /targets page. #11142
|
||||
|
||||
## 2.41.0 / 2022-12-20
|
||||
|
||||
* [FEATURE] Relabeling: Add `keepequal` and `dropequal` relabel actions. #11564
|
||||
|
@ -61,7 +83,7 @@ Your existing histograms won't switch to native histograms until `NativeHistogra
|
|||
* [ENHANCEMENT] Kubernetes SD: Use protobuf encoding. #11353
|
||||
* [ENHANCEMENT] TSDB: Use golang.org/x/exp/slices for improved sorting speed. #11054 #11318 #11380
|
||||
* [ENHANCEMENT] Consul SD: Add enterprise admin partitions. Adds `__meta_consul_partition` label. Adds `partition` config in `consul_sd_config`. #11482
|
||||
* [BUGFIX] API: Fix API error codes for `/api/v1/labels` and `/api/v1/series`. #11356
|
||||
* [BUGFIX] API: Fix API error codes for `/api/v1/labels` and `/api/v1/series`. #11356
|
||||
|
||||
## 2.39.2 / 2022-11-09
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_
|
|||
SKIP_GOLANGCI_LINT :=
|
||||
GOLANGCI_LINT :=
|
||||
GOLANGCI_LINT_OPTS ?=
|
||||
GOLANGCI_LINT_VERSION ?= v1.50.1
|
||||
GOLANGCI_LINT_VERSION ?= v1.51.2
|
||||
# golangci-lint only supports linux, darwin and windows platforms on i386/amd64.
|
||||
# windows isn't included here because of the path separator being different.
|
||||
ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin))
|
||||
|
|
|
@ -45,6 +45,7 @@ import (
|
|||
"gopkg.in/yaml.v2"
|
||||
|
||||
dto "github.com/prometheus/client_model/go"
|
||||
promconfig "github.com/prometheus/common/config"
|
||||
"github.com/prometheus/common/expfmt"
|
||||
|
||||
"github.com/prometheus/prometheus/config"
|
||||
|
@ -74,6 +75,12 @@ const (
|
|||
var lintOptions = []string{lintOptionAll, lintOptionDuplicateRules, lintOptionNone}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
httpRoundTripper = api.DefaultRoundTripper
|
||||
serverURL *url.URL
|
||||
httpConfigFilePath string
|
||||
)
|
||||
|
||||
app := kingpin.New(filepath.Base(os.Args[0]), "Tooling for the Prometheus monitoring system.").UsageWriter(os.Stdout)
|
||||
app.Version(version.Print("promtool"))
|
||||
app.HelpFlag.Short('h')
|
||||
|
@ -124,14 +131,15 @@ func main() {
|
|||
|
||||
queryCmd := app.Command("query", "Run query against a Prometheus server.")
|
||||
queryCmdFmt := queryCmd.Flag("format", "Output format of the query.").Short('o').Default("promql").Enum("promql", "json")
|
||||
queryCmd.Flag("http.config.file", "HTTP client configuration file for promtool to connect to Prometheus.").PlaceHolder("<filename>").ExistingFileVar(&httpConfigFilePath)
|
||||
|
||||
queryInstantCmd := queryCmd.Command("instant", "Run instant query.")
|
||||
queryInstantServer := queryInstantCmd.Arg("server", "Prometheus server to query.").Required().URL()
|
||||
queryInstantCmd.Arg("server", "Prometheus server to query.").Required().URLVar(&serverURL)
|
||||
queryInstantExpr := queryInstantCmd.Arg("expr", "PromQL query expression.").Required().String()
|
||||
queryInstantTime := queryInstantCmd.Flag("time", "Query evaluation time (RFC3339 or Unix timestamp).").String()
|
||||
|
||||
queryRangeCmd := queryCmd.Command("range", "Run range query.")
|
||||
queryRangeServer := queryRangeCmd.Arg("server", "Prometheus server to query.").Required().URL()
|
||||
queryRangeCmd.Arg("server", "Prometheus server to query.").Required().URLVar(&serverURL)
|
||||
queryRangeExpr := queryRangeCmd.Arg("expr", "PromQL query expression.").Required().String()
|
||||
queryRangeHeaders := queryRangeCmd.Flag("header", "Extra headers to send to server.").StringMap()
|
||||
queryRangeBegin := queryRangeCmd.Flag("start", "Query range start time (RFC3339 or Unix timestamp).").String()
|
||||
|
@ -139,7 +147,7 @@ func main() {
|
|||
queryRangeStep := queryRangeCmd.Flag("step", "Query step size (duration).").Duration()
|
||||
|
||||
querySeriesCmd := queryCmd.Command("series", "Run series query.")
|
||||
querySeriesServer := querySeriesCmd.Arg("server", "Prometheus server to query.").Required().URL()
|
||||
querySeriesCmd.Arg("server", "Prometheus server to query.").Required().URLVar(&serverURL)
|
||||
querySeriesMatch := querySeriesCmd.Flag("match", "Series selector. Can be specified multiple times.").Required().Strings()
|
||||
querySeriesBegin := querySeriesCmd.Flag("start", "Start time (RFC3339 or Unix timestamp).").String()
|
||||
querySeriesEnd := querySeriesCmd.Flag("end", "End time (RFC3339 or Unix timestamp).").String()
|
||||
|
@ -153,7 +161,7 @@ func main() {
|
|||
debugAllServer := debugAllCmd.Arg("server", "Prometheus server to get all debug information from.").Required().String()
|
||||
|
||||
queryLabelsCmd := queryCmd.Command("labels", "Run labels query.")
|
||||
queryLabelsServer := queryLabelsCmd.Arg("server", "Prometheus server to query.").Required().URL()
|
||||
queryLabelsCmd.Arg("server", "Prometheus server to query.").Required().URLVar(&serverURL)
|
||||
queryLabelsName := queryLabelsCmd.Arg("name", "Label name to provide label values for.").Required().String()
|
||||
queryLabelsBegin := queryLabelsCmd.Flag("start", "Start time (RFC3339 or Unix timestamp).").String()
|
||||
queryLabelsEnd := queryLabelsCmd.Flag("end", "End time (RFC3339 or Unix timestamp).").String()
|
||||
|
@ -200,7 +208,8 @@ func main() {
|
|||
importFilePath := openMetricsImportCmd.Arg("input file", "OpenMetrics file to read samples from.").Required().String()
|
||||
importDBPath := openMetricsImportCmd.Arg("output directory", "Output directory for generated blocks.").Default(defaultDBPath).String()
|
||||
importRulesCmd := importCmd.Command("rules", "Create blocks of data for new recording rules.")
|
||||
importRulesURL := importRulesCmd.Flag("url", "The URL for the Prometheus API with the data where the rule will be backfilled from.").Default("http://localhost:9090").URL()
|
||||
importRulesCmd.Flag("http.config.file", "HTTP client configuration file for promtool to connect to Prometheus.").PlaceHolder("<filename>").ExistingFileVar(&httpConfigFilePath)
|
||||
importRulesCmd.Flag("url", "The URL for the Prometheus API with the data where the rule will be backfilled from.").Default("http://localhost:9090").URLVar(&serverURL)
|
||||
importRulesStart := importRulesCmd.Flag("start", "The time to start backfilling the new rule from. Must be a RFC3339 formatted date or Unix timestamp. Required.").
|
||||
Required().String()
|
||||
importRulesEnd := importRulesCmd.Flag("end", "If an end time is provided, all recording rules in the rule files provided will be backfilled to the end time. Default will backfill up to 3 hours ago. Must be a RFC3339 formatted date or Unix timestamp.").String()
|
||||
|
@ -224,6 +233,22 @@ func main() {
|
|||
p = &promqlPrinter{}
|
||||
}
|
||||
|
||||
if httpConfigFilePath != "" {
|
||||
if serverURL != nil && serverURL.User.Username() != "" {
|
||||
kingpin.Fatalf("Cannot set base auth in the server URL and use a http.config.file at the same time")
|
||||
}
|
||||
var err error
|
||||
httpConfig, _, err := config_util.LoadHTTPConfigFile(httpConfigFilePath)
|
||||
if err != nil {
|
||||
kingpin.Fatalf("Failed to load HTTP config file: %v", err)
|
||||
}
|
||||
|
||||
httpRoundTripper, err = promconfig.NewRoundTripperFromConfig(*httpConfig, "promtool", config_util.WithUserAgent("promtool/"+version.Version))
|
||||
if err != nil {
|
||||
kingpin.Fatalf("Failed to create a new HTTP round tripper: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
var noDefaultScrapePort bool
|
||||
for _, f := range *featureList {
|
||||
opts := strings.Split(f, ",")
|
||||
|
@ -258,13 +283,13 @@ func main() {
|
|||
os.Exit(CheckMetrics(*checkMetricsExtended))
|
||||
|
||||
case queryInstantCmd.FullCommand():
|
||||
os.Exit(QueryInstant(*queryInstantServer, *queryInstantExpr, *queryInstantTime, p))
|
||||
os.Exit(QueryInstant(serverURL, httpRoundTripper, *queryInstantExpr, *queryInstantTime, p))
|
||||
|
||||
case queryRangeCmd.FullCommand():
|
||||
os.Exit(QueryRange(*queryRangeServer, *queryRangeHeaders, *queryRangeExpr, *queryRangeBegin, *queryRangeEnd, *queryRangeStep, p))
|
||||
os.Exit(QueryRange(serverURL, httpRoundTripper, *queryRangeHeaders, *queryRangeExpr, *queryRangeBegin, *queryRangeEnd, *queryRangeStep, p))
|
||||
|
||||
case querySeriesCmd.FullCommand():
|
||||
os.Exit(QuerySeries(*querySeriesServer, *querySeriesMatch, *querySeriesBegin, *querySeriesEnd, p))
|
||||
os.Exit(QuerySeries(serverURL, httpRoundTripper, *querySeriesMatch, *querySeriesBegin, *querySeriesEnd, p))
|
||||
|
||||
case debugPprofCmd.FullCommand():
|
||||
os.Exit(debugPprof(*debugPprofServer))
|
||||
|
@ -276,7 +301,7 @@ func main() {
|
|||
os.Exit(debugAll(*debugAllServer))
|
||||
|
||||
case queryLabelsCmd.FullCommand():
|
||||
os.Exit(QueryLabels(*queryLabelsServer, *queryLabelsMatch, *queryLabelsName, *queryLabelsBegin, *queryLabelsEnd, p))
|
||||
os.Exit(QueryLabels(serverURL, httpRoundTripper, *queryLabelsMatch, *queryLabelsName, *queryLabelsBegin, *queryLabelsEnd, p))
|
||||
|
||||
case testRulesCmd.FullCommand():
|
||||
os.Exit(RulesUnitTest(
|
||||
|
@ -303,7 +328,7 @@ func main() {
|
|||
os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable, *importQuiet, *maxBlockDuration))
|
||||
|
||||
case importRulesCmd.FullCommand():
|
||||
os.Exit(checkErr(importRules(*importRulesURL, *importRulesStart, *importRulesEnd, *importRulesOutputDir, *importRulesEvalInterval, *maxBlockDuration, *importRulesFiles...)))
|
||||
os.Exit(checkErr(importRules(serverURL, httpRoundTripper, *importRulesStart, *importRulesEnd, *importRulesOutputDir, *importRulesEvalInterval, *maxBlockDuration, *importRulesFiles...)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -795,12 +820,13 @@ func checkMetricsExtended(r io.Reader) ([]metricStat, int, error) {
|
|||
}
|
||||
|
||||
// QueryInstant performs an instant query against a Prometheus server.
|
||||
func QueryInstant(url *url.URL, query, evalTime string, p printer) int {
|
||||
func QueryInstant(url *url.URL, roundTripper http.RoundTripper, query, evalTime string, p printer) int {
|
||||
if url.Scheme == "" {
|
||||
url.Scheme = "http"
|
||||
}
|
||||
config := api.Config{
|
||||
Address: url.String(),
|
||||
Address: url.String(),
|
||||
RoundTripper: roundTripper,
|
||||
}
|
||||
|
||||
// Create new client.
|
||||
|
@ -835,12 +861,13 @@ func QueryInstant(url *url.URL, query, evalTime string, p printer) int {
|
|||
}
|
||||
|
||||
// QueryRange performs a range query against a Prometheus server.
|
||||
func QueryRange(url *url.URL, headers map[string]string, query, start, end string, step time.Duration, p printer) int {
|
||||
func QueryRange(url *url.URL, roundTripper http.RoundTripper, headers map[string]string, query, start, end string, step time.Duration, p printer) int {
|
||||
if url.Scheme == "" {
|
||||
url.Scheme = "http"
|
||||
}
|
||||
config := api.Config{
|
||||
Address: url.String(),
|
||||
Address: url.String(),
|
||||
RoundTripper: roundTripper,
|
||||
}
|
||||
|
||||
if len(headers) > 0 {
|
||||
|
@ -848,7 +875,7 @@ func QueryRange(url *url.URL, headers map[string]string, query, start, end strin
|
|||
for key, value := range headers {
|
||||
req.Header.Add(key, value)
|
||||
}
|
||||
return http.DefaultTransport.RoundTrip(req)
|
||||
return roundTripper.RoundTrip(req)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -908,12 +935,13 @@ func QueryRange(url *url.URL, headers map[string]string, query, start, end strin
|
|||
}
|
||||
|
||||
// QuerySeries queries for a series against a Prometheus server.
|
||||
func QuerySeries(url *url.URL, matchers []string, start, end string, p printer) int {
|
||||
func QuerySeries(url *url.URL, roundTripper http.RoundTripper, matchers []string, start, end string, p printer) int {
|
||||
if url.Scheme == "" {
|
||||
url.Scheme = "http"
|
||||
}
|
||||
config := api.Config{
|
||||
Address: url.String(),
|
||||
Address: url.String(),
|
||||
RoundTripper: roundTripper,
|
||||
}
|
||||
|
||||
// Create new client.
|
||||
|
@ -944,12 +972,13 @@ func QuerySeries(url *url.URL, matchers []string, start, end string, p printer)
|
|||
}
|
||||
|
||||
// QueryLabels queries for label values against a Prometheus server.
|
||||
func QueryLabels(url *url.URL, matchers []string, name, start, end string, p printer) int {
|
||||
func QueryLabels(url *url.URL, roundTripper http.RoundTripper, matchers []string, name, start, end string, p printer) int {
|
||||
if url.Scheme == "" {
|
||||
url.Scheme = "http"
|
||||
}
|
||||
config := api.Config{
|
||||
Address: url.String(),
|
||||
Address: url.String(),
|
||||
RoundTripper: roundTripper,
|
||||
}
|
||||
|
||||
// Create new client.
|
||||
|
@ -1154,7 +1183,7 @@ func (j *jsonPrinter) printLabelValues(v model.LabelValues) {
|
|||
|
||||
// importRules backfills recording rules from the files provided. The output are blocks of data
|
||||
// at the outputDir location.
|
||||
func importRules(url *url.URL, start, end, outputDir string, evalInterval, maxBlockDuration time.Duration, files ...string) error {
|
||||
func importRules(url *url.URL, roundTripper http.RoundTripper, start, end, outputDir string, evalInterval, maxBlockDuration time.Duration, files ...string) error {
|
||||
ctx := context.Background()
|
||||
var stime, etime time.Time
|
||||
var err error
|
||||
|
@ -1184,7 +1213,8 @@ func importRules(url *url.URL, start, end, outputDir string, evalInterval, maxBl
|
|||
maxBlockDuration: maxBlockDuration,
|
||||
}
|
||||
client, err := api.NewClient(api.Config{
|
||||
Address: url.String(),
|
||||
Address: url.String(),
|
||||
RoundTripper: roundTripper,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("new api client error: %w", err)
|
||||
|
|
|
@ -56,14 +56,14 @@ func TestQueryRange(t *testing.T) {
|
|||
require.Equal(t, nil, err)
|
||||
|
||||
p := &promqlPrinter{}
|
||||
exitCode := QueryRange(urlObject, map[string]string{}, "up", "0", "300", 0, p)
|
||||
exitCode := QueryRange(urlObject, http.DefaultTransport, map[string]string{}, "up", "0", "300", 0, p)
|
||||
require.Equal(t, "/api/v1/query_range", getRequest().URL.Path)
|
||||
form := getRequest().Form
|
||||
require.Equal(t, "up", form.Get("query"))
|
||||
require.Equal(t, "1", form.Get("step"))
|
||||
require.Equal(t, 0, exitCode)
|
||||
|
||||
exitCode = QueryRange(urlObject, map[string]string{}, "up", "0", "300", 10*time.Millisecond, p)
|
||||
exitCode = QueryRange(urlObject, http.DefaultTransport, map[string]string{}, "up", "0", "300", 10*time.Millisecond, p)
|
||||
require.Equal(t, "/api/v1/query_range", getRequest().URL.Path)
|
||||
form = getRequest().Form
|
||||
require.Equal(t, "up", form.Get("query"))
|
||||
|
@ -79,7 +79,7 @@ func TestQueryInstant(t *testing.T) {
|
|||
require.Equal(t, nil, err)
|
||||
|
||||
p := &promqlPrinter{}
|
||||
exitCode := QueryInstant(urlObject, "up", "300", p)
|
||||
exitCode := QueryInstant(urlObject, http.DefaultTransport, "up", "300", p)
|
||||
require.Equal(t, "/api/v1/query", getRequest().URL.Path)
|
||||
form := getRequest().Form
|
||||
require.Equal(t, "up", form.Get("query"))
|
||||
|
|
|
@ -365,7 +365,7 @@ func TestGetDatacenterShouldReturnError(t *testing.T) {
|
|||
{
|
||||
// Define a handler that will return status 500.
|
||||
handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(500)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
},
|
||||
errMessage: "Unexpected response code: 500 ()",
|
||||
},
|
||||
|
|
|
@ -74,16 +74,16 @@ func createTestHTTPServer(t *testing.T, responder discoveryResponder) *httptest.
|
|||
|
||||
discoveryRes, err := responder(discoveryReq)
|
||||
if err != nil {
|
||||
w.WriteHeader(500)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if discoveryRes == nil {
|
||||
w.WriteHeader(304)
|
||||
w.WriteHeader(http.StatusNotModified)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(200)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
data, err := protoJSONMarshalOptions.Marshal(discoveryRes)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
math "math"
|
||||
math_bits "math/bits"
|
||||
|
||||
_ "github.com/gogo/protobuf/gogoproto"
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
types "github.com/gogo/protobuf/types"
|
||||
)
|
||||
|
@ -281,12 +282,12 @@ func (m *Quantile) GetValue() float64 {
|
|||
}
|
||||
|
||||
type Summary struct {
|
||||
SampleCount uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount,proto3" json:"sample_count,omitempty"`
|
||||
SampleSum float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum,proto3" json:"sample_sum,omitempty"`
|
||||
Quantile []*Quantile `protobuf:"bytes,3,rep,name=quantile,proto3" json:"quantile,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
SampleCount uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount,proto3" json:"sample_count,omitempty"`
|
||||
SampleSum float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum,proto3" json:"sample_sum,omitempty"`
|
||||
Quantile []Quantile `protobuf:"bytes,3,rep,name=quantile,proto3" json:"quantile"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Summary) Reset() { *m = Summary{} }
|
||||
|
@ -336,7 +337,7 @@ func (m *Summary) GetSampleSum() float64 {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (m *Summary) GetQuantile() []*Quantile {
|
||||
func (m *Summary) GetQuantile() []Quantile {
|
||||
if m != nil {
|
||||
return m.Quantile
|
||||
}
|
||||
|
@ -395,7 +396,7 @@ type Histogram struct {
|
|||
SampleCountFloat float64 `protobuf:"fixed64,4,opt,name=sample_count_float,json=sampleCountFloat,proto3" json:"sample_count_float,omitempty"`
|
||||
SampleSum float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum,proto3" json:"sample_sum,omitempty"`
|
||||
// Buckets for the conventional histogram.
|
||||
Bucket []*Bucket `protobuf:"bytes,3,rep,name=bucket,proto3" json:"bucket,omitempty"`
|
||||
Bucket []Bucket `protobuf:"bytes,3,rep,name=bucket,proto3" json:"bucket"`
|
||||
// schema defines the bucket schema. Currently, valid numbers are -4 <= n <= 8.
|
||||
// They are all for base-2 bucket schemas, where 1 is a bucket boundary in each case, and
|
||||
// then each power of two is divided into 2^n logarithmic buckets.
|
||||
|
@ -406,14 +407,14 @@ type Histogram struct {
|
|||
ZeroCount uint64 `protobuf:"varint,7,opt,name=zero_count,json=zeroCount,proto3" json:"zero_count,omitempty"`
|
||||
ZeroCountFloat float64 `protobuf:"fixed64,8,opt,name=zero_count_float,json=zeroCountFloat,proto3" json:"zero_count_float,omitempty"`
|
||||
// Negative buckets for the native histogram.
|
||||
NegativeSpan []*BucketSpan `protobuf:"bytes,9,rep,name=negative_span,json=negativeSpan,proto3" json:"negative_span,omitempty"`
|
||||
NegativeSpan []BucketSpan `protobuf:"bytes,9,rep,name=negative_span,json=negativeSpan,proto3" json:"negative_span"`
|
||||
// Use either "negative_delta" or "negative_count", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
NegativeDelta []int64 `protobuf:"zigzag64,10,rep,packed,name=negative_delta,json=negativeDelta,proto3" json:"negative_delta,omitempty"`
|
||||
NegativeCount []float64 `protobuf:"fixed64,11,rep,packed,name=negative_count,json=negativeCount,proto3" json:"negative_count,omitempty"`
|
||||
// Positive buckets for the native histogram.
|
||||
PositiveSpan []*BucketSpan `protobuf:"bytes,12,rep,name=positive_span,json=positiveSpan,proto3" json:"positive_span,omitempty"`
|
||||
PositiveSpan []BucketSpan `protobuf:"bytes,12,rep,name=positive_span,json=positiveSpan,proto3" json:"positive_span"`
|
||||
// Use either "positive_delta" or "positive_count", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
|
@ -478,7 +479,7 @@ func (m *Histogram) GetSampleSum() float64 {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (m *Histogram) GetBucket() []*Bucket {
|
||||
func (m *Histogram) GetBucket() []Bucket {
|
||||
if m != nil {
|
||||
return m.Bucket
|
||||
}
|
||||
|
@ -513,7 +514,7 @@ func (m *Histogram) GetZeroCountFloat() float64 {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (m *Histogram) GetNegativeSpan() []*BucketSpan {
|
||||
func (m *Histogram) GetNegativeSpan() []BucketSpan {
|
||||
if m != nil {
|
||||
return m.NegativeSpan
|
||||
}
|
||||
|
@ -534,7 +535,7 @@ func (m *Histogram) GetNegativeCount() []float64 {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *Histogram) GetPositiveSpan() []*BucketSpan {
|
||||
func (m *Histogram) GetPositiveSpan() []BucketSpan {
|
||||
if m != nil {
|
||||
return m.PositiveSpan
|
||||
}
|
||||
|
@ -688,7 +689,7 @@ func (m *BucketSpan) GetLength() uint32 {
|
|||
}
|
||||
|
||||
type Exemplar struct {
|
||||
Label []*LabelPair `protobuf:"bytes,1,rep,name=label,proto3" json:"label,omitempty"`
|
||||
Label []LabelPair `protobuf:"bytes,1,rep,name=label,proto3" json:"label"`
|
||||
Value float64 `protobuf:"fixed64,2,opt,name=value,proto3" json:"value,omitempty"`
|
||||
Timestamp *types.Timestamp `protobuf:"bytes,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
|
@ -729,7 +730,7 @@ func (m *Exemplar) XXX_DiscardUnknown() {
|
|||
|
||||
var xxx_messageInfo_Exemplar proto.InternalMessageInfo
|
||||
|
||||
func (m *Exemplar) GetLabel() []*LabelPair {
|
||||
func (m *Exemplar) GetLabel() []LabelPair {
|
||||
if m != nil {
|
||||
return m.Label
|
||||
}
|
||||
|
@ -751,16 +752,16 @@ func (m *Exemplar) GetTimestamp() *types.Timestamp {
|
|||
}
|
||||
|
||||
type Metric struct {
|
||||
Label []*LabelPair `protobuf:"bytes,1,rep,name=label,proto3" json:"label,omitempty"`
|
||||
Gauge *Gauge `protobuf:"bytes,2,opt,name=gauge,proto3" json:"gauge,omitempty"`
|
||||
Counter *Counter `protobuf:"bytes,3,opt,name=counter,proto3" json:"counter,omitempty"`
|
||||
Summary *Summary `protobuf:"bytes,4,opt,name=summary,proto3" json:"summary,omitempty"`
|
||||
Untyped *Untyped `protobuf:"bytes,5,opt,name=untyped,proto3" json:"untyped,omitempty"`
|
||||
Histogram *Histogram `protobuf:"bytes,7,opt,name=histogram,proto3" json:"histogram,omitempty"`
|
||||
TimestampMs int64 `protobuf:"varint,6,opt,name=timestamp_ms,json=timestampMs,proto3" json:"timestamp_ms,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
Label []LabelPair `protobuf:"bytes,1,rep,name=label,proto3" json:"label"`
|
||||
Gauge *Gauge `protobuf:"bytes,2,opt,name=gauge,proto3" json:"gauge,omitempty"`
|
||||
Counter *Counter `protobuf:"bytes,3,opt,name=counter,proto3" json:"counter,omitempty"`
|
||||
Summary *Summary `protobuf:"bytes,4,opt,name=summary,proto3" json:"summary,omitempty"`
|
||||
Untyped *Untyped `protobuf:"bytes,5,opt,name=untyped,proto3" json:"untyped,omitempty"`
|
||||
Histogram *Histogram `protobuf:"bytes,7,opt,name=histogram,proto3" json:"histogram,omitempty"`
|
||||
TimestampMs int64 `protobuf:"varint,6,opt,name=timestamp_ms,json=timestampMs,proto3" json:"timestamp_ms,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Metric) Reset() { *m = Metric{} }
|
||||
|
@ -796,7 +797,7 @@ func (m *Metric) XXX_DiscardUnknown() {
|
|||
|
||||
var xxx_messageInfo_Metric proto.InternalMessageInfo
|
||||
|
||||
func (m *Metric) GetLabel() []*LabelPair {
|
||||
func (m *Metric) GetLabel() []LabelPair {
|
||||
if m != nil {
|
||||
return m.Label
|
||||
}
|
||||
|
@ -849,7 +850,7 @@ type MetricFamily struct {
|
|||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Help string `protobuf:"bytes,2,opt,name=help,proto3" json:"help,omitempty"`
|
||||
Type MetricType `protobuf:"varint,3,opt,name=type,proto3,enum=io.prometheus.client.MetricType" json:"type,omitempty"`
|
||||
Metric []*Metric `protobuf:"bytes,4,rep,name=metric,proto3" json:"metric,omitempty"`
|
||||
Metric []Metric `protobuf:"bytes,4,rep,name=metric,proto3" json:"metric"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
|
@ -909,7 +910,7 @@ func (m *MetricFamily) GetType() MetricType {
|
|||
return MetricType_COUNTER
|
||||
}
|
||||
|
||||
func (m *MetricFamily) GetMetric() []*Metric {
|
||||
func (m *MetricFamily) GetMetric() []Metric {
|
||||
if m != nil {
|
||||
return m.Metric
|
||||
}
|
||||
|
@ -937,63 +938,65 @@ func init() {
|
|||
}
|
||||
|
||||
var fileDescriptor_d1e5ddb18987a258 = []byte{
|
||||
// 894 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xdd, 0x6e, 0xe3, 0x44,
|
||||
0x18, 0xc5, 0xcd, 0xaf, 0xbf, 0x34, 0xdd, 0xec, 0x50, 0xad, 0xac, 0x42, 0xdb, 0x60, 0x09, 0xa9,
|
||||
0x20, 0xe4, 0x08, 0xe8, 0x0a, 0x84, 0xe0, 0xa2, 0xdd, 0xcd, 0x76, 0x91, 0xc8, 0xee, 0x32, 0x49,
|
||||
0x2e, 0x16, 0x2e, 0xac, 0x49, 0x3a, 0x4d, 0x2c, 0x3c, 0x1e, 0x63, 0x8f, 0x57, 0x94, 0x17, 0xe0,
|
||||
0x9a, 0x57, 0xe0, 0x61, 0x10, 0x97, 0x3c, 0x02, 0x2a, 0x0f, 0x02, 0x9a, 0x3f, 0xbb, 0x59, 0x39,
|
||||
0xcb, 0xb2, 0x77, 0x99, 0xe3, 0x73, 0xbe, 0x39, 0x67, 0x3c, 0x39, 0x06, 0x3f, 0xe2, 0xa3, 0x34,
|
||||
0xe3, 0x8c, 0x8a, 0x35, 0x2d, 0xf2, 0xd1, 0x32, 0x8e, 0x68, 0x22, 0x46, 0x8c, 0x8a, 0x2c, 0x5a,
|
||||
0xe6, 0x41, 0x9a, 0x71, 0xc1, 0xd1, 0x7e, 0xc4, 0x83, 0x8a, 0x13, 0x68, 0xce, 0xc1, 0xf1, 0x8a,
|
||||
0xf3, 0x55, 0x4c, 0x47, 0x8a, 0xb3, 0x28, 0xae, 0x46, 0x22, 0x62, 0x34, 0x17, 0x84, 0xa5, 0x5a,
|
||||
0xe6, 0xdf, 0x07, 0xf7, 0x1b, 0xb2, 0xa0, 0xf1, 0x33, 0x12, 0x65, 0x08, 0x41, 0x33, 0x21, 0x8c,
|
||||
0x7a, 0xce, 0xd0, 0x39, 0x71, 0xb1, 0xfa, 0x8d, 0xf6, 0xa1, 0xf5, 0x82, 0xc4, 0x05, 0xf5, 0x76,
|
||||
0x14, 0xa8, 0x17, 0xfe, 0x21, 0xb4, 0x2e, 0x48, 0xb1, 0xba, 0xf5, 0x58, 0x6a, 0x1c, 0xfb, 0xf8,
|
||||
0x7b, 0xe8, 0x3c, 0xe0, 0x45, 0x22, 0x68, 0x56, 0x4f, 0x40, 0x5f, 0x40, 0x97, 0xfe, 0x44, 0x59,
|
||||
0x1a, 0x93, 0x4c, 0x0d, 0xee, 0x7d, 0x72, 0x14, 0xd4, 0x05, 0x08, 0xc6, 0x86, 0x85, 0x4b, 0xbe,
|
||||
0xff, 0x25, 0x74, 0xbf, 0x2d, 0x48, 0x22, 0xa2, 0x98, 0xa2, 0x03, 0xe8, 0xfe, 0x68, 0x7e, 0x9b,
|
||||
0x0d, 0xca, 0xf5, 0xa6, 0xf3, 0xd2, 0xda, 0x2f, 0x0e, 0x74, 0xa6, 0x05, 0x63, 0x24, 0xbb, 0x46,
|
||||
0xef, 0xc1, 0x6e, 0x4e, 0x58, 0x1a, 0xd3, 0x70, 0x29, 0xdd, 0xaa, 0x09, 0x4d, 0xdc, 0xd3, 0x98,
|
||||
0x0a, 0x80, 0x0e, 0x01, 0x0c, 0x25, 0x2f, 0x98, 0x99, 0xe4, 0x6a, 0x64, 0x5a, 0x30, 0x99, 0xa3,
|
||||
0xdc, 0xbf, 0x31, 0x6c, 0x6c, 0xcf, 0x61, 0x1d, 0x57, 0xfe, 0xfc, 0x63, 0xe8, 0xcc, 0x13, 0x71,
|
||||
0x9d, 0xd2, 0xcb, 0x2d, 0xa7, 0xf8, 0x77, 0x13, 0xdc, 0xc7, 0x51, 0x2e, 0xf8, 0x2a, 0x23, 0xec,
|
||||
0x75, 0xcc, 0x7e, 0x04, 0xe8, 0x36, 0x25, 0xbc, 0x8a, 0x39, 0x11, 0x5e, 0x53, 0xcd, 0x1c, 0xdc,
|
||||
0x22, 0x3e, 0x92, 0xf8, 0x7f, 0x45, 0x3b, 0x85, 0xf6, 0xa2, 0x58, 0xfe, 0x40, 0x85, 0x09, 0xf6,
|
||||
0x6e, 0x7d, 0xb0, 0x73, 0xc5, 0xc1, 0x86, 0x8b, 0xee, 0x41, 0x3b, 0x5f, 0xae, 0x29, 0x23, 0x5e,
|
||||
0x6b, 0xe8, 0x9c, 0xdc, 0xc5, 0x66, 0x85, 0xde, 0x87, 0xbd, 0x9f, 0x69, 0xc6, 0x43, 0xb1, 0xce,
|
||||
0x68, 0xbe, 0xe6, 0xf1, 0xa5, 0xd7, 0x56, 0x1b, 0xf6, 0x25, 0x3a, 0xb3, 0xa0, 0xf4, 0xa4, 0x68,
|
||||
0x3a, 0x62, 0x47, 0x45, 0x74, 0x25, 0xa2, 0x03, 0x9e, 0xc0, 0xa0, 0x7a, 0x6c, 0xe2, 0x75, 0xd5,
|
||||
0x9c, 0xbd, 0x92, 0xa4, 0xc3, 0x8d, 0xa1, 0x9f, 0xd0, 0x15, 0x11, 0xd1, 0x0b, 0x1a, 0xe6, 0x29,
|
||||
0x49, 0x3c, 0x57, 0x85, 0x18, 0xbe, 0x2a, 0xc4, 0x34, 0x25, 0x09, 0xde, 0xb5, 0x32, 0xb9, 0x92,
|
||||
0xb6, 0xcb, 0x31, 0x97, 0x34, 0x16, 0xc4, 0x83, 0x61, 0xe3, 0x04, 0xe1, 0x72, 0xf8, 0x43, 0x09,
|
||||
0x6e, 0xd0, 0xb4, 0xf5, 0xde, 0xb0, 0x21, 0xd3, 0x59, 0x54, 0xdb, 0x1f, 0x43, 0x3f, 0xe5, 0x79,
|
||||
0x54, 0x99, 0xda, 0x7d, 0x5d, 0x53, 0x56, 0x66, 0x4d, 0x95, 0x63, 0xb4, 0xa9, 0xbe, 0x36, 0x65,
|
||||
0xd1, 0xd2, 0x54, 0x49, 0xd3, 0xa6, 0xf6, 0xb4, 0x29, 0x8b, 0x2a, 0x53, 0xfe, 0xef, 0x0e, 0xb4,
|
||||
0xf5, 0x56, 0xe8, 0x03, 0x18, 0x2c, 0x0b, 0x56, 0xc4, 0xb7, 0x83, 0xe8, 0x6b, 0x76, 0xa7, 0xc2,
|
||||
0x75, 0x94, 0x53, 0xb8, 0xf7, 0x32, 0x75, 0xe3, 0xba, 0xed, 0xbf, 0x24, 0xd0, 0x6f, 0xe5, 0x18,
|
||||
0x7a, 0x45, 0x9a, 0xd2, 0x2c, 0x5c, 0xf0, 0x22, 0xb9, 0x34, 0x77, 0x0e, 0x14, 0x74, 0x2e, 0x91,
|
||||
0x8d, 0x5e, 0x68, 0xfc, 0xef, 0x5e, 0x80, 0xea, 0xc8, 0xe4, 0x45, 0xe4, 0x57, 0x57, 0x39, 0xd5,
|
||||
0x09, 0xee, 0x62, 0xb3, 0x92, 0x78, 0x4c, 0x93, 0x95, 0x58, 0xab, 0xdd, 0xfb, 0xd8, 0xac, 0xfc,
|
||||
0x5f, 0x1d, 0xe8, 0xda, 0xa1, 0xe8, 0x3e, 0xb4, 0x62, 0xd9, 0x8a, 0x9e, 0xa3, 0x5e, 0xd0, 0x71,
|
||||
0xbd, 0x87, 0xb2, 0x38, 0xb1, 0x66, 0xd7, 0x37, 0x0e, 0xfa, 0x1c, 0xdc, 0xb2, 0x75, 0x4d, 0xa8,
|
||||
0x83, 0x40, 0xf7, 0x72, 0x60, 0x7b, 0x39, 0x98, 0x59, 0x06, 0xae, 0xc8, 0xfe, 0x3f, 0x3b, 0xd0,
|
||||
0x9e, 0xa8, 0x96, 0x7f, 0x53, 0x47, 0x1f, 0x43, 0x6b, 0x25, 0x7b, 0xda, 0x94, 0xec, 0x3b, 0xf5,
|
||||
0x32, 0x55, 0xe5, 0x58, 0x33, 0xd1, 0x67, 0xd0, 0x59, 0xea, 0xee, 0x36, 0x66, 0x0f, 0xeb, 0x45,
|
||||
0xa6, 0xe0, 0xb1, 0x65, 0x4b, 0x61, 0xae, 0x8b, 0x55, 0xdd, 0x81, 0xad, 0x42, 0xd3, 0xbe, 0xd8,
|
||||
0xb2, 0xa5, 0xb0, 0xd0, 0x45, 0xa8, 0x4a, 0x63, 0xab, 0xd0, 0xb4, 0x25, 0xb6, 0x6c, 0xf4, 0x15,
|
||||
0xb8, 0x6b, 0xdb, 0x8f, 0xaa, 0x2c, 0xb6, 0x1e, 0x4c, 0x59, 0xa3, 0xb8, 0x52, 0xc8, 0x46, 0x2d,
|
||||
0xcf, 0x3a, 0x64, 0xb9, 0x6a, 0xa4, 0x06, 0xee, 0x95, 0xd8, 0x24, 0xf7, 0x7f, 0x73, 0x60, 0x57,
|
||||
0xbf, 0x81, 0x47, 0x84, 0x45, 0xf1, 0x75, 0xed, 0x27, 0x12, 0x41, 0x73, 0x4d, 0xe3, 0xd4, 0x7c,
|
||||
0x21, 0xd5, 0x6f, 0x74, 0x0a, 0x4d, 0xe9, 0x51, 0x1d, 0xe1, 0xde, 0xb6, 0x7f, 0xb8, 0x9e, 0x3c,
|
||||
0xbb, 0x4e, 0x29, 0x56, 0x6c, 0xd9, 0xb9, 0xfa, 0xab, 0xee, 0x35, 0x5f, 0xd5, 0xb9, 0x5a, 0x87,
|
||||
0x0d, 0xf7, 0xc3, 0x05, 0x40, 0x35, 0x09, 0xf5, 0xa0, 0xf3, 0xe0, 0xe9, 0xfc, 0xc9, 0x6c, 0x8c,
|
||||
0x07, 0x6f, 0x21, 0x17, 0x5a, 0x17, 0x67, 0xf3, 0x8b, 0xf1, 0xc0, 0x91, 0xf8, 0x74, 0x3e, 0x99,
|
||||
0x9c, 0xe1, 0xe7, 0x83, 0x1d, 0xb9, 0x98, 0x3f, 0x99, 0x3d, 0x7f, 0x36, 0x7e, 0x38, 0x68, 0xa0,
|
||||
0x3e, 0xb8, 0x8f, 0xbf, 0x9e, 0xce, 0x9e, 0x5e, 0xe0, 0xb3, 0xc9, 0xa0, 0x89, 0xde, 0x86, 0x3b,
|
||||
0x4a, 0x13, 0x56, 0x60, 0xeb, 0xdc, 0xff, 0xe3, 0xe6, 0xc8, 0xf9, 0xf3, 0xe6, 0xc8, 0xf9, 0xeb,
|
||||
0xe6, 0xc8, 0xf9, 0x6e, 0x3f, 0xe2, 0x61, 0x65, 0x2b, 0xd4, 0xb6, 0x16, 0x6d, 0x75, 0x9b, 0x3f,
|
||||
0xfd, 0x37, 0x00, 0x00, 0xff, 0xff, 0xf0, 0xbc, 0x25, 0x8b, 0xaf, 0x08, 0x00, 0x00,
|
||||
// 923 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdd, 0x8e, 0xdb, 0x44,
|
||||
0x18, 0xad, 0x1b, 0xe7, 0xc7, 0x5f, 0x36, 0xdb, 0x74, 0x88, 0x2a, 0x6b, 0x61, 0x37, 0xc1, 0x12,
|
||||
0xd2, 0x82, 0x50, 0x22, 0xa0, 0x08, 0x54, 0x40, 0x62, 0xb7, 0xdd, 0x6e, 0x51, 0x49, 0x5b, 0x26,
|
||||
0xc9, 0x45, 0xe1, 0xc2, 0x9a, 0x64, 0x67, 0x1d, 0x0b, 0xdb, 0x63, 0xec, 0x71, 0xc5, 0x72, 0xcf,
|
||||
0x25, 0xd7, 0xbc, 0x02, 0x4f, 0x82, 0x7a, 0xc9, 0x13, 0x20, 0xb4, 0xef, 0xc0, 0x3d, 0x9a, 0x3f,
|
||||
0x3b, 0x5b, 0x39, 0x85, 0x15, 0x77, 0x33, 0xc7, 0xe7, 0xfb, 0xe6, 0x9c, 0x99, 0xc9, 0x99, 0x80,
|
||||
0x17, 0xb2, 0x49, 0x9a, 0xb1, 0x98, 0xf2, 0x35, 0x2d, 0xf2, 0xc9, 0x2a, 0x0a, 0x69, 0xc2, 0x27,
|
||||
0x31, 0xe5, 0x59, 0xb8, 0xca, 0xc7, 0x69, 0xc6, 0x38, 0x43, 0x83, 0x90, 0x8d, 0x2b, 0xce, 0x58,
|
||||
0x71, 0xf6, 0x06, 0x01, 0x0b, 0x98, 0x24, 0x4c, 0xc4, 0x48, 0x71, 0xf7, 0x86, 0x01, 0x63, 0x41,
|
||||
0x44, 0x27, 0x72, 0xb6, 0x2c, 0xce, 0x27, 0x3c, 0x8c, 0x69, 0xce, 0x49, 0x9c, 0x2a, 0x82, 0xf7,
|
||||
0x31, 0x38, 0x5f, 0x93, 0x25, 0x8d, 0x9e, 0x91, 0x30, 0x43, 0x08, 0xec, 0x84, 0xc4, 0xd4, 0xb5,
|
||||
0x46, 0xd6, 0xa1, 0x83, 0xe5, 0x18, 0x0d, 0xa0, 0xf9, 0x82, 0x44, 0x05, 0x75, 0x6f, 0x4a, 0x50,
|
||||
0x4d, 0xbc, 0x7d, 0x68, 0x9e, 0x92, 0x22, 0xd8, 0xf8, 0x2c, 0x6a, 0x2c, 0xf3, 0xf9, 0x3b, 0x68,
|
||||
0xdf, 0x67, 0x45, 0xc2, 0x69, 0x56, 0x4f, 0x40, 0xf7, 0xa0, 0x43, 0x7f, 0xa4, 0x71, 0x1a, 0x91,
|
||||
0x4c, 0x36, 0xee, 0x7e, 0x78, 0x30, 0xae, 0xb3, 0x35, 0x3e, 0xd1, 0x2c, 0x5c, 0xf2, 0xbd, 0xcf,
|
||||
0xa1, 0xf3, 0x4d, 0x41, 0x12, 0x1e, 0x46, 0x14, 0xed, 0x41, 0xe7, 0x07, 0x3d, 0xd6, 0x0b, 0x94,
|
||||
0xf3, 0xab, 0xca, 0x4b, 0x69, 0xbf, 0x58, 0xd0, 0x9e, 0x15, 0x71, 0x4c, 0xb2, 0x0b, 0xf4, 0x36,
|
||||
0xec, 0xe4, 0x24, 0x4e, 0x23, 0xea, 0xaf, 0x84, 0x5a, 0xd9, 0xc1, 0xc6, 0x5d, 0x85, 0x49, 0x03,
|
||||
0x68, 0x1f, 0x40, 0x53, 0xf2, 0x22, 0xd6, 0x9d, 0x1c, 0x85, 0xcc, 0x8a, 0x18, 0x7d, 0xb9, 0xb1,
|
||||
0x7e, 0x63, 0xd4, 0xd8, 0xee, 0xc3, 0x28, 0x3e, 0xb6, 0x5f, 0xfe, 0x39, 0xbc, 0x51, 0xa9, 0xf4,
|
||||
0x86, 0xd0, 0x5e, 0x24, 0xfc, 0x22, 0xa5, 0x67, 0x5b, 0xf6, 0xf2, 0x6f, 0x1b, 0x9c, 0x47, 0x61,
|
||||
0xce, 0x59, 0x90, 0x91, 0xf8, 0xbf, 0x48, 0x7e, 0x1f, 0xd0, 0x26, 0xc5, 0x3f, 0x8f, 0x18, 0xe1,
|
||||
0xae, 0x2d, 0x7b, 0xf6, 0x37, 0x88, 0x0f, 0x05, 0xfe, 0x6f, 0x06, 0xef, 0x41, 0x6b, 0x59, 0xac,
|
||||
0xbe, 0xa7, 0x5c, 0xdb, 0x7b, 0xab, 0xde, 0xde, 0xb1, 0xe4, 0x68, 0x73, 0xba, 0x02, 0xdd, 0x81,
|
||||
0x56, 0xbe, 0x5a, 0xd3, 0x98, 0xb8, 0xcd, 0x91, 0x75, 0x78, 0x1b, 0xeb, 0x19, 0x7a, 0x07, 0x76,
|
||||
0x7f, 0xa2, 0x19, 0xf3, 0xf9, 0x3a, 0xa3, 0xf9, 0x9a, 0x45, 0x67, 0x6e, 0x4b, 0x2e, 0xdb, 0x13,
|
||||
0xe8, 0xdc, 0x80, 0x42, 0x99, 0xa4, 0x29, 0xa3, 0x6d, 0x69, 0xd4, 0x11, 0x88, 0xb2, 0x79, 0x08,
|
||||
0xfd, 0xea, 0xb3, 0x36, 0xd9, 0x91, 0x7d, 0x76, 0x4b, 0x92, 0xb2, 0xf8, 0x18, 0x7a, 0x09, 0x0d,
|
||||
0x08, 0x0f, 0x5f, 0x50, 0x3f, 0x4f, 0x49, 0xe2, 0x3a, 0xd2, 0xca, 0xe8, 0x75, 0x56, 0x66, 0x29,
|
||||
0x49, 0xb4, 0x9d, 0x1d, 0x53, 0x2c, 0x30, 0x21, 0xbe, 0x6c, 0x76, 0x46, 0x23, 0x4e, 0x5c, 0x18,
|
||||
0x35, 0x0e, 0x11, 0x2e, 0x97, 0x78, 0x20, 0xc0, 0x2b, 0x34, 0x65, 0xa0, 0x3b, 0x6a, 0x08, 0x8f,
|
||||
0x06, 0x55, 0x26, 0x1e, 0x43, 0x2f, 0x65, 0x79, 0x58, 0x49, 0xdb, 0xb9, 0x9e, 0x34, 0x53, 0x6c,
|
||||
0xa4, 0x95, 0xcd, 0x94, 0xb4, 0x9e, 0x92, 0x66, 0xd0, 0x52, 0x5a, 0x49, 0x53, 0xd2, 0x76, 0x95,
|
||||
0x34, 0x83, 0x4a, 0x69, 0xde, 0xef, 0x16, 0xb4, 0xd4, 0x82, 0xe8, 0x5d, 0xe8, 0xaf, 0x8a, 0xb8,
|
||||
0x88, 0x36, 0xed, 0xa8, 0x8b, 0x77, 0xab, 0xc2, 0x95, 0xa1, 0xbb, 0x70, 0xe7, 0x55, 0xea, 0x95,
|
||||
0x0b, 0x38, 0x78, 0xa5, 0x40, 0x9d, 0xd0, 0x10, 0xba, 0x45, 0x9a, 0xd2, 0xcc, 0x5f, 0xb2, 0x22,
|
||||
0x39, 0xd3, 0xb7, 0x10, 0x24, 0x74, 0x2c, 0x90, 0x2b, 0x79, 0xd1, 0xb8, 0x76, 0x5e, 0x40, 0xb5,
|
||||
0x71, 0xe2, 0x52, 0xb2, 0xf3, 0xf3, 0x9c, 0x2a, 0x07, 0xb7, 0xb1, 0x9e, 0x09, 0x3c, 0xa2, 0x49,
|
||||
0xc0, 0xd7, 0x72, 0xf5, 0x1e, 0xd6, 0x33, 0xef, 0x57, 0x0b, 0x3a, 0xa6, 0x29, 0xfa, 0x0c, 0x9a,
|
||||
0x91, 0x48, 0x4b, 0xd7, 0x92, 0xc7, 0x34, 0xac, 0xd7, 0x50, 0x06, 0xaa, 0x3e, 0x25, 0x55, 0x53,
|
||||
0x9f, 0x47, 0xe8, 0x53, 0x70, 0xca, 0x4c, 0xd6, 0xd6, 0xf6, 0xc6, 0x2a, 0xb5, 0xc7, 0x26, 0xb5,
|
||||
0xc7, 0x73, 0xc3, 0xc0, 0x15, 0xd9, 0xfb, 0xb9, 0x01, 0xad, 0xa9, 0x7c, 0x19, 0xfe, 0x9f, 0xae,
|
||||
0x0f, 0xa0, 0x19, 0x88, 0x2c, 0xd7, 0x41, 0xfc, 0x66, 0x7d, 0xb1, 0x8c, 0x7b, 0xac, 0x98, 0xe8,
|
||||
0x13, 0x68, 0xaf, 0x54, 0xbe, 0x6b, 0xc9, 0xfb, 0xf5, 0x45, 0xfa, 0x11, 0xc0, 0x86, 0x2d, 0x0a,
|
||||
0x73, 0x15, 0xbe, 0xf2, 0x3e, 0x6c, 0x2d, 0xd4, 0x09, 0x8d, 0x0d, 0x5b, 0x14, 0x16, 0x2a, 0x26,
|
||||
0x65, 0x98, 0x6c, 0x2d, 0xd4, 0x59, 0x8a, 0x0d, 0x1b, 0x7d, 0x01, 0xce, 0xda, 0xa4, 0xa7, 0x0c,
|
||||
0x91, 0xad, 0xdb, 0x53, 0x86, 0x2c, 0xae, 0x2a, 0x44, 0xde, 0x96, 0x3b, 0xee, 0xc7, 0xb9, 0x4c,
|
||||
0xaa, 0x06, 0xee, 0x96, 0xd8, 0x34, 0xf7, 0x7e, 0xb3, 0x60, 0x47, 0x9d, 0xc3, 0x43, 0x12, 0x87,
|
||||
0xd1, 0x45, 0xed, 0x33, 0x8a, 0xc0, 0x5e, 0xd3, 0x28, 0xd5, 0xaf, 0xa8, 0x1c, 0xa3, 0xbb, 0x60,
|
||||
0x0b, 0x8d, 0x72, 0x0b, 0x77, 0xb7, 0xfd, 0xe6, 0x55, 0xe7, 0xf9, 0x45, 0x4a, 0xb1, 0x64, 0x8b,
|
||||
0x44, 0x56, 0xff, 0x07, 0x5c, 0xfb, 0x75, 0x89, 0xac, 0xea, 0x4c, 0x22, 0xab, 0x8a, 0xf7, 0x96,
|
||||
0x00, 0x55, 0x3f, 0xd4, 0x85, 0xf6, 0xfd, 0xa7, 0x8b, 0x27, 0xf3, 0x13, 0xdc, 0xbf, 0x81, 0x1c,
|
||||
0x68, 0x9e, 0x1e, 0x2d, 0x4e, 0x4f, 0xfa, 0x96, 0xc0, 0x67, 0x8b, 0xe9, 0xf4, 0x08, 0x3f, 0xef,
|
||||
0xdf, 0x14, 0x93, 0xc5, 0x93, 0xf9, 0xf3, 0x67, 0x27, 0x0f, 0xfa, 0x0d, 0xd4, 0x03, 0xe7, 0xd1,
|
||||
0x57, 0xb3, 0xf9, 0xd3, 0x53, 0x7c, 0x34, 0xed, 0xdb, 0xe8, 0x0d, 0xb8, 0x25, 0x6b, 0xfc, 0x0a,
|
||||
0x6c, 0x1e, 0x7b, 0x2f, 0x2f, 0x0f, 0xac, 0x3f, 0x2e, 0x0f, 0xac, 0xbf, 0x2e, 0x0f, 0xac, 0x6f,
|
||||
0x07, 0x21, 0xf3, 0x2b, 0x71, 0xbe, 0x12, 0xb7, 0x6c, 0xc9, 0x9b, 0xfd, 0xd1, 0x3f, 0x01, 0x00,
|
||||
0x00, 0xff, 0xff, 0x52, 0x2d, 0xb5, 0x31, 0xef, 0x08, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *LabelPair) Marshal() (dAtA []byte, err error) {
|
||||
|
@ -2496,7 +2499,7 @@ func (m *Summary) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.Quantile = append(m.Quantile, &Quantile{})
|
||||
m.Quantile = append(m.Quantile, Quantile{})
|
||||
if err := m.Quantile[len(m.Quantile)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -2673,7 +2676,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.Bucket = append(m.Bucket, &Bucket{})
|
||||
m.Bucket = append(m.Bucket, Bucket{})
|
||||
if err := m.Bucket[len(m.Bucket)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -2780,7 +2783,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.NegativeSpan = append(m.NegativeSpan, &BucketSpan{})
|
||||
m.NegativeSpan = append(m.NegativeSpan, BucketSpan{})
|
||||
if err := m.NegativeSpan[len(m.NegativeSpan)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -2946,7 +2949,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.PositiveSpan = append(m.PositiveSpan, &BucketSpan{})
|
||||
m.PositiveSpan = append(m.PositiveSpan, BucketSpan{})
|
||||
if err := m.PositiveSpan[len(m.PositiveSpan)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -3382,7 +3385,7 @@ func (m *Exemplar) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.Label = append(m.Label, &LabelPair{})
|
||||
m.Label = append(m.Label, LabelPair{})
|
||||
if err := m.Label[len(m.Label)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -3514,7 +3517,7 @@ func (m *Metric) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.Label = append(m.Label, &LabelPair{})
|
||||
m.Label = append(m.Label, LabelPair{})
|
||||
if err := m.Label[len(m.Label)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -3881,7 +3884,7 @@ func (m *MetricFamily) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.Metric = append(m.Metric, &Metric{})
|
||||
m.Metric = append(m.Metric, Metric{})
|
||||
if err := m.Metric[len(m.Metric)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ syntax = "proto3";
|
|||
package io.prometheus.client;
|
||||
option go_package = "io_prometheus_client";
|
||||
|
||||
import "gogoproto/gogo.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
message LabelPair {
|
||||
|
@ -58,9 +59,9 @@ message Quantile {
|
|||
}
|
||||
|
||||
message Summary {
|
||||
uint64 sample_count = 1;
|
||||
double sample_sum = 2;
|
||||
repeated Quantile quantile = 3;
|
||||
uint64 sample_count = 1;
|
||||
double sample_sum = 2;
|
||||
repeated Quantile quantile = 3 [(gogoproto.nullable) = false];
|
||||
}
|
||||
|
||||
message Untyped {
|
||||
|
@ -72,7 +73,7 @@ message Histogram {
|
|||
double sample_count_float = 4; // Overrides sample_count if > 0.
|
||||
double sample_sum = 2;
|
||||
// Buckets for the conventional histogram.
|
||||
repeated Bucket bucket = 3; // Ordered in increasing order of upper_bound, +Inf bucket is optional.
|
||||
repeated Bucket bucket = 3 [(gogoproto.nullable) = false]; // Ordered in increasing order of upper_bound, +Inf bucket is optional.
|
||||
|
||||
// Everything below here is for native histograms (also known as sparse histograms).
|
||||
// Native histograms are an experimental feature without stability guarantees.
|
||||
|
@ -88,20 +89,20 @@ message Histogram {
|
|||
double zero_count_float = 8; // Overrides sb_zero_count if > 0.
|
||||
|
||||
// Negative buckets for the native histogram.
|
||||
repeated BucketSpan negative_span = 9;
|
||||
repeated BucketSpan negative_span = 9 [(gogoproto.nullable) = false];
|
||||
// Use either "negative_delta" or "negative_count", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
repeated sint64 negative_delta = 10; // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||
repeated double negative_count = 11; // Absolute count of each bucket.
|
||||
repeated sint64 negative_delta = 10; // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||
repeated double negative_count = 11; // Absolute count of each bucket.
|
||||
|
||||
// Positive buckets for the native histogram.
|
||||
repeated BucketSpan positive_span = 12;
|
||||
repeated BucketSpan positive_span = 12 [(gogoproto.nullable) = false];
|
||||
// Use either "positive_delta" or "positive_count", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
repeated sint64 positive_delta = 13; // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||
repeated double positive_count = 14; // Absolute count of each bucket.
|
||||
repeated sint64 positive_delta = 13; // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||
repeated double positive_count = 14; // Absolute count of each bucket.
|
||||
}
|
||||
|
||||
message Bucket {
|
||||
|
@ -123,24 +124,24 @@ message BucketSpan {
|
|||
}
|
||||
|
||||
message Exemplar {
|
||||
repeated LabelPair label = 1;
|
||||
repeated LabelPair label = 1 [(gogoproto.nullable) = false];
|
||||
double value = 2;
|
||||
google.protobuf.Timestamp timestamp = 3; // OpenMetrics-style.
|
||||
}
|
||||
|
||||
message Metric {
|
||||
repeated LabelPair label = 1;
|
||||
Gauge gauge = 2;
|
||||
Counter counter = 3;
|
||||
Summary summary = 4;
|
||||
Untyped untyped = 5;
|
||||
Histogram histogram = 7;
|
||||
int64 timestamp_ms = 6;
|
||||
repeated LabelPair label = 1 [(gogoproto.nullable) = false];
|
||||
Gauge gauge = 2;
|
||||
Counter counter = 3;
|
||||
Summary summary = 4;
|
||||
Untyped untyped = 5;
|
||||
Histogram histogram = 7;
|
||||
int64 timestamp_ms = 6;
|
||||
}
|
||||
|
||||
message MetricFamily {
|
||||
string name = 1;
|
||||
string help = 2;
|
||||
MetricType type = 3;
|
||||
repeated Metric metric = 4;
|
||||
repeated Metric metric = 4 [(gogoproto.nullable) = false];
|
||||
}
|
||||
|
|
|
@ -383,14 +383,14 @@ type Histogram struct {
|
|||
// *Histogram_ZeroCountFloat
|
||||
ZeroCount isHistogram_ZeroCount `protobuf_oneof:"zero_count"`
|
||||
// Negative Buckets.
|
||||
NegativeSpans []*BucketSpan `protobuf:"bytes,8,rep,name=negative_spans,json=negativeSpans,proto3" json:"negative_spans,omitempty"`
|
||||
NegativeSpans []BucketSpan `protobuf:"bytes,8,rep,name=negative_spans,json=negativeSpans,proto3" json:"negative_spans"`
|
||||
// Use either "negative_deltas" or "negative_counts", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
NegativeDeltas []int64 `protobuf:"zigzag64,9,rep,packed,name=negative_deltas,json=negativeDeltas,proto3" json:"negative_deltas,omitempty"`
|
||||
NegativeCounts []float64 `protobuf:"fixed64,10,rep,packed,name=negative_counts,json=negativeCounts,proto3" json:"negative_counts,omitempty"`
|
||||
// Positive Buckets.
|
||||
PositiveSpans []*BucketSpan `protobuf:"bytes,11,rep,name=positive_spans,json=positiveSpans,proto3" json:"positive_spans,omitempty"`
|
||||
PositiveSpans []BucketSpan `protobuf:"bytes,11,rep,name=positive_spans,json=positiveSpans,proto3" json:"positive_spans"`
|
||||
// Use either "positive_deltas" or "positive_counts", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
|
@ -529,7 +529,7 @@ func (m *Histogram) GetZeroCountFloat() float64 {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (m *Histogram) GetNegativeSpans() []*BucketSpan {
|
||||
func (m *Histogram) GetNegativeSpans() []BucketSpan {
|
||||
if m != nil {
|
||||
return m.NegativeSpans
|
||||
}
|
||||
|
@ -550,7 +550,7 @@ func (m *Histogram) GetNegativeCounts() []float64 {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *Histogram) GetPositiveSpans() []*BucketSpan {
|
||||
func (m *Histogram) GetPositiveSpans() []BucketSpan {
|
||||
if m != nil {
|
||||
return m.PositiveSpans
|
||||
}
|
||||
|
@ -1143,75 +1143,75 @@ func init() {
|
|||
func init() { proto.RegisterFile("types.proto", fileDescriptor_d938547f84707355) }
|
||||
|
||||
var fileDescriptor_d938547f84707355 = []byte{
|
||||
// 1075 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdd, 0x8e, 0xdb, 0x44,
|
||||
0x14, 0x5e, 0xdb, 0x89, 0x13, 0x9f, 0xfc, 0xd4, 0x3b, 0xda, 0x16, 0x53, 0xd1, 0x6d, 0xb0, 0x54,
|
||||
0x88, 0x10, 0xca, 0xaa, 0x85, 0x0b, 0x2a, 0x0a, 0xd2, 0x6e, 0xc9, 0xfe, 0x88, 0x26, 0x51, 0x27,
|
||||
0x59, 0x41, 0xb9, 0x89, 0x66, 0x93, 0xd9, 0xc4, 0xaa, 0xff, 0xf0, 0x4c, 0xaa, 0x0d, 0xef, 0xc1,
|
||||
0x1d, 0x2f, 0xc1, 0x3d, 0x12, 0xb7, 0xbd, 0xe4, 0x09, 0x10, 0xda, 0x2b, 0x1e, 0x03, 0xcd, 0xb1,
|
||||
0x1d, 0x3b, 0xdd, 0x82, 0x54, 0xee, 0xe6, 0x7c, 0xe7, 0x3b, 0x33, 0x9f, 0xe7, 0xfc, 0x8c, 0xa1,
|
||||
0x21, 0xd7, 0x31, 0x17, 0xbd, 0x38, 0x89, 0x64, 0x44, 0x20, 0x4e, 0xa2, 0x80, 0xcb, 0x25, 0x5f,
|
||||
0x89, 0xbb, 0x7b, 0x8b, 0x68, 0x11, 0x21, 0x7c, 0xa0, 0x56, 0x29, 0xc3, 0xfd, 0x45, 0x87, 0xf6,
|
||||
0x80, 0xcb, 0xc4, 0x9b, 0x0d, 0xb8, 0x64, 0x73, 0x26, 0x19, 0x79, 0x0c, 0x15, 0xb5, 0x87, 0xa3,
|
||||
0x75, 0xb4, 0x6e, 0xfb, 0xd1, 0x83, 0x5e, 0xb1, 0x47, 0x6f, 0x9b, 0x99, 0x99, 0x93, 0x75, 0xcc,
|
||||
0x29, 0x86, 0x90, 0x4f, 0x81, 0x04, 0x88, 0x4d, 0x2f, 0x59, 0xe0, 0xf9, 0xeb, 0x69, 0xc8, 0x02,
|
||||
0xee, 0xe8, 0x1d, 0xad, 0x6b, 0x51, 0x3b, 0xf5, 0x1c, 0xa3, 0x63, 0xc8, 0x02, 0x4e, 0x08, 0x54,
|
||||
0x96, 0xdc, 0x8f, 0x9d, 0x0a, 0xfa, 0x71, 0xad, 0xb0, 0x55, 0xe8, 0x49, 0xa7, 0x9a, 0x62, 0x6a,
|
||||
0xed, 0xae, 0x01, 0x8a, 0x93, 0x48, 0x03, 0x6a, 0xe7, 0xc3, 0x6f, 0x87, 0xa3, 0xef, 0x86, 0xf6,
|
||||
0x8e, 0x32, 0x9e, 0x8e, 0xce, 0x87, 0x93, 0x3e, 0xb5, 0x35, 0x62, 0x41, 0xf5, 0xe4, 0xf0, 0xfc,
|
||||
0xa4, 0x6f, 0xeb, 0xa4, 0x05, 0xd6, 0xe9, 0xd9, 0x78, 0x32, 0x3a, 0xa1, 0x87, 0x03, 0xdb, 0x20,
|
||||
0x04, 0xda, 0xe8, 0x29, 0xb0, 0x8a, 0x0a, 0x1d, 0x9f, 0x0f, 0x06, 0x87, 0xf4, 0x85, 0x5d, 0x25,
|
||||
0x75, 0xa8, 0x9c, 0x0d, 0x8f, 0x47, 0xb6, 0x49, 0x9a, 0x50, 0x1f, 0x4f, 0x0e, 0x27, 0xfd, 0x71,
|
||||
0x7f, 0x62, 0xd7, 0xdc, 0x27, 0x60, 0x8e, 0x59, 0x10, 0xfb, 0x9c, 0xec, 0x41, 0xf5, 0x15, 0xf3,
|
||||
0x57, 0xe9, 0xb5, 0x68, 0x34, 0x35, 0xc8, 0x07, 0x60, 0x49, 0x2f, 0xe0, 0x42, 0xb2, 0x20, 0xc6,
|
||||
0xef, 0x34, 0x68, 0x01, 0xb8, 0x11, 0xd4, 0xfb, 0x57, 0x3c, 0x88, 0x7d, 0x96, 0x90, 0x03, 0x30,
|
||||
0x7d, 0x76, 0xc1, 0x7d, 0xe1, 0x68, 0x1d, 0xa3, 0xdb, 0x78, 0xb4, 0x5b, 0xbe, 0xd7, 0x67, 0xca,
|
||||
0x73, 0x54, 0x79, 0xfd, 0xe7, 0xfd, 0x1d, 0x9a, 0xd1, 0x8a, 0x03, 0xf5, 0x7f, 0x3d, 0xd0, 0x78,
|
||||
0xf3, 0xc0, 0xdf, 0xab, 0x60, 0x9d, 0x7a, 0x42, 0x46, 0x8b, 0x84, 0x05, 0xe4, 0x1e, 0x58, 0xb3,
|
||||
0x68, 0x15, 0xca, 0xa9, 0x17, 0x4a, 0x94, 0x5d, 0x39, 0xdd, 0xa1, 0x75, 0x84, 0xce, 0x42, 0x49,
|
||||
0x3e, 0x84, 0x46, 0xea, 0xbe, 0xf4, 0x23, 0x26, 0xd3, 0x63, 0x4e, 0x77, 0x28, 0x20, 0x78, 0xac,
|
||||
0x30, 0x62, 0x83, 0x21, 0x56, 0x01, 0x9e, 0xa3, 0x51, 0xb5, 0x24, 0x77, 0xc0, 0x14, 0xb3, 0x25,
|
||||
0x0f, 0x18, 0x66, 0x6d, 0x97, 0x66, 0x16, 0x79, 0x00, 0xed, 0x9f, 0x78, 0x12, 0x4d, 0xe5, 0x32,
|
||||
0xe1, 0x62, 0x19, 0xf9, 0x73, 0xcc, 0xa0, 0x46, 0x5b, 0x0a, 0x9d, 0xe4, 0x20, 0xf9, 0x28, 0xa3,
|
||||
0x15, 0xba, 0x4c, 0xd4, 0xa5, 0xd1, 0xa6, 0xc2, 0x9f, 0xe6, 0xda, 0x3e, 0x01, 0xbb, 0xc4, 0x4b,
|
||||
0x05, 0xd6, 0x50, 0xa0, 0x46, 0xdb, 0x1b, 0x66, 0x2a, 0xf2, 0x2b, 0x68, 0x87, 0x7c, 0xc1, 0xa4,
|
||||
0xf7, 0x8a, 0x4f, 0x45, 0xcc, 0x42, 0xe1, 0xd4, 0xf1, 0x86, 0xef, 0x94, 0x6f, 0xf8, 0x68, 0x35,
|
||||
0x7b, 0xc9, 0xe5, 0x38, 0x66, 0x21, 0x6d, 0xe5, 0x6c, 0x65, 0x09, 0xf2, 0x31, 0xdc, 0xda, 0x84,
|
||||
0xcf, 0xb9, 0x2f, 0x99, 0x70, 0xac, 0x8e, 0xd1, 0x25, 0x74, 0xb3, 0xeb, 0x37, 0x88, 0x6e, 0x11,
|
||||
0x51, 0x97, 0x70, 0xa0, 0x63, 0x74, 0xb5, 0x82, 0x88, 0xa2, 0x84, 0x12, 0x14, 0x47, 0xc2, 0x2b,
|
||||
0x09, 0x6a, 0xfc, 0xb7, 0xa0, 0x9c, 0xbd, 0x11, 0xb4, 0x09, 0xcf, 0x04, 0x35, 0x53, 0x41, 0x39,
|
||||
0x5c, 0x08, 0xda, 0x10, 0x33, 0x41, 0xad, 0x54, 0x50, 0x0e, 0x67, 0x82, 0xbe, 0x06, 0x48, 0xb8,
|
||||
0xe0, 0x72, 0xba, 0x54, 0x37, 0xde, 0xc6, 0xbe, 0xbe, 0x5f, 0x16, 0xb3, 0xa9, 0x99, 0x1e, 0x55,
|
||||
0xbc, 0x53, 0x2f, 0x94, 0xd4, 0x4a, 0xf2, 0xe5, 0x76, 0xd1, 0xdd, 0x7a, 0xb3, 0xe8, 0x3e, 0x07,
|
||||
0x6b, 0x13, 0xb5, 0xdd, 0x9d, 0x35, 0x30, 0x5e, 0xf4, 0xc7, 0xb6, 0x46, 0x4c, 0xd0, 0x87, 0x23,
|
||||
0x5b, 0x2f, 0x3a, 0xd4, 0x38, 0xaa, 0x41, 0x15, 0x35, 0x1f, 0x35, 0x01, 0x8a, 0x54, 0xbb, 0x4f,
|
||||
0x00, 0x8a, 0x9b, 0x51, 0xd5, 0x16, 0x5d, 0x5e, 0x0a, 0x9e, 0x96, 0xef, 0x2e, 0xcd, 0x2c, 0x85,
|
||||
0xfb, 0x3c, 0x5c, 0xc8, 0x25, 0x56, 0x6d, 0x8b, 0x66, 0x96, 0xfb, 0xb7, 0x06, 0x30, 0xf1, 0x02,
|
||||
0x3e, 0xe6, 0x89, 0xc7, 0xc5, 0xbb, 0xf7, 0xdc, 0x23, 0xa8, 0x09, 0x6c, 0x77, 0xe1, 0xe8, 0x18,
|
||||
0x41, 0xca, 0x11, 0xe9, 0x24, 0xc8, 0x42, 0x72, 0x22, 0xf9, 0x02, 0x2c, 0x9e, 0x35, 0xb9, 0x70,
|
||||
0x0c, 0x8c, 0xda, 0x2b, 0x47, 0xe5, 0x13, 0x20, 0x8b, 0x2b, 0xc8, 0xe4, 0x4b, 0x80, 0x65, 0x7e,
|
||||
0xf1, 0xc2, 0xa9, 0x60, 0xe8, 0xed, 0xb7, 0xa6, 0x25, 0x8b, 0x2d, 0xd1, 0xdd, 0x87, 0x50, 0xc5,
|
||||
0x2f, 0x50, 0x13, 0x13, 0xa7, 0xac, 0x96, 0x4e, 0x4c, 0xb5, 0xde, 0x9e, 0x1d, 0x56, 0x36, 0x3b,
|
||||
0xdc, 0xc7, 0x60, 0x3e, 0x4b, 0xbf, 0xf3, 0x5d, 0x2f, 0xc6, 0xfd, 0x59, 0x83, 0x26, 0xe2, 0x03,
|
||||
0x26, 0x67, 0x4b, 0x9e, 0x90, 0x87, 0x5b, 0x8f, 0xc4, 0xbd, 0x1b, 0xf1, 0x19, 0xaf, 0x57, 0x7a,
|
||||
0x1c, 0x72, 0xa1, 0xfa, 0xdb, 0x84, 0x1a, 0x65, 0xa1, 0x5d, 0xa8, 0xe0, 0xa8, 0x37, 0x41, 0xef,
|
||||
0x3f, 0x4f, 0xeb, 0x68, 0xd8, 0x7f, 0x9e, 0xd6, 0x11, 0x55, 0xe3, 0x5d, 0x01, 0xb4, 0x6f, 0x1b,
|
||||
0xee, 0xaf, 0x9a, 0x2a, 0x3e, 0x36, 0x57, 0xb5, 0x27, 0xc8, 0x7b, 0x50, 0x13, 0x92, 0xc7, 0xd3,
|
||||
0x40, 0xa0, 0x2e, 0x83, 0x9a, 0xca, 0x1c, 0x08, 0x75, 0xf4, 0xe5, 0x2a, 0x9c, 0xe5, 0x47, 0xab,
|
||||
0x35, 0x79, 0x1f, 0xea, 0x42, 0xb2, 0x44, 0x2a, 0x76, 0x3a, 0x48, 0x6b, 0x68, 0x0f, 0x04, 0xb9,
|
||||
0x0d, 0x26, 0x0f, 0xe7, 0x53, 0x4c, 0x8a, 0x72, 0x54, 0x79, 0x38, 0x1f, 0x08, 0x72, 0x17, 0xea,
|
||||
0x8b, 0x24, 0x5a, 0xc5, 0x5e, 0xb8, 0x70, 0xaa, 0x1d, 0xa3, 0x6b, 0xd1, 0x8d, 0x4d, 0xda, 0xa0,
|
||||
0x5f, 0xac, 0x71, 0x98, 0xd5, 0xa9, 0x7e, 0xb1, 0x56, 0xbb, 0x27, 0x2c, 0x5c, 0x70, 0xb5, 0x49,
|
||||
0x2d, 0xdd, 0x1d, 0xed, 0x81, 0x70, 0x7f, 0xd3, 0xa0, 0xfa, 0x74, 0xb9, 0x0a, 0x5f, 0x92, 0x7d,
|
||||
0x68, 0x04, 0x5e, 0x38, 0x55, 0xad, 0x54, 0x68, 0xb6, 0x02, 0x2f, 0x54, 0x35, 0x3c, 0x10, 0xe8,
|
||||
0x67, 0x57, 0x1b, 0x7f, 0xf6, 0xbe, 0x04, 0xec, 0x2a, 0xf3, 0xf7, 0xb2, 0x24, 0x18, 0x98, 0x84,
|
||||
0xbb, 0xe5, 0x24, 0xe0, 0x01, 0xbd, 0x7e, 0x38, 0x8b, 0xe6, 0x5e, 0xb8, 0x28, 0x32, 0xa0, 0xde,
|
||||
0x6d, 0xfc, 0xaa, 0x26, 0xc5, 0xb5, 0x7b, 0x00, 0xf5, 0x9c, 0x75, 0xa3, 0x79, 0xbf, 0x1f, 0xa9,
|
||||
0x67, 0x75, 0xeb, 0x2d, 0xd5, 0xdd, 0x1f, 0xa1, 0x85, 0x9b, 0xf3, 0xf9, 0xff, 0xed, 0xb2, 0x03,
|
||||
0x30, 0x67, 0x6a, 0x87, 0xbc, 0xc9, 0x76, 0x6f, 0x08, 0xcf, 0x03, 0x52, 0xda, 0xd1, 0xde, 0xeb,
|
||||
0xeb, 0x7d, 0xed, 0x8f, 0xeb, 0x7d, 0xed, 0xaf, 0xeb, 0x7d, 0xed, 0x07, 0x53, 0xb1, 0xe3, 0x8b,
|
||||
0x0b, 0x13, 0xff, 0x60, 0x3e, 0xfb, 0x27, 0x00, 0x00, 0xff, 0xff, 0x36, 0xd7, 0x1e, 0xb4, 0xf2,
|
||||
0x08, 0x00, 0x00,
|
||||
// 1081 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdb, 0x6e, 0xdb, 0x46,
|
||||
0x13, 0x36, 0x49, 0x89, 0x12, 0x47, 0x87, 0xd0, 0x0b, 0x27, 0x3f, 0xff, 0xa0, 0x71, 0x54, 0x02,
|
||||
0x69, 0x85, 0xa2, 0x90, 0x91, 0xb4, 0x17, 0x0d, 0x1a, 0x14, 0xb0, 0x5d, 0xf9, 0x80, 0x46, 0x12,
|
||||
0xb2, 0x92, 0xd1, 0xa6, 0x37, 0xc2, 0x5a, 0x5a, 0x4b, 0x44, 0x78, 0x2a, 0x77, 0x15, 0x58, 0x7d,
|
||||
0x8f, 0xde, 0xf5, 0x25, 0x7a, 0xdf, 0x07, 0x08, 0xd0, 0x9b, 0x3e, 0x41, 0x51, 0xf8, 0xaa, 0x8f,
|
||||
0x51, 0xec, 0x90, 0x14, 0xa9, 0x38, 0x05, 0x9a, 0xde, 0xed, 0x7c, 0xf3, 0xcd, 0xec, 0xc7, 0xdd,
|
||||
0x99, 0x59, 0x42, 0x43, 0xae, 0x63, 0x2e, 0x7a, 0x71, 0x12, 0xc9, 0x88, 0x40, 0x9c, 0x44, 0x01,
|
||||
0x97, 0x4b, 0xbe, 0x12, 0xf7, 0xf7, 0x16, 0xd1, 0x22, 0x42, 0xf8, 0x40, 0xad, 0x52, 0x86, 0xfb,
|
||||
0xb3, 0x0e, 0xed, 0x01, 0x97, 0x89, 0x37, 0x1b, 0x70, 0xc9, 0xe6, 0x4c, 0x32, 0xf2, 0x14, 0x2a,
|
||||
0x2a, 0x87, 0xa3, 0x75, 0xb4, 0x6e, 0xfb, 0xc9, 0xa3, 0x5e, 0x91, 0xa3, 0xb7, 0xcd, 0xcc, 0xcc,
|
||||
0xc9, 0x3a, 0xe6, 0x14, 0x43, 0xc8, 0xa7, 0x40, 0x02, 0xc4, 0xa6, 0x57, 0x2c, 0xf0, 0xfc, 0xf5,
|
||||
0x34, 0x64, 0x01, 0x77, 0xf4, 0x8e, 0xd6, 0xb5, 0xa8, 0x9d, 0x7a, 0x4e, 0xd0, 0x31, 0x64, 0x01,
|
||||
0x27, 0x04, 0x2a, 0x4b, 0xee, 0xc7, 0x4e, 0x05, 0xfd, 0xb8, 0x56, 0xd8, 0x2a, 0xf4, 0xa4, 0x53,
|
||||
0x4d, 0x31, 0xb5, 0x76, 0xd7, 0x00, 0xc5, 0x4e, 0xa4, 0x01, 0xb5, 0x8b, 0xe1, 0x37, 0xc3, 0xd1,
|
||||
0xb7, 0x43, 0x7b, 0x47, 0x19, 0xc7, 0xa3, 0x8b, 0xe1, 0xa4, 0x4f, 0x6d, 0x8d, 0x58, 0x50, 0x3d,
|
||||
0x3d, 0xbc, 0x38, 0xed, 0xdb, 0x3a, 0x69, 0x81, 0x75, 0x76, 0x3e, 0x9e, 0x8c, 0x4e, 0xe9, 0xe1,
|
||||
0xc0, 0x36, 0x08, 0x81, 0x36, 0x7a, 0x0a, 0xac, 0xa2, 0x42, 0xc7, 0x17, 0x83, 0xc1, 0x21, 0x7d,
|
||||
0x69, 0x57, 0x49, 0x1d, 0x2a, 0xe7, 0xc3, 0x93, 0x91, 0x6d, 0x92, 0x26, 0xd4, 0xc7, 0x93, 0xc3,
|
||||
0x49, 0x7f, 0xdc, 0x9f, 0xd8, 0x35, 0xf7, 0x19, 0x98, 0x63, 0x16, 0xc4, 0x3e, 0x27, 0x7b, 0x50,
|
||||
0x7d, 0xcd, 0xfc, 0x55, 0x7a, 0x2c, 0x1a, 0x4d, 0x0d, 0xf2, 0x01, 0x58, 0xd2, 0x0b, 0xb8, 0x90,
|
||||
0x2c, 0x88, 0xf1, 0x3b, 0x0d, 0x5a, 0x00, 0x6e, 0x04, 0xf5, 0xfe, 0x35, 0x0f, 0x62, 0x9f, 0x25,
|
||||
0xe4, 0x00, 0x4c, 0x9f, 0x5d, 0x72, 0x5f, 0x38, 0x5a, 0xc7, 0xe8, 0x36, 0x9e, 0xec, 0x96, 0xcf,
|
||||
0xf5, 0xb9, 0xf2, 0x1c, 0x55, 0xde, 0xfc, 0xf1, 0x70, 0x87, 0x66, 0xb4, 0x62, 0x43, 0xfd, 0x1f,
|
||||
0x37, 0x34, 0xde, 0xde, 0xf0, 0xb7, 0x2a, 0x58, 0x67, 0x9e, 0x90, 0xd1, 0x22, 0x61, 0x01, 0x79,
|
||||
0x00, 0xd6, 0x2c, 0x5a, 0x85, 0x72, 0xea, 0x85, 0x12, 0x65, 0x57, 0xce, 0x76, 0x68, 0x1d, 0xa1,
|
||||
0xf3, 0x50, 0x92, 0x0f, 0xa1, 0x91, 0xba, 0xaf, 0xfc, 0x88, 0xc9, 0x74, 0x9b, 0xb3, 0x1d, 0x0a,
|
||||
0x08, 0x9e, 0x28, 0x8c, 0xd8, 0x60, 0x88, 0x55, 0x80, 0xfb, 0x68, 0x54, 0x2d, 0xc9, 0x3d, 0x30,
|
||||
0xc5, 0x6c, 0xc9, 0x03, 0x86, 0xb7, 0xb6, 0x4b, 0x33, 0x8b, 0x3c, 0x82, 0xf6, 0x8f, 0x3c, 0x89,
|
||||
0xa6, 0x72, 0x99, 0x70, 0xb1, 0x8c, 0xfc, 0x39, 0xde, 0xa0, 0x46, 0x5b, 0x0a, 0x9d, 0xe4, 0x20,
|
||||
0xf9, 0x28, 0xa3, 0x15, 0xba, 0x4c, 0xd4, 0xa5, 0xd1, 0xa6, 0xc2, 0x8f, 0x73, 0x6d, 0x9f, 0x80,
|
||||
0x5d, 0xe2, 0xa5, 0x02, 0x6b, 0x28, 0x50, 0xa3, 0xed, 0x0d, 0x33, 0x15, 0x79, 0x0c, 0xed, 0x90,
|
||||
0x2f, 0x98, 0xf4, 0x5e, 0xf3, 0xa9, 0x88, 0x59, 0x28, 0x9c, 0x3a, 0x9e, 0xf0, 0xbd, 0xf2, 0x09,
|
||||
0x1f, 0xad, 0x66, 0xaf, 0xb8, 0x1c, 0xc7, 0x2c, 0xcc, 0x8e, 0xb9, 0x95, 0xc7, 0x28, 0x4c, 0x90,
|
||||
0x8f, 0xe1, 0xce, 0x26, 0xc9, 0x9c, 0xfb, 0x92, 0x09, 0xc7, 0xea, 0x18, 0x5d, 0x42, 0x37, 0xb9,
|
||||
0xbf, 0x46, 0x74, 0x8b, 0x88, 0xea, 0x84, 0x03, 0x1d, 0xa3, 0xab, 0x15, 0x44, 0x94, 0x26, 0x94,
|
||||
0xac, 0x38, 0x12, 0x5e, 0x49, 0x56, 0xe3, 0xdf, 0xc8, 0xca, 0x63, 0x36, 0xb2, 0x36, 0x49, 0x32,
|
||||
0x59, 0xcd, 0x54, 0x56, 0x0e, 0x17, 0xb2, 0x36, 0xc4, 0x4c, 0x56, 0x2b, 0x95, 0x95, 0xc3, 0x99,
|
||||
0xac, 0xaf, 0x00, 0x12, 0x2e, 0xb8, 0x9c, 0x2e, 0xd5, 0xe9, 0xb7, 0xb1, 0xc7, 0x1f, 0x96, 0x25,
|
||||
0x6d, 0xea, 0xa7, 0x47, 0x15, 0xef, 0xcc, 0x0b, 0x25, 0xb5, 0x92, 0x7c, 0xb9, 0x5d, 0x80, 0x77,
|
||||
0xde, 0x2e, 0xc0, 0xcf, 0xc1, 0xda, 0x44, 0x6d, 0x77, 0x6a, 0x0d, 0x8c, 0x97, 0xfd, 0xb1, 0xad,
|
||||
0x11, 0x13, 0xf4, 0xe1, 0xc8, 0xd6, 0x8b, 0x6e, 0x35, 0x8e, 0x6a, 0x50, 0x45, 0xcd, 0x47, 0x4d,
|
||||
0x80, 0xe2, 0xda, 0xdd, 0x67, 0x00, 0xc5, 0xf9, 0xa8, 0xca, 0x8b, 0xae, 0xae, 0x04, 0x4f, 0x4b,
|
||||
0x79, 0x97, 0x66, 0x96, 0xc2, 0x7d, 0x1e, 0x2e, 0xe4, 0x12, 0x2b, 0xb8, 0x45, 0x33, 0xcb, 0xfd,
|
||||
0x4b, 0x03, 0x98, 0x78, 0x01, 0x1f, 0xf3, 0xc4, 0xe3, 0xe2, 0xfd, 0xfb, 0xef, 0x09, 0xd4, 0x04,
|
||||
0xb6, 0xbe, 0x70, 0x74, 0x8c, 0x20, 0xe5, 0x88, 0x74, 0x2a, 0x64, 0x21, 0x39, 0x91, 0x7c, 0x01,
|
||||
0x16, 0xcf, 0x1a, 0x5e, 0x38, 0x06, 0x46, 0xed, 0x95, 0xa3, 0xf2, 0x69, 0x90, 0xc5, 0x15, 0x64,
|
||||
0xf2, 0x25, 0xc0, 0x32, 0x3f, 0x78, 0xe1, 0x54, 0x30, 0xf4, 0xee, 0x3b, 0xaf, 0x25, 0x8b, 0x2d,
|
||||
0xd1, 0xdd, 0xc7, 0x50, 0xc5, 0x2f, 0x50, 0xd3, 0x13, 0x27, 0xae, 0x96, 0x4e, 0x4f, 0xb5, 0xde,
|
||||
0x9e, 0x23, 0x56, 0x36, 0x47, 0xdc, 0xa7, 0x60, 0x3e, 0x4f, 0xbf, 0xf3, 0x7d, 0x0f, 0xc6, 0xfd,
|
||||
0x49, 0x83, 0x26, 0xe2, 0x03, 0x26, 0x67, 0x4b, 0x9e, 0x90, 0xc7, 0x5b, 0x0f, 0xc6, 0x83, 0x5b,
|
||||
0xf1, 0x19, 0xaf, 0x57, 0x7a, 0x28, 0x72, 0xa1, 0xfa, 0xbb, 0x84, 0x1a, 0x65, 0xa1, 0x5d, 0xa8,
|
||||
0xe0, 0xd8, 0x37, 0x41, 0xef, 0xbf, 0x48, 0xeb, 0x68, 0xd8, 0x7f, 0x91, 0xd6, 0x11, 0x55, 0xa3,
|
||||
0x5e, 0x01, 0xb4, 0x6f, 0x1b, 0xee, 0x2f, 0x9a, 0x2a, 0x3e, 0x36, 0x57, 0xb5, 0x27, 0xc8, 0xff,
|
||||
0xa0, 0x26, 0x24, 0x8f, 0xa7, 0x81, 0x40, 0x5d, 0x06, 0x35, 0x95, 0x39, 0x10, 0x6a, 0xeb, 0xab,
|
||||
0x55, 0x38, 0xcb, 0xb7, 0x56, 0x6b, 0xf2, 0x7f, 0xa8, 0x0b, 0xc9, 0x12, 0xa9, 0xd8, 0xe9, 0x50,
|
||||
0xad, 0xa1, 0x3d, 0x10, 0xe4, 0x2e, 0x98, 0x3c, 0x9c, 0x4f, 0xf1, 0x52, 0x94, 0xa3, 0xca, 0xc3,
|
||||
0xf9, 0x40, 0x90, 0xfb, 0x50, 0x5f, 0x24, 0xd1, 0x2a, 0xf6, 0xc2, 0x85, 0x53, 0xed, 0x18, 0x5d,
|
||||
0x8b, 0x6e, 0x6c, 0xd2, 0x06, 0xfd, 0x72, 0x8d, 0x83, 0xad, 0x4e, 0xf5, 0xcb, 0xb5, 0xca, 0x9e,
|
||||
0xb0, 0x70, 0xc1, 0x55, 0x92, 0x5a, 0x9a, 0x1d, 0xed, 0x81, 0x70, 0x7f, 0xd5, 0xa0, 0x7a, 0xbc,
|
||||
0x5c, 0x85, 0xaf, 0xc8, 0x3e, 0x34, 0x02, 0x2f, 0x9c, 0xaa, 0x56, 0x2a, 0x34, 0x5b, 0x81, 0x17,
|
||||
0xaa, 0x1a, 0x1e, 0x08, 0xf4, 0xb3, 0xeb, 0x8d, 0x3f, 0x7b, 0x6b, 0x02, 0x76, 0x9d, 0xf9, 0x7b,
|
||||
0xd9, 0x25, 0x18, 0x78, 0x09, 0xf7, 0xcb, 0x97, 0x80, 0x1b, 0xf4, 0xfa, 0xe1, 0x2c, 0x9a, 0x7b,
|
||||
0xe1, 0xa2, 0xb8, 0x01, 0xf5, 0x86, 0xe3, 0x57, 0x35, 0x29, 0xae, 0xdd, 0x03, 0xa8, 0xe7, 0xac,
|
||||
0x5b, 0xcd, 0xfb, 0xdd, 0x48, 0x3d, 0xb1, 0x5b, 0xef, 0xaa, 0xee, 0xfe, 0x00, 0x2d, 0x4c, 0xce,
|
||||
0xe7, 0xff, 0xb5, 0xcb, 0x0e, 0xc0, 0x9c, 0xa9, 0x0c, 0x79, 0x93, 0xed, 0xde, 0x12, 0x9e, 0x07,
|
||||
0xa4, 0xb4, 0xa3, 0xbd, 0x37, 0x37, 0xfb, 0xda, 0xef, 0x37, 0xfb, 0xda, 0x9f, 0x37, 0xfb, 0xda,
|
||||
0xf7, 0xa6, 0x62, 0xc7, 0x97, 0x97, 0x26, 0xfe, 0xcd, 0x7c, 0xf6, 0x77, 0x00, 0x00, 0x00, 0xff,
|
||||
0xff, 0x53, 0x09, 0xe5, 0x37, 0xfe, 0x08, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *MetricMetadata) Marshal() (dAtA []byte, err error) {
|
||||
|
@ -2903,7 +2903,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.NegativeSpans = append(m.NegativeSpans, &BucketSpan{})
|
||||
m.NegativeSpans = append(m.NegativeSpans, BucketSpan{})
|
||||
if err := m.NegativeSpans[len(m.NegativeSpans)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -3069,7 +3069,7 @@ func (m *Histogram) Unmarshal(dAtA []byte) error {
|
|||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.PositiveSpans = append(m.PositiveSpans, &BucketSpan{})
|
||||
m.PositiveSpans = append(m.PositiveSpans, BucketSpan{})
|
||||
if err := m.PositiveSpans[len(m.PositiveSpans)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -86,9 +86,9 @@ message Histogram {
|
|||
uint64 zero_count_int = 6;
|
||||
double zero_count_float = 7;
|
||||
}
|
||||
|
||||
|
||||
// Negative Buckets.
|
||||
repeated BucketSpan negative_spans = 8;
|
||||
repeated BucketSpan negative_spans = 8 [(gogoproto.nullable) = false];
|
||||
// Use either "negative_deltas" or "negative_counts", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
|
@ -96,7 +96,7 @@ message Histogram {
|
|||
repeated double negative_counts = 10; // Absolute count of each bucket.
|
||||
|
||||
// Positive Buckets.
|
||||
repeated BucketSpan positive_spans = 11;
|
||||
repeated BucketSpan positive_spans = 11 [(gogoproto.nullable) = false];
|
||||
// Use either "positive_deltas" or "positive_counts", the former for
|
||||
// regular histograms with integer counts, the latter for float
|
||||
// histograms.
|
||||
|
@ -107,7 +107,7 @@ message Histogram {
|
|||
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
||||
// conversion from time.Time to Prometheus timestamp.
|
||||
int64 timestamp = 15;
|
||||
}
|
||||
}
|
||||
|
||||
// A BucketSpan defines a number of consecutive buckets with their
|
||||
// offset. Logically, it would be more straightforward to include the
|
||||
|
|
|
@ -25,7 +25,7 @@ import (
|
|||
|
||||
"github.com/go-kit/log"
|
||||
|
||||
"github.com/prometheus/prometheus/util/stats"
|
||||
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/goleak"
|
||||
|
@ -35,7 +35,7 @@ import (
|
|||
"github.com/prometheus/prometheus/model/timestamp"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
"github.com/prometheus/prometheus/util/stats"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
|
@ -3162,7 +3162,7 @@ func TestNativeHistogramRate(t *testing.T) {
|
|||
lbls := labels.FromStrings("__name__", seriesName)
|
||||
|
||||
app := test.Storage().Appender(context.TODO())
|
||||
for i, h := range tsdb.GenerateTestHistograms(100) {
|
||||
for i, h := range tsdbutil.GenerateTestHistograms(100) {
|
||||
_, err := app.AppendHistogram(0, lbls, int64(i)*int64(15*time.Second/time.Millisecond), h, nil)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
@ -3207,7 +3207,7 @@ func TestNativeFloatHistogramRate(t *testing.T) {
|
|||
lbls := labels.FromStrings("__name__", seriesName)
|
||||
|
||||
app := test.Storage().Appender(context.TODO())
|
||||
for i, fh := range tsdb.GenerateTestFloatHistograms(100) {
|
||||
for i, fh := range tsdbutil.GenerateTestFloatHistograms(100) {
|
||||
_, err := app.AppendHistogram(0, lbls, int64(i)*int64(15*time.Second/time.Millisecond), nil, fh)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
|
|
@ -322,6 +322,8 @@ const resolvedRetention = 15 * time.Minute
|
|||
// Eval evaluates the rule expression and then creates pending alerts and fires
|
||||
// or removes previously pending alerts accordingly.
|
||||
func (r *AlertingRule) Eval(ctx context.Context, evalDelay time.Duration, ts time.Time, query QueryFunc, externalURL *url.URL, limit int) (promql.Vector, error) {
|
||||
ctx = NewOriginContext(ctx, NewRuleDetail(r))
|
||||
|
||||
res, err := query(ctx, r.vector.String(), ts.Add(-evalDelay))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -895,3 +895,41 @@ func TestPendingAndKeepFiringFor(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
require.Equal(t, 0, len(res))
|
||||
}
|
||||
|
||||
// TestAlertingEvalWithOrigin checks that the alerting rule details are passed through the context.
|
||||
func TestAlertingEvalWithOrigin(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
now := time.Now()
|
||||
|
||||
const (
|
||||
name = "my-recording-rule"
|
||||
query = `count(metric{foo="bar"}) > 0`
|
||||
)
|
||||
var (
|
||||
detail RuleDetail
|
||||
lbs = labels.FromStrings("test", "test")
|
||||
)
|
||||
|
||||
expr, err := parser.ParseExpr(query)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewAlertingRule(
|
||||
name,
|
||||
expr,
|
||||
time.Second,
|
||||
time.Minute,
|
||||
lbs,
|
||||
nil,
|
||||
nil,
|
||||
"",
|
||||
true, log.NewNopLogger(),
|
||||
)
|
||||
|
||||
_, err = rule.Eval(ctx, 0, now, func(ctx context.Context, qs string, _ time.Time) (promql.Vector, error) {
|
||||
detail = FromOriginContext(ctx)
|
||||
return nil, nil
|
||||
}, nil, 0)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, detail, NewRuleDetail(rule))
|
||||
}
|
||||
|
|
|
@ -40,8 +40,8 @@ import (
|
|||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/tsdb"
|
||||
"github.com/prometheus/prometheus/tsdb/chunkenc"
|
||||
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
||||
"github.com/prometheus/prometheus/util/teststorage"
|
||||
)
|
||||
|
||||
|
@ -1815,7 +1815,7 @@ func TestNativeHistogramsInRecordingRules(t *testing.T) {
|
|||
|
||||
// Add some histograms.
|
||||
db := suite.TSDB()
|
||||
hists := tsdb.GenerateTestHistograms(5)
|
||||
hists := tsdbutil.GenerateTestHistograms(5)
|
||||
ts := time.Now()
|
||||
app := db.Appender(context.Background())
|
||||
for i, h := range hists {
|
||||
|
|
69
rules/origin.go
Normal file
69
rules/origin.go
Normal file
|
@ -0,0 +1,69 @@
|
|||
// Copyright 2023 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
)
|
||||
|
||||
type ruleOrigin struct{}
|
||||
|
||||
// RuleDetail contains information about the rule that is being evaluated.
|
||||
type RuleDetail struct {
|
||||
Name string
|
||||
Query string
|
||||
Labels labels.Labels
|
||||
Kind string
|
||||
}
|
||||
|
||||
const (
|
||||
KindAlerting = "alerting"
|
||||
KindRecording = "recording"
|
||||
)
|
||||
|
||||
// NewRuleDetail creates a RuleDetail from a given Rule.
|
||||
func NewRuleDetail(r Rule) RuleDetail {
|
||||
var kind string
|
||||
switch r.(type) {
|
||||
case *AlertingRule:
|
||||
kind = KindAlerting
|
||||
case *RecordingRule:
|
||||
kind = KindRecording
|
||||
default:
|
||||
panic(fmt.Sprintf(`unknown rule type "%T"`, r))
|
||||
}
|
||||
|
||||
return RuleDetail{
|
||||
Name: r.Name(),
|
||||
Query: r.Query().String(),
|
||||
Labels: r.Labels(),
|
||||
Kind: kind,
|
||||
}
|
||||
}
|
||||
|
||||
// NewOriginContext returns a new context with data about the origin attached.
|
||||
func NewOriginContext(ctx context.Context, rule RuleDetail) context.Context {
|
||||
return context.WithValue(ctx, ruleOrigin{}, rule)
|
||||
}
|
||||
|
||||
// FromOriginContext returns the RuleDetail origin data from the context.
|
||||
func FromOriginContext(ctx context.Context) RuleDetail {
|
||||
if rule, ok := ctx.Value(ruleOrigin{}).(RuleDetail); ok {
|
||||
return rule
|
||||
}
|
||||
return RuleDetail{}
|
||||
}
|
51
rules/origin_test.go
Normal file
51
rules/origin_test.go
Normal file
|
@ -0,0 +1,51 @@
|
|||
// Copyright 2023 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
type unknownRule struct{}
|
||||
|
||||
func (u unknownRule) Name() string { return "" }
|
||||
func (u unknownRule) Labels() labels.Labels { return nil }
|
||||
func (u unknownRule) Eval(ctx context.Context, evalDelay time.Duration, time time.Time, queryFunc QueryFunc, url *url.URL, i int) (promql.Vector, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (u unknownRule) String() string { return "" }
|
||||
func (u unknownRule) Query() parser.Expr { return nil }
|
||||
func (u unknownRule) SetLastError(err error) {}
|
||||
func (u unknownRule) LastError() error { return nil }
|
||||
func (u unknownRule) SetHealth(health RuleHealth) {}
|
||||
func (u unknownRule) Health() RuleHealth { return "" }
|
||||
func (u unknownRule) SetEvaluationDuration(duration time.Duration) {}
|
||||
func (u unknownRule) GetEvaluationDuration() time.Duration { return 0 }
|
||||
func (u unknownRule) SetEvaluationTimestamp(time time.Time) {}
|
||||
func (u unknownRule) GetEvaluationTimestamp() time.Time { return time.Time{} }
|
||||
|
||||
func TestNewRuleDetailPanics(t *testing.T) {
|
||||
require.PanicsWithValue(t, `unknown rule type "rules.unknownRule"`, func() {
|
||||
NewRuleDetail(unknownRule{})
|
||||
})
|
||||
}
|
|
@ -73,6 +73,8 @@ func (rule *RecordingRule) Labels() labels.Labels {
|
|||
|
||||
// Eval evaluates the rule and then overrides the metric names and labels accordingly.
|
||||
func (rule *RecordingRule) Eval(ctx context.Context, evalDelay time.Duration, ts time.Time, query QueryFunc, _ *url.URL, limit int) (promql.Vector, error) {
|
||||
ctx = NewOriginContext(ctx, NewRuleDetail(rule))
|
||||
|
||||
vector, err := query(ctx, rule.vector.String(), ts.Add(-evalDelay))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -156,3 +156,31 @@ func TestRecordingRuleLimit(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestRecordingEvalWithOrigin checks that the recording rule details are passed through the context.
|
||||
func TestRecordingEvalWithOrigin(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
now := time.Now()
|
||||
|
||||
const (
|
||||
name = "my-recording-rule"
|
||||
query = `count(metric{foo="bar"})`
|
||||
)
|
||||
|
||||
var (
|
||||
detail RuleDetail
|
||||
lbs = labels.FromStrings("foo", "bar")
|
||||
)
|
||||
|
||||
expr, err := parser.ParseExpr(query)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule := NewRecordingRule(name, expr, lbs)
|
||||
_, err = rule.Eval(ctx, 0, now, func(ctx context.Context, qs string, _ time.Time) (promql.Vector, error) {
|
||||
detail = FromOriginContext(ctx)
|
||||
return nil, nil
|
||||
}, nil, 0)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, detail, NewRuleDetail(rule))
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/prometheus/prometheus/model/histogram"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/tsdb/chunkenc"
|
||||
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
||||
|
@ -387,18 +386,11 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
|
|||
|
||||
// histogramSample returns a histogram that is unique to the ts.
|
||||
histogramSample := func(ts int64) sample {
|
||||
idx := ts + 1
|
||||
return sample{t: ts, h: &histogram.Histogram{
|
||||
Schema: 2,
|
||||
ZeroThreshold: 0.001,
|
||||
ZeroCount: 2 * uint64(idx),
|
||||
Count: 5 * uint64(idx),
|
||||
Sum: 12.34 * float64(idx),
|
||||
PositiveSpans: []histogram.Span{{Offset: 1, Length: 2}, {Offset: 2, Length: 1}},
|
||||
NegativeSpans: []histogram.Span{{Offset: 2, Length: 1}, {Offset: 1, Length: 2}},
|
||||
PositiveBuckets: []int64{1 * idx, -1 * idx, 3 * idx},
|
||||
NegativeBuckets: []int64{1 * idx, 2 * idx, 3 * idx},
|
||||
}}
|
||||
return sample{t: ts, h: tsdbutil.GenerateTestHistogram(int(ts + 1))}
|
||||
}
|
||||
|
||||
floatHistogramSample := func(ts int64) sample {
|
||||
return sample{t: ts, fh: tsdbutil.GenerateTestFloatHistogram(int(ts + 1))}
|
||||
}
|
||||
|
||||
for _, tc := range []struct {
|
||||
|
@ -529,6 +521,46 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
|
|||
[]tsdbutil.Sample{histogramSample(15)},
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "float histogram chunks overlapping",
|
||||
input: []ChunkSeries{
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []tsdbutil.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(2), floatHistogramSample(20)}, []tsdbutil.Sample{floatHistogramSample(25), floatHistogramSample(30)}),
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(18), floatHistogramSample(26)}, []tsdbutil.Sample{floatHistogramSample(31), floatHistogramSample(35)}),
|
||||
},
|
||||
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
|
||||
[]tsdbutil.Sample{floatHistogramSample(0), floatHistogramSample(2), floatHistogramSample(5), floatHistogramSample(10), floatHistogramSample(15), floatHistogramSample(18), floatHistogramSample(20), floatHistogramSample(25), floatHistogramSample(26), floatHistogramSample(30)},
|
||||
[]tsdbutil.Sample{floatHistogramSample(31), floatHistogramSample(35)},
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "float histogram chunks overlapping with float chunks",
|
||||
input: []ChunkSeries{
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []tsdbutil.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{12, 12, nil, nil}}, []tsdbutil.Sample{sample{14, 14, nil, nil}}),
|
||||
},
|
||||
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
|
||||
[]tsdbutil.Sample{floatHistogramSample(0)},
|
||||
[]tsdbutil.Sample{sample{1, 1, nil, nil}},
|
||||
[]tsdbutil.Sample{floatHistogramSample(5), floatHistogramSample(10)},
|
||||
[]tsdbutil.Sample{sample{12, 12, nil, nil}, sample{14, 14, nil, nil}},
|
||||
[]tsdbutil.Sample{floatHistogramSample(15)},
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "float histogram chunks overlapping with histogram chunks",
|
||||
input: []ChunkSeries{
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []tsdbutil.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
|
||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{histogramSample(1), histogramSample(12)}, []tsdbutil.Sample{histogramSample(14)}),
|
||||
},
|
||||
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
|
||||
[]tsdbutil.Sample{floatHistogramSample(0)},
|
||||
[]tsdbutil.Sample{histogramSample(1)},
|
||||
[]tsdbutil.Sample{floatHistogramSample(5), floatHistogramSample(10)},
|
||||
[]tsdbutil.Sample{histogramSample(12), histogramSample(14)},
|
||||
[]tsdbutil.Sample{floatHistogramSample(15)},
|
||||
),
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
merged := m(tc.input...)
|
||||
|
|
|
@ -87,7 +87,7 @@ func TestStoreHTTPErrorHandling(t *testing.T) {
|
|||
func TestClientRetryAfter(t *testing.T) {
|
||||
server := httptest.NewServer(
|
||||
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Error(w, longErrMessage, 429)
|
||||
http.Error(w, longErrMessage, http.StatusTooManyRequests)
|
||||
}),
|
||||
)
|
||||
defer server.Close()
|
||||
|
|
|
@ -559,7 +559,7 @@ func HistogramProtoToFloatHistogram(hp prompb.Histogram) *histogram.FloatHistogr
|
|||
}
|
||||
}
|
||||
|
||||
func spansProtoToSpans(s []*prompb.BucketSpan) []histogram.Span {
|
||||
func spansProtoToSpans(s []prompb.BucketSpan) []histogram.Span {
|
||||
spans := make([]histogram.Span, len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
spans[i] = histogram.Span{Offset: s[i].Offset, Length: s[i].Length}
|
||||
|
@ -600,10 +600,10 @@ func FloatHistogramToHistogramProto(timestamp int64, fh *histogram.FloatHistogra
|
|||
}
|
||||
}
|
||||
|
||||
func spansToSpansProto(s []histogram.Span) []*prompb.BucketSpan {
|
||||
spans := make([]*prompb.BucketSpan, len(s))
|
||||
func spansToSpansProto(s []histogram.Span) []prompb.BucketSpan {
|
||||
spans := make([]prompb.BucketSpan, len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
spans[i] = &prompb.BucketSpan{Offset: s[i].Offset, Length: s[i].Length}
|
||||
spans[i] = prompb.BucketSpan{Offset: s[i].Offset, Length: s[i].Length}
|
||||
}
|
||||
|
||||
return spans
|
||||
|
|
|
@ -399,7 +399,7 @@ func exampleHistogramProto() prompb.Histogram {
|
|||
Schema: 0,
|
||||
ZeroThreshold: 0,
|
||||
ZeroCount: &prompb.Histogram_ZeroCountInt{ZeroCountInt: 0},
|
||||
NegativeSpans: []*prompb.BucketSpan{
|
||||
NegativeSpans: []prompb.BucketSpan{
|
||||
{
|
||||
Offset: 0,
|
||||
Length: 5,
|
||||
|
@ -414,7 +414,7 @@ func exampleHistogramProto() prompb.Histogram {
|
|||
},
|
||||
},
|
||||
NegativeDeltas: []int64{1, 2, -2, 1, -1, 0},
|
||||
PositiveSpans: []*prompb.BucketSpan{
|
||||
PositiveSpans: []prompb.BucketSpan{
|
||||
{
|
||||
Offset: 0,
|
||||
Length: 4,
|
||||
|
@ -497,7 +497,7 @@ func exampleFloatHistogramProto() prompb.Histogram {
|
|||
Schema: 0,
|
||||
ZeroThreshold: 0,
|
||||
ZeroCount: &prompb.Histogram_ZeroCountFloat{ZeroCountFloat: 0},
|
||||
NegativeSpans: []*prompb.BucketSpan{
|
||||
NegativeSpans: []prompb.BucketSpan{
|
||||
{
|
||||
Offset: 0,
|
||||
Length: 5,
|
||||
|
@ -512,7 +512,7 @@ func exampleFloatHistogramProto() prompb.Histogram {
|
|||
},
|
||||
},
|
||||
NegativeCounts: []float64{1, 2, -2, 1, -1, 0},
|
||||
PositiveSpans: []*prompb.BucketSpan{
|
||||
PositiveSpans: []prompb.BucketSpan{
|
||||
{
|
||||
Offset: 0,
|
||||
Length: 4,
|
||||
|
|
|
@ -54,10 +54,10 @@ func TestDB_InvalidSeries(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("Histograms", func(t *testing.T) {
|
||||
_, err := app.AppendHistogram(0, labels.Labels{}, 0, tsdb.GenerateTestHistograms(1)[0], nil)
|
||||
_, err := app.AppendHistogram(0, labels.Labels{}, 0, tsdbutil.GenerateTestHistograms(1)[0], nil)
|
||||
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject empty labels")
|
||||
|
||||
_, err = app.AppendHistogram(0, labels.FromStrings("a", "1", "a", "2"), 0, tsdb.GenerateTestHistograms(1)[0], nil)
|
||||
_, err = app.AppendHistogram(0, labels.FromStrings("a", "1", "a", "2"), 0, tsdbutil.GenerateTestHistograms(1)[0], nil)
|
||||
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject duplicate labels")
|
||||
})
|
||||
|
||||
|
@ -151,7 +151,7 @@ func TestCommit(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
histograms := tsdb.GenerateTestHistograms(numHistograms)
|
||||
histograms := tsdbutil.GenerateTestHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, int64(i), histograms[i], nil)
|
||||
|
@ -163,7 +163,7 @@ func TestCommit(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
floatHistograms := tsdb.GenerateTestFloatHistograms(numHistograms)
|
||||
floatHistograms := tsdbutil.GenerateTestFloatHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, int64(i), nil, floatHistograms[i])
|
||||
|
@ -257,7 +257,7 @@ func TestRollback(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
histograms := tsdb.GenerateTestHistograms(numHistograms)
|
||||
histograms := tsdbutil.GenerateTestHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, int64(i), histograms[i], nil)
|
||||
|
@ -269,7 +269,7 @@ func TestRollback(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
floatHistograms := tsdb.GenerateTestFloatHistograms(numHistograms)
|
||||
floatHistograms := tsdbutil.GenerateTestFloatHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, int64(i), nil, floatHistograms[i])
|
||||
|
@ -374,7 +374,7 @@ func TestFullTruncateWAL(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
histograms := tsdb.GenerateTestHistograms(numHistograms)
|
||||
histograms := tsdbutil.GenerateTestHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, int64(lastTs), histograms[i], nil)
|
||||
|
@ -387,7 +387,7 @@ func TestFullTruncateWAL(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
floatHistograms := tsdb.GenerateTestFloatHistograms(numHistograms)
|
||||
floatHistograms := tsdbutil.GenerateTestFloatHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, int64(lastTs), nil, floatHistograms[i])
|
||||
|
@ -436,7 +436,7 @@ func TestPartialTruncateWAL(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
histograms := tsdb.GenerateTestHistograms(numDatapoints)
|
||||
histograms := tsdbutil.GenerateTestHistograms(numDatapoints)
|
||||
|
||||
for i := 0; i < numDatapoints; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, lastTs, histograms[i], nil)
|
||||
|
@ -449,7 +449,7 @@ func TestPartialTruncateWAL(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
floatHistograms := tsdb.GenerateTestFloatHistograms(numDatapoints)
|
||||
floatHistograms := tsdbutil.GenerateTestFloatHistograms(numDatapoints)
|
||||
|
||||
for i := 0; i < numDatapoints; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, lastTs, nil, floatHistograms[i])
|
||||
|
@ -475,7 +475,7 @@ func TestPartialTruncateWAL(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
histograms := tsdb.GenerateTestHistograms(numDatapoints)
|
||||
histograms := tsdbutil.GenerateTestHistograms(numDatapoints)
|
||||
|
||||
for i := 0; i < numDatapoints; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, lastTs, histograms[i], nil)
|
||||
|
@ -488,7 +488,7 @@ func TestPartialTruncateWAL(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
floatHistograms := tsdb.GenerateTestFloatHistograms(numDatapoints)
|
||||
floatHistograms := tsdbutil.GenerateTestFloatHistograms(numDatapoints)
|
||||
|
||||
for i := 0; i < numDatapoints; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, lastTs, nil, floatHistograms[i])
|
||||
|
@ -529,7 +529,7 @@ func TestWALReplay(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
histograms := tsdb.GenerateTestHistograms(numHistograms)
|
||||
histograms := tsdbutil.GenerateTestHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, lastTs, histograms[i], nil)
|
||||
|
@ -541,7 +541,7 @@ func TestWALReplay(t *testing.T) {
|
|||
for _, l := range lbls {
|
||||
lset := labels.New(l...)
|
||||
|
||||
floatHistograms := tsdb.GenerateTestFloatHistograms(numHistograms)
|
||||
floatHistograms := tsdbutil.GenerateTestFloatHistograms(numHistograms)
|
||||
|
||||
for i := 0; i < numHistograms; i++ {
|
||||
_, err := app.AppendHistogram(0, lset, lastTs, nil, floatHistograms[i])
|
||||
|
@ -622,7 +622,7 @@ func Test_ExistingWAL_NextRef(t *testing.T) {
|
|||
}
|
||||
|
||||
histogramCount := 10
|
||||
histograms := tsdb.GenerateTestHistograms(histogramCount)
|
||||
histograms := tsdbutil.GenerateTestHistograms(histogramCount)
|
||||
// Append <histogramCount> series
|
||||
for i := 0; i < histogramCount; i++ {
|
||||
lset := labels.FromStrings(model.MetricNameLabel, fmt.Sprintf("histogram_%d", i))
|
||||
|
|
91
tsdb/head.go
91
tsdb/head.go
|
@ -17,7 +17,6 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"math/rand"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
|
@ -2073,93 +2072,3 @@ func (h *Head) updateWALReplayStatusRead(current int) {
|
|||
|
||||
h.stats.WALReplayStatus.Current = current
|
||||
}
|
||||
|
||||
func GenerateTestHistograms(n int) (r []*histogram.Histogram) {
|
||||
for i := 0; i < n; i++ {
|
||||
h := GenerateTestHistogram(i)
|
||||
if i > 0 {
|
||||
h.CounterResetHint = histogram.NotCounterReset
|
||||
}
|
||||
r = append(r, h)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Generates a test histogram, it is up to the user to set any known counter reset hint.
|
||||
func GenerateTestHistogram(i int) *histogram.Histogram {
|
||||
return &histogram.Histogram{
|
||||
Count: 10 + uint64(i*8),
|
||||
ZeroCount: 2 + uint64(i),
|
||||
ZeroThreshold: 0.001,
|
||||
Sum: 18.4 * float64(i+1),
|
||||
Schema: 1,
|
||||
PositiveSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
PositiveBuckets: []int64{int64(i + 1), 1, -1, 0},
|
||||
NegativeSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
NegativeBuckets: []int64{int64(i + 1), 1, -1, 0},
|
||||
}
|
||||
}
|
||||
|
||||
func GenerateTestGaugeHistograms(n int) (r []*histogram.Histogram) {
|
||||
for x := 0; x < n; x++ {
|
||||
r = append(r, GenerateTestGaugeHistogram(rand.Intn(n)))
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func GenerateTestGaugeHistogram(i int) *histogram.Histogram {
|
||||
h := GenerateTestHistogram(i)
|
||||
h.CounterResetHint = histogram.GaugeType
|
||||
return h
|
||||
}
|
||||
|
||||
func GenerateTestFloatHistograms(n int) (r []*histogram.FloatHistogram) {
|
||||
for i := 0; i < n; i++ {
|
||||
h := GenerateTestFloatHistogram(i)
|
||||
if i > 0 {
|
||||
h.CounterResetHint = histogram.NotCounterReset
|
||||
}
|
||||
r = append(r, h)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Generates a test float histogram, it is up to the user to set any known counter reset hint.
|
||||
func GenerateTestFloatHistogram(i int) *histogram.FloatHistogram {
|
||||
return &histogram.FloatHistogram{
|
||||
Count: 10 + float64(i*8),
|
||||
ZeroCount: 2 + float64(i),
|
||||
ZeroThreshold: 0.001,
|
||||
Sum: 18.4 * float64(i+1),
|
||||
Schema: 1,
|
||||
PositiveSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
PositiveBuckets: []float64{float64(i + 1), float64(i + 2), float64(i + 1), float64(i + 1)},
|
||||
NegativeSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
NegativeBuckets: []float64{float64(i + 1), float64(i + 2), float64(i + 1), float64(i + 1)},
|
||||
}
|
||||
}
|
||||
|
||||
func GenerateTestGaugeFloatHistograms(n int) (r []*histogram.FloatHistogram) {
|
||||
for x := 0; x < n; x++ {
|
||||
r = append(r, GenerateTestGaugeFloatHistogram(rand.Intn(n)))
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func GenerateTestGaugeFloatHistogram(i int) *histogram.FloatHistogram {
|
||||
h := GenerateTestFloatHistogram(i)
|
||||
h.CounterResetHint = histogram.GaugeType
|
||||
return h
|
||||
}
|
||||
|
|
|
@ -1347,7 +1347,7 @@ func TestMemSeries_appendHistogram(t *testing.T) {
|
|||
lbls := labels.Labels{}
|
||||
s := newMemSeries(lbls, 1, labels.StableHash(lbls), 0, defaultIsolationDisabled)
|
||||
|
||||
histograms := GenerateTestHistograms(4)
|
||||
histograms := tsdbutil.GenerateTestHistograms(4)
|
||||
histogramWithOneMoreBucket := histograms[3].Copy()
|
||||
histogramWithOneMoreBucket.Count++
|
||||
histogramWithOneMoreBucket.Sum += 1.23
|
||||
|
@ -2906,7 +2906,7 @@ func TestAppendHistogram(t *testing.T) {
|
|||
expHistograms := make([]tsdbutil.Sample, 0, 2*numHistograms)
|
||||
|
||||
// Counter integer histograms.
|
||||
for _, h := range GenerateTestHistograms(numHistograms) {
|
||||
for _, h := range tsdbutil.GenerateTestHistograms(numHistograms) {
|
||||
_, err := app.AppendHistogram(0, l, ingestTs, h, nil)
|
||||
require.NoError(t, err)
|
||||
expHistograms = append(expHistograms, sample{t: ingestTs, h: h})
|
||||
|
@ -2918,7 +2918,7 @@ func TestAppendHistogram(t *testing.T) {
|
|||
}
|
||||
|
||||
// Gauge integer histograms.
|
||||
for _, h := range GenerateTestGaugeHistograms(numHistograms) {
|
||||
for _, h := range tsdbutil.GenerateTestGaugeHistograms(numHistograms) {
|
||||
_, err := app.AppendHistogram(0, l, ingestTs, h, nil)
|
||||
require.NoError(t, err)
|
||||
expHistograms = append(expHistograms, sample{t: ingestTs, h: h})
|
||||
|
@ -2932,7 +2932,7 @@ func TestAppendHistogram(t *testing.T) {
|
|||
expFloatHistograms := make([]tsdbutil.Sample, 0, 2*numHistograms)
|
||||
|
||||
// Counter float histograms.
|
||||
for _, fh := range GenerateTestFloatHistograms(numHistograms) {
|
||||
for _, fh := range tsdbutil.GenerateTestFloatHistograms(numHistograms) {
|
||||
_, err := app.AppendHistogram(0, l, ingestTs, nil, fh)
|
||||
require.NoError(t, err)
|
||||
expFloatHistograms = append(expFloatHistograms, sample{t: ingestTs, fh: fh})
|
||||
|
@ -2944,7 +2944,7 @@ func TestAppendHistogram(t *testing.T) {
|
|||
}
|
||||
|
||||
// Gauge float histograms.
|
||||
for _, fh := range GenerateTestGaugeFloatHistograms(numHistograms) {
|
||||
for _, fh := range tsdbutil.GenerateTestGaugeFloatHistograms(numHistograms) {
|
||||
_, err := app.AppendHistogram(0, l, ingestTs, nil, fh)
|
||||
require.NoError(t, err)
|
||||
expFloatHistograms = append(expFloatHistograms, sample{t: ingestTs, fh: fh})
|
||||
|
@ -3014,9 +3014,9 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
|
|||
app = head.Appender(context.Background())
|
||||
var hists []*histogram.Histogram
|
||||
if gauge {
|
||||
hists = GenerateTestGaugeHistograms(numHistograms)
|
||||
hists = tsdbutil.GenerateTestGaugeHistograms(numHistograms)
|
||||
} else {
|
||||
hists = GenerateTestHistograms(numHistograms)
|
||||
hists = tsdbutil.GenerateTestHistograms(numHistograms)
|
||||
}
|
||||
for _, h := range hists {
|
||||
h.Count = h.Count * 2
|
||||
|
@ -3037,9 +3037,9 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
|
|||
app = head.Appender(context.Background())
|
||||
var hists []*histogram.FloatHistogram
|
||||
if gauge {
|
||||
hists = GenerateTestGaugeFloatHistograms(numHistograms)
|
||||
hists = tsdbutil.GenerateTestGaugeFloatHistograms(numHistograms)
|
||||
} else {
|
||||
hists = GenerateTestFloatHistograms(numHistograms)
|
||||
hists = tsdbutil.GenerateTestFloatHistograms(numHistograms)
|
||||
}
|
||||
for _, h := range hists {
|
||||
h.Count = h.Count * 2
|
||||
|
@ -3077,9 +3077,9 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
|
|||
app = head.Appender(context.Background())
|
||||
var hists []*histogram.Histogram
|
||||
if gauge {
|
||||
hists = GenerateTestGaugeHistograms(100)
|
||||
hists = tsdbutil.GenerateTestGaugeHistograms(100)
|
||||
} else {
|
||||
hists = GenerateTestHistograms(100)
|
||||
hists = tsdbutil.GenerateTestHistograms(100)
|
||||
}
|
||||
for _, h := range hists {
|
||||
ts++
|
||||
|
@ -3114,9 +3114,9 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
|
|||
app = head.Appender(context.Background())
|
||||
var hists []*histogram.FloatHistogram
|
||||
if gauge {
|
||||
hists = GenerateTestGaugeFloatHistograms(100)
|
||||
hists = tsdbutil.GenerateTestGaugeFloatHistograms(100)
|
||||
} else {
|
||||
hists = GenerateTestFloatHistograms(100)
|
||||
hists = tsdbutil.GenerateTestFloatHistograms(100)
|
||||
}
|
||||
for _, h := range hists {
|
||||
ts++
|
||||
|
@ -3492,14 +3492,14 @@ func TestHistogramMetrics(t *testing.T) {
|
|||
for x := 0; x < 5; x++ {
|
||||
expHSeries++
|
||||
l := labels.FromStrings("a", fmt.Sprintf("b%d", x))
|
||||
for i, h := range GenerateTestHistograms(numHistograms) {
|
||||
for i, h := range tsdbutil.GenerateTestHistograms(numHistograms) {
|
||||
app := head.Appender(context.Background())
|
||||
_, err := app.AppendHistogram(0, l, int64(i), h, nil)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, app.Commit())
|
||||
expHSamples++
|
||||
}
|
||||
for i, fh := range GenerateTestFloatHistograms(numHistograms) {
|
||||
for i, fh := range tsdbutil.GenerateTestFloatHistograms(numHistograms) {
|
||||
app := head.Appender(context.Background())
|
||||
_, err := app.AppendHistogram(0, l, int64(numHistograms+i), nil, fh)
|
||||
require.NoError(t, err)
|
||||
|
@ -3614,7 +3614,7 @@ func testHistogramStaleSampleHelper(t *testing.T, floatHistogram bool) {
|
|||
|
||||
// Adding stale in the same appender.
|
||||
app := head.Appender(context.Background())
|
||||
for _, h := range GenerateTestHistograms(numHistograms) {
|
||||
for _, h := range tsdbutil.GenerateTestHistograms(numHistograms) {
|
||||
var err error
|
||||
if floatHistogram {
|
||||
_, err = app.AppendHistogram(0, l, 100*int64(len(expHistograms)), nil, h.ToFloat())
|
||||
|
@ -3643,7 +3643,7 @@ func testHistogramStaleSampleHelper(t *testing.T, floatHistogram bool) {
|
|||
|
||||
// Adding stale in different appender and continuing series after a stale sample.
|
||||
app = head.Appender(context.Background())
|
||||
for _, h := range GenerateTestHistograms(2 * numHistograms)[numHistograms:] {
|
||||
for _, h := range tsdbutil.GenerateTestHistograms(2 * numHistograms)[numHistograms:] {
|
||||
var err error
|
||||
if floatHistogram {
|
||||
_, err = app.AppendHistogram(0, l, 100*int64(len(expHistograms)), nil, h.ToFloat())
|
||||
|
@ -3722,7 +3722,7 @@ func TestHistogramCounterResetHeader(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
h := GenerateTestHistograms(1)[0]
|
||||
h := tsdbutil.GenerateTestHistograms(1)[0]
|
||||
h.PositiveBuckets = []int64{100, 1, 1, 1}
|
||||
h.NegativeBuckets = []int64{100, 1, 1, 1}
|
||||
h.Count = 1000
|
||||
|
@ -3799,8 +3799,8 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
|
|||
})
|
||||
db.DisableCompactions()
|
||||
|
||||
hists := GenerateTestHistograms(10)
|
||||
floatHists := GenerateTestFloatHistograms(10)
|
||||
hists := tsdbutil.GenerateTestHistograms(10)
|
||||
floatHists := tsdbutil.GenerateTestFloatHistograms(10)
|
||||
lbls := labels.FromStrings("a", "b")
|
||||
|
||||
var expResult []tsdbutil.Sample
|
||||
|
@ -4428,7 +4428,7 @@ func TestHistogramValidation(t *testing.T) {
|
|||
errMsgFloat string // To be considered for float histogram only if it is non-empty.
|
||||
}{
|
||||
"valid histogram": {
|
||||
h: GenerateTestHistograms(1)[0],
|
||||
h: tsdbutil.GenerateTestHistograms(1)[0],
|
||||
},
|
||||
"rejects histogram who has too few negative buckets": {
|
||||
h: &histogram.Histogram{
|
||||
|
@ -4711,7 +4711,7 @@ func TestGaugeHistogramWALAndChunkHeader(t *testing.T) {
|
|||
require.NoError(t, app.Commit())
|
||||
}
|
||||
|
||||
hists := GenerateTestGaugeHistograms(5)
|
||||
hists := tsdbutil.GenerateTestGaugeHistograms(5)
|
||||
hists[0].CounterResetHint = histogram.UnknownCounterReset
|
||||
appendHistogram(hists[0])
|
||||
appendHistogram(hists[1])
|
||||
|
@ -4786,7 +4786,7 @@ func TestGaugeFloatHistogramWALAndChunkHeader(t *testing.T) {
|
|||
require.NoError(t, app.Commit())
|
||||
}
|
||||
|
||||
hists := GenerateTestGaugeFloatHistograms(5)
|
||||
hists := tsdbutil.GenerateTestGaugeFloatHistograms(5)
|
||||
hists[0].CounterResetHint = histogram.UnknownCounterReset
|
||||
appendHistogram(hists[0])
|
||||
appendHistogram(hists[1])
|
||||
|
|
|
@ -72,6 +72,8 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
|
|||
ca.Append(s.Get(i).T(), s.Get(i).V())
|
||||
case chunkenc.ValHistogram:
|
||||
ca.AppendHistogram(s.Get(i).T(), s.Get(i).H())
|
||||
case chunkenc.ValFloatHistogram:
|
||||
ca.AppendFloatHistogram(s.Get(i).T(), s.Get(i).FH())
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown sample type %s", sampleType.String()))
|
||||
}
|
||||
|
@ -128,12 +130,18 @@ func PopulatedChunk(numSamples int, minTime int64) chunks.Meta {
|
|||
|
||||
// GenerateSamples starting at start and counting up numSamples.
|
||||
func GenerateSamples(start, numSamples int) []Sample {
|
||||
samples := make([]Sample, 0, numSamples)
|
||||
for i := start; i < start+numSamples; i++ {
|
||||
samples = append(samples, sample{
|
||||
return generateSamples(start, numSamples, func(i int) Sample {
|
||||
return sample{
|
||||
t: int64(i),
|
||||
v: float64(i),
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func generateSamples(start, numSamples int, gen func(int) Sample) []Sample {
|
||||
samples := make([]Sample, 0, numSamples)
|
||||
for i := start; i < start+numSamples; i++ {
|
||||
samples = append(samples, gen(i))
|
||||
}
|
||||
return samples
|
||||
}
|
||||
|
|
110
tsdb/tsdbutil/histogram.go
Normal file
110
tsdb/tsdbutil/histogram.go
Normal file
|
@ -0,0 +1,110 @@
|
|||
// Copyright 2023 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package tsdbutil
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
|
||||
"github.com/prometheus/prometheus/model/histogram"
|
||||
)
|
||||
|
||||
func GenerateTestHistograms(n int) (r []*histogram.Histogram) {
|
||||
for i := 0; i < n; i++ {
|
||||
h := GenerateTestHistogram(i)
|
||||
if i > 0 {
|
||||
h.CounterResetHint = histogram.NotCounterReset
|
||||
}
|
||||
r = append(r, h)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// GenerateTestHistogram but it is up to the user to set any known counter reset hint.
|
||||
func GenerateTestHistogram(i int) *histogram.Histogram {
|
||||
return &histogram.Histogram{
|
||||
Count: 10 + uint64(i*8),
|
||||
ZeroCount: 2 + uint64(i),
|
||||
ZeroThreshold: 0.001,
|
||||
Sum: 18.4 * float64(i+1),
|
||||
Schema: 1,
|
||||
PositiveSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
PositiveBuckets: []int64{int64(i + 1), 1, -1, 0},
|
||||
NegativeSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
NegativeBuckets: []int64{int64(i + 1), 1, -1, 0},
|
||||
}
|
||||
}
|
||||
|
||||
func GenerateTestGaugeHistograms(n int) (r []*histogram.Histogram) {
|
||||
for x := 0; x < n; x++ {
|
||||
r = append(r, GenerateTestGaugeHistogram(rand.Intn(n)))
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func GenerateTestGaugeHistogram(i int) *histogram.Histogram {
|
||||
h := GenerateTestHistogram(i)
|
||||
h.CounterResetHint = histogram.GaugeType
|
||||
return h
|
||||
}
|
||||
|
||||
func GenerateTestFloatHistograms(n int) (r []*histogram.FloatHistogram) {
|
||||
for i := 0; i < n; i++ {
|
||||
h := GenerateTestFloatHistogram(i)
|
||||
if i > 0 {
|
||||
h.CounterResetHint = histogram.NotCounterReset
|
||||
}
|
||||
r = append(r, h)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// GenerateTestFloatHistogram but it is up to the user to set any known counter reset hint.
|
||||
func GenerateTestFloatHistogram(i int) *histogram.FloatHistogram {
|
||||
return &histogram.FloatHistogram{
|
||||
Count: 10 + float64(i*8),
|
||||
ZeroCount: 2 + float64(i),
|
||||
ZeroThreshold: 0.001,
|
||||
Sum: 18.4 * float64(i+1),
|
||||
Schema: 1,
|
||||
PositiveSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
PositiveBuckets: []float64{float64(i + 1), float64(i + 2), float64(i + 1), float64(i + 1)},
|
||||
NegativeSpans: []histogram.Span{
|
||||
{Offset: 0, Length: 2},
|
||||
{Offset: 1, Length: 2},
|
||||
},
|
||||
NegativeBuckets: []float64{float64(i + 1), float64(i + 2), float64(i + 1), float64(i + 1)},
|
||||
}
|
||||
}
|
||||
|
||||
func GenerateTestGaugeFloatHistograms(n int) (r []*histogram.FloatHistogram) {
|
||||
for x := 0; x < n; x++ {
|
||||
r = append(r, GenerateTestGaugeFloatHistogram(rand.Intn(n)))
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func GenerateTestGaugeFloatHistogram(i int) *histogram.FloatHistogram {
|
||||
h := GenerateTestFloatHistogram(i)
|
||||
h.CounterResetHint = histogram.GaugeType
|
||||
return h
|
||||
}
|
|
@ -16,6 +16,7 @@ package strutil
|
|||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/regexp"
|
||||
)
|
||||
|
@ -38,6 +39,26 @@ func GraphLinkForExpression(expr string) string {
|
|||
|
||||
// SanitizeLabelName replaces anything that doesn't match
|
||||
// client_label.LabelNameRE with an underscore.
|
||||
// Note: this does not handle all Prometheus label name restrictions (such as
|
||||
// not starting with a digit 0-9), and hence should only be used if the label
|
||||
// name is prefixed with a known valid string.
|
||||
func SanitizeLabelName(name string) string {
|
||||
return invalidLabelCharRE.ReplaceAllString(name, "_")
|
||||
}
|
||||
|
||||
// SanitizeFullLabelName replaces any invalid character with an underscore, and
|
||||
// if given an empty string, returns a string containing a single underscore.
|
||||
func SanitizeFullLabelName(name string) string {
|
||||
if len(name) == 0 {
|
||||
return "_"
|
||||
}
|
||||
var validSb strings.Builder
|
||||
for i, b := range name {
|
||||
if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) {
|
||||
validSb.WriteRune('_')
|
||||
} else {
|
||||
validSb.WriteRune(b)
|
||||
}
|
||||
}
|
||||
return validSb.String()
|
||||
}
|
||||
|
|
|
@ -59,3 +59,21 @@ func TestSanitizeLabelName(t *testing.T) {
|
|||
expected = "barClient_LABEL____"
|
||||
require.Equal(t, expected, actual, "SanitizeLabelName failed for label (%s)", expected)
|
||||
}
|
||||
|
||||
func TestSanitizeFullLabelName(t *testing.T) {
|
||||
actual := SanitizeFullLabelName("fooClientLABEL")
|
||||
expected := "fooClientLABEL"
|
||||
require.Equal(t, expected, actual, "SanitizeFullLabelName failed for label (%s)", expected)
|
||||
|
||||
actual = SanitizeFullLabelName("barClient.LABEL$$##")
|
||||
expected = "barClient_LABEL____"
|
||||
require.Equal(t, expected, actual, "SanitizeFullLabelName failed for label (%s)", expected)
|
||||
|
||||
actual = SanitizeFullLabelName("0zerothClient1LABEL")
|
||||
expected = "_zerothClient1LABEL"
|
||||
require.Equal(t, expected, actual, "SanitizeFullLabelName failed for label (%s)", expected)
|
||||
|
||||
actual = SanitizeFullLabelName("")
|
||||
expected = "_"
|
||||
require.Equal(t, expected, actual, "SanitizeFullLabelName failed for the empty label")
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@prometheus-io/codemirror-promql",
|
||||
"version": "0.41.0",
|
||||
"version": "0.42.0",
|
||||
"description": "a CodeMirror mode for the PromQL language",
|
||||
"types": "dist/esm/index.d.ts",
|
||||
"module": "dist/esm/index.js",
|
||||
|
@ -29,7 +29,7 @@
|
|||
},
|
||||
"homepage": "https://github.com/prometheus/prometheus/blob/main/web/ui/module/codemirror-promql/README.md",
|
||||
"dependencies": {
|
||||
"@prometheus-io/lezer-promql": "^0.41.0",
|
||||
"@prometheus-io/lezer-promql": "^0.42.0",
|
||||
"lru-cache": "^6.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -231,7 +231,19 @@ describe('analyzeCompletion test', () => {
|
|||
title: 'starting to autocomplete labelName in aggregate modifier',
|
||||
expr: 'sum by ()',
|
||||
pos: 8, // cursor is between the bracket
|
||||
expectedContext: [{ kind: ContextKind.LabelName }],
|
||||
expectedContext: [{ kind: ContextKind.LabelName, metricName: '' }],
|
||||
},
|
||||
{
|
||||
title: 'starting to autocomplete labelName in aggregate modifier with metric name',
|
||||
expr: 'sum(up) by ()',
|
||||
pos: 12, // cursor is between ()
|
||||
expectedContext: [{ kind: ContextKind.LabelName, metricName: 'up' }],
|
||||
},
|
||||
{
|
||||
title: 'starting to autocomplete labelName in aggregate modifier with metric name in front',
|
||||
expr: 'sum by ()(up)',
|
||||
pos: 8, // cursor is between ()
|
||||
expectedContext: [{ kind: ContextKind.LabelName, metricName: 'up' }],
|
||||
},
|
||||
{
|
||||
title: 'continue to autocomplete labelName in aggregate modifier',
|
||||
|
@ -243,7 +255,7 @@ describe('analyzeCompletion test', () => {
|
|||
title: 'autocomplete labelName in a list',
|
||||
expr: 'sum by (myLabel1,)',
|
||||
pos: 17, // cursor is between the bracket after the string myLab
|
||||
expectedContext: [{ kind: ContextKind.LabelName }],
|
||||
expectedContext: [{ kind: ContextKind.LabelName, metricName: '' }],
|
||||
},
|
||||
{
|
||||
title: 'autocomplete labelName in a list 2',
|
||||
|
|
|
@ -110,6 +110,26 @@ export interface Context {
|
|||
matchers?: Matcher[];
|
||||
}
|
||||
|
||||
function getMetricNameInGroupBy(tree: SyntaxNode, state: EditorState): string {
|
||||
// There should be an AggregateExpr as parent of the GroupingLabels.
|
||||
// Then we should find the VectorSelector child to be able to find the metric name.
|
||||
const currentNode: SyntaxNode | null = walkBackward(tree, AggregateExpr);
|
||||
if (!currentNode) {
|
||||
return '';
|
||||
}
|
||||
let metricName = '';
|
||||
currentNode.cursor().iterate((node) => {
|
||||
// Continue until we find the VectorSelector, then look up the metric name.
|
||||
if (node.type.id === VectorSelector) {
|
||||
metricName = getMetricNameInVectorSelector(node.node, state);
|
||||
if (metricName) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
});
|
||||
return metricName;
|
||||
}
|
||||
|
||||
function getMetricNameInVectorSelector(tree: SyntaxNode, state: EditorState): string {
|
||||
// Find if there is a defined metric name. Should be used to autocomplete a labelValue or a labelName
|
||||
// First find the parent "VectorSelector" to be able to find then the subChild "Identifier" if it exists.
|
||||
|
@ -344,9 +364,9 @@ export function analyzeCompletion(state: EditorState, node: SyntaxNode): Context
|
|||
break;
|
||||
case GroupingLabels:
|
||||
// In this case we are in the given situation:
|
||||
// sum by ()
|
||||
// So we have to autocomplete any labelName
|
||||
result.push({ kind: ContextKind.LabelName });
|
||||
// sum by () or sum (metric_name) by ()
|
||||
// so we have or to autocomplete any kind of labelName or to autocomplete only the labelName associated to the metric
|
||||
result.push({ kind: ContextKind.LabelName, metricName: getMetricNameInGroupBy(node, state) });
|
||||
break;
|
||||
case LabelMatchers:
|
||||
// In that case we are in the given situation:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@prometheus-io/lezer-promql",
|
||||
"version": "0.41.0",
|
||||
"version": "0.42.0",
|
||||
"description": "lezer-based PromQL grammar",
|
||||
"main": "dist/index.cjs",
|
||||
"type": "module",
|
||||
|
|
14
web/ui/package-lock.json
generated
14
web/ui/package-lock.json
generated
|
@ -28,10 +28,10 @@
|
|||
},
|
||||
"module/codemirror-promql": {
|
||||
"name": "@prometheus-io/codemirror-promql",
|
||||
"version": "0.41.0",
|
||||
"version": "0.42.0",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@prometheus-io/lezer-promql": "^0.41.0",
|
||||
"@prometheus-io/lezer-promql": "^0.42.0",
|
||||
"lru-cache": "^6.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -61,7 +61,7 @@
|
|||
},
|
||||
"module/lezer-promql": {
|
||||
"name": "@prometheus-io/lezer-promql",
|
||||
"version": "0.41.0",
|
||||
"version": "0.42.0",
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"@lezer/generator": "^1.2.2",
|
||||
|
@ -20763,7 +20763,7 @@
|
|||
},
|
||||
"react-app": {
|
||||
"name": "@prometheus-io/app",
|
||||
"version": "0.41.0",
|
||||
"version": "0.42.0",
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.4.0",
|
||||
"@codemirror/commands": "^6.2.0",
|
||||
|
@ -20781,7 +20781,7 @@
|
|||
"@lezer/lr": "^1.3.1",
|
||||
"@nexucis/fuzzy": "^0.4.1",
|
||||
"@nexucis/kvsearch": "^0.8.1",
|
||||
"@prometheus-io/codemirror-promql": "^0.41.0",
|
||||
"@prometheus-io/codemirror-promql": "^0.42.0",
|
||||
"bootstrap": "^4.6.2",
|
||||
"css.escape": "^1.5.1",
|
||||
"downshift": "^7.2.0",
|
||||
|
@ -23417,7 +23417,7 @@
|
|||
"@lezer/lr": "^1.3.1",
|
||||
"@nexucis/fuzzy": "^0.4.1",
|
||||
"@nexucis/kvsearch": "^0.8.1",
|
||||
"@prometheus-io/codemirror-promql": "^0.41.0",
|
||||
"@prometheus-io/codemirror-promql": "^0.42.0",
|
||||
"@testing-library/react-hooks": "^7.0.2",
|
||||
"@types/enzyme": "^3.10.12",
|
||||
"@types/flot": "0.0.32",
|
||||
|
@ -23468,7 +23468,7 @@
|
|||
"@lezer/common": "^1.0.2",
|
||||
"@lezer/highlight": "^1.1.3",
|
||||
"@lezer/lr": "^1.3.1",
|
||||
"@prometheus-io/lezer-promql": "^0.41.0",
|
||||
"@prometheus-io/lezer-promql": "^0.42.0",
|
||||
"@types/lru-cache": "^5.1.1",
|
||||
"isomorphic-fetch": "^3.0.0",
|
||||
"lru-cache": "^6.0.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@prometheus-io/app",
|
||||
"version": "0.41.0",
|
||||
"version": "0.42.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.4.0",
|
||||
|
@ -19,7 +19,7 @@
|
|||
"@lezer/common": "^1.0.2",
|
||||
"@nexucis/fuzzy": "^0.4.1",
|
||||
"@nexucis/kvsearch": "^0.8.1",
|
||||
"@prometheus-io/codemirror-promql": "^0.41.0",
|
||||
"@prometheus-io/codemirror-promql": "^0.42.0",
|
||||
"bootstrap": "^4.6.2",
|
||||
"css.escape": "^1.5.1",
|
||||
"downshift": "^7.2.0",
|
||||
|
|
Loading…
Reference in a new issue