mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-11 16:14:05 -08:00
Merge branch 'master' into release-2.13
This commit is contained in:
commit
e61b980676
|
@ -9,7 +9,7 @@ executors:
|
||||||
# should also be updated.
|
# should also be updated.
|
||||||
golang:
|
golang:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/golang:1.13
|
- image: circleci/golang:1.13-node
|
||||||
|
|
||||||
fuzzit:
|
fuzzit:
|
||||||
docker:
|
docker:
|
||||||
|
@ -24,7 +24,7 @@ jobs:
|
||||||
- run:
|
- run:
|
||||||
command: make
|
command: make
|
||||||
environment:
|
environment:
|
||||||
# Run garbage collection more aggresively to avoid getting OOMed during the lint phase.
|
# Run garbage collection more aggressively to avoid getting OOMed during the lint phase.
|
||||||
GOGC: "20"
|
GOGC: "20"
|
||||||
# By default Go uses GOMAXPROCS but a Circle CI executor has many
|
# By default Go uses GOMAXPROCS but a Circle CI executor has many
|
||||||
# cores (> 30) while the CPU and RAM resources are throttled. If we
|
# cores (> 30) while the CPU and RAM resources are throttled. If we
|
||||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -16,3 +16,7 @@ benchmark.txt
|
||||||
!/.golangci.yml
|
!/.golangci.yml
|
||||||
/documentation/examples/remote_storage/remote_storage_adapter/remote_storage_adapter
|
/documentation/examples/remote_storage/remote_storage_adapter/remote_storage_adapter
|
||||||
/documentation/examples/remote_storage/example_write_adapter/example_writer_adapter
|
/documentation/examples/remote_storage/example_write_adapter/example_writer_adapter
|
||||||
|
|
||||||
|
npm_licenses.tar.bz2
|
||||||
|
/web/ui/static/graph-new
|
||||||
|
/web/ui/assets_vfsdata.go
|
||||||
|
|
|
@ -12,7 +12,7 @@ build:
|
||||||
path: ./cmd/promtool
|
path: ./cmd/promtool
|
||||||
- name: tsdb
|
- name: tsdb
|
||||||
path: ./tsdb/cmd/tsdb
|
path: ./tsdb/cmd/tsdb
|
||||||
flags: -mod=vendor -a -tags netgo
|
flags: -mod=vendor -a -tags netgo,builtinassets
|
||||||
ldflags: |
|
ldflags: |
|
||||||
-X github.com/prometheus/common/version.Version={{.Version}}
|
-X github.com/prometheus/common/version.Version={{.Version}}
|
||||||
-X github.com/prometheus/common/version.Revision={{.Revision}}
|
-X github.com/prometheus/common/version.Revision={{.Revision}}
|
||||||
|
@ -26,6 +26,7 @@ tarball:
|
||||||
- documentation/examples/prometheus.yml
|
- documentation/examples/prometheus.yml
|
||||||
- LICENSE
|
- LICENSE
|
||||||
- NOTICE
|
- NOTICE
|
||||||
|
- npm_licenses.tar.bz2
|
||||||
crossbuild:
|
crossbuild:
|
||||||
platforms:
|
platforms:
|
||||||
- linux/amd64
|
- linux/amd64
|
||||||
|
|
|
@ -12,8 +12,11 @@ go_import_path: github.com/prometheus/prometheus
|
||||||
# random issues on Travis.
|
# random issues on Travis.
|
||||||
before_install:
|
before_install:
|
||||||
- travis_retry make deps
|
- travis_retry make deps
|
||||||
|
- . $HOME/.nvm/nvm.sh
|
||||||
|
- nvm install stable
|
||||||
|
- nvm use stable
|
||||||
- if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install make; fi
|
- if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install make; fi
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- make check_license style unused test lint check_assets
|
- make check_license style unused test lint
|
||||||
- git diff --exit-code
|
- git diff --exit-code
|
||||||
|
|
|
@ -134,7 +134,7 @@ and the memory is available to the kernel when it needs it.
|
||||||
* [BUGFIX] Check if label value is valid when unmarshaling external labels from YAML. #5316
|
* [BUGFIX] Check if label value is valid when unmarshaling external labels from YAML. #5316
|
||||||
* [BUGFIX] Promparse: sort all labels when parsing. #5372
|
* [BUGFIX] Promparse: sort all labels when parsing. #5372
|
||||||
* [BUGFIX] Reload rules: copy state on both name and labels. #5368
|
* [BUGFIX] Reload rules: copy state on both name and labels. #5368
|
||||||
* [BUGFIX] Exponentation operator to drop metric name in result of operation. #5329
|
* [BUGFIX] Exponentiation operator to drop metric name in result of operation. #5329
|
||||||
* [BUGFIX] Config: resolve more file paths. #5284
|
* [BUGFIX] Config: resolve more file paths. #5284
|
||||||
* [BUGFIX] Promtool: resolve relative paths in alert test files. #5336
|
* [BUGFIX] Promtool: resolve relative paths in alert test files. #5336
|
||||||
* [BUGFIX] Set TLSHandshakeTimeout in HTTP transport. common#179
|
* [BUGFIX] Set TLSHandshakeTimeout in HTTP transport. common#179
|
||||||
|
|
|
@ -10,6 +10,9 @@ COPY .build/${OS}-${ARCH}/promtool /bin/promtool
|
||||||
COPY documentation/examples/prometheus.yml /etc/prometheus/prometheus.yml
|
COPY documentation/examples/prometheus.yml /etc/prometheus/prometheus.yml
|
||||||
COPY console_libraries/ /usr/share/prometheus/console_libraries/
|
COPY console_libraries/ /usr/share/prometheus/console_libraries/
|
||||||
COPY consoles/ /usr/share/prometheus/consoles/
|
COPY consoles/ /usr/share/prometheus/consoles/
|
||||||
|
COPY LICENSE /LICENSE
|
||||||
|
COPY NOTICE /NOTICE
|
||||||
|
COPY npm_licenses.tar.bz2 /npm_licenses.tar.bz2
|
||||||
|
|
||||||
RUN ln -s /usr/share/prometheus/console_libraries /usr/share/prometheus/consoles/ /etc/prometheus/
|
RUN ln -s /usr/share/prometheus/console_libraries /usr/share/prometheus/consoles/ /etc/prometheus/
|
||||||
RUN mkdir -p /prometheus && \
|
RUN mkdir -p /prometheus && \
|
||||||
|
|
|
@ -3,4 +3,6 @@ Maintainers of this repository with their focus areas:
|
||||||
* Brian Brazil <brian.brazil@robustperception.io> @brian-brazil: Console templates; semantics of PromQL, service discovery, and relabeling.
|
* Brian Brazil <brian.brazil@robustperception.io> @brian-brazil: Console templates; semantics of PromQL, service discovery, and relabeling.
|
||||||
* Fabian Reinartz <freinartz@google.com> @fabxc: PromQL parsing and evaluation; implementation of retrieval, alert notification, and service discovery.
|
* Fabian Reinartz <freinartz@google.com> @fabxc: PromQL parsing and evaluation; implementation of retrieval, alert notification, and service discovery.
|
||||||
* Julius Volz <julius.volz@gmail.com> @juliusv: Remote storage integrations; web UI.
|
* Julius Volz <julius.volz@gmail.com> @juliusv: Remote storage integrations; web UI.
|
||||||
|
* Krasi Georgiev <kgeorgie@redhat.com> @krasi-georgiev: TSDB - the storage engine.
|
||||||
|
* Ganesh Vernekar <cs15btech11018@iith.ac.in> @codesome: TSDB - the storage engine.
|
||||||
|
|
||||||
|
|
51
Makefile
51
Makefile
|
@ -14,6 +14,12 @@
|
||||||
# Needs to be defined before including Makefile.common to auto-generate targets
|
# Needs to be defined before including Makefile.common to auto-generate targets
|
||||||
DOCKER_ARCHS ?= amd64 armv7 arm64
|
DOCKER_ARCHS ?= amd64 armv7 arm64
|
||||||
|
|
||||||
|
REACT_APP_PATH = web/ui/react-app
|
||||||
|
REACT_APP_SOURCE_FILES = $(wildcard $(REACT_APP_PATH)/public/* $(REACT_APP_PATH)/src/* $(REACT_APP_PATH)/tsconfig.json)
|
||||||
|
REACT_APP_OUTPUT_DIR = web/ui/static/graph-new
|
||||||
|
REACT_APP_NODE_MODULES_PATH = $(REACT_APP_PATH)/node_modules
|
||||||
|
REACT_APP_NPM_LICENSES_TARBALL = "npm_licenses.tar.bz2"
|
||||||
|
|
||||||
TSDB_PROJECT_DIR = "./tsdb"
|
TSDB_PROJECT_DIR = "./tsdb"
|
||||||
TSDB_CLI_DIR="$(TSDB_PROJECT_DIR)/cmd/tsdb"
|
TSDB_CLI_DIR="$(TSDB_PROJECT_DIR)/cmd/tsdb"
|
||||||
TSDB_BIN = "$(TSDB_CLI_DIR)/tsdb"
|
TSDB_BIN = "$(TSDB_CLI_DIR)/tsdb"
|
||||||
|
@ -25,23 +31,50 @@ include Makefile.common
|
||||||
|
|
||||||
DOCKER_IMAGE_NAME ?= prometheus
|
DOCKER_IMAGE_NAME ?= prometheus
|
||||||
|
|
||||||
|
$(REACT_APP_NODE_MODULES_PATH): $(REACT_APP_PATH)/package.json $(REACT_APP_PATH)/yarn.lock
|
||||||
|
cd $(REACT_APP_PATH) && yarn --frozen-lockfile
|
||||||
|
|
||||||
|
$(REACT_APP_OUTPUT_DIR): $(REACT_APP_NODE_MODULES_PATH) $(REACT_APP_SOURCE_FILES)
|
||||||
|
@echo ">> building React app"
|
||||||
|
@./scripts/build_react_app.sh
|
||||||
|
|
||||||
.PHONY: assets
|
.PHONY: assets
|
||||||
assets:
|
assets: $(REACT_APP_OUTPUT_DIR)
|
||||||
@echo ">> writing assets"
|
@echo ">> writing assets"
|
||||||
cd $(PREFIX)/web/ui && GO111MODULE=$(GO111MODULE) $(GO) generate -x -v $(GOOPTS)
|
# Un-setting GOOS and GOARCH here because the generated Go code is always the same,
|
||||||
|
# but the cached object code is incompatible between architectures and OSes (which
|
||||||
|
# breaks cross-building for different combinations on CI in the same container).
|
||||||
|
cd web/ui && GO111MODULE=$(GO111MODULE) GOOS= GOARCH= $(GO) generate -x -v $(GOOPTS)
|
||||||
@$(GOFMT) -w ./web/ui
|
@$(GOFMT) -w ./web/ui
|
||||||
|
|
||||||
.PHONY: check_assets
|
.PHONY: react-app-test
|
||||||
check_assets: assets
|
react-app-test: $(REACT_APP_NODE_MODULES_PATH)
|
||||||
@echo ">> checking that assets are up-to-date"
|
@echo ">> running React app tests"
|
||||||
@if ! (cd $(PREFIX)/web/ui && git diff --exit-code); then \
|
cd $(REACT_APP_PATH) && yarn test --no-watch
|
||||||
echo "Run 'make assets' and commit the changes to fix the error."; \
|
|
||||||
exit 1; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
test: common-test react-app-test
|
||||||
|
|
||||||
|
.PHONY: npm_licenses
|
||||||
|
npm_licenses: $(REACT_APP_NODE_MODULES_PATH)
|
||||||
|
@echo ">> bundling npm licenses"
|
||||||
|
rm -f $(REACT_APP_NPM_LICENSES_TARBALL)
|
||||||
|
find $(REACT_APP_NODE_MODULES_PATH) -iname "license*" | tar cfj $(REACT_APP_NPM_LICENSES_TARBALL) --transform 's/^/npm_licenses\//' --files-from=-
|
||||||
|
|
||||||
|
.PHONY: tarball
|
||||||
|
tarball: npm_licenses common-tarball
|
||||||
|
|
||||||
|
.PHONY: docker
|
||||||
|
docker: npm_licenses common-docker
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
build: assets common-build
|
||||||
|
|
||||||
|
.PHONY: build_tsdb
|
||||||
build_tsdb:
|
build_tsdb:
|
||||||
GO111MODULE=$(GO111MODULE) $(GO) build -o $(TSDB_BIN) $(TSDB_CLI_DIR)
|
GO111MODULE=$(GO111MODULE) $(GO) build -o $(TSDB_BIN) $(TSDB_CLI_DIR)
|
||||||
|
|
||||||
|
.PHONY: bench_tsdb
|
||||||
bench_tsdb: build_tsdb
|
bench_tsdb: build_tsdb
|
||||||
@echo ">> running benchmark, writing result to $(TSDB_BENCHMARK_OUTPUT_DIR)"
|
@echo ">> running benchmark, writing result to $(TSDB_BENCHMARK_OUTPUT_DIR)"
|
||||||
@$(TSDB_BIN) bench write --metrics=$(TSDB_BENCHMARK_NUM_METRICS) --out=$(TSDB_BENCHMARK_OUTPUT_DIR) $(TSDB_BENCHMARK_DATASET)
|
@$(TSDB_BIN) bench write --metrics=$(TSDB_BENCHMARK_NUM_METRICS) --out=$(TSDB_BENCHMARK_OUTPUT_DIR) $(TSDB_BENCHMARK_DATASET)
|
||||||
|
|
6
NOTICE
6
NOTICE
|
@ -85,3 +85,9 @@ go-zookeeper - Native ZooKeeper client for Go
|
||||||
https://github.com/samuel/go-zookeeper
|
https://github.com/samuel/go-zookeeper
|
||||||
Copyright (c) 2013, Samuel Stauffer <samuel@descolada.com>
|
Copyright (c) 2013, Samuel Stauffer <samuel@descolada.com>
|
||||||
See https://github.com/samuel/go-zookeeper/blob/master/LICENSE for license details.
|
See https://github.com/samuel/go-zookeeper/blob/master/LICENSE for license details.
|
||||||
|
|
||||||
|
We also use code from a large number of npm packages. For details, see:
|
||||||
|
- https://github.com/prometheus/prometheus/blob/master/web/ui/react-app/package.json
|
||||||
|
- https://github.com/prometheus/prometheus/blob/master/web/ui/react-app/package-lock.json
|
||||||
|
- The individual package licenses as copied from the node_modules directory can be found in
|
||||||
|
the npm_licenses.tar.bz2 archive in release tarballs and Docker images.
|
||||||
|
|
18
README.md
18
README.md
|
@ -1,4 +1,4 @@
|
||||||
# Prometheus
|
# Prometheus
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/prometheus/prometheus.svg)][travis]
|
[![Build Status](https://travis-ci.org/prometheus/prometheus.svg)][travis]
|
||||||
[![CircleCI](https://circleci.com/gh/prometheus/prometheus/tree/master.svg?style=shield)][circleci]
|
[![CircleCI](https://circleci.com/gh/prometheus/prometheus/tree/master.svg?style=shield)][circleci]
|
||||||
|
@ -16,7 +16,7 @@ from configured targets at given intervals, evaluates rule expressions,
|
||||||
displays the results, and can trigger alerts if some condition is observed
|
displays the results, and can trigger alerts if some condition is observed
|
||||||
to be true.
|
to be true.
|
||||||
|
|
||||||
Prometheus' main distinguishing features as compared to other monitoring systems are:
|
Prometheus's main distinguishing features as compared to other monitoring systems are:
|
||||||
|
|
||||||
- a **multi-dimensional** data model (timeseries defined by metric name and set of key/value dimensions)
|
- a **multi-dimensional** data model (timeseries defined by metric name and set of key/value dimensions)
|
||||||
- a **flexible query language** to leverage this dimensionality
|
- a **flexible query language** to leverage this dimensionality
|
||||||
|
@ -60,6 +60,8 @@ Prometheus will now be reachable at http://localhost:9090/.
|
||||||
|
|
||||||
To build Prometheus from the source code yourself you need to have a working
|
To build Prometheus from the source code yourself you need to have a working
|
||||||
Go environment with [version 1.13 or greater installed](https://golang.org/doc/install).
|
Go environment with [version 1.13 or greater installed](https://golang.org/doc/install).
|
||||||
|
You will also need to have [Node.js](https://nodejs.org/) and [Yarn](https://yarnpkg.com/)
|
||||||
|
installed in order to build the frontend assets.
|
||||||
|
|
||||||
You can directly use the `go` tool to download and install the `prometheus`
|
You can directly use the `go` tool to download and install the `prometheus`
|
||||||
and `promtool` binaries into your `GOPATH`:
|
and `promtool` binaries into your `GOPATH`:
|
||||||
|
@ -67,7 +69,14 @@ and `promtool` binaries into your `GOPATH`:
|
||||||
$ go get github.com/prometheus/prometheus/cmd/...
|
$ go get github.com/prometheus/prometheus/cmd/...
|
||||||
$ prometheus --config.file=your_config.yml
|
$ prometheus --config.file=your_config.yml
|
||||||
|
|
||||||
You can also clone the repository yourself and build using `make`:
|
*However*, when using `go get` to build Prometheus, Prometheus will expect to be able to
|
||||||
|
read its web assets from local filesystem directories under `web/ui/static` and
|
||||||
|
`web/ui/templates`. In order for these assets to be found, you will have to run Prometheus
|
||||||
|
from the root of the cloned repository. Note also that these directories do not include the
|
||||||
|
new experimental React UI unless it has been built explicitly using `make assets` or `make build`.
|
||||||
|
|
||||||
|
You can also clone the repository yourself and build using `make build`, which will compile in
|
||||||
|
the web assets so that Prometheus can be run from anywhere:
|
||||||
|
|
||||||
$ mkdir -p $GOPATH/src/github.com/prometheus
|
$ mkdir -p $GOPATH/src/github.com/prometheus
|
||||||
$ cd $GOPATH/src/github.com/prometheus
|
$ cd $GOPATH/src/github.com/prometheus
|
||||||
|
@ -78,12 +87,11 @@ You can also clone the repository yourself and build using `make`:
|
||||||
|
|
||||||
The Makefile provides several targets:
|
The Makefile provides several targets:
|
||||||
|
|
||||||
* *build*: build the `prometheus` and `promtool` binaries
|
* *build*: build the `prometheus` and `promtool` binaries (includes building and compiling in web assets)
|
||||||
* *test*: run the tests
|
* *test*: run the tests
|
||||||
* *test-short*: run the short tests
|
* *test-short*: run the short tests
|
||||||
* *format*: format the source code
|
* *format*: format the source code
|
||||||
* *vet*: check the source code for common errors
|
* *vet*: check the source code for common errors
|
||||||
* *assets*: rebuild the static assets
|
|
||||||
* *docker*: build a docker container for the current `HEAD`
|
* *docker*: build a docker container for the current `HEAD`
|
||||||
|
|
||||||
## More information
|
## More information
|
||||||
|
|
|
@ -56,7 +56,7 @@ For a patch release, work in the branch of the minor release you want to patch.
|
||||||
|
|
||||||
For a new major or minor release, create the corresponding release branch based on the master branch.
|
For a new major or minor release, create the corresponding release branch based on the master branch.
|
||||||
|
|
||||||
Bump the version in the `VERSION` file and update `CHANGELOG.md`. Do this in a proper PR as this gives others the opportunity to chime in on the release in general and on the addition to the changelog in particular.
|
Bump the version in the `VERSION` file and update `CHANGELOG.md`. Do this in a proper PR pointing to the release branch as this gives others the opportunity to chime in on the release in general and on the addition to the changelog in particular.
|
||||||
|
|
||||||
Note that `CHANGELOG.md` should only document changes relevant to users of Prometheus, including external API changes, performance improvements, and new features. Do not document changes of internal interfaces, code refactorings and clean-ups, changes to the build process, etc. People interested in these are asked to refer to the git history.
|
Note that `CHANGELOG.md` should only document changes relevant to users of Prometheus, including external API changes, performance improvements, and new features. Do not document changes of internal interfaces, code refactorings and clean-ups, changes to the build process, etc. People interested in these are asked to refer to the git history.
|
||||||
|
|
||||||
|
|
|
@ -128,7 +128,9 @@ func (e *Endpoints) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
defer e.queue.ShutDown()
|
defer e.queue.ShutDown()
|
||||||
|
|
||||||
if !cache.WaitForCacheSync(ctx.Done(), e.endpointsInf.HasSynced, e.serviceInf.HasSynced, e.podInf.HasSynced) {
|
if !cache.WaitForCacheSync(ctx.Done(), e.endpointsInf.HasSynced, e.serviceInf.HasSynced, e.podInf.HasSynced) {
|
||||||
level.Error(e.logger).Log("msg", "endpoints informer unable to sync cache")
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(e.logger).Log("msg", "endpoints informer unable to sync cache")
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,9 @@ func (i *Ingress) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
defer i.queue.ShutDown()
|
defer i.queue.ShutDown()
|
||||||
|
|
||||||
if !cache.WaitForCacheSync(ctx.Done(), i.informer.HasSynced) {
|
if !cache.WaitForCacheSync(ctx.Done(), i.informer.HasSynced) {
|
||||||
level.Error(i.logger).Log("msg", "ingress informer unable to sync cache")
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(i.logger).Log("msg", "ingress informer unable to sync cache")
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,9 @@ func (n *Node) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
defer n.queue.ShutDown()
|
defer n.queue.ShutDown()
|
||||||
|
|
||||||
if !cache.WaitForCacheSync(ctx.Done(), n.informer.HasSynced) {
|
if !cache.WaitForCacheSync(ctx.Done(), n.informer.HasSynced) {
|
||||||
level.Error(n.logger).Log("msg", "node informer unable to sync cache")
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(n.logger).Log("msg", "node informer unable to sync cache")
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,9 @@ func (p *Pod) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
defer p.queue.ShutDown()
|
defer p.queue.ShutDown()
|
||||||
|
|
||||||
if !cache.WaitForCacheSync(ctx.Done(), p.informer.HasSynced) {
|
if !cache.WaitForCacheSync(ctx.Done(), p.informer.HasSynced) {
|
||||||
level.Error(p.logger).Log("msg", "pod informer unable to sync cache")
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(p.logger).Log("msg", "pod informer unable to sync cache")
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,9 @@ func (s *Service) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
defer s.queue.ShutDown()
|
defer s.queue.ShutDown()
|
||||||
|
|
||||||
if !cache.WaitForCacheSync(ctx.Done(), s.informer.HasSynced) {
|
if !cache.WaitForCacheSync(ctx.Done(), s.informer.HasSynced) {
|
||||||
level.Error(s.logger).Log("msg", "service informer unable to sync cache")
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(s.logger).Log("msg", "service informer unable to sync cache")
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,11 +25,13 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/go-kit/kit/log"
|
"github.com/go-kit/kit/log"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus/testutil"
|
||||||
dto "github.com/prometheus/client_model/go"
|
common_config "github.com/prometheus/common/config"
|
||||||
"github.com/prometheus/common/model"
|
"github.com/prometheus/common/model"
|
||||||
"github.com/prometheus/prometheus/config"
|
"github.com/prometheus/prometheus/config"
|
||||||
sd_config "github.com/prometheus/prometheus/discovery/config"
|
sd_config "github.com/prometheus/prometheus/discovery/config"
|
||||||
|
"github.com/prometheus/prometheus/discovery/consul"
|
||||||
|
"github.com/prometheus/prometheus/discovery/file"
|
||||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
@ -749,27 +751,33 @@ func verifyPresence(t *testing.T, tSets map[poolKey]map[string]*targetgroup.Grou
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTargetSetRecreatesTargetGroupsEveryRun(t *testing.T) {
|
func TestTargetSetRecreatesTargetGroupsEveryRun(t *testing.T) {
|
||||||
cfg := &config.Config{}
|
|
||||||
|
|
||||||
sOne := `
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: 'prometheus'
|
|
||||||
static_configs:
|
|
||||||
- targets: ["foo:9090"]
|
|
||||||
- targets: ["bar:9090"]
|
|
||||||
`
|
|
||||||
if err := yaml.UnmarshalStrict([]byte(sOne), cfg); err != nil {
|
|
||||||
t.Fatalf("Unable to load YAML config sOne: %s", err)
|
|
||||||
}
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
discoveryManager := NewManager(ctx, log.NewNopLogger())
|
discoveryManager := NewManager(ctx, log.NewNopLogger())
|
||||||
discoveryManager.updatert = 100 * time.Millisecond
|
discoveryManager.updatert = 100 * time.Millisecond
|
||||||
go discoveryManager.Run()
|
go discoveryManager.Run()
|
||||||
|
|
||||||
c := make(map[string]sd_config.ServiceDiscoveryConfig)
|
c := map[string]sd_config.ServiceDiscoveryConfig{
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
"prometheus": sd_config.ServiceDiscoveryConfig{
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
StaticConfigs: []*targetgroup.Group{
|
||||||
|
&targetgroup.Group{
|
||||||
|
Source: "0",
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
model.LabelSet{
|
||||||
|
model.AddressLabel: model.LabelValue("foo:9090"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&targetgroup.Group{
|
||||||
|
Source: "1",
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
model.LabelSet{
|
||||||
|
model.AddressLabel: model.LabelValue("bar:9090"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
|
|
||||||
|
@ -777,18 +785,17 @@ scrape_configs:
|
||||||
verifyPresence(t, discoveryManager.targets, poolKey{setName: "prometheus", provider: "string/0"}, "{__address__=\"foo:9090\"}", true)
|
verifyPresence(t, discoveryManager.targets, poolKey{setName: "prometheus", provider: "string/0"}, "{__address__=\"foo:9090\"}", true)
|
||||||
verifyPresence(t, discoveryManager.targets, poolKey{setName: "prometheus", provider: "string/0"}, "{__address__=\"bar:9090\"}", true)
|
verifyPresence(t, discoveryManager.targets, poolKey{setName: "prometheus", provider: "string/0"}, "{__address__=\"bar:9090\"}", true)
|
||||||
|
|
||||||
sTwo := `
|
c["prometheus"] = sd_config.ServiceDiscoveryConfig{
|
||||||
scrape_configs:
|
StaticConfigs: []*targetgroup.Group{
|
||||||
- job_name: 'prometheus'
|
&targetgroup.Group{
|
||||||
static_configs:
|
Source: "0",
|
||||||
- targets: ["foo:9090"]
|
Targets: []model.LabelSet{
|
||||||
`
|
model.LabelSet{
|
||||||
if err := yaml.UnmarshalStrict([]byte(sTwo), cfg); err != nil {
|
model.AddressLabel: model.LabelValue("foo:9090"),
|
||||||
t.Fatalf("Unable to load YAML config sTwo: %s", err)
|
},
|
||||||
}
|
},
|
||||||
c = make(map[string]sd_config.ServiceDiscoveryConfig)
|
},
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
},
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
|
||||||
}
|
}
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
|
|
||||||
|
@ -801,43 +808,33 @@ scrape_configs:
|
||||||
// removing all targets from the static_configs sends an update with empty targetGroups.
|
// removing all targets from the static_configs sends an update with empty targetGroups.
|
||||||
// This is required to signal the receiver that this target set has no current targets.
|
// This is required to signal the receiver that this target set has no current targets.
|
||||||
func TestTargetSetRecreatesEmptyStaticConfigs(t *testing.T) {
|
func TestTargetSetRecreatesEmptyStaticConfigs(t *testing.T) {
|
||||||
cfg := &config.Config{}
|
|
||||||
|
|
||||||
sOne := `
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: 'prometheus'
|
|
||||||
static_configs:
|
|
||||||
- targets: ["foo:9090"]
|
|
||||||
`
|
|
||||||
if err := yaml.UnmarshalStrict([]byte(sOne), cfg); err != nil {
|
|
||||||
t.Fatalf("Unable to load YAML config sOne: %s", err)
|
|
||||||
}
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
discoveryManager := NewManager(ctx, log.NewNopLogger())
|
discoveryManager := NewManager(ctx, log.NewNopLogger())
|
||||||
discoveryManager.updatert = 100 * time.Millisecond
|
discoveryManager.updatert = 100 * time.Millisecond
|
||||||
go discoveryManager.Run()
|
go discoveryManager.Run()
|
||||||
|
|
||||||
c := make(map[string]sd_config.ServiceDiscoveryConfig)
|
c := map[string]sd_config.ServiceDiscoveryConfig{
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
"prometheus": sd_config.ServiceDiscoveryConfig{
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
StaticConfigs: []*targetgroup.Group{
|
||||||
|
&targetgroup.Group{
|
||||||
|
Source: "0",
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
model.LabelSet{
|
||||||
|
model.AddressLabel: model.LabelValue("foo:9090"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
|
|
||||||
<-discoveryManager.SyncCh()
|
<-discoveryManager.SyncCh()
|
||||||
verifyPresence(t, discoveryManager.targets, poolKey{setName: "prometheus", provider: "string/0"}, "{__address__=\"foo:9090\"}", true)
|
verifyPresence(t, discoveryManager.targets, poolKey{setName: "prometheus", provider: "string/0"}, "{__address__=\"foo:9090\"}", true)
|
||||||
|
|
||||||
sTwo := `
|
c["prometheus"] = sd_config.ServiceDiscoveryConfig{
|
||||||
scrape_configs:
|
StaticConfigs: []*targetgroup.Group{},
|
||||||
- job_name: 'prometheus'
|
|
||||||
static_configs:
|
|
||||||
`
|
|
||||||
if err := yaml.UnmarshalStrict([]byte(sTwo), cfg); err != nil {
|
|
||||||
t.Fatalf("Unable to load YAML config sTwo: %s", err)
|
|
||||||
}
|
|
||||||
c = make(map[string]sd_config.ServiceDiscoveryConfig)
|
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
|
||||||
}
|
}
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
|
|
||||||
|
@ -876,30 +873,33 @@ func TestIdenticalConfigurationsAreCoalesced(t *testing.T) {
|
||||||
}
|
}
|
||||||
defer os.Remove(tmpFile2)
|
defer os.Remove(tmpFile2)
|
||||||
|
|
||||||
cfg := &config.Config{}
|
|
||||||
|
|
||||||
sOne := `
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: 'prometheus'
|
|
||||||
file_sd_configs:
|
|
||||||
- files: ["%s"]
|
|
||||||
- job_name: 'prometheus2'
|
|
||||||
file_sd_configs:
|
|
||||||
- files: ["%s"]
|
|
||||||
`
|
|
||||||
sOne = fmt.Sprintf(sOne, tmpFile2, tmpFile2)
|
|
||||||
if err := yaml.UnmarshalStrict([]byte(sOne), cfg); err != nil {
|
|
||||||
t.Fatalf("Unable to load YAML config sOne: %s", err)
|
|
||||||
}
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
discoveryManager := NewManager(ctx, nil)
|
discoveryManager := NewManager(ctx, nil)
|
||||||
discoveryManager.updatert = 100 * time.Millisecond
|
discoveryManager.updatert = 100 * time.Millisecond
|
||||||
go discoveryManager.Run()
|
go discoveryManager.Run()
|
||||||
|
|
||||||
c := make(map[string]sd_config.ServiceDiscoveryConfig)
|
c := map[string]sd_config.ServiceDiscoveryConfig{
|
||||||
for _, v := range cfg.ScrapeConfigs {
|
"prometheus": sd_config.ServiceDiscoveryConfig{
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
FileSDConfigs: []*file.SDConfig{
|
||||||
|
&file.SDConfig{
|
||||||
|
Files: []string{
|
||||||
|
tmpFile2,
|
||||||
|
},
|
||||||
|
RefreshInterval: file.DefaultSDConfig.RefreshInterval,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"prometheus2": sd_config.ServiceDiscoveryConfig{
|
||||||
|
FileSDConfigs: []*file.SDConfig{
|
||||||
|
&file.SDConfig{
|
||||||
|
Files: []string{
|
||||||
|
tmpFile2,
|
||||||
|
},
|
||||||
|
RefreshInterval: file.DefaultSDConfig.RefreshInterval,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
|
|
||||||
|
@ -924,7 +924,6 @@ scrape_configs:
|
||||||
if err := yaml.UnmarshalStrict([]byte(cfgText), originalConfig); err != nil {
|
if err := yaml.UnmarshalStrict([]byte(cfgText), originalConfig); err != nil {
|
||||||
t.Fatalf("Unable to load YAML config cfgYaml: %s", err)
|
t.Fatalf("Unable to load YAML config cfgYaml: %s", err)
|
||||||
}
|
}
|
||||||
origScrpCfg := originalConfig.ScrapeConfigs[0]
|
|
||||||
|
|
||||||
processedConfig := &config.Config{}
|
processedConfig := &config.Config{}
|
||||||
if err := yaml.UnmarshalStrict([]byte(cfgText), processedConfig); err != nil {
|
if err := yaml.UnmarshalStrict([]byte(cfgText), processedConfig); err != nil {
|
||||||
|
@ -936,100 +935,76 @@ scrape_configs:
|
||||||
discoveryManager.updatert = 100 * time.Millisecond
|
discoveryManager.updatert = 100 * time.Millisecond
|
||||||
go discoveryManager.Run()
|
go discoveryManager.Run()
|
||||||
|
|
||||||
c := make(map[string]sd_config.ServiceDiscoveryConfig)
|
c := map[string]sd_config.ServiceDiscoveryConfig{
|
||||||
for _, v := range processedConfig.ScrapeConfigs {
|
"prometheus": processedConfig.ScrapeConfigs[0].ServiceDiscoveryConfig,
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
|
||||||
}
|
}
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
<-discoveryManager.SyncCh()
|
<-discoveryManager.SyncCh()
|
||||||
|
|
||||||
|
origSdcfg := originalConfig.ScrapeConfigs[0].ServiceDiscoveryConfig
|
||||||
for _, sdcfg := range c {
|
for _, sdcfg := range c {
|
||||||
if !reflect.DeepEqual(origScrpCfg.ServiceDiscoveryConfig.StaticConfigs, sdcfg.StaticConfigs) {
|
if !reflect.DeepEqual(origSdcfg.StaticConfigs, sdcfg.StaticConfigs) {
|
||||||
t.Fatalf("discovery manager modified static config \n expected: %v\n got: %v\n",
|
t.Fatalf("discovery manager modified static config \n expected: %v\n got: %v\n",
|
||||||
origScrpCfg.ServiceDiscoveryConfig.StaticConfigs, sdcfg.StaticConfigs)
|
origSdcfg.StaticConfigs, sdcfg.StaticConfigs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGaugeFailedConfigs(t *testing.T) {
|
func TestGaugeFailedConfigs(t *testing.T) {
|
||||||
var (
|
|
||||||
fcGauge prometheus.Gauge
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
cfgOneText := `
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: prometheus
|
|
||||||
consul_sd_configs:
|
|
||||||
- server: "foo:8500"
|
|
||||||
tls_config:
|
|
||||||
cert_file: "/tmp/non_existent"
|
|
||||||
- server: "bar:8500"
|
|
||||||
tls_config:
|
|
||||||
cert_file: "/tmp/non_existent"
|
|
||||||
- server: "foo2:8500"
|
|
||||||
tls_config:
|
|
||||||
cert_file: "/tmp/non_existent"
|
|
||||||
`
|
|
||||||
cfgOne := &config.Config{}
|
|
||||||
|
|
||||||
err = yaml.UnmarshalStrict([]byte(cfgOneText), cfgOne)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Unable to load YAML config cfgOne: %s", err)
|
|
||||||
}
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
defer cancel()
|
defer cancel()
|
||||||
discoveryManager := NewManager(ctx, log.NewNopLogger())
|
discoveryManager := NewManager(ctx, log.NewNopLogger())
|
||||||
discoveryManager.updatert = 100 * time.Millisecond
|
discoveryManager.updatert = 100 * time.Millisecond
|
||||||
go discoveryManager.Run()
|
go discoveryManager.Run()
|
||||||
|
|
||||||
c := make(map[string]sd_config.ServiceDiscoveryConfig)
|
c := map[string]sd_config.ServiceDiscoveryConfig{
|
||||||
for _, v := range cfgOne.ScrapeConfigs {
|
"prometheus": sd_config.ServiceDiscoveryConfig{
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
ConsulSDConfigs: []*consul.SDConfig{
|
||||||
|
&consul.SDConfig{
|
||||||
|
Server: "foo:8500",
|
||||||
|
TLSConfig: common_config.TLSConfig{
|
||||||
|
CertFile: "/tmp/non_existent",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&consul.SDConfig{
|
||||||
|
Server: "bar:8500",
|
||||||
|
TLSConfig: common_config.TLSConfig{
|
||||||
|
CertFile: "/tmp/non_existent",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&consul.SDConfig{
|
||||||
|
Server: "foo2:8500",
|
||||||
|
TLSConfig: common_config.TLSConfig{
|
||||||
|
CertFile: "/tmp/non_existent",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
<-discoveryManager.SyncCh()
|
<-discoveryManager.SyncCh()
|
||||||
|
|
||||||
metricOne := &dto.Metric{}
|
failedCount := testutil.ToFloat64(failedConfigs)
|
||||||
fcGauge, err = failedConfigs.GetMetricWithLabelValues(discoveryManager.name)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fcGauge.Write(metricOne)
|
|
||||||
|
|
||||||
failedCount := metricOne.GetGauge().GetValue()
|
|
||||||
if failedCount != 3 {
|
if failedCount != 3 {
|
||||||
t.Fatalf("Expected to have 3 failed configs, got: %v", failedCount)
|
t.Fatalf("Expected to have 3 failed configs, got: %v", failedCount)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfgTwoText := `
|
c["prometheus"] = sd_config.ServiceDiscoveryConfig{
|
||||||
scrape_configs:
|
StaticConfigs: []*targetgroup.Group{
|
||||||
- job_name: 'prometheus'
|
&targetgroup.Group{
|
||||||
static_configs:
|
Source: "0",
|
||||||
- targets: ["foo:9090"]
|
Targets: []model.LabelSet{
|
||||||
`
|
model.LabelSet{
|
||||||
cfgTwo := &config.Config{}
|
model.AddressLabel: "foo:9090",
|
||||||
if err := yaml.UnmarshalStrict([]byte(cfgTwoText), cfgTwo); err != nil {
|
},
|
||||||
t.Fatalf("Unable to load YAML config cfgTwo: %s", err)
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
c = make(map[string]sd_config.ServiceDiscoveryConfig)
|
|
||||||
for _, v := range cfgTwo.ScrapeConfigs {
|
|
||||||
c[v.JobName] = v.ServiceDiscoveryConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
discoveryManager.ApplyConfig(c)
|
discoveryManager.ApplyConfig(c)
|
||||||
<-discoveryManager.SyncCh()
|
<-discoveryManager.SyncCh()
|
||||||
|
|
||||||
metricTwo := &dto.Metric{}
|
failedCount = testutil.ToFloat64(failedConfigs)
|
||||||
fcGauge, err = failedConfigs.GetMetricWithLabelValues(discoveryManager.name)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
fcGauge.Write(metricTwo)
|
|
||||||
|
|
||||||
failedCount = metricTwo.GetGauge().GetValue()
|
|
||||||
if failedCount != 0 {
|
if failedCount != 0 {
|
||||||
t.Fatalf("Expected to get no failed config, got: %v", failedCount)
|
t.Fatalf("Expected to get no failed config, got: %v", failedCount)
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,9 @@ func (d *Discovery) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
// Get an initial set right away.
|
// Get an initial set right away.
|
||||||
tgs, err := d.refresh(ctx)
|
tgs, err := d.refresh(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(d.logger).Log("msg", "Unable to refresh target groups", "err", err.Error())
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(d.logger).Log("msg", "Unable to refresh target groups", "err", err.Error())
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
select {
|
select {
|
||||||
case ch <- tgs:
|
case ch <- tgs:
|
||||||
|
@ -92,7 +94,9 @@ func (d *Discovery) Run(ctx context.Context, ch chan<- []*targetgroup.Group) {
|
||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
tgs, err := d.refresh(ctx)
|
tgs, err := d.refresh(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(d.logger).Log("msg", "Unable to refresh target groups", "err", err.Error())
|
if ctx.Err() != context.Canceled {
|
||||||
|
level.Error(d.logger).Log("msg", "Unable to refresh target groups", "err", err.Error())
|
||||||
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTargetGroupStrictJsonUnmarshal(t *testing.T) {
|
func TestTargetGroupStrictJsonUnmarshal(t *testing.T) {
|
||||||
|
@ -46,3 +47,45 @@ func TestTargetGroupStrictJsonUnmarshal(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTargetGroupYamlUnmarshal(t *testing.T) {
|
||||||
|
unmarshal := func(d []byte) func(interface{}) error {
|
||||||
|
return func(o interface{}) error {
|
||||||
|
return yaml.Unmarshal(d, o)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
yaml string
|
||||||
|
expectedNumberOfTargets int
|
||||||
|
expectedNumberOfLabels int
|
||||||
|
expectedReply error
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
yaml: "labels:\ntargets:\n",
|
||||||
|
expectedNumberOfTargets: 0,
|
||||||
|
expectedNumberOfLabels: 0,
|
||||||
|
expectedReply: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
yaml: "labels:\n my: label\ntargets:\n ['localhost:9090', 'localhost:9191']",
|
||||||
|
expectedNumberOfTargets: 2,
|
||||||
|
expectedNumberOfLabels: 1,
|
||||||
|
expectedReply: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
yaml: "labels:\ntargets:\n 'localhost:9090'",
|
||||||
|
expectedNumberOfTargets: 0,
|
||||||
|
expectedNumberOfLabels: 0,
|
||||||
|
expectedReply: &yaml.TypeError{Errors: []string{"line 3: cannot unmarshal !!str `localho...` into []string"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
tg := Group{}
|
||||||
|
actual := tg.UnmarshalYAML(unmarshal([]byte(test.yaml)))
|
||||||
|
testutil.Equals(t, test.expectedReply, actual)
|
||||||
|
testutil.Equals(t, test.expectedNumberOfTargets, len(tg.Targets))
|
||||||
|
testutil.Equals(t, test.expectedNumberOfLabels, len(tg.Labels))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -101,5 +101,5 @@ on top of the simple alert definitions. In Prometheus's ecosystem, the
|
||||||
role. Thus, Prometheus may be configured to periodically send information about
|
role. Thus, Prometheus may be configured to periodically send information about
|
||||||
alert states to an Alertmanager instance, which then takes care of dispatching
|
alert states to an Alertmanager instance, which then takes care of dispatching
|
||||||
the right notifications.
|
the right notifications.
|
||||||
Prometheus can be [configured](configuration.md) to automatically discovered available
|
Prometheus can be [configured](configuration.md) to automatically discover available
|
||||||
Alertmanager instances through its service discovery integrations.
|
Alertmanager instances through its service discovery integrations.
|
||||||
|
|
|
@ -35,7 +35,7 @@ POST /-/reload
|
||||||
|
|
||||||
This endpoint triggers a reload of the Prometheus configuration and rule files. It's disabled by default and can be enabled via the `--web.enable-lifecycle` flag.
|
This endpoint triggers a reload of the Prometheus configuration and rule files. It's disabled by default and can be enabled via the `--web.enable-lifecycle` flag.
|
||||||
|
|
||||||
An alternative way trigger a configuration reload is by sending a `SIGHUP` to the Prometheus process.
|
Alternatively, a configuration reload can be triggered by sending a `SIGHUP` to the Prometheus process.
|
||||||
|
|
||||||
|
|
||||||
### Quit
|
### Quit
|
||||||
|
@ -47,4 +47,4 @@ POST /-/quit
|
||||||
|
|
||||||
This endpoint triggers a graceful shutdown of Prometheus. It's disabled by default and can be enabled via the `--web.enable-lifecycle` flag.
|
This endpoint triggers a graceful shutdown of Prometheus. It's disabled by default and can be enabled via the `--web.enable-lifecycle` flag.
|
||||||
|
|
||||||
An alternative way trigger a graceful shutdown is by sending a `SIGTERM` to the Prometheus process.
|
Alternatively, a graceful shutdown can be triggered by sending a `SIGTERM` to the Prometheus process.
|
||||||
|
|
|
@ -43,7 +43,9 @@ The directory structure of a Prometheus server's data directory will look someth
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
Note that a limitation of the local storage is that it is not clustered or replicated. Thus, it is not arbitrarily scalable or durable in the face of disk or node outages and should thus be treated as more of an ephemeral sliding window of recent data. However, if your durability requirements are not strict, you may still succeed in storing up to years of data in the local storage.
|
Note that a limitation of the local storage is that it is not clustered or replicated. Thus, it is not arbitrarily scalable or durable in the face of disk or node outages and should be treated as you would any other kind of single node database. Using RAID for disk availiablity, [snapshots](https://prometheus.io/docs/prometheus/latest/querying/api/#snapshot) for backups, capacity planning, etc, is recommended for improved durability. With proper storage durability and planning storing years of data in the local storage is possible.
|
||||||
|
|
||||||
|
Alternatively, external storage may be used via the [remote read/write APIs](https://prometheus.io/docs/operating/integrations/#remote-endpoints-and-storage). Careful evaluation is required for these systems as they vary greatly in durability, performance, and efficiency.
|
||||||
|
|
||||||
For further details on file format, see [TSDB format](https://github.com/prometheus/prometheus/blob/master/tsdb/docs/format/README.md).
|
For further details on file format, see [TSDB format](https://github.com/prometheus/prometheus/blob/master/tsdb/docs/format/README.md).
|
||||||
|
|
||||||
|
@ -51,7 +53,7 @@ For further details on file format, see [TSDB format](https://github.com/prometh
|
||||||
|
|
||||||
The initial two-hour blocks are eventually compacted into longer blocks in the background.
|
The initial two-hour blocks are eventually compacted into longer blocks in the background.
|
||||||
|
|
||||||
Compaction will create larger blocks up to 10% of the rention time, or 21 days, whichever is smaller.
|
Compaction will create larger blocks up to 10% of the retention time, or 21 days, whichever is smaller.
|
||||||
|
|
||||||
## Operational aspects
|
## Operational aspects
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
// +build openbsd windows netbsd
|
// +build openbsd windows netbsd solaris
|
||||||
|
|
||||||
package runtime
|
package runtime
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
// +build !windows,!openbsd,!netbsd
|
// +build !windows,!openbsd,!netbsd,!solaris
|
||||||
// +build !386
|
// +build !386
|
||||||
|
|
||||||
package runtime
|
package runtime
|
||||||
|
|
|
@ -11,7 +11,6 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
// +build !windows !openbsd !darwin !freebsd !netbsd
|
|
||||||
// +build linux,386
|
// +build linux,386
|
||||||
|
|
||||||
package runtime
|
package runtime
|
||||||
|
|
|
@ -15,12 +15,11 @@ package promql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"reflect"
|
"errors"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/go-kit/kit/log"
|
"github.com/go-kit/kit/log"
|
||||||
"github.com/pkg/errors"
|
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/pkg/labels"
|
"github.com/prometheus/prometheus/pkg/labels"
|
||||||
"github.com/prometheus/prometheus/storage"
|
"github.com/prometheus/prometheus/storage"
|
||||||
|
@ -104,14 +103,14 @@ func TestQueryTimeout(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
res := query.Exec(ctx)
|
res := query.Exec(ctx)
|
||||||
if res.Err == nil {
|
testutil.NotOk(t, res.Err, "expected timeout error but got none")
|
||||||
t.Fatalf("expected timeout error but got none")
|
|
||||||
}
|
var e ErrQueryTimeout
|
||||||
if _, ok := res.Err.(ErrQueryTimeout); res.Err != nil && !ok {
|
testutil.Assert(t, errors.As(res.Err, &e), "expected timeout error but got: %s", res.Err)
|
||||||
t.Fatalf("expected timeout error but got: %s", res.Err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const errQueryCanceled = ErrQueryCanceled("test statement execution")
|
||||||
|
|
||||||
func TestQueryCancel(t *testing.T) {
|
func TestQueryCancel(t *testing.T) {
|
||||||
opts := EngineOpts{
|
opts := EngineOpts{
|
||||||
Logger: nil,
|
Logger: nil,
|
||||||
|
@ -146,12 +145,8 @@ func TestQueryCancel(t *testing.T) {
|
||||||
block <- struct{}{}
|
block <- struct{}{}
|
||||||
<-processing
|
<-processing
|
||||||
|
|
||||||
if res.Err == nil {
|
testutil.NotOk(t, res.Err, "expected cancellation error for query1 but got none")
|
||||||
t.Fatalf("expected cancellation error for query1 but got none")
|
testutil.Equals(t, res.Err, errQueryCanceled)
|
||||||
}
|
|
||||||
if ee := ErrQueryCanceled("test statement execution"); res.Err != ee {
|
|
||||||
t.Fatalf("expected error %q, got %q", ee, res.Err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Canceling a query before starting it must have no effect.
|
// Canceling a query before starting it must have no effect.
|
||||||
query2 := engine.newTestQuery(func(ctx context.Context) error {
|
query2 := engine.newTestQuery(func(ctx context.Context) error {
|
||||||
|
@ -160,9 +155,7 @@ func TestQueryCancel(t *testing.T) {
|
||||||
|
|
||||||
query2.Cancel()
|
query2.Cancel()
|
||||||
res = query2.Exec(ctx)
|
res = query2.Exec(ctx)
|
||||||
if res.Err != nil {
|
testutil.Ok(t, res.Err)
|
||||||
t.Fatalf("unexpected error on executing query2: %s", res.Err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// errQuerier implements storage.Querier which always returns error.
|
// errQuerier implements storage.Querier which always returns error.
|
||||||
|
@ -203,28 +196,18 @@ func TestQueryError(t *testing.T) {
|
||||||
defer cancelCtx()
|
defer cancelCtx()
|
||||||
|
|
||||||
vectorQuery, err := engine.NewInstantQuery(queryable, "foo", time.Unix(1, 0))
|
vectorQuery, err := engine.NewInstantQuery(queryable, "foo", time.Unix(1, 0))
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error creating query: %q", err)
|
|
||||||
}
|
|
||||||
res := vectorQuery.Exec(ctx)
|
res := vectorQuery.Exec(ctx)
|
||||||
if res.Err == nil {
|
testutil.NotOk(t, res.Err, "expected error on failed select but got none")
|
||||||
t.Fatalf("expected error on failed select but got none")
|
testutil.Equals(t, res.Err, errStorage)
|
||||||
}
|
|
||||||
if res.Err != errStorage {
|
|
||||||
t.Fatalf("expected error %q, got %q", errStorage, res.Err)
|
|
||||||
}
|
|
||||||
|
|
||||||
matrixQuery, err := engine.NewInstantQuery(queryable, "foo[1m]", time.Unix(1, 0))
|
matrixQuery, err := engine.NewInstantQuery(queryable, "foo[1m]", time.Unix(1, 0))
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error creating query: %q", err)
|
|
||||||
}
|
|
||||||
res = matrixQuery.Exec(ctx)
|
res = matrixQuery.Exec(ctx)
|
||||||
if res.Err == nil {
|
testutil.NotOk(t, res.Err, "expected error on failed select but got none")
|
||||||
t.Fatalf("expected error on failed select but got none")
|
testutil.Equals(t, res.Err, errStorage)
|
||||||
}
|
|
||||||
if res.Err != errStorage {
|
|
||||||
t.Fatalf("expected error %q, got %q", errStorage, res.Err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// paramCheckerQuerier implements storage.Querier which checks the start and end times
|
// paramCheckerQuerier implements storage.Querier which checks the start and end times
|
||||||
|
@ -427,12 +410,8 @@ func TestEngineShutdown(t *testing.T) {
|
||||||
block <- struct{}{}
|
block <- struct{}{}
|
||||||
<-processing
|
<-processing
|
||||||
|
|
||||||
if res.Err == nil {
|
testutil.NotOk(t, res.Err, "expected error on shutdown during query but got none")
|
||||||
t.Fatalf("expected error on shutdown during query but got none")
|
testutil.Equals(t, res.Err, errQueryCanceled)
|
||||||
}
|
|
||||||
if ee := ErrQueryCanceled("test statement execution"); res.Err != ee {
|
|
||||||
t.Fatalf("expected error %q, got %q", ee, res.Err)
|
|
||||||
}
|
|
||||||
|
|
||||||
query2 := engine.newTestQuery(func(context.Context) error {
|
query2 := engine.newTestQuery(func(context.Context) error {
|
||||||
t.Fatalf("reached query execution unexpectedly")
|
t.Fatalf("reached query execution unexpectedly")
|
||||||
|
@ -442,12 +421,10 @@ func TestEngineShutdown(t *testing.T) {
|
||||||
// The second query is started after the engine shut down. It must
|
// The second query is started after the engine shut down. It must
|
||||||
// be canceled immediately.
|
// be canceled immediately.
|
||||||
res2 := query2.Exec(ctx)
|
res2 := query2.Exec(ctx)
|
||||||
if res2.Err == nil {
|
testutil.NotOk(t, res2.Err, "expected error on querying with canceled context but got none")
|
||||||
t.Fatalf("expected error on querying with canceled context but got none")
|
|
||||||
}
|
var e ErrQueryCanceled
|
||||||
if _, ok := res2.Err.(ErrQueryCanceled); !ok {
|
testutil.Assert(t, errors.As(res2.Err, &e), "expected cancellation error but got: %s", res2.Err)
|
||||||
t.Fatalf("expected cancellation error, got %q", res2.Err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEngineEvalStmtTimestamps(t *testing.T) {
|
func TestEngineEvalStmtTimestamps(t *testing.T) {
|
||||||
|
@ -455,15 +432,11 @@ func TestEngineEvalStmtTimestamps(t *testing.T) {
|
||||||
load 10s
|
load 10s
|
||||||
metric 1 2
|
metric 1 2
|
||||||
`)
|
`)
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error creating test: %q", err)
|
|
||||||
}
|
|
||||||
defer test.Close()
|
defer test.Close()
|
||||||
|
|
||||||
err = test.Run()
|
err = test.Run()
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error initializing test: %q", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
cases := []struct {
|
cases := []struct {
|
||||||
Query string
|
Query string
|
||||||
|
@ -540,20 +513,16 @@ load 10s
|
||||||
} else {
|
} else {
|
||||||
qry, err = test.QueryEngine().NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval)
|
qry, err = test.QueryEngine().NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval)
|
||||||
}
|
}
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error creating query: %q", err)
|
|
||||||
}
|
|
||||||
res := qry.Exec(test.Context())
|
res := qry.Exec(test.Context())
|
||||||
if c.ShouldError {
|
if c.ShouldError {
|
||||||
testutil.NotOk(t, res.Err, "expected error for the query %q", c.Query)
|
testutil.NotOk(t, res.Err, "expected error for the query %q", c.Query)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if res.Err != nil {
|
|
||||||
t.Fatalf("unexpected error running query: %q", res.Err)
|
testutil.Ok(t, res.Err)
|
||||||
}
|
testutil.Equals(t, res.Value, c.Result)
|
||||||
if !reflect.DeepEqual(res.Value, c.Result) {
|
|
||||||
t.Fatalf("unexpected result for query %q: got %q wanted %q", c.Query, res.Value.String(), c.Result.String())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -563,16 +532,11 @@ func TestMaxQuerySamples(t *testing.T) {
|
||||||
load 10s
|
load 10s
|
||||||
metric 1 2
|
metric 1 2
|
||||||
`)
|
`)
|
||||||
|
testutil.Ok(t, err)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("unexpected error creating test: %q", err)
|
|
||||||
}
|
|
||||||
defer test.Close()
|
defer test.Close()
|
||||||
|
|
||||||
err = test.Run()
|
err = test.Run()
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error initializing test: %q", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
cases := []struct {
|
cases := []struct {
|
||||||
Query string
|
Query string
|
||||||
|
@ -772,16 +736,11 @@ load 10s
|
||||||
} else {
|
} else {
|
||||||
qry, err = engine.NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval)
|
qry, err = engine.NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval)
|
||||||
}
|
}
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error creating query: %q", err)
|
|
||||||
}
|
|
||||||
res := qry.Exec(test.Context())
|
res := qry.Exec(test.Context())
|
||||||
if res.Err != nil && res.Err != c.Result.Err {
|
testutil.Equals(t, res.Err, c.Result.Err)
|
||||||
t.Fatalf("unexpected error running query: %q, expected to get result: %q", res.Err, c.Result.Value)
|
testutil.Equals(t, res.Value, c.Result.Value)
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(res.Value, c.Result.Value) {
|
|
||||||
t.Fatalf("unexpected result for query %q: got %q wanted %q", c.Query, res.Value.String(), c.Result.String())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1048,16 +1007,12 @@ func TestSubquerySelector(t *testing.T) {
|
||||||
SetDefaultEvaluationInterval(1 * time.Minute)
|
SetDefaultEvaluationInterval(1 * time.Minute)
|
||||||
for _, tst := range tests {
|
for _, tst := range tests {
|
||||||
test, err := NewTest(t, tst.loadString)
|
test, err := NewTest(t, tst.loadString)
|
||||||
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("unexpected error creating test: %q", err)
|
|
||||||
}
|
|
||||||
defer test.Close()
|
defer test.Close()
|
||||||
|
|
||||||
err = test.Run()
|
err = test.Run()
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error initializing test: %q", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
engine := test.QueryEngine()
|
engine := test.QueryEngine()
|
||||||
for _, c := range tst.cases {
|
for _, c := range tst.cases {
|
||||||
|
@ -1065,16 +1020,11 @@ func TestSubquerySelector(t *testing.T) {
|
||||||
var qry Query
|
var qry Query
|
||||||
|
|
||||||
qry, err = engine.NewInstantQuery(test.Queryable(), c.Query, c.Start)
|
qry, err = engine.NewInstantQuery(test.Queryable(), c.Query, c.Start)
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("unexpected error creating query: %q", err)
|
|
||||||
}
|
|
||||||
res := qry.Exec(test.Context())
|
res := qry.Exec(test.Context())
|
||||||
if res.Err != nil && res.Err != c.Result.Err {
|
testutil.Equals(t, res.Err, c.Result.Err)
|
||||||
t.Fatalf("unexpected error running query: %q, expected to get result: %q", res.Err, c.Result.Value)
|
testutil.Equals(t, res.Value, c.Result.Value)
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(res.Value, c.Result.Value) {
|
|
||||||
t.Fatalf("unexpected result for query %q: got %q wanted %q", c.Query, res.Value.String(), c.Result.String())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,9 +14,9 @@
|
||||||
package promql
|
package promql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testCase struct {
|
type testCase struct {
|
||||||
|
@ -688,24 +688,12 @@ func TestLexer(t *testing.T) {
|
||||||
t.Fatalf("unexpected lexing error at position %d: %s", lastItem.pos, lastItem)
|
t.Fatalf("unexpected lexing error at position %d: %s", lastItem.pos, lastItem)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(lastItem, item{ItemEOF, Pos(len(test.input)), ""}) {
|
eofItem := item{ItemEOF, Pos(len(test.input)), ""}
|
||||||
t.Logf("%d: input %q", i, test.input)
|
testutil.Equals(t, lastItem, eofItem, "%d: input %q", i, test.input)
|
||||||
t.Fatalf("lexing error: expected output to end with EOF item.\ngot:\n%s", expectedList(out))
|
|
||||||
}
|
|
||||||
out = out[:len(out)-1]
|
out = out[:len(out)-1]
|
||||||
if !reflect.DeepEqual(out, test.expected) {
|
testutil.Equals(t, out, test.expected, "%d: input %q", i, test.input)
|
||||||
t.Logf("%d: input %q", i, test.input)
|
|
||||||
t.Fatalf("lexing mismatch:\nexpected:\n%s\ngot:\n%s", expectedList(test.expected), expectedList(out))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func expectedList(exp []item) string {
|
|
||||||
s := ""
|
|
||||||
for _, it := range exp {
|
|
||||||
s += fmt.Sprintf("\t%#v\n", it)
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
|
@ -15,7 +15,6 @@ package promql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
"reflect"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
@ -1590,26 +1589,14 @@ func TestParseExpressions(t *testing.T) {
|
||||||
expr, err := ParseExpr(test.input)
|
expr, err := ParseExpr(test.input)
|
||||||
|
|
||||||
// Unexpected errors are always caused by a bug.
|
// Unexpected errors are always caused by a bug.
|
||||||
if err == errUnexpected {
|
testutil.Assert(t, err != errUnexpected, "unexpected error occurred")
|
||||||
t.Fatalf("unexpected error occurred")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !test.fail && err != nil {
|
if !test.fail {
|
||||||
t.Errorf("error in input '%s'", test.input)
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("could not parse: %s", err)
|
testutil.Equals(t, expr, test.expected, "error on input '%s'", test.input)
|
||||||
}
|
} else {
|
||||||
|
testutil.NotOk(t, err)
|
||||||
if test.fail && err != nil {
|
testutil.Assert(t, strings.Contains(err.Error(), test.errMsg), "unexpected error on input '%s'", test.input)
|
||||||
if !strings.Contains(err.Error(), test.errMsg) {
|
|
||||||
t.Errorf("unexpected error on input '%s'", test.input)
|
|
||||||
t.Fatalf("expected error to contain %q but got %q", test.errMsg, err)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if !reflect.DeepEqual(expr, test.expected) {
|
|
||||||
t.Errorf("error on input '%s'", test.input)
|
|
||||||
t.Fatalf("no match\n\nexpected:\n%s\ngot: \n%s\n", Tree(test.expected), Tree(expr))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1617,21 +1604,11 @@ func TestParseExpressions(t *testing.T) {
|
||||||
// NaN has no equality. Thus, we need a separate test for it.
|
// NaN has no equality. Thus, we need a separate test for it.
|
||||||
func TestNaNExpression(t *testing.T) {
|
func TestNaNExpression(t *testing.T) {
|
||||||
expr, err := ParseExpr("NaN")
|
expr, err := ParseExpr("NaN")
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Errorf("error on input 'NaN'")
|
|
||||||
t.Fatalf("could not parse: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
nl, ok := expr.(*NumberLiteral)
|
nl, ok := expr.(*NumberLiteral)
|
||||||
if !ok {
|
testutil.Assert(t, ok, "expected number literal but got %T", expr)
|
||||||
t.Errorf("error on input 'NaN'")
|
testutil.Assert(t, math.IsNaN(float64(nl.Val)), "expected 'NaN' in number literal but got %v", nl.Val)
|
||||||
t.Fatalf("expected number literal but got %T", expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !math.IsNaN(float64(nl.Val)) {
|
|
||||||
t.Errorf("error on input 'NaN'")
|
|
||||||
t.Fatalf("expected 'NaN' in number literal but got %v", nl.Val)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func mustLabelMatcher(mt labels.MatchType, name, val string) *labels.Matcher {
|
func mustLabelMatcher(mt labels.MatchType, name, val string) *labels.Matcher {
|
||||||
|
@ -1746,29 +1723,14 @@ func TestParseSeries(t *testing.T) {
|
||||||
metric, vals, err := parseSeriesDesc(test.input)
|
metric, vals, err := parseSeriesDesc(test.input)
|
||||||
|
|
||||||
// Unexpected errors are always caused by a bug.
|
// Unexpected errors are always caused by a bug.
|
||||||
if err == errUnexpected {
|
testutil.Assert(t, err != errUnexpected, "unexpected error occurred")
|
||||||
t.Fatalf("unexpected error occurred")
|
|
||||||
}
|
|
||||||
|
|
||||||
if test.fail {
|
if !test.fail {
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
continue
|
testutil.Equals(t, test.expectedMetric, metric, "error on input '%s'", test.input)
|
||||||
}
|
testutil.Equals(t, test.expectedValues, vals, "error in input '%s'", test.input)
|
||||||
t.Errorf("error in input: \n\n%s\n", test.input)
|
|
||||||
t.Fatalf("failure expected, but passed")
|
|
||||||
} else {
|
} else {
|
||||||
if err != nil {
|
testutil.NotOk(t, err)
|
||||||
t.Errorf("error in input: \n\n%s\n", test.input)
|
|
||||||
t.Fatalf("could not parse: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
testutil.Equals(t, test.expectedMetric, metric)
|
|
||||||
testutil.Equals(t, test.expectedValues, vals)
|
|
||||||
|
|
||||||
if !reflect.DeepEqual(vals, test.expectedValues) || !reflect.DeepEqual(metric, test.expectedMetric) {
|
|
||||||
t.Errorf("error in input: \n\n%s\n", test.input)
|
|
||||||
t.Fatalf("no match\n\nexpected:\n%s %s\ngot: \n%s %s\n", test.expectedMetric, test.expectedValues, metric, vals)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1778,13 +1740,10 @@ func TestRecoverParserRuntime(t *testing.T) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if err != errUnexpected {
|
testutil.Equals(t, err, errUnexpected)
|
||||||
t.Fatalf("wrong error message: %q, expected %q", err, errUnexpected)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := <-p.lex.items; ok {
|
_, ok := <-p.lex.items
|
||||||
t.Fatalf("lex.items was not closed")
|
testutil.Assert(t, !ok, "lex.items was not closed")
|
||||||
}
|
|
||||||
}()
|
}()
|
||||||
defer p.recover(&err)
|
defer p.recover(&err)
|
||||||
// Cause a runtime panic.
|
// Cause a runtime panic.
|
||||||
|
@ -1800,9 +1759,7 @@ func TestRecoverParserError(t *testing.T) {
|
||||||
e := errors.New("custom error")
|
e := errors.New("custom error")
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if err.Error() != e.Error() {
|
testutil.Equals(t, err.Error(), e.Error())
|
||||||
t.Fatalf("wrong error message: %q, expected %q", err, e)
|
|
||||||
}
|
|
||||||
}()
|
}()
|
||||||
defer p.recover(&err)
|
defer p.recover(&err)
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,8 @@ package promql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestExprString(t *testing.T) {
|
func TestExprString(t *testing.T) {
|
||||||
|
@ -88,15 +90,13 @@ func TestExprString(t *testing.T) {
|
||||||
|
|
||||||
for _, test := range inputs {
|
for _, test := range inputs {
|
||||||
expr, err := ParseExpr(test.in)
|
expr, err := ParseExpr(test.in)
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("parsing error for %q: %s", test.in, err)
|
|
||||||
}
|
|
||||||
exp := test.in
|
exp := test.in
|
||||||
if test.out != "" {
|
if test.out != "" {
|
||||||
exp = test.out
|
exp = test.out
|
||||||
}
|
}
|
||||||
if expr.String() != exp {
|
|
||||||
t.Fatalf("expected %q to be returned as:\n%s\ngot:\n%s\n", test.in, exp, expr.String())
|
testutil.Equals(t, expr.String(), exp)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,22 +16,21 @@ package promql
|
||||||
import (
|
import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestEvaluations(t *testing.T) {
|
func TestEvaluations(t *testing.T) {
|
||||||
files, err := filepath.Glob("testdata/*.test")
|
files, err := filepath.Glob("testdata/*.test")
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
for _, fn := range files {
|
for _, fn := range files {
|
||||||
test, err := newTestFromFile(t, fn)
|
test, err := newTestFromFile(t, fn)
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Errorf("error creating test for %s: %s", fn, err)
|
|
||||||
}
|
|
||||||
err = test.Run()
|
err = test.Run()
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Errorf("error running test %s: %s", fn, err)
|
|
||||||
}
|
|
||||||
test.Close()
|
test.Close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,8 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestQueryLogging(t *testing.T) {
|
func TestQueryLogging(t *testing.T) {
|
||||||
|
@ -106,24 +108,18 @@ func TestIndexReuse(t *testing.T) {
|
||||||
|
|
||||||
func TestMMapFile(t *testing.T) {
|
func TestMMapFile(t *testing.T) {
|
||||||
file, err := ioutil.TempFile("", "mmapedFile")
|
file, err := ioutil.TempFile("", "mmapedFile")
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("Couldn't create temp test file. %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
filename := file.Name()
|
filename := file.Name()
|
||||||
defer os.Remove(filename)
|
defer os.Remove(filename)
|
||||||
|
|
||||||
fileAsBytes, err := getMMapedFile(filename, 2, nil)
|
fileAsBytes, err := getMMapedFile(filename, 2, nil)
|
||||||
|
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("Couldn't create test mmaped file")
|
|
||||||
}
|
|
||||||
copy(fileAsBytes, "ab")
|
copy(fileAsBytes, "ab")
|
||||||
|
|
||||||
f, err := os.Open(filename)
|
f, err := os.Open(filename)
|
||||||
if err != nil {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("Couldn't open test mmaped file")
|
|
||||||
}
|
|
||||||
|
|
||||||
bytes := make([]byte, 4)
|
bytes := make([]byte, 4)
|
||||||
n, err := f.Read(bytes)
|
n, err := f.Read(bytes)
|
||||||
|
|
|
@ -550,12 +550,12 @@ func TestScrapeLoopRun(t *testing.T) {
|
||||||
select {
|
select {
|
||||||
case <-signal:
|
case <-signal:
|
||||||
case <-time.After(5 * time.Second):
|
case <-time.After(5 * time.Second):
|
||||||
t.Fatalf("Cancelation during initial offset failed")
|
t.Fatalf("Cancellation during initial offset failed")
|
||||||
case err := <-errc:
|
case err := <-errc:
|
||||||
t.Fatalf("Unexpected error: %s", err)
|
t.Fatalf("Unexpected error: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The provided timeout must cause cancelation of the context passed down to the
|
// The provided timeout must cause cancellation of the context passed down to the
|
||||||
// scraper. The scraper has to respect the context.
|
// scraper. The scraper has to respect the context.
|
||||||
scraper.offsetDur = 0
|
scraper.offsetDur = 0
|
||||||
|
|
||||||
|
@ -607,7 +607,7 @@ func TestScrapeLoopRun(t *testing.T) {
|
||||||
case err := <-errc:
|
case err := <-errc:
|
||||||
t.Fatalf("Unexpected error: %s", err)
|
t.Fatalf("Unexpected error: %s", err)
|
||||||
case <-time.After(3 * time.Second):
|
case <-time.After(3 * time.Second):
|
||||||
t.Fatalf("Loop did not terminate on context cancelation")
|
t.Fatalf("Loop did not terminate on context cancellation")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1469,7 +1469,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
errc <- errors.New("Expected error but got nil")
|
errc <- errors.New("Expected error but got nil")
|
||||||
} else if ctx.Err() != context.Canceled {
|
} else if ctx.Err() != context.Canceled {
|
||||||
errc <- errors.Errorf("Expected context cancelation error but got: %s", ctx.Err())
|
errc <- errors.Errorf("Expected context cancellation error but got: %s", ctx.Err())
|
||||||
}
|
}
|
||||||
close(errc)
|
close(errc)
|
||||||
}()
|
}()
|
||||||
|
|
20
scripts/build_react_app.sh
Executable file
20
scripts/build_react_app.sh
Executable file
|
@ -0,0 +1,20 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Build React web UI.
|
||||||
|
# Run from repository root.
|
||||||
|
set -e
|
||||||
|
set -u
|
||||||
|
|
||||||
|
if ! [[ "$0" =~ "scripts/build_react_app.sh" ]]; then
|
||||||
|
echo "must be run from repository root"
|
||||||
|
exit 255
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd web/ui/react-app
|
||||||
|
|
||||||
|
echo "building React app"
|
||||||
|
PUBLIC_URL=. yarn build
|
||||||
|
rm -rf ../static/graph-new
|
||||||
|
mv build ../static/graph-new
|
||||||
|
# Prevent bad redirect due to Go HTTP router treating index.html specially.
|
||||||
|
mv ../static/graph-new/index.html ../static/graph-new/app.html
|
|
@ -77,11 +77,11 @@ func TestSampleRing(t *testing.T) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if sold.t >= s.t-c.delta && !found {
|
|
||||||
t.Fatalf("%d: expected sample %d to be in buffer but was not; buffer %v", i, sold.t, buffered)
|
if found {
|
||||||
}
|
testutil.Assert(t, sold.t >= s.t-c.delta, "%d: unexpected sample %d in buffer; buffer %v", i, sold.t, buffered)
|
||||||
if sold.t < s.t-c.delta && found {
|
} else {
|
||||||
t.Fatalf("%d: unexpected sample %d in buffer; buffer %v", i, sold.t, buffered)
|
testutil.Assert(t, sold.t < s.t-c.delta, "%d: expected sample %d to be in buffer but was not; buffer %v", i, sold.t, buffered)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ type recoverableError struct {
|
||||||
func (c *Client) Store(ctx context.Context, req []byte) error {
|
func (c *Client) Store(ctx context.Context, req []byte) error {
|
||||||
httpReq, err := http.NewRequest("POST", c.url.String(), bytes.NewReader(req))
|
httpReq, err := http.NewRequest("POST", c.url.String(), bytes.NewReader(req))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Errors from NewRequest are from unparseable URLs, so are not
|
// Errors from NewRequest are from unparsable URLs, so are not
|
||||||
// recoverable.
|
// recoverable.
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input labels.Labels
|
input labels.Labels
|
||||||
expectedErr string
|
expectedErr string
|
||||||
shouldPass bool
|
|
||||||
description string
|
description string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
|
@ -36,7 +35,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"labelName", "labelValue",
|
"labelName", "labelValue",
|
||||||
),
|
),
|
||||||
expectedErr: "",
|
expectedErr: "",
|
||||||
shouldPass: true,
|
|
||||||
description: "regular labels",
|
description: "regular labels",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -45,7 +43,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"_labelName", "labelValue",
|
"_labelName", "labelValue",
|
||||||
),
|
),
|
||||||
expectedErr: "",
|
expectedErr: "",
|
||||||
shouldPass: true,
|
|
||||||
description: "label name with _",
|
description: "label name with _",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -54,7 +51,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"@labelName", "labelValue",
|
"@labelName", "labelValue",
|
||||||
),
|
),
|
||||||
expectedErr: "invalid label name: @labelName",
|
expectedErr: "invalid label name: @labelName",
|
||||||
shouldPass: false,
|
|
||||||
description: "label name with @",
|
description: "label name with @",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -63,7 +59,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"123labelName", "labelValue",
|
"123labelName", "labelValue",
|
||||||
),
|
),
|
||||||
expectedErr: "invalid label name: 123labelName",
|
expectedErr: "invalid label name: 123labelName",
|
||||||
shouldPass: false,
|
|
||||||
description: "label name starts with numbers",
|
description: "label name starts with numbers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -72,7 +67,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"", "labelValue",
|
"", "labelValue",
|
||||||
),
|
),
|
||||||
expectedErr: "invalid label name: ",
|
expectedErr: "invalid label name: ",
|
||||||
shouldPass: false,
|
|
||||||
description: "label name is empty string",
|
description: "label name is empty string",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -81,7 +75,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"labelName", string([]byte{0xff}),
|
"labelName", string([]byte{0xff}),
|
||||||
),
|
),
|
||||||
expectedErr: "invalid label value: " + string([]byte{0xff}),
|
expectedErr: "invalid label value: " + string([]byte{0xff}),
|
||||||
shouldPass: false,
|
|
||||||
description: "label value is an invalid UTF-8 value",
|
description: "label value is an invalid UTF-8 value",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -89,7 +82,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"__name__", "@invalid_name",
|
"__name__", "@invalid_name",
|
||||||
),
|
),
|
||||||
expectedErr: "invalid metric name: @invalid_name",
|
expectedErr: "invalid metric name: @invalid_name",
|
||||||
shouldPass: false,
|
|
||||||
description: "metric name starts with @",
|
description: "metric name starts with @",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -98,7 +90,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"__name__", "name2",
|
"__name__", "name2",
|
||||||
),
|
),
|
||||||
expectedErr: "duplicate label with name: __name__",
|
expectedErr: "duplicate label with name: __name__",
|
||||||
shouldPass: false,
|
|
||||||
description: "duplicate label names",
|
description: "duplicate label names",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -107,7 +98,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"label2", "name",
|
"label2", "name",
|
||||||
),
|
),
|
||||||
expectedErr: "",
|
expectedErr: "",
|
||||||
shouldPass: true,
|
|
||||||
description: "duplicate label values",
|
description: "duplicate label values",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -116,7 +106,6 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
"label2", "name",
|
"label2", "name",
|
||||||
),
|
),
|
||||||
expectedErr: "invalid label name: ",
|
expectedErr: "invalid label name: ",
|
||||||
shouldPass: false,
|
|
||||||
description: "don't report as duplicate label name",
|
description: "don't report as duplicate label name",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -124,16 +113,11 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.description, func(t *testing.T) {
|
t.Run(test.description, func(t *testing.T) {
|
||||||
err := validateLabelsAndMetricName(test.input)
|
err := validateLabelsAndMetricName(test.input)
|
||||||
if err == nil {
|
if test.expectedErr != "" {
|
||||||
if !test.shouldPass {
|
testutil.NotOk(t, err)
|
||||||
t.Fatalf("Test should fail, but passed instead.")
|
testutil.Equals(t, test.expectedErr, err.Error())
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
if test.shouldPass {
|
testutil.Ok(t, err)
|
||||||
t.Fatalf("Test should pass, got unexpected error: %v", err)
|
|
||||||
} else if err.Error() != test.expectedErr {
|
|
||||||
t.Fatalf("Test should fail with: %s got unexpected error instead: %v", test.expectedErr, err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -151,21 +135,11 @@ func TestConcreteSeriesSet(t *testing.T) {
|
||||||
c := &concreteSeriesSet{
|
c := &concreteSeriesSet{
|
||||||
series: []storage.Series{series1, series2},
|
series: []storage.Series{series1, series2},
|
||||||
}
|
}
|
||||||
if !c.Next() {
|
testutil.Assert(t, c.Next(), "Expected Next() to be true.")
|
||||||
t.Fatalf("Expected Next() to be true.")
|
testutil.Equals(t, series1, c.At(), "Unexpected series returned.")
|
||||||
}
|
testutil.Assert(t, c.Next(), "Expected Next() to be true.")
|
||||||
if c.At() != series1 {
|
testutil.Equals(t, series2, c.At(), "Unexpected series returned.")
|
||||||
t.Fatalf("Unexpected series returned.")
|
testutil.Assert(t, !c.Next(), "Expected Next() to be false.")
|
||||||
}
|
|
||||||
if !c.Next() {
|
|
||||||
t.Fatalf("Expected Next() to be true.")
|
|
||||||
}
|
|
||||||
if c.At() != series2 {
|
|
||||||
t.Fatalf("Unexpected series returned.")
|
|
||||||
}
|
|
||||||
if c.Next() {
|
|
||||||
t.Fatalf("Expected Next() to be false.")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConcreteSeriesClonesLabels(t *testing.T) {
|
func TestConcreteSeriesClonesLabels(t *testing.T) {
|
||||||
|
|
|
@ -194,6 +194,7 @@ type QueueManager struct {
|
||||||
client StorageClient
|
client StorageClient
|
||||||
watcher *wal.Watcher
|
watcher *wal.Watcher
|
||||||
|
|
||||||
|
seriesMtx sync.Mutex
|
||||||
seriesLabels map[uint64]labels.Labels
|
seriesLabels map[uint64]labels.Labels
|
||||||
seriesSegmentIndexes map[uint64]int
|
seriesSegmentIndexes map[uint64]int
|
||||||
droppedSeries map[uint64]struct{}
|
droppedSeries map[uint64]struct{}
|
||||||
|
@ -264,6 +265,7 @@ func NewQueueManager(reg prometheus.Registerer, logger log.Logger, walDir string
|
||||||
func (t *QueueManager) Append(samples []record.RefSample) bool {
|
func (t *QueueManager) Append(samples []record.RefSample) bool {
|
||||||
outer:
|
outer:
|
||||||
for _, s := range samples {
|
for _, s := range samples {
|
||||||
|
t.seriesMtx.Lock()
|
||||||
lbls, ok := t.seriesLabels[s.Ref]
|
lbls, ok := t.seriesLabels[s.Ref]
|
||||||
if !ok {
|
if !ok {
|
||||||
t.droppedSamplesTotal.Inc()
|
t.droppedSamplesTotal.Inc()
|
||||||
|
@ -271,8 +273,10 @@ outer:
|
||||||
if _, ok := t.droppedSeries[s.Ref]; !ok {
|
if _, ok := t.droppedSeries[s.Ref]; !ok {
|
||||||
level.Info(t.logger).Log("msg", "dropped sample for series that was not explicitly dropped via relabelling", "ref", s.Ref)
|
level.Info(t.logger).Log("msg", "dropped sample for series that was not explicitly dropped via relabelling", "ref", s.Ref)
|
||||||
}
|
}
|
||||||
|
t.seriesMtx.Unlock()
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
t.seriesMtx.Unlock()
|
||||||
// This will only loop if the queues are being resharded.
|
// This will only loop if the queues are being resharded.
|
||||||
backoff := t.cfg.MinBackoff
|
backoff := t.cfg.MinBackoff
|
||||||
for {
|
for {
|
||||||
|
@ -356,9 +360,11 @@ func (t *QueueManager) Stop() {
|
||||||
t.watcher.Stop()
|
t.watcher.Stop()
|
||||||
|
|
||||||
// On shutdown, release the strings in the labels from the intern pool.
|
// On shutdown, release the strings in the labels from the intern pool.
|
||||||
|
t.seriesMtx.Lock()
|
||||||
for _, labels := range t.seriesLabels {
|
for _, labels := range t.seriesLabels {
|
||||||
releaseLabels(labels)
|
releaseLabels(labels)
|
||||||
}
|
}
|
||||||
|
t.seriesMtx.Unlock()
|
||||||
// Delete metrics so we don't have alerts for queues that are gone.
|
// Delete metrics so we don't have alerts for queues that are gone.
|
||||||
name := t.client.Name()
|
name := t.client.Name()
|
||||||
queueHighestSentTimestamp.DeleteLabelValues(name)
|
queueHighestSentTimestamp.DeleteLabelValues(name)
|
||||||
|
@ -378,6 +384,8 @@ func (t *QueueManager) Stop() {
|
||||||
|
|
||||||
// StoreSeries keeps track of which series we know about for lookups when sending samples to remote.
|
// StoreSeries keeps track of which series we know about for lookups when sending samples to remote.
|
||||||
func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
|
func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
|
||||||
|
t.seriesMtx.Lock()
|
||||||
|
defer t.seriesMtx.Unlock()
|
||||||
for _, s := range series {
|
for _, s := range series {
|
||||||
ls := processExternalLabels(s.Labels, t.externalLabels)
|
ls := processExternalLabels(s.Labels, t.externalLabels)
|
||||||
lbls := relabel.Process(ls, t.relabelConfigs...)
|
lbls := relabel.Process(ls, t.relabelConfigs...)
|
||||||
|
@ -402,6 +410,8 @@ func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
|
||||||
// stored series records with the checkpoints index number, so we can now
|
// stored series records with the checkpoints index number, so we can now
|
||||||
// delete any ref ID's lower than that # from the two maps.
|
// delete any ref ID's lower than that # from the two maps.
|
||||||
func (t *QueueManager) SeriesReset(index int) {
|
func (t *QueueManager) SeriesReset(index int) {
|
||||||
|
t.seriesMtx.Lock()
|
||||||
|
defer t.seriesMtx.Unlock()
|
||||||
// Check for series that are in segments older than the checkpoint
|
// Check for series that are in segments older than the checkpoint
|
||||||
// that were not also present in the checkpoint.
|
// that were not also present in the checkpoint.
|
||||||
for k, v := range t.seriesSegmentIndexes {
|
for k, v := range t.seriesSegmentIndexes {
|
||||||
|
@ -409,6 +419,7 @@ func (t *QueueManager) SeriesReset(index int) {
|
||||||
delete(t.seriesSegmentIndexes, k)
|
delete(t.seriesSegmentIndexes, k)
|
||||||
releaseLabels(t.seriesLabels[k])
|
releaseLabels(t.seriesLabels[k])
|
||||||
delete(t.seriesLabels, k)
|
delete(t.seriesLabels, k)
|
||||||
|
delete(t.droppedSeries, k)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,8 +52,8 @@ func NewStorage(l log.Logger, reg prometheus.Registerer, stCallback startTimeCal
|
||||||
s := &Storage{
|
s := &Storage{
|
||||||
logger: logging.Dedupe(l, 1*time.Minute),
|
logger: logging.Dedupe(l, 1*time.Minute),
|
||||||
localStartTimeCallback: stCallback,
|
localStartTimeCallback: stCallback,
|
||||||
rws: NewWriteStorage(l, walDir, flushDeadline),
|
|
||||||
}
|
}
|
||||||
|
s.rws = NewWriteStorage(s.logger, walDir, flushDeadline)
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -122,7 +122,7 @@ func (rws *WriteStorage) ApplyConfig(conf *config.Config) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use RemoteWriteConfigs and its index to get hash. So if its index changed,
|
// Use RemoteWriteConfigs and its index to get hash. So if its index changed,
|
||||||
// the correspoinding queue should also be restarted.
|
// the corresponding queue should also be restarted.
|
||||||
hash := md5.Sum(b)
|
hash := md5.Sum(b)
|
||||||
if i < len(rws.queues) && rws.hashes[i] == hash && externalLabelUnchanged {
|
if i < len(rws.queues) && rws.hashes[i] == hash && externalLabelUnchanged {
|
||||||
// The RemoteWriteConfig and index both not changed, keep the queue.
|
// The RemoteWriteConfig and index both not changed, keep the queue.
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
Maintainers of this repository:
|
|
||||||
|
|
||||||
* Krasi Georgiev <kgeorgie@redhat.com> @krasi-georgiev
|
|
||||||
* Goutham Veeramachaneni <gouthamve@gmail.com> @gouthamve
|
|
|
@ -275,7 +275,7 @@ type Block struct {
|
||||||
meta BlockMeta
|
meta BlockMeta
|
||||||
|
|
||||||
// Symbol Table Size in bytes.
|
// Symbol Table Size in bytes.
|
||||||
// We maintain this variable to avoid recalculation everytime.
|
// We maintain this variable to avoid recalculation every time.
|
||||||
symbolTableSize uint64
|
symbolTableSize uint64
|
||||||
|
|
||||||
chunkr ChunkReader
|
chunkr ChunkReader
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
// The code in this file was largely written by Damian Gryski as part of
|
// The code in this file was largely written by Damian Gryski as part of
|
||||||
// https://github.com/dgryski/go-tsz and published under the license below.
|
// https://github.com/dgryski/go-tsz and published under the license below.
|
||||||
// It was modified to accommodate reading from byte slices without modifying
|
// It was modified to accommodate reading from byte slices without modifying
|
||||||
// the underlying bytes, which would panic when reading from mmaped
|
// the underlying bytes, which would panic when reading from mmap'd
|
||||||
// read-only byte slices.
|
// read-only byte slices.
|
||||||
|
|
||||||
// Copyright (c) 2015,2016 Damian Gryski <damian@gryski.com>
|
// Copyright (c) 2015,2016 Damian Gryski <damian@gryski.com>
|
||||||
|
|
|
@ -163,7 +163,7 @@ func TestNoPanicFor0Tombstones(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLeveledCompactor_plan(t *testing.T) {
|
func TestLeveledCompactor_plan(t *testing.T) {
|
||||||
// This mimicks our default ExponentialBlockRanges with min block size equals to 20.
|
// This mimics our default ExponentialBlockRanges with min block size equals to 20.
|
||||||
compactor, err := NewLeveledCompactor(context.Background(), nil, nil, []int64{
|
compactor, err := NewLeveledCompactor(context.Background(), nil, nil, []int64{
|
||||||
20,
|
20,
|
||||||
60,
|
60,
|
||||||
|
@ -936,7 +936,7 @@ func TestCancelCompactions(t *testing.T) {
|
||||||
testutil.Ok(t, os.RemoveAll(tmpdirCopy))
|
testutil.Ok(t, os.RemoveAll(tmpdirCopy))
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Measure the compaction time without interupting it.
|
// Measure the compaction time without interrupting it.
|
||||||
var timeCompactionUninterrupted time.Duration
|
var timeCompactionUninterrupted time.Duration
|
||||||
{
|
{
|
||||||
db, err := Open(tmpdir, log.NewNopLogger(), nil, &Options{BlockRanges: []int64{1, 2000}})
|
db, err := Open(tmpdir, log.NewNopLogger(), nil, &Options{BlockRanges: []int64{1, 2000}})
|
||||||
|
|
|
@ -260,7 +260,7 @@ func newDBMetrics(db *DB, r prometheus.Registerer) *dbMetrics {
|
||||||
var ErrClosed = errors.New("db already closed")
|
var ErrClosed = errors.New("db already closed")
|
||||||
|
|
||||||
// DBReadOnly provides APIs for read only operations on a database.
|
// DBReadOnly provides APIs for read only operations on a database.
|
||||||
// Current implementation doesn't support concurency so
|
// Current implementation doesn't support concurrency so
|
||||||
// all API calls should happen in the same go routine.
|
// all API calls should happen in the same go routine.
|
||||||
type DBReadOnly struct {
|
type DBReadOnly struct {
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
|
@ -272,7 +272,7 @@ type DBReadOnly struct {
|
||||||
// OpenDBReadOnly opens DB in the given directory for read only operations.
|
// OpenDBReadOnly opens DB in the given directory for read only operations.
|
||||||
func OpenDBReadOnly(dir string, l log.Logger) (*DBReadOnly, error) {
|
func OpenDBReadOnly(dir string, l log.Logger) (*DBReadOnly, error) {
|
||||||
if _, err := os.Stat(dir); err != nil {
|
if _, err := os.Stat(dir); err != nil {
|
||||||
return nil, errors.Wrap(err, "openning the db dir")
|
return nil, errors.Wrap(err, "opening the db dir")
|
||||||
}
|
}
|
||||||
|
|
||||||
if l == nil {
|
if l == nil {
|
||||||
|
@ -359,7 +359,7 @@ func (db *DBReadOnly) Querier(mint, maxt int64) (Querier, error) {
|
||||||
maxBlockTime = blocks[len(blocks)-1].Meta().MaxTime
|
maxBlockTime = blocks[len(blocks)-1].Meta().MaxTime
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also add the WAL if the current blocks don't cover the requestes time range.
|
// Also add the WAL if the current blocks don't cover the requests time range.
|
||||||
if maxBlockTime <= maxt {
|
if maxBlockTime <= maxt {
|
||||||
w, err := wal.Open(db.logger, nil, filepath.Join(db.dir, "wal"))
|
w, err := wal.Open(db.logger, nil, filepath.Join(db.dir, "wal"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -2271,7 +2271,7 @@ func TestDBReadOnly(t *testing.T) {
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
// Boostrap the db.
|
// Bootstrap the db.
|
||||||
{
|
{
|
||||||
dbDir, err = ioutil.TempDir("", "test")
|
dbDir, err = ioutil.TempDir("", "test")
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
@ -2370,7 +2370,7 @@ func TestDBReadOnly_FlushWAL(t *testing.T) {
|
||||||
maxt int
|
maxt int
|
||||||
)
|
)
|
||||||
|
|
||||||
// Boostrap the db.
|
// Bootstrap the db.
|
||||||
{
|
{
|
||||||
dbDir, err = ioutil.TempDir("", "test")
|
dbDir, err = ioutil.TempDir("", "test")
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
|
@ -204,7 +204,7 @@ They are used to track label index sections. They are read into memory when an i
|
||||||
### Postings Offset Table
|
### Postings Offset Table
|
||||||
|
|
||||||
A postings offset table stores a sequence of postings offset entries.
|
A postings offset table stores a sequence of postings offset entries.
|
||||||
Every postings offset entry holds the lable name/value pair and the offset to its series list in the postings section.
|
Every postings offset entry holds the label name/value pair and the offset to its series list in the postings section.
|
||||||
They are used to track postings sections. They are read into memory when an index file is loaded.
|
They are used to track postings sections. They are read into memory when an index file is loaded.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
// +build go1.12
|
// +build go1.12
|
||||||
|
|
||||||
// Package goversion enforces the go version suported by the tsdb module.
|
// Package goversion enforces the go version supported by the tsdb module.
|
||||||
package goversion
|
package goversion
|
||||||
|
|
||||||
const _SoftwareRequiresGOVERSION1_12 = uint8(0)
|
const _SoftwareRequiresGOVERSION1_12 = uint8(0)
|
||||||
|
|
13
tsdb/head.go
13
tsdb/head.go
|
@ -333,11 +333,10 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64) (err error) {
|
||||||
// They are connected through a ring of channels which ensures that all sample batches
|
// They are connected through a ring of channels which ensures that all sample batches
|
||||||
// read from the WAL are processed in order.
|
// read from the WAL are processed in order.
|
||||||
var (
|
var (
|
||||||
wg sync.WaitGroup
|
wg sync.WaitGroup
|
||||||
multiRefLock sync.Mutex
|
n = runtime.GOMAXPROCS(0)
|
||||||
n = runtime.GOMAXPROCS(0)
|
inputs = make([]chan []record.RefSample, n)
|
||||||
inputs = make([]chan []record.RefSample, n)
|
outputs = make([]chan []record.RefSample, n)
|
||||||
outputs = make([]chan []record.RefSample, n)
|
|
||||||
)
|
)
|
||||||
wg.Add(n)
|
wg.Add(n)
|
||||||
|
|
||||||
|
@ -370,6 +369,7 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64) (err error) {
|
||||||
samples []record.RefSample
|
samples []record.RefSample
|
||||||
tstones []tombstones.Stone
|
tstones []tombstones.Stone
|
||||||
allStones = tombstones.NewMemTombstones()
|
allStones = tombstones.NewMemTombstones()
|
||||||
|
shards = make([][]record.RefSample, n)
|
||||||
)
|
)
|
||||||
defer func() {
|
defer func() {
|
||||||
if err := allStones.Close(); err != nil {
|
if err := allStones.Close(); err != nil {
|
||||||
|
@ -395,9 +395,7 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64) (err error) {
|
||||||
|
|
||||||
if !created {
|
if !created {
|
||||||
// There's already a different ref for this series.
|
// There's already a different ref for this series.
|
||||||
multiRefLock.Lock()
|
|
||||||
multiRef[s.Ref] = series.ref
|
multiRef[s.Ref] = series.ref
|
||||||
multiRefLock.Unlock()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if h.lastSeriesID < s.Ref {
|
if h.lastSeriesID < s.Ref {
|
||||||
|
@ -423,7 +421,6 @@ func (h *Head) loadWAL(r *wal.Reader, multiRef map[uint64]uint64) (err error) {
|
||||||
if len(samples) < m {
|
if len(samples) < m {
|
||||||
m = len(samples)
|
m = len(samples)
|
||||||
}
|
}
|
||||||
shards := make([][]record.RefSample, n)
|
|
||||||
for i := 0; i < n; i++ {
|
for i := 0; i < n; i++ {
|
||||||
var buf []record.RefSample
|
var buf []record.RefSample
|
||||||
select {
|
select {
|
||||||
|
|
|
@ -22,6 +22,7 @@ import (
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sort"
|
"sort"
|
||||||
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
@ -98,6 +99,85 @@ func readTestWAL(t testing.TB, dir string) (recs []interface{}) {
|
||||||
return recs
|
return recs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkLoadWAL(b *testing.B) {
|
||||||
|
cases := []struct {
|
||||||
|
// Total series is (batches*seriesPerBatch).
|
||||||
|
batches int
|
||||||
|
seriesPerBatch int
|
||||||
|
samplesPerSeries int
|
||||||
|
}{
|
||||||
|
{ // Less series and more samples.
|
||||||
|
batches: 10,
|
||||||
|
seriesPerBatch: 100,
|
||||||
|
samplesPerSeries: 100000,
|
||||||
|
},
|
||||||
|
{ // More series and less samples.
|
||||||
|
batches: 10,
|
||||||
|
seriesPerBatch: 10000,
|
||||||
|
samplesPerSeries: 100,
|
||||||
|
},
|
||||||
|
{ // In between.
|
||||||
|
batches: 10,
|
||||||
|
seriesPerBatch: 1000,
|
||||||
|
samplesPerSeries: 10000,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
labelsPerSeries := 5
|
||||||
|
for _, c := range cases {
|
||||||
|
b.Run(fmt.Sprintf("batches=%d,seriesPerBatch=%d,samplesPerSeries=%d", c.batches, c.seriesPerBatch, c.samplesPerSeries),
|
||||||
|
func(b *testing.B) {
|
||||||
|
dir, err := ioutil.TempDir("", "test_load_wal")
|
||||||
|
testutil.Ok(b, err)
|
||||||
|
defer func() {
|
||||||
|
testutil.Ok(b, os.RemoveAll(dir))
|
||||||
|
}()
|
||||||
|
|
||||||
|
w, err := wal.New(nil, nil, dir, false)
|
||||||
|
testutil.Ok(b, err)
|
||||||
|
|
||||||
|
// Write series.
|
||||||
|
refSeries := make([]record.RefSeries, 0, c.seriesPerBatch)
|
||||||
|
for k := 0; k < c.batches; k++ {
|
||||||
|
refSeries = refSeries[:0]
|
||||||
|
for i := k * c.seriesPerBatch; i < (k+1)*c.seriesPerBatch; i++ {
|
||||||
|
lbls := make(map[string]string, labelsPerSeries)
|
||||||
|
lbls[defaultLabelName] = strconv.Itoa(i)
|
||||||
|
for j := 1; len(lbls) < labelsPerSeries; j++ {
|
||||||
|
lbls[defaultLabelName+strconv.Itoa(j)] = defaultLabelValue + strconv.Itoa(j)
|
||||||
|
}
|
||||||
|
refSeries = append(refSeries, record.RefSeries{Ref: uint64(i) * 100, Labels: labels.FromMap(lbls)})
|
||||||
|
}
|
||||||
|
populateTestWAL(b, w, []interface{}{refSeries})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write samples.
|
||||||
|
refSamples := make([]record.RefSample, 0, c.seriesPerBatch)
|
||||||
|
for i := 0; i < c.samplesPerSeries; i++ {
|
||||||
|
for j := 0; j < c.batches; j++ {
|
||||||
|
refSamples = refSamples[:0]
|
||||||
|
for k := j * c.seriesPerBatch; k < (j+1)*c.seriesPerBatch; k++ {
|
||||||
|
refSamples = append(refSamples, record.RefSample{
|
||||||
|
Ref: uint64(k) * 100,
|
||||||
|
T: int64(i) * 10,
|
||||||
|
V: float64(i) * 100,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
populateTestWAL(b, w, []interface{}{refSamples})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
h, err := NewHead(nil, nil, w, 1000)
|
||||||
|
testutil.Ok(b, err)
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
|
||||||
|
// Load the WAL.
|
||||||
|
h.Init(0)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestHead_ReadWAL(t *testing.T) {
|
func TestHead_ReadWAL(t *testing.T) {
|
||||||
for _, compress := range []bool{false, true} {
|
for _, compress := range []bool{false, true} {
|
||||||
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
||||||
|
@ -149,7 +229,7 @@ func TestHead_ReadWAL(t *testing.T) {
|
||||||
s100 := head.series.getByID(100)
|
s100 := head.series.getByID(100)
|
||||||
|
|
||||||
testutil.Equals(t, labels.FromStrings("a", "1"), s10.lset)
|
testutil.Equals(t, labels.FromStrings("a", "1"), s10.lset)
|
||||||
testutil.Equals(t, (*memSeries)(nil), s11) // Series without samples should be garbage colected at head.Init().
|
testutil.Equals(t, (*memSeries)(nil), s11) // Series without samples should be garbage collected at head.Init().
|
||||||
testutil.Equals(t, labels.FromStrings("a", "4"), s50.lset)
|
testutil.Equals(t, labels.FromStrings("a", "4"), s50.lset)
|
||||||
testutil.Equals(t, labels.FromStrings("a", "3"), s100.lset)
|
testutil.Equals(t, labels.FromStrings("a", "3"), s100.lset)
|
||||||
|
|
||||||
|
|
|
@ -906,7 +906,7 @@ func (s *chainedSeries) Iterator() SeriesIterator {
|
||||||
return newChainedSeriesIterator(s.series...)
|
return newChainedSeriesIterator(s.series...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// chainedSeriesIterator implements a series iterater over a list
|
// chainedSeriesIterator implements a series iterator over a list
|
||||||
// of time-sorted, non-overlapping iterators.
|
// of time-sorted, non-overlapping iterators.
|
||||||
type chainedSeriesIterator struct {
|
type chainedSeriesIterator struct {
|
||||||
series []Series // series in time order
|
series []Series // series in time order
|
||||||
|
@ -977,7 +977,7 @@ func (s *verticalChainedSeries) Iterator() SeriesIterator {
|
||||||
return newVerticalMergeSeriesIterator(s.series...)
|
return newVerticalMergeSeriesIterator(s.series...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// verticalMergeSeriesIterator implements a series iterater over a list
|
// verticalMergeSeriesIterator implements a series iterator over a list
|
||||||
// of time-sorted, time-overlapping iterators.
|
// of time-sorted, time-overlapping iterators.
|
||||||
type verticalMergeSeriesIterator struct {
|
type verticalMergeSeriesIterator struct {
|
||||||
a, b SeriesIterator
|
a, b SeriesIterator
|
||||||
|
|
|
@ -253,7 +253,6 @@ func (t *memTombstones) AddInterval(ref uint64, itvs ...Interval) {
|
||||||
t.mtx.Lock()
|
t.mtx.Lock()
|
||||||
defer t.mtx.Unlock()
|
defer t.mtx.Unlock()
|
||||||
for _, itv := range itvs {
|
for _, itv := range itvs {
|
||||||
fmt.Println("adding interval to ref: ", ref)
|
|
||||||
t.intvlGroups[ref] = t.intvlGroups[ref].Add(itv)
|
t.intvlGroups[ref] = t.intvlGroups[ref].Add(itv)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -915,7 +915,7 @@ func (r *walReader) Read(
|
||||||
et, flag, b := r.at()
|
et, flag, b := r.at()
|
||||||
|
|
||||||
// In decoding below we never return a walCorruptionErr for now.
|
// In decoding below we never return a walCorruptionErr for now.
|
||||||
// Those should generally be catched by entry decoding before.
|
// Those should generally be caught by entry decoding before.
|
||||||
switch et {
|
switch et {
|
||||||
case WALEntrySeries:
|
case WALEntrySeries:
|
||||||
var series []record.RefSeries
|
var series []record.RefSeries
|
||||||
|
|
|
@ -46,7 +46,7 @@ func TestWALRepair_ReadingError(t *testing.T) {
|
||||||
8,
|
8,
|
||||||
},
|
},
|
||||||
// Ensures that the page buffer is big enough to fit
|
// Ensures that the page buffer is big enough to fit
|
||||||
// an entire page size without panicing.
|
// an entire page size without panicking.
|
||||||
// https://github.com/prometheus/prometheus/tsdb/pull/414
|
// https://github.com/prometheus/prometheus/tsdb/pull/414
|
||||||
"bad_header": {
|
"bad_header": {
|
||||||
1,
|
1,
|
||||||
|
|
|
@ -41,10 +41,13 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
// WriteTo is an interface used by the Watcher to send the samples it's read
|
// WriteTo is an interface used by the Watcher to send the samples it's read
|
||||||
// from the WAL on to somewhere else.
|
// from the WAL on to somewhere else. Functions will be called concurrently
|
||||||
|
// and it is left to the implementer to make sure they are safe.
|
||||||
type WriteTo interface {
|
type WriteTo interface {
|
||||||
Append([]record.RefSample) bool
|
Append([]record.RefSample) bool
|
||||||
StoreSeries([]record.RefSeries, int)
|
StoreSeries([]record.RefSeries, int)
|
||||||
|
// SeriesReset is called after reading a checkpoint to allow the deletion
|
||||||
|
// of all series created in a segment lower than the argument.
|
||||||
SeriesReset(int)
|
SeriesReset(int)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,6 +340,7 @@ func (w *Watcher) watch(segmentNum int, tail bool) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gcSem := make(chan struct{}, 1)
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-w.quit:
|
case <-w.quit:
|
||||||
|
@ -345,9 +349,21 @@ func (w *Watcher) watch(segmentNum int, tail bool) error {
|
||||||
case <-checkpointTicker.C:
|
case <-checkpointTicker.C:
|
||||||
// Periodically check if there is a new checkpoint so we can garbage
|
// Periodically check if there is a new checkpoint so we can garbage
|
||||||
// collect labels. As this is considered an optimisation, we ignore
|
// collect labels. As this is considered an optimisation, we ignore
|
||||||
// errors during checkpoint processing.
|
// errors during checkpoint processing. Doing the process asynchronously
|
||||||
if err := w.garbageCollectSeries(segmentNum); err != nil {
|
// allows the current WAL segment to be processed while reading the
|
||||||
level.Warn(w.logger).Log("msg", "error process checkpoint", "err", err)
|
// checkpoint.
|
||||||
|
select {
|
||||||
|
case gcSem <- struct{}{}:
|
||||||
|
go func() {
|
||||||
|
defer func() {
|
||||||
|
<-gcSem
|
||||||
|
}()
|
||||||
|
if err := w.garbageCollectSeries(segmentNum); err != nil {
|
||||||
|
level.Warn(w.logger).Log("msg", "error process checkpoint", "err", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
default:
|
||||||
|
// Currently doing a garbage collect, try again later.
|
||||||
}
|
}
|
||||||
|
|
||||||
case <-segmentTicker.C:
|
case <-segmentTicker.C:
|
||||||
|
|
|
@ -401,7 +401,7 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
||||||
|
|
||||||
// cut() truncates and fsyncs the first segment async. If it happens after
|
// cut() truncates and fsyncs the first segment async. If it happens after
|
||||||
// the corruption we apply below, the corruption will be overwritten again.
|
// the corruption we apply below, the corruption will be overwritten again.
|
||||||
// Fire and forget a sync to avoid flakyness.
|
// Fire and forget a sync to avoid flakiness.
|
||||||
w.files[0].Sync()
|
w.files[0].Sync()
|
||||||
// Corrupt the second entry in the first file.
|
// Corrupt the second entry in the first file.
|
||||||
// After re-opening we must be able to read the first entry
|
// After re-opening we must be able to read the first entry
|
||||||
|
|
|
@ -15,11 +15,12 @@ package stats
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTimerGroupNewTimer(t *testing.T) {
|
func TestTimerGroupNewTimer(t *testing.T) {
|
||||||
|
|
|
@ -4,7 +4,8 @@ using the vfsgen library (c.f. Makefile).
|
||||||
|
|
||||||
During development it is more convenient to always use the files on disk to
|
During development it is more convenient to always use the files on disk to
|
||||||
directly see changes without recompiling.
|
directly see changes without recompiling.
|
||||||
To make this work, add `-tags dev` to the `flags` entry in `.promu.yml`, and then `make build`.
|
To make this work, remove the `builtinassets` build tag in the `flags` entry
|
||||||
|
in `.promu.yml`, and then `make build`.
|
||||||
|
|
||||||
This will serve all files from your local filesystem.
|
This will serve all files from your local filesystem.
|
||||||
This is for development purposes only.
|
This is for development purposes only.
|
||||||
|
|
|
@ -29,7 +29,7 @@ func main() {
|
||||||
fs := modtimevfs.New(ui.Assets, time.Unix(1, 0))
|
fs := modtimevfs.New(ui.Assets, time.Unix(1, 0))
|
||||||
err := vfsgen.Generate(fs, vfsgen.Options{
|
err := vfsgen.Generate(fs, vfsgen.Options{
|
||||||
PackageName: "ui",
|
PackageName: "ui",
|
||||||
BuildTags: "!dev",
|
BuildTags: "builtinassets",
|
||||||
VariableName: "Assets",
|
VariableName: "Assets",
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -16,7 +16,9 @@ package ui
|
||||||
|
|
||||||
import (
|
import (
|
||||||
// The blank import is to make Go modules happy.
|
// The blank import is to make Go modules happy.
|
||||||
|
_ "github.com/shurcooL/httpfs/filter"
|
||||||
|
_ "github.com/shurcooL/httpfs/union"
|
||||||
_ "github.com/shurcooL/vfsgen"
|
_ "github.com/shurcooL/vfsgen"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:generate go run -mod=vendor -tags=dev assets_generate.go
|
//go:generate go run -mod=vendor assets_generate.go
|
||||||
|
|
23
web/ui/react-app/.gitignore
vendored
Executable file
23
web/ui/react-app/.gitignore
vendored
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||||
|
|
||||||
|
# dependencies
|
||||||
|
/node_modules
|
||||||
|
/.pnp
|
||||||
|
.pnp.js
|
||||||
|
|
||||||
|
# testing
|
||||||
|
/coverage
|
||||||
|
|
||||||
|
# production
|
||||||
|
/build
|
||||||
|
|
||||||
|
# misc
|
||||||
|
.DS_Store
|
||||||
|
.env.local
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
68
web/ui/react-app/README.md
Executable file
68
web/ui/react-app/README.md
Executable file
|
@ -0,0 +1,68 @@
|
||||||
|
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
|
||||||
|
|
||||||
|
## Available Scripts
|
||||||
|
|
||||||
|
In the project directory, you can run:
|
||||||
|
|
||||||
|
### `npm start`
|
||||||
|
|
||||||
|
Runs the app in the development mode.<br>
|
||||||
|
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
|
||||||
|
|
||||||
|
The page will reload if you make edits.<br>
|
||||||
|
You will also see any lint errors in the console.
|
||||||
|
|
||||||
|
### `npm test`
|
||||||
|
|
||||||
|
Launches the test runner in the interactive watch mode.<br>
|
||||||
|
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
|
||||||
|
|
||||||
|
### `npm run build`
|
||||||
|
|
||||||
|
Builds the app for production to the `build` folder.<br>
|
||||||
|
It correctly bundles React in production mode and optimizes the build for the best performance.
|
||||||
|
|
||||||
|
The build is minified and the filenames include the hashes.<br>
|
||||||
|
Your app is ready to be deployed!
|
||||||
|
|
||||||
|
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
|
||||||
|
|
||||||
|
### `npm run eject`
|
||||||
|
|
||||||
|
**Note: this is a one-way operation. Once you `eject`, you can’t go back!**
|
||||||
|
|
||||||
|
If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
|
||||||
|
|
||||||
|
Instead, it will copy all the configuration files and the transitive dependencies (Webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own.
|
||||||
|
|
||||||
|
You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it.
|
||||||
|
|
||||||
|
## Learn More
|
||||||
|
|
||||||
|
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
|
||||||
|
|
||||||
|
To learn React, check out the [React documentation](https://reactjs.org/).
|
||||||
|
|
||||||
|
### Code Splitting
|
||||||
|
|
||||||
|
This section has moved here: https://facebook.github.io/create-react-app/docs/code-splitting
|
||||||
|
|
||||||
|
### Analyzing the Bundle Size
|
||||||
|
|
||||||
|
This section has moved here: https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size
|
||||||
|
|
||||||
|
### Making a Progressive Web App
|
||||||
|
|
||||||
|
This section has moved here: https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app
|
||||||
|
|
||||||
|
### Advanced Configuration
|
||||||
|
|
||||||
|
This section has moved here: https://facebook.github.io/create-react-app/docs/advanced-configuration
|
||||||
|
|
||||||
|
### Deployment
|
||||||
|
|
||||||
|
This section has moved here: https://facebook.github.io/create-react-app/docs/deployment
|
||||||
|
|
||||||
|
### `npm run build` fails to minify
|
||||||
|
|
||||||
|
This section has moved here: https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify
|
58
web/ui/react-app/package.json
Normal file
58
web/ui/react-app/package.json
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
{
|
||||||
|
"name": "graph",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@fortawesome/fontawesome-svg-core": "^1.2.14",
|
||||||
|
"@fortawesome/free-solid-svg-icons": "^5.7.1",
|
||||||
|
"@fortawesome/react-fontawesome": "^0.1.4",
|
||||||
|
"@types/jest": "^24.0.4",
|
||||||
|
"@types/jquery": "^3.3.29",
|
||||||
|
"@types/node": "^12.11.1",
|
||||||
|
"@types/react": "^16.8.2",
|
||||||
|
"@types/react-dom": "^16.8.0",
|
||||||
|
"@types/sanitize-html": "^1.20.2",
|
||||||
|
"@types/react-resize-detector": "^4.0.2",
|
||||||
|
"bootstrap": "^4.2.1",
|
||||||
|
"downshift": "^3.2.2",
|
||||||
|
"flot": "^3.2.13",
|
||||||
|
"fuzzy": "^0.1.3",
|
||||||
|
"i": "^0.3.6",
|
||||||
|
"jquery": "^3.3.1",
|
||||||
|
"jquery.flot.tooltip": "^0.9.0",
|
||||||
|
"jsdom": "^15.2.0",
|
||||||
|
"moment": "^2.24.0",
|
||||||
|
"moment-timezone": "^0.5.23",
|
||||||
|
"popper.js": "^1.14.3",
|
||||||
|
"react": "^16.7.0",
|
||||||
|
"react-dom": "^16.7.0",
|
||||||
|
"sanitize-html": "^1.20.1",
|
||||||
|
"react-resize-detector": "^4.2.1",
|
||||||
|
"react-scripts": "^3.2.0",
|
||||||
|
"reactstrap": "^8.0.1",
|
||||||
|
"tempusdominus-bootstrap-4": "^5.1.2",
|
||||||
|
"tempusdominus-core": "^5.0.3",
|
||||||
|
"typescript": "^3.3.3"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"start": "react-scripts start",
|
||||||
|
"build": "react-scripts build",
|
||||||
|
"test": "react-scripts test",
|
||||||
|
"eject": "react-scripts eject"
|
||||||
|
},
|
||||||
|
"eslintConfig": {
|
||||||
|
"extends": "react-app"
|
||||||
|
},
|
||||||
|
"browserslist": [
|
||||||
|
">0.2%",
|
||||||
|
"not dead",
|
||||||
|
"not ie <= 11",
|
||||||
|
"not op_mini all"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/flot": "0.0.31",
|
||||||
|
"@types/moment-timezone": "^0.5.10",
|
||||||
|
"@types/reactstrap": "^8.0.5"
|
||||||
|
},
|
||||||
|
"proxy": "http://localhost:9090"
|
||||||
|
}
|
BIN
web/ui/react-app/public/favicon.ico
Executable file
BIN
web/ui/react-app/public/favicon.ico
Executable file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
41
web/ui/react-app/public/index.html
Executable file
41
web/ui/react-app/public/index.html
Executable file
|
@ -0,0 +1,41 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico" />
|
||||||
|
<meta
|
||||||
|
name="viewport"
|
||||||
|
content="width=device-width, initial-scale=1, shrink-to-fit=no"
|
||||||
|
/>
|
||||||
|
<meta name="theme-color" content="#000000" />
|
||||||
|
<!--
|
||||||
|
manifest.json provides metadata used when your web app is added to the
|
||||||
|
homescreen on Android. See https://developers.google.com/web/fundamentals/web-app-manifest/
|
||||||
|
-->
|
||||||
|
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
|
||||||
|
<!--
|
||||||
|
Notice the use of %PUBLIC_URL% in the tags above.
|
||||||
|
It will be replaced with the URL of the `public` folder during the build.
|
||||||
|
Only files inside the `public` folder can be referenced from the HTML.
|
||||||
|
|
||||||
|
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
|
||||||
|
work correctly both with client-side routing and a non-root public URL.
|
||||||
|
Learn how to configure a non-root public URL by running `npm run build`.
|
||||||
|
-->
|
||||||
|
<title>Prometheus Expression Browser</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||||
|
<div id="root"></div>
|
||||||
|
<!--
|
||||||
|
This HTML file is a template.
|
||||||
|
If you open it directly in the browser, you will see an empty page.
|
||||||
|
|
||||||
|
You can add webfonts, meta tags, or analytics to this file.
|
||||||
|
The build step will place the bundled scripts into the <body> tag.
|
||||||
|
|
||||||
|
To begin the development, run `npm start` or `yarn start`.
|
||||||
|
To create a production bundle, use `npm run build` or `yarn build`.
|
||||||
|
-->
|
||||||
|
</body>
|
||||||
|
</html>
|
15
web/ui/react-app/public/manifest.json
Executable file
15
web/ui/react-app/public/manifest.json
Executable file
|
@ -0,0 +1,15 @@
|
||||||
|
{
|
||||||
|
"short_name": "Prometheus UI",
|
||||||
|
"name": "Prometheus Server Web Interface",
|
||||||
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": "favicon.ico",
|
||||||
|
"sizes": "64x64 32x32 24x24 16x16",
|
||||||
|
"type": "image/x-icon"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"start_url": ".",
|
||||||
|
"display": "standalone",
|
||||||
|
"theme_color": "#000000",
|
||||||
|
"background_color": "#ffffff"
|
||||||
|
}
|
180
web/ui/react-app/src/App.css
Normal file
180
web/ui/react-app/src/App.css
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
body {
|
||||||
|
padding-top: 10px; /* TODO remove */
|
||||||
|
}
|
||||||
|
|
||||||
|
.panel {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-input {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-input textarea {
|
||||||
|
/* font-family: Menlo,Monaco,Consolas,'Courier New',monospace; */
|
||||||
|
resize: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
button.execute-btn {
|
||||||
|
width: 84px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert.alert-danger {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-tabs .nav-link {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-content {
|
||||||
|
border-left: 1px solid #dee2e6;
|
||||||
|
border-right: 1px solid #dee2e6;
|
||||||
|
border-bottom: 1px solid #dee2e6;
|
||||||
|
padding: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-content .alert {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-table.table {
|
||||||
|
margin: 10px 0 2px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-table > tbody > tr > td {
|
||||||
|
padding: 5px 0 5px 8px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.autosuggest-dropdown {
|
||||||
|
position: absolute;
|
||||||
|
border: 1px solid #ced4da;
|
||||||
|
border-radius: .25rem;
|
||||||
|
background-color: #fff;
|
||||||
|
color: #495057;
|
||||||
|
font-size: 1rem;
|
||||||
|
z-index: 1000;
|
||||||
|
min-width: 10rem;
|
||||||
|
top: 100%;
|
||||||
|
left: 56px;
|
||||||
|
float: left;
|
||||||
|
padding: .5rem 1px .5rem 1px;
|
||||||
|
margin: -5px;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.autosuggest-dropdown li {
|
||||||
|
width: 100%;
|
||||||
|
padding: .25rem 1.5rem;
|
||||||
|
clear: both;
|
||||||
|
white-space: nowrap;
|
||||||
|
background-color: transparent;
|
||||||
|
border: 0;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls, .table-controls {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls input {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls .range-input input {
|
||||||
|
width: 50px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls .time-input input {
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.time-input {
|
||||||
|
width: 240px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-controls input {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls input.resolution-input {
|
||||||
|
width: 90px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls .time-input, .graph-controls .resolution-input, .graph-controls .stacked-input {
|
||||||
|
margin-left: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-controls .clear-time-btn {
|
||||||
|
background: #fff;
|
||||||
|
border-left: none;
|
||||||
|
border-top: 1px solid #ced4da;
|
||||||
|
border-bottom: 1px solid #ced4da;
|
||||||
|
color: #495057;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-legend {
|
||||||
|
margin: 15px 0 15px 25px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-legend .legend-swatch {
|
||||||
|
padding: 5px;
|
||||||
|
height: 5px;
|
||||||
|
outline-offset: 1px;
|
||||||
|
outline: 1.5px solid #ccc;
|
||||||
|
margin: 2px 8px 2px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.legend-metric-name {
|
||||||
|
margin-right: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.legend-label-name {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph {
|
||||||
|
margin: 0 5px 0 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-chart {
|
||||||
|
height: 500px;
|
||||||
|
width: 100%;
|
||||||
|
/* This is picked up by Flot's axis label font renderer,
|
||||||
|
which ignores "color" and uses "fill" instead. */
|
||||||
|
fill: #495057;
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-chart .flot-overlay {
|
||||||
|
cursor: crosshair;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-tooltip {
|
||||||
|
background: rgba(0,0,0,.8);
|
||||||
|
color: #fff;
|
||||||
|
font-family: Arial, Helvetica, sans-serif;
|
||||||
|
font-size: 12px;
|
||||||
|
white-space: nowrap;
|
||||||
|
padding: 8px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-tooltip .labels {
|
||||||
|
font-size: 11px;
|
||||||
|
line-height: 11px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.graph-tooltip .detail-swatch {
|
||||||
|
display: inline-block;
|
||||||
|
width: 10px;
|
||||||
|
height: 10px;
|
||||||
|
margin: 0 5px 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.add-panel-btn {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
10
web/ui/react-app/src/App.test.js
Executable file
10
web/ui/react-app/src/App.test.js
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
import './globals';
|
||||||
|
import React from 'react';
|
||||||
|
import ReactDOM from 'react-dom';
|
||||||
|
import App from './App';
|
||||||
|
|
||||||
|
it('renders without crashing', () => {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
ReactDOM.render(<App />, div);
|
||||||
|
ReactDOM.unmountComponentAtNode(div);
|
||||||
|
});
|
19
web/ui/react-app/src/App.tsx
Executable file
19
web/ui/react-app/src/App.tsx
Executable file
|
@ -0,0 +1,19 @@
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
|
||||||
|
import { Container } from 'reactstrap';
|
||||||
|
|
||||||
|
import PanelList from './PanelList';
|
||||||
|
|
||||||
|
import './App.css';
|
||||||
|
|
||||||
|
class App extends Component {
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<Container fluid={true}>
|
||||||
|
<PanelList />
|
||||||
|
</Container>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default App;
|
107
web/ui/react-app/src/DataTable.tsx
Normal file
107
web/ui/react-app/src/DataTable.tsx
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
import React, { PureComponent, ReactNode } from 'react';
|
||||||
|
|
||||||
|
import { Alert, Table } from 'reactstrap';
|
||||||
|
|
||||||
|
import SeriesName from './SeriesName';
|
||||||
|
|
||||||
|
export interface QueryResult {
|
||||||
|
data: null | {
|
||||||
|
resultType: 'vector',
|
||||||
|
result: InstantSample[],
|
||||||
|
} | {
|
||||||
|
resultType: 'matrix',
|
||||||
|
result: RangeSamples[],
|
||||||
|
} | {
|
||||||
|
resultType: 'scalar',
|
||||||
|
result: SampleValue,
|
||||||
|
} | {
|
||||||
|
resultType: 'string',
|
||||||
|
result: string,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
interface InstantSample {
|
||||||
|
metric: Metric,
|
||||||
|
value: SampleValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RangeSamples {
|
||||||
|
metric: Metric,
|
||||||
|
values: SampleValue[],
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Metric {
|
||||||
|
[key: string]: string,
|
||||||
|
}
|
||||||
|
|
||||||
|
type SampleValue = [number, string];
|
||||||
|
|
||||||
|
class DataTable extends PureComponent<QueryResult> {
|
||||||
|
limitSeries(series: InstantSample[] | RangeSamples[]): InstantSample[] | RangeSamples[] {
|
||||||
|
const maxSeries = 10000;
|
||||||
|
|
||||||
|
if (series.length > maxSeries) {
|
||||||
|
return series.slice(0, maxSeries);
|
||||||
|
}
|
||||||
|
return series;
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const data = this.props.data;
|
||||||
|
|
||||||
|
if (data === null) {
|
||||||
|
return <Alert color="light">No data queried yet</Alert>;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.result === null || data.result.length === 0) {
|
||||||
|
return <Alert color="secondary">Empty query result</Alert>;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rows: ReactNode[] = [];
|
||||||
|
let limited = false;
|
||||||
|
switch(data.resultType) {
|
||||||
|
case 'vector':
|
||||||
|
rows = (this.limitSeries(data.result) as InstantSample[])
|
||||||
|
.map((s: InstantSample, index: number): ReactNode => {
|
||||||
|
return <tr key={index}><td><SeriesName labels={s.metric} format={false}/></td><td>{s.value[1]}</td></tr>;
|
||||||
|
});
|
||||||
|
limited = rows.length !== data.result.length;
|
||||||
|
break;
|
||||||
|
case 'matrix':
|
||||||
|
rows = (this.limitSeries(data.result) as RangeSamples[])
|
||||||
|
.map((s, index) => {
|
||||||
|
const valueText = s.values.map((v) => {
|
||||||
|
return [1] + ' @' + v[0];
|
||||||
|
}).join('\n');
|
||||||
|
return <tr style={{whiteSpace: 'pre'}} key={index}><td><SeriesName labels={s.metric} format={false}/></td><td>{valueText}</td></tr>;
|
||||||
|
});
|
||||||
|
limited = rows.length !== data.result.length;
|
||||||
|
break;
|
||||||
|
case 'scalar':
|
||||||
|
rows.push(<tr><td>scalar</td><td>{data.result[1]}</td></tr>);
|
||||||
|
break;
|
||||||
|
case 'string':
|
||||||
|
rows.push(<tr><td>scalar</td><td>{data.result[1]}</td></tr>);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
return <Alert color="danger">Unsupported result value type</Alert>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{limited &&
|
||||||
|
<Alert color="danger">
|
||||||
|
<strong>Warning:</strong> Fetched {data.result.length} metrics, only displaying first {rows.length}.
|
||||||
|
</Alert>
|
||||||
|
}
|
||||||
|
<Table hover size="sm" className="data-table">
|
||||||
|
<tbody>
|
||||||
|
{rows}
|
||||||
|
</tbody>
|
||||||
|
</Table>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default DataTable;
|
165
web/ui/react-app/src/ExpressionInput.tsx
Normal file
165
web/ui/react-app/src/ExpressionInput.tsx
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
import $ from 'jquery';
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
import {
|
||||||
|
Button,
|
||||||
|
InputGroup,
|
||||||
|
InputGroupAddon,
|
||||||
|
InputGroupText,
|
||||||
|
Input,
|
||||||
|
} from 'reactstrap';
|
||||||
|
|
||||||
|
import Downshift from 'downshift';
|
||||||
|
import fuzzy from 'fuzzy';
|
||||||
|
import SanitizeHTML from './components/SanitizeHTML';
|
||||||
|
|
||||||
|
import { library } from '@fortawesome/fontawesome-svg-core';
|
||||||
|
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
|
import { faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
|
||||||
|
|
||||||
|
library.add(faSearch, faSpinner);
|
||||||
|
|
||||||
|
interface ExpressionInputProps {
|
||||||
|
value: string;
|
||||||
|
metricNames: string[];
|
||||||
|
executeQuery: (expr: string) => void;
|
||||||
|
loading: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
class ExpressionInput extends Component<ExpressionInputProps> {
|
||||||
|
prevNoMatchValue: string | null = null;
|
||||||
|
private exprInputRef = React.createRef<HTMLInputElement>();
|
||||||
|
|
||||||
|
handleKeyPress = (event: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
if (event.key === 'Enter' && !event.shiftKey) {
|
||||||
|
this.props.executeQuery(this.exprInputRef.current!.value);
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
renderAutosuggest = (downshift: any) => {
|
||||||
|
if (!downshift.isOpen) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.prevNoMatchValue && downshift.inputValue.includes(this.prevNoMatchValue)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let matches = fuzzy.filter(downshift.inputValue.replace(/ /g, ''), this.props.metricNames, {
|
||||||
|
pre: "<strong>",
|
||||||
|
post: "</strong>",
|
||||||
|
});
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
this.prevNoMatchValue = downshift.inputValue;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ul className="autosuggest-dropdown" {...downshift.getMenuProps()}>
|
||||||
|
{
|
||||||
|
matches
|
||||||
|
.slice(0, 200) // Limit DOM rendering to 100 results, as DOM rendering is sloooow.
|
||||||
|
.map((item, index) => (
|
||||||
|
<li
|
||||||
|
{...downshift.getItemProps({
|
||||||
|
key: item.original,
|
||||||
|
index,
|
||||||
|
item: item.original,
|
||||||
|
style: {
|
||||||
|
backgroundColor:
|
||||||
|
downshift.highlightedIndex === index ? 'lightgray' : 'white',
|
||||||
|
fontWeight: downshift.selectedItem === item ? 'bold' : 'normal',
|
||||||
|
},
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
<SanitizeHTML inline={true} allowedTags={['strong']}>
|
||||||
|
{item.string}
|
||||||
|
</SanitizeHTML>
|
||||||
|
</li>
|
||||||
|
))
|
||||||
|
}
|
||||||
|
</ul>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
const $exprInput = $(this.exprInputRef.current!);
|
||||||
|
const resize = () => {
|
||||||
|
const el = $exprInput.get(0);
|
||||||
|
const offset = el.offsetHeight - el.clientHeight;
|
||||||
|
$exprInput.css('height', 'auto').css('height', el.scrollHeight + offset);
|
||||||
|
};
|
||||||
|
resize();
|
||||||
|
$exprInput.on('input', resize);
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<Downshift
|
||||||
|
//inputValue={this.props.value}
|
||||||
|
//onInputValueChange={this.props.onChange}
|
||||||
|
selectedItem={this.props.value}
|
||||||
|
>
|
||||||
|
{(downshift) => (
|
||||||
|
<div>
|
||||||
|
<InputGroup className="expression-input">
|
||||||
|
<InputGroupAddon addonType="prepend">
|
||||||
|
<InputGroupText>
|
||||||
|
{this.props.loading ? <FontAwesomeIcon icon="spinner" spin/> : <FontAwesomeIcon icon="search"/>}
|
||||||
|
</InputGroupText>
|
||||||
|
</InputGroupAddon>
|
||||||
|
<Input
|
||||||
|
autoFocus
|
||||||
|
type="textarea"
|
||||||
|
rows="1"
|
||||||
|
onKeyPress={this.handleKeyPress}
|
||||||
|
placeholder="Expression (press Shift+Enter for newlines)"
|
||||||
|
innerRef={this.exprInputRef}
|
||||||
|
{...downshift.getInputProps({
|
||||||
|
onKeyDown: (event: React.KeyboardEvent): void => {
|
||||||
|
switch (event.key) {
|
||||||
|
case 'Home':
|
||||||
|
case 'End':
|
||||||
|
// We want to be able to jump to the beginning/end of the input field.
|
||||||
|
// By default, Downshift otherwise jumps to the first/last suggestion item instead.
|
||||||
|
(event.nativeEvent as any).preventDownshiftDefault = true;
|
||||||
|
break;
|
||||||
|
case 'ArrowUp':
|
||||||
|
case 'ArrowDown':
|
||||||
|
if (!downshift.isOpen) {
|
||||||
|
(event.nativeEvent as any).preventDownshiftDefault = true;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'Enter':
|
||||||
|
downshift.closeMenu();
|
||||||
|
break;
|
||||||
|
case 'Escape':
|
||||||
|
if (!downshift.isOpen) {
|
||||||
|
this.exprInputRef.current!.blur();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} as any)}
|
||||||
|
/>
|
||||||
|
<InputGroupAddon addonType="append">
|
||||||
|
<Button
|
||||||
|
className="execute-btn"
|
||||||
|
color="primary"
|
||||||
|
onClick={() => this.props.executeQuery(this.exprInputRef.current!.value)}
|
||||||
|
>
|
||||||
|
Execute
|
||||||
|
</Button>
|
||||||
|
</InputGroupAddon>
|
||||||
|
</InputGroup>
|
||||||
|
{this.renderAutosuggest(downshift)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</Downshift>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ExpressionInput;
|
284
web/ui/react-app/src/Graph.tsx
Normal file
284
web/ui/react-app/src/Graph.tsx
Normal file
|
@ -0,0 +1,284 @@
|
||||||
|
import $ from 'jquery';
|
||||||
|
import React, { PureComponent } from 'react';
|
||||||
|
import ReactResizeDetector from 'react-resize-detector';
|
||||||
|
import { Alert } from 'reactstrap';
|
||||||
|
|
||||||
|
import Legend from './Legend';
|
||||||
|
|
||||||
|
require('flot');
|
||||||
|
require('flot/source/jquery.flot.crosshair');
|
||||||
|
require('flot/source/jquery.flot.legend');
|
||||||
|
require('flot/source/jquery.flot.time');
|
||||||
|
require('flot/source/jquery.canvaswrapper');
|
||||||
|
require('jquery.flot.tooltip');
|
||||||
|
|
||||||
|
var graphID = 0;
|
||||||
|
function getGraphID() {
|
||||||
|
// TODO: This is ugly.
|
||||||
|
return graphID++;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GraphProps {
|
||||||
|
data: any; // TODO: Type this.
|
||||||
|
stacked: boolean;
|
||||||
|
queryParams: {
|
||||||
|
startTime: number,
|
||||||
|
endTime: number,
|
||||||
|
resolution: number,
|
||||||
|
} | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
class Graph extends PureComponent<GraphProps> {
|
||||||
|
private id: number = getGraphID();
|
||||||
|
private chartRef = React.createRef<HTMLDivElement>();
|
||||||
|
|
||||||
|
escapeHTML(str: string) {
|
||||||
|
var entityMap: {[key: string]: string} = {
|
||||||
|
'&': '&',
|
||||||
|
'<': '<',
|
||||||
|
'>': '>',
|
||||||
|
'"': '"',
|
||||||
|
"'": ''',
|
||||||
|
'/': '/'
|
||||||
|
};
|
||||||
|
|
||||||
|
return String(str).replace(/[&<>"'/]/g, function (s) {
|
||||||
|
return entityMap[s];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
renderLabels(labels: {[key: string]: string}) {
|
||||||
|
let labelStrings: string[] = [];
|
||||||
|
for (let label in labels) {
|
||||||
|
if (label !== '__name__') {
|
||||||
|
labelStrings.push('<strong>' + label + '</strong>: ' + this.escapeHTML(labels[label]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return '<div class="labels">' + labelStrings.join('<br>') + '</div>';
|
||||||
|
};
|
||||||
|
|
||||||
|
formatValue = (y: number | null): string => {
|
||||||
|
if (y === null) {
|
||||||
|
return 'null';
|
||||||
|
}
|
||||||
|
var abs_y = Math.abs(y);
|
||||||
|
if (abs_y >= 1e24) {
|
||||||
|
return (y / 1e24).toFixed(2) + "Y";
|
||||||
|
} else if (abs_y >= 1e21) {
|
||||||
|
return (y / 1e21).toFixed(2) + "Z";
|
||||||
|
} else if (abs_y >= 1e18) {
|
||||||
|
return (y / 1e18).toFixed(2) + "E";
|
||||||
|
} else if (abs_y >= 1e15) {
|
||||||
|
return (y / 1e15).toFixed(2) + "P";
|
||||||
|
} else if (abs_y >= 1e12) {
|
||||||
|
return (y / 1e12).toFixed(2) + "T";
|
||||||
|
} else if (abs_y >= 1e9) {
|
||||||
|
return (y / 1e9).toFixed(2) + "G";
|
||||||
|
} else if (abs_y >= 1e6) {
|
||||||
|
return (y / 1e6).toFixed(2) + "M";
|
||||||
|
} else if (abs_y >= 1e3) {
|
||||||
|
return (y / 1e3).toFixed(2) + "k";
|
||||||
|
} else if (abs_y >= 1) {
|
||||||
|
return y.toFixed(2)
|
||||||
|
} else if (abs_y === 0) {
|
||||||
|
return y.toFixed(2)
|
||||||
|
} else if (abs_y <= 1e-24) {
|
||||||
|
return (y / 1e-24).toFixed(2) + "y";
|
||||||
|
} else if (abs_y <= 1e-21) {
|
||||||
|
return (y / 1e-21).toFixed(2) + "z";
|
||||||
|
} else if (abs_y <= 1e-18) {
|
||||||
|
return (y / 1e-18).toFixed(2) + "a";
|
||||||
|
} else if (abs_y <= 1e-15) {
|
||||||
|
return (y / 1e-15).toFixed(2) + "f";
|
||||||
|
} else if (abs_y <= 1e-12) {
|
||||||
|
return (y / 1e-12).toFixed(2) + "p";
|
||||||
|
} else if (abs_y <= 1e-9) {
|
||||||
|
return (y / 1e-9).toFixed(2) + "n";
|
||||||
|
} else if (abs_y <= 1e-6) {
|
||||||
|
return (y / 1e-6).toFixed(2) + "µ";
|
||||||
|
} else if (abs_y <=1e-3) {
|
||||||
|
return (y / 1e-3).toFixed(2) + "m";
|
||||||
|
} else if (abs_y <= 1) {
|
||||||
|
return y.toFixed(2)
|
||||||
|
}
|
||||||
|
throw Error("couldn't format a value, this is a bug");
|
||||||
|
}
|
||||||
|
|
||||||
|
getOptions(): any {
|
||||||
|
return {
|
||||||
|
grid: {
|
||||||
|
hoverable: true,
|
||||||
|
clickable: true,
|
||||||
|
autoHighlight: true,
|
||||||
|
mouseActiveRadius: 100,
|
||||||
|
},
|
||||||
|
legend: {
|
||||||
|
show: false,
|
||||||
|
},
|
||||||
|
xaxis: {
|
||||||
|
mode: 'time',
|
||||||
|
showTicks: true,
|
||||||
|
showMinorTicks: true,
|
||||||
|
timeBase: 'milliseconds',
|
||||||
|
},
|
||||||
|
yaxis: {
|
||||||
|
tickFormatter: this.formatValue,
|
||||||
|
},
|
||||||
|
crosshair: {
|
||||||
|
mode: 'xy',
|
||||||
|
color: '#bbb',
|
||||||
|
},
|
||||||
|
tooltip: {
|
||||||
|
show: true,
|
||||||
|
cssClass: 'graph-tooltip',
|
||||||
|
content: (label: string, xval: number, yval: number, flotItem: any) => {
|
||||||
|
const series = flotItem.series; // TODO: type this.
|
||||||
|
var date = '<span class="date">' + new Date(xval).toUTCString() + '</span>';
|
||||||
|
var swatch = '<span class="detail-swatch" style="background-color: ' + series.color + '"></span>';
|
||||||
|
var content = swatch + (series.labels.__name__ || 'value') + ": <strong>" + yval + '</strong>';
|
||||||
|
return date + '<br>' + content + '<br>' + this.renderLabels(series.labels);
|
||||||
|
},
|
||||||
|
defaultTheme: false,
|
||||||
|
lines: true,
|
||||||
|
},
|
||||||
|
series: {
|
||||||
|
stack: this.props.stacked,
|
||||||
|
lines: {
|
||||||
|
lineWidth: this.props.stacked ? 1 : 2,
|
||||||
|
steps: false,
|
||||||
|
fill: this.props.stacked,
|
||||||
|
},
|
||||||
|
shadowSize: 0,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// This was adapted from Flot's color generation code.
|
||||||
|
getColors() {
|
||||||
|
let colors = [];
|
||||||
|
const colorPool = ["#edc240", "#afd8f8", "#cb4b4b", "#4da74d", "#9440ed"];
|
||||||
|
const colorPoolSize = colorPool.length;
|
||||||
|
let variation = 0;
|
||||||
|
const neededColors = this.props.data.result.length;
|
||||||
|
|
||||||
|
for (let i = 0; i < neededColors; i++) {
|
||||||
|
const c = ($ as any).color.parse(colorPool[i % colorPoolSize] || "#666");
|
||||||
|
|
||||||
|
// Each time we exhaust the colors in the pool we adjust
|
||||||
|
// a scaling factor used to produce more variations on
|
||||||
|
// those colors. The factor alternates negative/positive
|
||||||
|
// to produce lighter/darker colors.
|
||||||
|
|
||||||
|
// Reset the variation after every few cycles, or else
|
||||||
|
// it will end up producing only white or black colors.
|
||||||
|
|
||||||
|
if (i % colorPoolSize === 0 && i) {
|
||||||
|
if (variation >= 0) {
|
||||||
|
if (variation < 0.5) {
|
||||||
|
variation = -variation - 0.2;
|
||||||
|
} else variation = 0;
|
||||||
|
} else variation = -variation;
|
||||||
|
}
|
||||||
|
|
||||||
|
colors[i] = c.scale('rgb', 1 + variation);
|
||||||
|
}
|
||||||
|
|
||||||
|
return colors;
|
||||||
|
}
|
||||||
|
|
||||||
|
getData() {
|
||||||
|
const colors = this.getColors();
|
||||||
|
|
||||||
|
return this.props.data.result.map((ts: any /* TODO: Type this*/, index: number) => {
|
||||||
|
// Insert nulls for all missing steps.
|
||||||
|
let data = [];
|
||||||
|
let pos = 0;
|
||||||
|
const params = this.props.queryParams!;
|
||||||
|
|
||||||
|
for (let t = params.startTime; t <= params.endTime; t += params.resolution) {
|
||||||
|
// Allow for floating point inaccuracy.
|
||||||
|
if (ts.values.length > pos && ts.values[pos][0] < t + params.resolution / 100) {
|
||||||
|
data.push([ts.values[pos][0] * 1000, this.parseValue(ts.values[pos][1])]);
|
||||||
|
pos++;
|
||||||
|
} else {
|
||||||
|
// TODO: Flot has problems displaying intermittent "null" values when stacked,
|
||||||
|
// resort to 0 now. In Grafana this works for some reason, figure out how they
|
||||||
|
// do it.
|
||||||
|
data.push([t * 1000, this.props.stacked ? 0 : null]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
labels: ts.metric !== null ? ts.metric : {},
|
||||||
|
data: data,
|
||||||
|
color: colors[index],
|
||||||
|
index: index,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
parseValue(value: string) {
|
||||||
|
var val = parseFloat(value);
|
||||||
|
if (isNaN(val)) {
|
||||||
|
// "+Inf", "-Inf", "+Inf" will be parsed into NaN by parseFloat(). They
|
||||||
|
// can't be graphed, so show them as gaps (null).
|
||||||
|
|
||||||
|
// TODO: Flot has problems displaying intermittent "null" values when stacked,
|
||||||
|
// resort to 0 now. In Grafana this works for some reason, figure out how they
|
||||||
|
// do it.
|
||||||
|
return this.props.stacked ? 0 : null;
|
||||||
|
}
|
||||||
|
return val;
|
||||||
|
};
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
this.plot();
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidUpdate() {
|
||||||
|
this.plot();
|
||||||
|
}
|
||||||
|
|
||||||
|
componentWillUnmount() {
|
||||||
|
this.destroyPlot();
|
||||||
|
}
|
||||||
|
|
||||||
|
plot() {
|
||||||
|
if (this.chartRef.current === null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.destroyPlot();
|
||||||
|
$.plot($(this.chartRef.current!), this.getData(), this.getOptions());
|
||||||
|
}
|
||||||
|
|
||||||
|
destroyPlot() {
|
||||||
|
const chart = $(this.chartRef.current!).data('plot');
|
||||||
|
if (chart !== undefined) {
|
||||||
|
chart.destroy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
if (this.props.data === null) {
|
||||||
|
return <Alert color="light">No data queried yet</Alert>;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.props.data.resultType !== 'matrix') {
|
||||||
|
return <Alert color="danger">Query result is of wrong type '{this.props.data.resultType}', should be 'matrix' (range vector).</Alert>;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.props.data.result.length === 0) {
|
||||||
|
return <Alert color="secondary">Empty query result</Alert>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="graph">
|
||||||
|
<ReactResizeDetector handleWidth onResize={() => this.plot()} />
|
||||||
|
<div className="graph-chart" ref={this.chartRef} />
|
||||||
|
<Legend series={this.getData()}/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Graph;
|
156
web/ui/react-app/src/GraphControls.tsx
Normal file
156
web/ui/react-app/src/GraphControls.tsx
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
import {
|
||||||
|
Button,
|
||||||
|
ButtonGroup,
|
||||||
|
Form,
|
||||||
|
InputGroup,
|
||||||
|
InputGroupAddon,
|
||||||
|
Input,
|
||||||
|
} from 'reactstrap';
|
||||||
|
|
||||||
|
import { library } from '@fortawesome/fontawesome-svg-core';
|
||||||
|
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
|
import {
|
||||||
|
faPlus,
|
||||||
|
faMinus,
|
||||||
|
faChartArea,
|
||||||
|
faChartLine,
|
||||||
|
} from '@fortawesome/free-solid-svg-icons';
|
||||||
|
|
||||||
|
import TimeInput from './TimeInput';
|
||||||
|
import { parseRange, formatRange } from './utils/timeFormat';
|
||||||
|
|
||||||
|
library.add(
|
||||||
|
faPlus,
|
||||||
|
faMinus,
|
||||||
|
faChartArea,
|
||||||
|
faChartLine,
|
||||||
|
);
|
||||||
|
|
||||||
|
interface GraphControlsProps {
|
||||||
|
range: number;
|
||||||
|
endTime: number | null;
|
||||||
|
resolution: number | null;
|
||||||
|
stacked: boolean;
|
||||||
|
|
||||||
|
onChangeRange: (range: number) => void;
|
||||||
|
onChangeEndTime: (endTime: number | null) => void;
|
||||||
|
onChangeResolution: (resolution: number | null) => void;
|
||||||
|
onChangeStacking: (stacked: boolean) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
class GraphControls extends Component<GraphControlsProps> {
|
||||||
|
private rangeRef = React.createRef<HTMLInputElement>();
|
||||||
|
private resolutionRef = React.createRef<HTMLInputElement>();
|
||||||
|
|
||||||
|
rangeSteps = [
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
60,
|
||||||
|
5*60,
|
||||||
|
15*60,
|
||||||
|
30*60,
|
||||||
|
60*60,
|
||||||
|
2*60*60,
|
||||||
|
6*60*60,
|
||||||
|
12*60*60,
|
||||||
|
24*60*60,
|
||||||
|
48*60*60,
|
||||||
|
7*24*60*60,
|
||||||
|
14*24*60*60,
|
||||||
|
28*24*60*60,
|
||||||
|
56*24*60*60,
|
||||||
|
365*24*60*60,
|
||||||
|
730*24*60*60,
|
||||||
|
]
|
||||||
|
|
||||||
|
onChangeRangeInput = (rangeText: string): void => {
|
||||||
|
const range = parseRange(rangeText);
|
||||||
|
if (range === null) {
|
||||||
|
this.changeRangeInput(this.props.range);
|
||||||
|
} else {
|
||||||
|
this.props.onChangeRange(range);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
changeRangeInput = (range: number): void => {
|
||||||
|
this.rangeRef.current!.value = formatRange(range);
|
||||||
|
}
|
||||||
|
|
||||||
|
increaseRange = (): void => {
|
||||||
|
for (let range of this.rangeSteps) {
|
||||||
|
if (this.props.range < range) {
|
||||||
|
this.changeRangeInput(range);
|
||||||
|
this.props.onChangeRange(range);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
decreaseRange = (): void => {
|
||||||
|
for (let range of this.rangeSteps.slice().reverse()) {
|
||||||
|
if (this.props.range > range) {
|
||||||
|
this.changeRangeInput(range);
|
||||||
|
this.props.onChangeRange(range);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidUpdate(prevProps: GraphControlsProps) {
|
||||||
|
if (prevProps.range !== this.props.range) {
|
||||||
|
this.changeRangeInput(this.props.range);
|
||||||
|
}
|
||||||
|
if (prevProps.resolution !== this.props.resolution) {
|
||||||
|
this.resolutionRef.current!.value = this.props.resolution !== null ? this.props.resolution.toString() : '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<Form inline className="graph-controls" onSubmit={e => e.preventDefault()}>
|
||||||
|
<InputGroup className="range-input" size="sm">
|
||||||
|
<InputGroupAddon addonType="prepend">
|
||||||
|
<Button title="Decrease range" onClick={this.decreaseRange}><FontAwesomeIcon icon="minus" fixedWidth/></Button>
|
||||||
|
</InputGroupAddon>
|
||||||
|
|
||||||
|
<Input
|
||||||
|
defaultValue={formatRange(this.props.range)}
|
||||||
|
innerRef={this.rangeRef}
|
||||||
|
onBlur={() => this.onChangeRangeInput(this.rangeRef.current!.value)}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<InputGroupAddon addonType="append">
|
||||||
|
<Button title="Increase range" onClick={this.increaseRange}><FontAwesomeIcon icon="plus" fixedWidth/></Button>
|
||||||
|
</InputGroupAddon>
|
||||||
|
</InputGroup>
|
||||||
|
|
||||||
|
<TimeInput
|
||||||
|
time={this.props.endTime}
|
||||||
|
range={this.props.range}
|
||||||
|
placeholder="End time"
|
||||||
|
onChangeTime={this.props.onChangeEndTime}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Input
|
||||||
|
placeholder="Res. (s)"
|
||||||
|
className="resolution-input"
|
||||||
|
defaultValue={this.props.resolution !== null ? this.props.resolution.toString() : ''}
|
||||||
|
innerRef={this.resolutionRef}
|
||||||
|
onBlur={() => {
|
||||||
|
const res = parseInt(this.resolutionRef.current!.value);
|
||||||
|
this.props.onChangeResolution(res ? res : null);
|
||||||
|
}}
|
||||||
|
bsSize="sm"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<ButtonGroup className="stacked-input" size="sm">
|
||||||
|
<Button title="Show unstacked line graph" onClick={() => this.props.onChangeStacking(false)} active={!this.props.stacked}><FontAwesomeIcon icon="chart-line" fixedWidth/></Button>
|
||||||
|
<Button title="Show stacked graph" onClick={() => this.props.onChangeStacking(true)} active={this.props.stacked}><FontAwesomeIcon icon="chart-area" fixedWidth/></Button>
|
||||||
|
</ButtonGroup>
|
||||||
|
</Form>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default GraphControls;
|
34
web/ui/react-app/src/Legend.tsx
Normal file
34
web/ui/react-app/src/Legend.tsx
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import React, { PureComponent } from 'react';
|
||||||
|
|
||||||
|
import SeriesName from './SeriesName';
|
||||||
|
|
||||||
|
interface LegendProps {
|
||||||
|
series: any; // TODO: Type this.
|
||||||
|
}
|
||||||
|
|
||||||
|
class Legend extends PureComponent<LegendProps> {
|
||||||
|
renderLegendItem(s: any) {
|
||||||
|
return (
|
||||||
|
<tr key={s.index} className="legend-item">
|
||||||
|
<td>
|
||||||
|
<div className="legend-swatch" style={{backgroundColor: s.color}}></div>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<SeriesName labels={s.labels} format={true} />
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<table className="graph-legend">
|
||||||
|
<tbody>
|
||||||
|
{this.props.series.map((s: any) => {return this.renderLegendItem(s)})}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Legend;
|
16
web/ui/react-app/src/MetricFomat.ts
Normal file
16
web/ui/react-app/src/MetricFomat.ts
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
function metricToSeriesName(labels: {[key: string]: string}): string {
|
||||||
|
if (labels === null) {
|
||||||
|
return 'scalar';
|
||||||
|
}
|
||||||
|
let tsName = (labels.__name__ || '') + '{';
|
||||||
|
let labelStrings: string[] = [];
|
||||||
|
for (let label in labels) {
|
||||||
|
if (label !== '__name__') {
|
||||||
|
labelStrings.push(label + '="' + labels[label] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tsName += labelStrings.join(', ') + '}';
|
||||||
|
return tsName;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default metricToSeriesName;
|
297
web/ui/react-app/src/Panel.tsx
Normal file
297
web/ui/react-app/src/Panel.tsx
Normal file
|
@ -0,0 +1,297 @@
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Alert,
|
||||||
|
Button,
|
||||||
|
Col,
|
||||||
|
Nav,
|
||||||
|
NavItem,
|
||||||
|
NavLink,
|
||||||
|
Row,
|
||||||
|
TabContent,
|
||||||
|
TabPane,
|
||||||
|
} from 'reactstrap';
|
||||||
|
|
||||||
|
import moment from 'moment-timezone';
|
||||||
|
|
||||||
|
import ExpressionInput from './ExpressionInput';
|
||||||
|
import GraphControls from './GraphControls';
|
||||||
|
import Graph from './Graph';
|
||||||
|
import DataTable from './DataTable';
|
||||||
|
import TimeInput from './TimeInput';
|
||||||
|
|
||||||
|
interface PanelProps {
|
||||||
|
options: PanelOptions;
|
||||||
|
onOptionsChanged: (opts: PanelOptions) => void;
|
||||||
|
metricNames: string[];
|
||||||
|
removePanel: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PanelState {
|
||||||
|
data: any; // TODO: Type data.
|
||||||
|
lastQueryParams: { // TODO: Share these with Graph.tsx in a file.
|
||||||
|
startTime: number,
|
||||||
|
endTime: number,
|
||||||
|
resolution: number,
|
||||||
|
} | null;
|
||||||
|
loading: boolean;
|
||||||
|
error: string | null;
|
||||||
|
stats: null; // TODO: Stats.
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PanelOptions {
|
||||||
|
expr: string;
|
||||||
|
type: PanelType;
|
||||||
|
range: number; // Range in seconds.
|
||||||
|
endTime: number | null; // Timestamp in milliseconds.
|
||||||
|
resolution: number | null; // Resolution in seconds.
|
||||||
|
stacked: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum PanelType {
|
||||||
|
Graph = 'graph',
|
||||||
|
Table = 'table',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const PanelDefaultOptions: PanelOptions = {
|
||||||
|
type: PanelType.Table,
|
||||||
|
expr: '',
|
||||||
|
range: 3600,
|
||||||
|
endTime: null,
|
||||||
|
resolution: null,
|
||||||
|
stacked: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
class Panel extends Component<PanelProps, PanelState> {
|
||||||
|
private abortInFlightFetch: (() => void) | null = null;
|
||||||
|
|
||||||
|
constructor(props: PanelProps) {
|
||||||
|
super(props);
|
||||||
|
|
||||||
|
this.state = {
|
||||||
|
data: null,
|
||||||
|
lastQueryParams: null,
|
||||||
|
loading: false,
|
||||||
|
error: null,
|
||||||
|
stats: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidUpdate(prevProps: PanelProps, prevState: PanelState) {
|
||||||
|
const prevOpts = prevProps.options;
|
||||||
|
const opts = this.props.options;
|
||||||
|
if (prevOpts.type !== opts.type ||
|
||||||
|
prevOpts.range !== opts.range ||
|
||||||
|
prevOpts.endTime !== opts.endTime ||
|
||||||
|
prevOpts.resolution !== opts.resolution) {
|
||||||
|
|
||||||
|
if (prevOpts.type !== opts.type) {
|
||||||
|
// If the other options change, we still want to show the old data until the new
|
||||||
|
// query completes, but this is not a good idea when we actually change between
|
||||||
|
// table and graph view, since not all queries work well in both.
|
||||||
|
this.setState({data: null});
|
||||||
|
}
|
||||||
|
this.executeQuery(opts.expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
this.executeQuery(this.props.options.expr);
|
||||||
|
}
|
||||||
|
|
||||||
|
executeQuery = (expr: string): void => {
|
||||||
|
if (this.props.options.expr !== expr) {
|
||||||
|
this.setOptions({expr: expr});
|
||||||
|
}
|
||||||
|
if (expr === '') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.abortInFlightFetch) {
|
||||||
|
this.abortInFlightFetch();
|
||||||
|
this.abortInFlightFetch = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const abortController = new AbortController();
|
||||||
|
this.abortInFlightFetch = () => abortController.abort();
|
||||||
|
this.setState({loading: true});
|
||||||
|
|
||||||
|
const endTime = this.getEndTime().valueOf() / 1000; // TODO: shouldn't valueof only work when it's a moment?
|
||||||
|
const startTime = endTime - this.props.options.range;
|
||||||
|
const resolution = this.props.options.resolution || Math.max(Math.floor(this.props.options.range / 250), 1);
|
||||||
|
|
||||||
|
const url = new URL(window.location.href);
|
||||||
|
const params: {[key: string]: string} = {
|
||||||
|
'query': expr,
|
||||||
|
};
|
||||||
|
|
||||||
|
switch (this.props.options.type) {
|
||||||
|
case 'graph':
|
||||||
|
url.pathname = '../../api/v1/query_range'
|
||||||
|
Object.assign(params, {
|
||||||
|
start: startTime,
|
||||||
|
end: endTime,
|
||||||
|
step: resolution,
|
||||||
|
})
|
||||||
|
// TODO path prefix here and elsewhere.
|
||||||
|
break;
|
||||||
|
case 'table':
|
||||||
|
url.pathname = '../../api/v1/query'
|
||||||
|
Object.assign(params, {
|
||||||
|
time: endTime,
|
||||||
|
})
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error('Invalid panel type "' + this.props.options.type + '"');
|
||||||
|
}
|
||||||
|
Object.keys(params).forEach(key => url.searchParams.append(key, params[key]))
|
||||||
|
|
||||||
|
fetch(url.toString(), {cache: 'no-store', signal: abortController.signal})
|
||||||
|
.then(resp => resp.json())
|
||||||
|
.then(json => {
|
||||||
|
if (json.status !== 'success') {
|
||||||
|
throw new Error(json.error || 'invalid response JSON');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setState({
|
||||||
|
error: null,
|
||||||
|
data: json.data,
|
||||||
|
lastQueryParams: {
|
||||||
|
startTime: startTime,
|
||||||
|
endTime: endTime,
|
||||||
|
resolution: resolution,
|
||||||
|
},
|
||||||
|
loading: false,
|
||||||
|
});
|
||||||
|
this.abortInFlightFetch = null;
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
if (error.name === 'AbortError') {
|
||||||
|
// Aborts are expected, don't show an error for them.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.setState({
|
||||||
|
error: 'Error executing query: ' + error.message,
|
||||||
|
loading: false,
|
||||||
|
})
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
setOptions(opts: object): void {
|
||||||
|
const newOpts = {...this.props.options, ...opts};
|
||||||
|
this.props.onOptionsChanged(newOpts);
|
||||||
|
}
|
||||||
|
|
||||||
|
handleExpressionChange = (expr: string): void => {
|
||||||
|
this.setOptions({expr: expr});
|
||||||
|
}
|
||||||
|
|
||||||
|
handleChangeRange = (range: number): void => {
|
||||||
|
this.setOptions({range: range});
|
||||||
|
}
|
||||||
|
|
||||||
|
getEndTime = (): number | moment.Moment => {
|
||||||
|
if (this.props.options.endTime === null) {
|
||||||
|
return moment();
|
||||||
|
}
|
||||||
|
return this.props.options.endTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleChangeEndTime = (endTime: number | null) => {
|
||||||
|
this.setOptions({endTime: endTime});
|
||||||
|
}
|
||||||
|
|
||||||
|
handleChangeResolution = (resolution: number | null) => {
|
||||||
|
this.setOptions({resolution: resolution});
|
||||||
|
}
|
||||||
|
|
||||||
|
handleChangeStacking = (stacked: boolean) => {
|
||||||
|
this.setOptions({stacked: stacked});
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<div className="panel">
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
<ExpressionInput
|
||||||
|
value={this.props.options.expr}
|
||||||
|
executeQuery={this.executeQuery}
|
||||||
|
loading={this.state.loading}
|
||||||
|
metricNames={this.props.metricNames}
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
{this.state.error && <Alert color="danger">{this.state.error}</Alert>}
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
<Nav tabs>
|
||||||
|
<NavItem>
|
||||||
|
<NavLink
|
||||||
|
className={this.props.options.type === 'table' ? 'active' : ''}
|
||||||
|
onClick={() => { this.setOptions({type: 'table'}); }}
|
||||||
|
>
|
||||||
|
Table
|
||||||
|
</NavLink>
|
||||||
|
</NavItem>
|
||||||
|
<NavItem>
|
||||||
|
<NavLink
|
||||||
|
className={this.props.options.type === 'graph' ? 'active' : ''}
|
||||||
|
onClick={() => { this.setOptions({type: 'graph'}); }}
|
||||||
|
>
|
||||||
|
Graph
|
||||||
|
</NavLink>
|
||||||
|
</NavItem>
|
||||||
|
</Nav>
|
||||||
|
<TabContent activeTab={this.props.options.type}>
|
||||||
|
<TabPane tabId="table">
|
||||||
|
{this.props.options.type === 'table' &&
|
||||||
|
<>
|
||||||
|
<div className="table-controls">
|
||||||
|
<TimeInput
|
||||||
|
time={this.props.options.endTime}
|
||||||
|
range={this.props.options.range}
|
||||||
|
placeholder="Evaluation time"
|
||||||
|
onChangeTime={this.handleChangeEndTime}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<DataTable data={this.state.data} />
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
</TabPane>
|
||||||
|
<TabPane tabId="graph">
|
||||||
|
{this.props.options.type === 'graph' &&
|
||||||
|
<>
|
||||||
|
<GraphControls
|
||||||
|
range={this.props.options.range}
|
||||||
|
endTime={this.props.options.endTime}
|
||||||
|
resolution={this.props.options.resolution}
|
||||||
|
stacked={this.props.options.stacked}
|
||||||
|
|
||||||
|
onChangeRange={this.handleChangeRange}
|
||||||
|
onChangeEndTime={this.handleChangeEndTime}
|
||||||
|
onChangeResolution={this.handleChangeResolution}
|
||||||
|
onChangeStacking={this.handleChangeStacking}
|
||||||
|
/>
|
||||||
|
<Graph data={this.state.data} stacked={this.props.options.stacked} queryParams={this.state.lastQueryParams} />
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
</TabPane>
|
||||||
|
</TabContent>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
<Button className="float-right" color="link" onClick={this.props.removePanel} size="sm">Remove Panel</Button>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Panel;
|
144
web/ui/react-app/src/PanelList.tsx
Normal file
144
web/ui/react-app/src/PanelList.tsx
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
|
||||||
|
import { Alert, Button, Col, Row } from 'reactstrap';
|
||||||
|
|
||||||
|
import Panel, { PanelOptions, PanelDefaultOptions } from './Panel';
|
||||||
|
import { decodePanelOptionsFromQueryString, encodePanelOptionsToQueryString } from './utils/urlParams';
|
||||||
|
|
||||||
|
interface PanelListState {
|
||||||
|
panels: {
|
||||||
|
key: string;
|
||||||
|
options: PanelOptions;
|
||||||
|
}[],
|
||||||
|
metricNames: string[];
|
||||||
|
fetchMetricsError: string | null;
|
||||||
|
timeDriftError: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
class PanelList extends Component<any, PanelListState> {
|
||||||
|
private key: number = 0;
|
||||||
|
|
||||||
|
constructor(props: any) {
|
||||||
|
super(props);
|
||||||
|
|
||||||
|
const urlPanels = decodePanelOptionsFromQueryString(window.location.search);
|
||||||
|
|
||||||
|
this.state = {
|
||||||
|
panels: urlPanels.length !== 0 ? urlPanels : [
|
||||||
|
{
|
||||||
|
key: this.getKey(),
|
||||||
|
options: PanelDefaultOptions,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
metricNames: [],
|
||||||
|
fetchMetricsError: null,
|
||||||
|
timeDriftError: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
fetch("../../api/v1/label/__name__/values", {cache: "no-store"})
|
||||||
|
.then(resp => {
|
||||||
|
if (resp.ok) {
|
||||||
|
return resp.json();
|
||||||
|
} else {
|
||||||
|
throw new Error('Unexpected response status when fetching metric names: ' + resp.statusText); // TODO extract error
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(json => this.setState({ metricNames: json.data }))
|
||||||
|
.catch(error => this.setState({ fetchMetricsError: error.message }));
|
||||||
|
|
||||||
|
const browserTime = new Date().getTime() / 1000;
|
||||||
|
fetch("../../api/v1/query?query=time()", {cache: "no-store"})
|
||||||
|
.then(resp => {
|
||||||
|
if (resp.ok) {
|
||||||
|
return resp.json();
|
||||||
|
} else {
|
||||||
|
throw new Error('Unexpected response status when fetching metric names: ' + resp.statusText); // TODO extract error
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(json => {
|
||||||
|
const serverTime = json.data.result[0];
|
||||||
|
const delta = Math.abs(browserTime - serverTime);
|
||||||
|
|
||||||
|
if (delta >= 30) {
|
||||||
|
throw new Error('Detected ' + delta + ' seconds time difference between your browser and the server. Prometheus relies on accurate time and time drift might cause unexpected query results.');
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(error => this.setState({ timeDriftError: error.message }));
|
||||||
|
|
||||||
|
window.onpopstate = () => {
|
||||||
|
const panels = decodePanelOptionsFromQueryString(window.location.search);
|
||||||
|
if (panels.length !== 0) {
|
||||||
|
this.setState({panels: panels});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getKey(): string {
|
||||||
|
return (this.key++).toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
handleOptionsChanged(key: string, opts: PanelOptions): void {
|
||||||
|
const newPanels = this.state.panels.map(p => {
|
||||||
|
if (key === p.key) {
|
||||||
|
return {
|
||||||
|
key: key,
|
||||||
|
options: opts,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
});
|
||||||
|
this.setState({panels: newPanels}, this.updateURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
updateURL(): void {
|
||||||
|
const query = encodePanelOptionsToQueryString(this.state.panels);
|
||||||
|
window.history.pushState({}, '', query);
|
||||||
|
}
|
||||||
|
|
||||||
|
addPanel = (): void => {
|
||||||
|
const panels = this.state.panels.slice();
|
||||||
|
panels.push({
|
||||||
|
key: this.getKey(),
|
||||||
|
options: PanelDefaultOptions,
|
||||||
|
});
|
||||||
|
this.setState({panels: panels}, this.updateURL);
|
||||||
|
}
|
||||||
|
|
||||||
|
removePanel = (key: string): void => {
|
||||||
|
const panels = this.state.panels.filter(panel => {
|
||||||
|
return panel.key !== key;
|
||||||
|
});
|
||||||
|
this.setState({panels: panels}, this.updateURL);
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
{this.state.timeDriftError && <Alert color="danger"><strong>Warning:</strong> Error fetching server time: {this.state.timeDriftError}</Alert>}
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
{this.state.fetchMetricsError && <Alert color="danger"><strong>Warning:</strong> Error fetching metrics list: {this.state.fetchMetricsError}</Alert>}
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
{this.state.panels.map(p =>
|
||||||
|
<Panel
|
||||||
|
key={p.key}
|
||||||
|
options={p.options}
|
||||||
|
onOptionsChanged={(opts: PanelOptions) => this.handleOptionsChanged(p.key, opts)}
|
||||||
|
removePanel={() => this.removePanel(p.key)}
|
||||||
|
metricNames={this.state.metricNames}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<Button color="primary" className="add-panel-btn" onClick={this.addPanel}>Add Panel</Button>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default PanelList;
|
70
web/ui/react-app/src/SeriesName.tsx
Normal file
70
web/ui/react-app/src/SeriesName.tsx
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
import React, { PureComponent } from "react";
|
||||||
|
|
||||||
|
interface SeriesNameProps {
|
||||||
|
labels: {[key: string]: string} | null;
|
||||||
|
format: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
class SeriesName extends PureComponent<SeriesNameProps> {
|
||||||
|
renderFormatted(): React.ReactNode {
|
||||||
|
const labels = this.props.labels!;
|
||||||
|
|
||||||
|
let labelNodes: React.ReactNode[] = [];
|
||||||
|
let first = true;
|
||||||
|
for (let label in labels) {
|
||||||
|
if (label === '__name__') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
labelNodes.push(
|
||||||
|
<span key={label}>
|
||||||
|
{!first && ', '}
|
||||||
|
<span className="legend-label-name">{label}</span>=
|
||||||
|
<span className="legend-label-value">"{labels[label]}"</span>
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
|
||||||
|
if (first) {
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<span className="legend-metric-name">{labels.__name__ || ''}</span>
|
||||||
|
<span className="legend-label-brace">{'{'}</span>
|
||||||
|
{labelNodes}
|
||||||
|
<span className="legend-label-brace">{'}'}</span>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
renderPlain() {
|
||||||
|
const labels = this.props.labels!;
|
||||||
|
|
||||||
|
let tsName = (labels.__name__ || '') + '{';
|
||||||
|
let labelStrings: string[] = [];
|
||||||
|
for (let label in labels) {
|
||||||
|
if (label !== '__name__') {
|
||||||
|
labelStrings.push(label + '="' + labels[label] + '"');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tsName += labelStrings.join(', ') + '}';
|
||||||
|
return tsName;
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
if (this.props.labels === null) {
|
||||||
|
return 'scalar';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.props.format) {
|
||||||
|
return this.renderFormatted();
|
||||||
|
}
|
||||||
|
// Return a simple text node. This is much faster to scroll through
|
||||||
|
// for longer lists (hundreds of items).
|
||||||
|
return this.renderPlain();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SeriesName;
|
129
web/ui/react-app/src/TimeInput.tsx
Normal file
129
web/ui/react-app/src/TimeInput.tsx
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
import $ from 'jquery';
|
||||||
|
import React, { Component } from 'react';
|
||||||
|
import { Button, InputGroup, InputGroupAddon, Input } from 'reactstrap';
|
||||||
|
|
||||||
|
import moment from 'moment-timezone';
|
||||||
|
|
||||||
|
import 'tempusdominus-core';
|
||||||
|
import 'tempusdominus-bootstrap-4';
|
||||||
|
import '../node_modules/tempusdominus-bootstrap-4/build/css/tempusdominus-bootstrap-4.min.css';
|
||||||
|
|
||||||
|
import { dom, library } from '@fortawesome/fontawesome-svg-core';
|
||||||
|
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
|
import {
|
||||||
|
faChevronLeft,
|
||||||
|
faChevronRight,
|
||||||
|
faCalendarCheck,
|
||||||
|
faArrowUp,
|
||||||
|
faArrowDown,
|
||||||
|
faTimes,
|
||||||
|
} from '@fortawesome/free-solid-svg-icons';
|
||||||
|
|
||||||
|
library.add(
|
||||||
|
faChevronLeft,
|
||||||
|
faChevronRight,
|
||||||
|
faCalendarCheck,
|
||||||
|
faArrowUp,
|
||||||
|
faArrowDown,
|
||||||
|
faTimes,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Sadly needed to also replace <i> within the date picker, since it's not a React component.
|
||||||
|
dom.watch();
|
||||||
|
|
||||||
|
interface TimeInputProps {
|
||||||
|
time: number | null; // Timestamp in milliseconds.
|
||||||
|
range: number; // Range in seconds.
|
||||||
|
placeholder: string;
|
||||||
|
|
||||||
|
onChangeTime: (time: number | null) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
class TimeInput extends Component<TimeInputProps> {
|
||||||
|
private timeInputRef = React.createRef<HTMLInputElement>();
|
||||||
|
private $time: any | null = null;
|
||||||
|
|
||||||
|
getBaseTime = (): number => {
|
||||||
|
return this.props.time || moment().valueOf();
|
||||||
|
}
|
||||||
|
|
||||||
|
increaseTime = (): void => {
|
||||||
|
const time = this.getBaseTime() + this.props.range*1000/2;
|
||||||
|
this.props.onChangeTime(time);
|
||||||
|
}
|
||||||
|
|
||||||
|
decreaseTime = (): void => {
|
||||||
|
const time = this.getBaseTime() - this.props.range*1000/2;
|
||||||
|
this.props.onChangeTime(time);
|
||||||
|
}
|
||||||
|
|
||||||
|
clearTime = (): void => {
|
||||||
|
this.props.onChangeTime(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
this.$time = $(this.timeInputRef.current!);
|
||||||
|
|
||||||
|
this.$time.datetimepicker({
|
||||||
|
icons: {
|
||||||
|
today: 'fas fa-calendar-check',
|
||||||
|
},
|
||||||
|
buttons: {
|
||||||
|
//showClear: true,
|
||||||
|
showClose: true,
|
||||||
|
showToday: true,
|
||||||
|
},
|
||||||
|
sideBySide: true,
|
||||||
|
format: 'YYYY-MM-DD HH:mm',
|
||||||
|
locale: 'en',
|
||||||
|
timeZone: 'UTC',
|
||||||
|
defaultDate: this.props.time,
|
||||||
|
});
|
||||||
|
|
||||||
|
this.$time.on('change.datetimepicker', (e: any) => {
|
||||||
|
if (e.date) {
|
||||||
|
this.props.onChangeTime(e.date.valueOf());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
componentWillUnmount() {
|
||||||
|
this.$time.datetimepicker('destroy');
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidUpdate() {
|
||||||
|
this.$time.datetimepicker('date', this.props.time ? moment(this.props.time) : null);
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return (
|
||||||
|
<InputGroup className="time-input" size="sm">
|
||||||
|
<InputGroupAddon addonType="prepend">
|
||||||
|
<Button title="Decrease time" onClick={this.decreaseTime}><FontAwesomeIcon icon="chevron-left" fixedWidth/></Button>
|
||||||
|
</InputGroupAddon>
|
||||||
|
|
||||||
|
<Input
|
||||||
|
placeholder={this.props.placeholder}
|
||||||
|
innerRef={this.timeInputRef}
|
||||||
|
onFocus={() => this.$time.datetimepicker('show')}
|
||||||
|
onBlur={() => this.$time.datetimepicker('hide')}
|
||||||
|
onKeyDown={(e) => ['Escape', 'Enter'].includes(e.key) && this.$time.datetimepicker('hide')}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* CAUTION: While the datetimepicker also has an option to show a 'clear' button,
|
||||||
|
that functionality is broken, so we create an external solution instead. */}
|
||||||
|
{this.props.time &&
|
||||||
|
<InputGroupAddon addonType="append">
|
||||||
|
<Button className="clear-time-btn" title="Clear time" onClick={this.clearTime}><FontAwesomeIcon icon="times" fixedWidth/></Button>
|
||||||
|
</InputGroupAddon>
|
||||||
|
}
|
||||||
|
|
||||||
|
<InputGroupAddon addonType="append">
|
||||||
|
<Button title="Increase time" onClick={this.increaseTime}><FontAwesomeIcon icon="chevron-right" fixedWidth/></Button>
|
||||||
|
</InputGroupAddon>
|
||||||
|
</InputGroup>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default TimeInput;
|
30
web/ui/react-app/src/components/SanitizeHTML/index.tsx
Normal file
30
web/ui/react-app/src/components/SanitizeHTML/index.tsx
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
/**
|
||||||
|
* SanitizeHTML to render HTML, this takes care of sanitizing HTML.
|
||||||
|
*/
|
||||||
|
import React, { PureComponent } from 'react';
|
||||||
|
import sanitizeHTML from 'sanitize-html';
|
||||||
|
|
||||||
|
interface SanitizeHTMLProps {
|
||||||
|
inline: Boolean;
|
||||||
|
allowedTags: string[];
|
||||||
|
children: Element | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
class SanitizeHTML extends PureComponent<SanitizeHTMLProps> {
|
||||||
|
sanitize = (html: any) => {
|
||||||
|
return sanitizeHTML(html, {
|
||||||
|
allowedTags: this.props.allowedTags
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const { inline, children } = this.props;
|
||||||
|
return inline ? (
|
||||||
|
<span dangerouslySetInnerHTML={{ __html: this.sanitize(children) }} />
|
||||||
|
) : (
|
||||||
|
<div dangerouslySetInnerHTML={{ __html: this.sanitize(children) }} />
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SanitizeHTML;
|
12
web/ui/react-app/src/components/SanitizeHTML/test.js
vendored
Normal file
12
web/ui/react-app/src/components/SanitizeHTML/test.js
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
/**
|
||||||
|
* SanitizeHTML tests
|
||||||
|
*/
|
||||||
|
import React from 'react';
|
||||||
|
import ReactDOM from 'react-dom';
|
||||||
|
import SanitizeHTML from '../SanitizeHTML';
|
||||||
|
|
||||||
|
it('renders without crashing', () => {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
ReactDOM.render(<SanitizeHTML />, div);
|
||||||
|
ReactDOM.unmountComponentAtNode(div);
|
||||||
|
});
|
4
web/ui/react-app/src/globals.ts
Normal file
4
web/ui/react-app/src/globals.ts
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
import jquery from 'jquery';
|
||||||
|
|
||||||
|
(window as any).jQuery = jquery;
|
||||||
|
(window as any).moment = require('moment');
|
7
web/ui/react-app/src/index.tsx
Executable file
7
web/ui/react-app/src/index.tsx
Executable file
|
@ -0,0 +1,7 @@
|
||||||
|
import './globals';
|
||||||
|
import React from 'react';
|
||||||
|
import ReactDOM from 'react-dom';
|
||||||
|
import App from './App';
|
||||||
|
import 'bootstrap/dist/css/bootstrap.min.css';
|
||||||
|
|
||||||
|
ReactDOM.render(<App />, document.getElementById('root'));
|
1
web/ui/react-app/src/react-app-env.d.ts
vendored
Normal file
1
web/ui/react-app/src/react-app-env.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
/// <reference types="react-scripts" />
|
38
web/ui/react-app/src/utils/timeFormat.ts
Normal file
38
web/ui/react-app/src/utils/timeFormat.ts
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import moment from 'moment-timezone';
|
||||||
|
|
||||||
|
const rangeUnits: {[unit: string]: number} = {
|
||||||
|
'y': 60 * 60 * 24 * 365,
|
||||||
|
'w': 60 * 60 * 24 * 7,
|
||||||
|
'd': 60 * 60 * 24,
|
||||||
|
'h': 60 * 60,
|
||||||
|
'm': 60,
|
||||||
|
's': 1
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseRange(rangeText: string): number | null {
|
||||||
|
const rangeRE = new RegExp('^([0-9]+)([ywdhms]+)$');
|
||||||
|
const matches = rangeText.match(rangeRE);
|
||||||
|
if (!matches || matches.length !== 3) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const value = parseInt(matches[1]);
|
||||||
|
const unit = matches[2];
|
||||||
|
return value * rangeUnits[unit];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatRange(range: number): string {
|
||||||
|
for (let unit of Object.keys(rangeUnits)) {
|
||||||
|
if (range % rangeUnits[unit] === 0) {
|
||||||
|
return (range / rangeUnits[unit]) + unit;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return range + 's';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseTime(timeText: string): number {
|
||||||
|
return moment.utc(timeText).valueOf();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatTime(time: number): string {
|
||||||
|
return moment.utc(time).format('YYYY-MM-DD HH:mm');
|
||||||
|
}
|
125
web/ui/react-app/src/utils/urlParams.ts
Normal file
125
web/ui/react-app/src/utils/urlParams.ts
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
import { parseRange, parseTime, formatRange, formatTime } from './timeFormat';
|
||||||
|
import { PanelOptions, PanelType, PanelDefaultOptions } from '../Panel';
|
||||||
|
|
||||||
|
export function decodePanelOptionsFromQueryString(query: string): {key: string, options: PanelOptions}[] {
|
||||||
|
if (query === '') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = query.substring(1).split('&');
|
||||||
|
return parseParams(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
const paramFormat = /^g\d+\..+=.+$/;
|
||||||
|
|
||||||
|
interface IncompletePanelOptions {
|
||||||
|
expr?: string;
|
||||||
|
type?: PanelType;
|
||||||
|
range?: number;
|
||||||
|
endTime?: number | null;
|
||||||
|
resolution?: number | null;
|
||||||
|
stacked?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseParams(params: string[]): {key: string, options: PanelOptions}[] {
|
||||||
|
const sortedParams = params.filter((p) => {
|
||||||
|
return paramFormat.test(p);
|
||||||
|
}).sort();
|
||||||
|
|
||||||
|
let panelOpts: {key: string, options: PanelOptions}[] = [];
|
||||||
|
|
||||||
|
let key = 0;
|
||||||
|
let options: IncompletePanelOptions = {};
|
||||||
|
for (const p of sortedParams) {
|
||||||
|
const prefix = 'g' + key + '.';
|
||||||
|
|
||||||
|
if (!p.startsWith(prefix)) {
|
||||||
|
panelOpts.push({
|
||||||
|
key: key.toString(),
|
||||||
|
options: {...PanelDefaultOptions, ...options},
|
||||||
|
});
|
||||||
|
options = {};
|
||||||
|
key++;
|
||||||
|
}
|
||||||
|
|
||||||
|
addParam(options, p.substring(prefix.length));
|
||||||
|
}
|
||||||
|
panelOpts.push({
|
||||||
|
key: key.toString(),
|
||||||
|
options: {...PanelDefaultOptions, ...options},
|
||||||
|
});
|
||||||
|
|
||||||
|
return panelOpts;
|
||||||
|
}
|
||||||
|
|
||||||
|
function addParam(opts: IncompletePanelOptions, param: string): void {
|
||||||
|
let [ opt, val ] = param.split('=');
|
||||||
|
val = decodeURIComponent(val.replace(/\+/g, ' '));
|
||||||
|
|
||||||
|
switch(opt) {
|
||||||
|
case 'expr':
|
||||||
|
opts.expr = val;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'tab':
|
||||||
|
if (val === '0') {
|
||||||
|
opts.type = PanelType.Graph;
|
||||||
|
} else {
|
||||||
|
opts.type = PanelType.Table;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stacked':
|
||||||
|
opts.stacked = val === '1';
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'range_input':
|
||||||
|
const range = parseRange(val);
|
||||||
|
if (range !== null) {
|
||||||
|
opts.range = range;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'end_input':
|
||||||
|
opts.endTime = parseTime(val);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'step_input':
|
||||||
|
const res = parseInt(val);
|
||||||
|
if (res > 0) {
|
||||||
|
opts.resolution = res;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'moment_input':
|
||||||
|
opts.endTime = parseTime(val);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function encodePanelOptionsToQueryString(panels: {key: string, options: PanelOptions}[]): string {
|
||||||
|
const queryParams: string[] = [];
|
||||||
|
|
||||||
|
panels.forEach(p => {
|
||||||
|
const prefix = 'g' + p.key + '.';
|
||||||
|
const o = p.options;
|
||||||
|
const panelParams: {[key: string]: string | undefined} = {
|
||||||
|
'expr': o.expr,
|
||||||
|
'tab': o.type === PanelType.Graph ? '0' : '1',
|
||||||
|
'stacked': o.stacked ? '1' : '0',
|
||||||
|
'range_input': formatRange(o.range),
|
||||||
|
'end_input': o.endTime !== null ? formatTime(o.endTime) : undefined,
|
||||||
|
'moment_input': o.endTime !== null ? formatTime(o.endTime) : undefined,
|
||||||
|
'step_input': o.resolution !== null ? o.resolution.toString() : undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (let o in panelParams) {
|
||||||
|
const pp = panelParams[o];
|
||||||
|
if (pp !== undefined) {
|
||||||
|
queryParams.push(prefix + o + '=' + encodeURIComponent(pp));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return '?' + queryParams.join('&');
|
||||||
|
}
|
25
web/ui/react-app/tsconfig.json
Normal file
25
web/ui/react-app/tsconfig.json
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "es5",
|
||||||
|
"lib": [
|
||||||
|
"dom",
|
||||||
|
"dom.iterable",
|
||||||
|
"esnext"
|
||||||
|
],
|
||||||
|
"allowJs": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"allowSyntheticDefaultImports": true,
|
||||||
|
"strict": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"module": "esnext",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"noEmit": true,
|
||||||
|
"jsx": "preserve"
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src"
|
||||||
|
]
|
||||||
|
}
|
10747
web/ui/react-app/yarn.lock
Normal file
10747
web/ui/react-app/yarn.lock
Normal file
File diff suppressed because it is too large
Load diff
|
@ -271,7 +271,7 @@ Prometheus.Graph.prototype.populateInsertableMetrics = function() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
pageConfig.allMetrics = json.data; // todo: do we need self.allMetrics? Or can it just live on the page
|
pageConfig.allMetrics = json.data || []; // todo: do we need self.allMetrics? Or can it just live on the page
|
||||||
for (var i = 0; i < pageConfig.allMetrics.length; i++) {
|
for (var i = 0; i < pageConfig.allMetrics.length; i++) {
|
||||||
self.insertMetric[0].options.add(new Option(pageConfig.allMetrics[i], pageConfig.allMetrics[i]));
|
self.insertMetric[0].options.add(new Option(pageConfig.allMetrics[i], pageConfig.allMetrics[i]));
|
||||||
}
|
}
|
||||||
|
|
66
web/ui/ui.go
66
web/ui/ui.go
|
@ -11,39 +11,59 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
// +build dev
|
// +build !builtinassets
|
||||||
|
|
||||||
package ui
|
package ui
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/shurcooL/httpfs/filter"
|
"github.com/shurcooL/httpfs/filter"
|
||||||
"github.com/shurcooL/httpfs/union"
|
"github.com/shurcooL/httpfs/union"
|
||||||
)
|
)
|
||||||
|
|
||||||
var static http.FileSystem = filter.Keep(
|
|
||||||
http.Dir("./static"),
|
|
||||||
func(path string, fi os.FileInfo) bool {
|
|
||||||
return fi.IsDir() ||
|
|
||||||
(!strings.HasSuffix(path, "map.js") &&
|
|
||||||
!strings.HasSuffix(path, "/bootstrap.js") &&
|
|
||||||
!strings.HasSuffix(path, "/bootstrap-theme.css") &&
|
|
||||||
!strings.HasSuffix(path, "/bootstrap.css"))
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
var templates http.FileSystem = filter.Keep(
|
|
||||||
http.Dir("./templates"),
|
|
||||||
func(path string, fi os.FileInfo) bool {
|
|
||||||
return fi.IsDir() || strings.HasSuffix(path, ".html")
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
// Assets contains the project's assets.
|
// Assets contains the project's assets.
|
||||||
var Assets http.FileSystem = union.New(map[string]http.FileSystem{
|
var Assets http.FileSystem = func() http.FileSystem {
|
||||||
"/templates": templates,
|
wd, err := os.Getwd()
|
||||||
"/static": static,
|
if err != nil {
|
||||||
})
|
panic(err)
|
||||||
|
}
|
||||||
|
var assetsPrefix string
|
||||||
|
switch path.Base(wd) {
|
||||||
|
case "prometheus":
|
||||||
|
// When running Prometheus (without built-in assets) from the repo root.
|
||||||
|
assetsPrefix = "./web/ui"
|
||||||
|
case "web":
|
||||||
|
// When running web tests.
|
||||||
|
assetsPrefix = "./ui"
|
||||||
|
case "ui":
|
||||||
|
// When generating statically compiled-in assets.
|
||||||
|
assetsPrefix = "./"
|
||||||
|
}
|
||||||
|
|
||||||
|
static := filter.Keep(
|
||||||
|
http.Dir(path.Join(assetsPrefix, "static")),
|
||||||
|
func(path string, fi os.FileInfo) bool {
|
||||||
|
return fi.IsDir() ||
|
||||||
|
(!strings.HasSuffix(path, "map.js") &&
|
||||||
|
!strings.HasSuffix(path, "/bootstrap.js") &&
|
||||||
|
!strings.HasSuffix(path, "/bootstrap-theme.css") &&
|
||||||
|
!strings.HasSuffix(path, "/bootstrap.css"))
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
templates := filter.Keep(
|
||||||
|
http.Dir(path.Join(assetsPrefix, "templates")),
|
||||||
|
func(path string, fi os.FileInfo) bool {
|
||||||
|
return fi.IsDir() || strings.HasSuffix(path, ".html")
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return union.New(map[string]http.FileSystem{
|
||||||
|
"/templates": templates,
|
||||||
|
"/static": static,
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
|
14
web/web.go
14
web/web.go
|
@ -344,14 +344,14 @@ func New(logger log.Logger, o *Options) *Handler {
|
||||||
router.Post("/-/reload", h.reload)
|
router.Post("/-/reload", h.reload)
|
||||||
router.Put("/-/reload", h.reload)
|
router.Put("/-/reload", h.reload)
|
||||||
} else {
|
} else {
|
||||||
router.Post("/-/quit", func(w http.ResponseWriter, _ *http.Request) {
|
forbiddenAPINotEnabled := func(w http.ResponseWriter, _ *http.Request) {
|
||||||
w.WriteHeader(http.StatusForbidden)
|
w.WriteHeader(http.StatusForbidden)
|
||||||
w.Write([]byte("Lifecycle APIs are not enabled"))
|
w.Write([]byte("Lifecycle API is not enabled."))
|
||||||
})
|
}
|
||||||
router.Post("/-/reload", func(w http.ResponseWriter, _ *http.Request) {
|
router.Post("/-/quit", forbiddenAPINotEnabled)
|
||||||
w.WriteHeader(http.StatusForbidden)
|
router.Put("/-/quit", forbiddenAPINotEnabled)
|
||||||
w.Write([]byte("Lifecycle APIs are not enabled"))
|
router.Post("/-/reload", forbiddenAPINotEnabled)
|
||||||
})
|
router.Put("/-/reload", forbiddenAPINotEnabled)
|
||||||
}
|
}
|
||||||
router.Get("/-/quit", func(w http.ResponseWriter, _ *http.Request) {
|
router.Get("/-/quit", func(w http.ResponseWriter, _ *http.Request) {
|
||||||
w.WriteHeader(http.StatusMethodNotAllowed)
|
w.WriteHeader(http.StatusMethodNotAllowed)
|
||||||
|
|
Loading…
Reference in a new issue