mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 13:44:05 -08:00
Merge remote-tracking branch 'upstream/main' into codesome/syncprom
Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com>
This commit is contained in:
commit
84f8307aa1
|
@ -47,30 +47,24 @@ jobs:
|
||||||
- store_test_results:
|
- store_test_results:
|
||||||
path: test-results
|
path: test-results
|
||||||
|
|
||||||
test_react:
|
test_ui:
|
||||||
executor: golang
|
executor: golang
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- restore_cache:
|
- restore_cache:
|
||||||
keys:
|
keys:
|
||||||
- v3-npm-deps-{{ checksum "web/ui/react-app/package-lock.json" }}
|
- v3-npm-deps-{{ checksum "web/ui/package-lock.json" }}
|
||||||
- v3-npm-deps-
|
- v3-npm-deps-
|
||||||
- run:
|
- run: make ui-install
|
||||||
command: make react-app-test
|
- run: make ui-lint
|
||||||
|
- run: make ui-build-module
|
||||||
|
- run: make ui-test
|
||||||
- save_cache:
|
- save_cache:
|
||||||
key: v3-npm-deps-{{ checksum "web/ui/react-app/package-lock.json" }}
|
key: v3-npm-deps-{{ checksum "web/ui/package-lock.json" }}
|
||||||
paths:
|
paths:
|
||||||
- ~/.npm
|
- ~/.npm
|
||||||
|
|
||||||
test_web_module:
|
|
||||||
executor: golang
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: make web-module-install
|
|
||||||
- run: make web-module-test
|
|
||||||
- run: make web-module-lint
|
|
||||||
|
|
||||||
test_windows:
|
test_windows:
|
||||||
executor:
|
executor:
|
||||||
name: win/default
|
name: win/default
|
||||||
|
@ -135,11 +129,7 @@ workflows:
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
only: /.*/
|
only: /.*/
|
||||||
- test_react:
|
- test_ui:
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /.*/
|
|
||||||
- test_web_module:
|
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
only: /.*/
|
only: /.*/
|
||||||
|
@ -165,7 +155,7 @@ workflows:
|
||||||
context: org-context
|
context: org-context
|
||||||
requires:
|
requires:
|
||||||
- test_go
|
- test_go
|
||||||
- test_react
|
- test_ui
|
||||||
- build
|
- build
|
||||||
filters:
|
filters:
|
||||||
branches:
|
branches:
|
||||||
|
@ -175,7 +165,7 @@ workflows:
|
||||||
context: org-context
|
context: org-context
|
||||||
requires:
|
requires:
|
||||||
- test_go
|
- test_go
|
||||||
- test_react
|
- test_ui
|
||||||
- build
|
- build
|
||||||
filters:
|
filters:
|
||||||
tags:
|
tags:
|
||||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -25,3 +25,5 @@ npm_licenses.tar.bz2
|
||||||
|
|
||||||
/vendor
|
/vendor
|
||||||
/.build
|
/.build
|
||||||
|
|
||||||
|
/**/node_modules
|
||||||
|
|
17
CHANGELOG.md
17
CHANGELOG.md
|
@ -1,3 +1,20 @@
|
||||||
|
## 2.30.0 / 2021-09-14
|
||||||
|
|
||||||
|
* [FEATURE] **experimental** TSDB: Snapshot in-memory chunks on shutdown for faster restarts. Behind `--enable-feature=memory-snapshot-on-shutdown` flag. #7229
|
||||||
|
* [FEATURE] **experimental** Scrape: Configure scrape interval and scrape timeout via relabeling using `__scrape_interval__` and `__scrape_timeout__` labels respectively. #8911
|
||||||
|
* [FEATURE] Scrape: Add `scrape_timeout_seconds` and `scrape_sample_limit` metric. Behind `--enable-feature=extra-scrape-metrics` flag to avoid additional cardinality by default. #9247 #9295
|
||||||
|
* [ENHANCEMENT] Scrape: Add `--scrape.timestamp-tolerance` flag to adjust scrape timestamp tolerance when enabled via `--scrape.adjust-timestamps`. #9283
|
||||||
|
* [ENHANCEMENT] Remote Write: Improve throughput when sending exemplars. #8921
|
||||||
|
* [ENHANCEMENT] TSDB: Optimise WAL loading by removing extra map and caching min-time #9160
|
||||||
|
* [ENHANCEMENT] promtool: Speed up checking for duplicate rules. #9262/#9306
|
||||||
|
* [ENHANCEMENT] Scrape: Reduce allocations when parsing the metrics. #9299
|
||||||
|
* [ENHANCEMENT] docker_sd: Support host network mode #9125
|
||||||
|
* [BUGFIX] Exemplars: Fix panic when resizing exemplar storage from 0 to a non-zero size. #9286
|
||||||
|
* [BUGFIX] TSDB: Correctly decrement `prometheus_tsdb_head_active_appenders` when the append has no samples. #9230
|
||||||
|
* [BUGFIX] promtool rules backfill: Return 1 if backfill was unsuccessful. #9303
|
||||||
|
* [BUGFIX] promtool rules backfill: Avoid creation of overlapping blocks. #9324
|
||||||
|
* [BUGFIX] config: Fix a panic when reloading configuration with a `null` relabel action. #9224
|
||||||
|
|
||||||
## 2.29.2 / 2021-08-27
|
## 2.29.2 / 2021-08-27
|
||||||
|
|
||||||
* [BUGFIX] Fix Kubernetes SD failing to discover Ingress in Kubernetes v1.22. #9205
|
* [BUGFIX] Fix Kubernetes SD failing to discover Ingress in Kubernetes v1.22. #9205
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
Julien Pivotto (<roidelapluie@prometheus.io> / @roidelapluie) is the main/default maintainer, some parts of the codebase have other maintainers:
|
Julien Pivotto (<roidelapluie@prometheus.io> / @roidelapluie) is the main/default maintainer, some parts of the codebase have other maintainers:
|
||||||
|
|
||||||
* `cmd`
|
* `cmd`
|
||||||
* `promtool`: David Leadbeater (<dgl@dgl.cx> / @dgl)
|
* `promtool`: David Leadbeater (<dgl@dgl.cx> / @dgl), Jessica Grebenschikov (<jessica.greben1@gmail.com> / @jessicagreben)
|
||||||
* `discovery`
|
* `discovery`
|
||||||
* `k8s`: Frederic Branczyk (<fbranczyk@gmail.com> / @brancz)
|
* `k8s`: Frederic Branczyk (<fbranczyk@gmail.com> / @brancz)
|
||||||
* `documentation`
|
* `documentation`
|
||||||
|
|
71
Makefile
71
Makefile
|
@ -14,13 +14,9 @@
|
||||||
# Needs to be defined before including Makefile.common to auto-generate targets
|
# Needs to be defined before including Makefile.common to auto-generate targets
|
||||||
DOCKER_ARCHS ?= amd64 armv7 arm64 ppc64le s390x
|
DOCKER_ARCHS ?= amd64 armv7 arm64 ppc64le s390x
|
||||||
|
|
||||||
WEB_MODULE_PATH = web/ui/module
|
UI_PATH = web/ui
|
||||||
REACT_APP_PATH = web/ui/react-app
|
UI_NODE_MODULES_PATH = $(UI_PATH)/node_modules
|
||||||
REACT_APP_SOURCE_FILES = $(shell find $(REACT_APP_PATH)/public/ $(REACT_APP_PATH)/src/ $(REACT_APP_PATH)/tsconfig.json)
|
|
||||||
REACT_APP_OUTPUT_DIR = web/ui/static/react
|
|
||||||
REACT_APP_NODE_MODULES_PATH = $(REACT_APP_PATH)/node_modules
|
|
||||||
REACT_APP_NPM_LICENSES_TARBALL = "npm_licenses.tar.bz2"
|
REACT_APP_NPM_LICENSES_TARBALL = "npm_licenses.tar.bz2"
|
||||||
REACT_APP_BUILD_SCRIPT = ./scripts/build_react_app.sh
|
|
||||||
|
|
||||||
PROMTOOL = ./promtool
|
PROMTOOL = ./promtool
|
||||||
TSDB_BENCHMARK_NUM_METRICS ?= 1000
|
TSDB_BENCHMARK_NUM_METRICS ?= 1000
|
||||||
|
@ -33,15 +29,28 @@ include Makefile.common
|
||||||
|
|
||||||
DOCKER_IMAGE_NAME ?= prometheus
|
DOCKER_IMAGE_NAME ?= prometheus
|
||||||
|
|
||||||
$(REACT_APP_NODE_MODULES_PATH): $(REACT_APP_PATH)/package.json $(REACT_APP_PATH)/package-lock.json
|
.PHONY: ui-install
|
||||||
cd $(REACT_APP_PATH) && npm ci
|
ui-install:
|
||||||
|
cd $(UI_PATH) && npm install
|
||||||
|
|
||||||
$(REACT_APP_OUTPUT_DIR): $(REACT_APP_NODE_MODULES_PATH) $(REACT_APP_SOURCE_FILES) $(REACT_APP_BUILD_SCRIPT)
|
.PHONY: ui-build
|
||||||
@echo ">> building React app"
|
ui-build:
|
||||||
@$(REACT_APP_BUILD_SCRIPT)
|
cd $(UI_PATH) && npm run build
|
||||||
|
|
||||||
|
.PHONY: ui-build-module
|
||||||
|
ui-build-module:
|
||||||
|
cd $(UI_PATH) && npm run build:module
|
||||||
|
|
||||||
|
.PHONY: ui-test
|
||||||
|
ui-test:
|
||||||
|
cd $(UI_PATH) && npm run test:coverage
|
||||||
|
|
||||||
|
.PHONY: ui-lint
|
||||||
|
ui-lint:
|
||||||
|
cd $(UI_PATH) && npm run lint
|
||||||
|
|
||||||
.PHONY: assets
|
.PHONY: assets
|
||||||
assets: web-module-install web-module-build $(REACT_APP_OUTPUT_DIR)
|
assets: ui-install ui-build
|
||||||
@echo ">> writing assets"
|
@echo ">> writing assets"
|
||||||
# Un-setting GOOS and GOARCH here because the generated Go code is always the same,
|
# Un-setting GOOS and GOARCH here because the generated Go code is always the same,
|
||||||
# but the cached object code is incompatible between architectures and OSes (which
|
# but the cached object code is incompatible between architectures and OSes (which
|
||||||
|
@ -49,52 +58,20 @@ assets: web-module-install web-module-build $(REACT_APP_OUTPUT_DIR)
|
||||||
cd web/ui && GO111MODULE=$(GO111MODULE) GOOS= GOARCH= $(GO) generate -x -v $(GOOPTS)
|
cd web/ui && GO111MODULE=$(GO111MODULE) GOOS= GOARCH= $(GO) generate -x -v $(GOOPTS)
|
||||||
@$(GOFMT) -w ./web/ui
|
@$(GOFMT) -w ./web/ui
|
||||||
|
|
||||||
.PHONY: react-app-lint
|
|
||||||
react-app-lint:
|
|
||||||
@echo ">> running React app linting"
|
|
||||||
cd $(REACT_APP_PATH) && npm run lint:ci
|
|
||||||
|
|
||||||
.PHONY: react-app-lint-fix
|
|
||||||
react-app-lint-fix:
|
|
||||||
@echo ">> running React app linting and fixing errors where possible"
|
|
||||||
cd $(REACT_APP_PATH) && npm run lint
|
|
||||||
|
|
||||||
.PHONY: react-app-test
|
|
||||||
react-app-test: | $(REACT_APP_NODE_MODULES_PATH) react-app-lint
|
|
||||||
@echo ">> running React app tests"
|
|
||||||
cd $(REACT_APP_PATH) && npm run test --no-watch --coverage
|
|
||||||
|
|
||||||
.PHONY: web-module-build
|
|
||||||
web-module-build:
|
|
||||||
@cd ${WEB_MODULE_PATH} && ./build.sh --build
|
|
||||||
|
|
||||||
.PHONY: web-module-lint
|
|
||||||
web-module-lint:
|
|
||||||
@cd ${WEB_MODULE_PATH} && ./build.sh --lint
|
|
||||||
|
|
||||||
.PHONY: web-module-test
|
|
||||||
web-module-test:
|
|
||||||
@cd ${WEB_MODULE_PATH} && ./build.sh --test
|
|
||||||
|
|
||||||
.PHONY: web-module-install
|
|
||||||
web-module-install:
|
|
||||||
@cd ${WEB_MODULE_PATH} && ./build.sh --install
|
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
# If we only want to only test go code we have to change the test target
|
# If we only want to only test go code we have to change the test target
|
||||||
# which is called by all.
|
# which is called by all.
|
||||||
ifeq ($(GO_ONLY),1)
|
ifeq ($(GO_ONLY),1)
|
||||||
test: common-test
|
test: common-test
|
||||||
else
|
else
|
||||||
test: common-test react-app-test web-module-test web-module-lint
|
test: common-test ui-build-module ui-test ui-lint
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
|
||||||
.PHONY: npm_licenses
|
.PHONY: npm_licenses
|
||||||
npm_licenses: $(REACT_APP_NODE_MODULES_PATH)
|
npm_licenses: ui-install
|
||||||
@echo ">> bundling npm licenses"
|
@echo ">> bundling npm licenses"
|
||||||
rm -f $(REACT_APP_NPM_LICENSES_TARBALL)
|
rm -f $(REACT_APP_NPM_LICENSES_TARBALL)
|
||||||
find $(REACT_APP_NODE_MODULES_PATH) -iname "license*" | tar cfj $(REACT_APP_NPM_LICENSES_TARBALL) --transform 's/^/npm_licenses\//' --files-from=-
|
find $(UI_NODE_MODULES_PATH) -iname "license*" | tar cfj $(REACT_APP_NPM_LICENSES_TARBALL) --transform 's/^/npm_licenses\//' --files-from=-
|
||||||
|
|
||||||
.PHONY: tarball
|
.PHONY: tarball
|
||||||
tarball: npm_licenses common-tarball
|
tarball: npm_licenses common-tarball
|
||||||
|
|
|
@ -81,10 +81,9 @@ func (importer *ruleImporter) importAll(ctx context.Context) (errs []error) {
|
||||||
for name, group := range importer.groups {
|
for name, group := range importer.groups {
|
||||||
level.Info(importer.logger).Log("backfiller", "processing group", "name", name)
|
level.Info(importer.logger).Log("backfiller", "processing group", "name", name)
|
||||||
|
|
||||||
stimeWithAlignment := group.EvalTimestamp(importer.config.start.UnixNano())
|
|
||||||
for i, r := range group.Rules() {
|
for i, r := range group.Rules() {
|
||||||
level.Info(importer.logger).Log("backfiller", "processing rule", "id", i, "name", r.Name())
|
level.Info(importer.logger).Log("backfiller", "processing rule", "id", i, "name", r.Name())
|
||||||
if err := importer.importRule(ctx, r.Query().String(), r.Name(), r.Labels(), stimeWithAlignment, importer.config.end, group); err != nil {
|
if err := importer.importRule(ctx, r.Query().String(), r.Name(), r.Labels(), importer.config.start, importer.config.end, group); err != nil {
|
||||||
errs = append(errs, err)
|
errs = append(errs, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -103,11 +102,18 @@ func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr, ruleName
|
||||||
|
|
||||||
currStart := max(startOfBlock/int64(time.Second/time.Millisecond), start.Unix())
|
currStart := max(startOfBlock/int64(time.Second/time.Millisecond), start.Unix())
|
||||||
startWithAlignment := grp.EvalTimestamp(time.Unix(currStart, 0).UTC().UnixNano())
|
startWithAlignment := grp.EvalTimestamp(time.Unix(currStart, 0).UTC().UnixNano())
|
||||||
|
for startWithAlignment.Unix() < currStart {
|
||||||
|
startWithAlignment = startWithAlignment.Add(grp.Interval())
|
||||||
|
}
|
||||||
|
end := time.Unix(min(endOfBlock/int64(time.Second/time.Millisecond), end.Unix()), 0).UTC()
|
||||||
|
if end.Before(startWithAlignment) {
|
||||||
|
break
|
||||||
|
}
|
||||||
val, warnings, err := importer.apiClient.QueryRange(ctx,
|
val, warnings, err := importer.apiClient.QueryRange(ctx,
|
||||||
ruleExpr,
|
ruleExpr,
|
||||||
v1.Range{
|
v1.Range{
|
||||||
Start: startWithAlignment,
|
Start: startWithAlignment,
|
||||||
End: time.Unix(min(endOfBlock/int64(time.Second/time.Millisecond), end.Unix()), 0).UTC(),
|
End: end,
|
||||||
Step: grp.Interval(),
|
Step: grp.Interval(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -141,22 +147,16 @@ func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr, ruleName
|
||||||
matrix = val.(model.Matrix)
|
matrix = val.(model.Matrix)
|
||||||
|
|
||||||
for _, sample := range matrix {
|
for _, sample := range matrix {
|
||||||
currentLabels := make(labels.Labels, 0, len(sample.Metric)+len(ruleLabels)+1)
|
lb := labels.NewBuilder(ruleLabels)
|
||||||
currentLabels = append(currentLabels, labels.Label{
|
|
||||||
Name: labels.MetricName,
|
|
||||||
Value: ruleName,
|
|
||||||
})
|
|
||||||
|
|
||||||
currentLabels = append(currentLabels, ruleLabels...)
|
|
||||||
|
|
||||||
for name, value := range sample.Metric {
|
for name, value := range sample.Metric {
|
||||||
currentLabels = append(currentLabels, labels.Label{
|
lb.Set(string(name), string(value))
|
||||||
Name: string(name),
|
|
||||||
Value: string(value),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lb.Set(labels.MetricName, ruleName)
|
||||||
|
|
||||||
for _, value := range sample.Values {
|
for _, value := range sample.Values {
|
||||||
if err := app.add(ctx, currentLabels, timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {
|
if err := app.add(ctx, lb.Labels(), timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {
|
||||||
return errors.Wrap(err, "add")
|
return errors.Wrap(err, "add")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,6 +61,7 @@ func TestBackfillRuleIntegration(t *testing.T) {
|
||||||
}{
|
}{
|
||||||
{"no samples", 1, 0, 0, 0, []*model.SampleStream{}},
|
{"no samples", 1, 0, 0, 0, []*model.SampleStream{}},
|
||||||
{"run importer once", 1, 8, 4, 4, []*model.SampleStream{{Metric: model.Metric{"name1": "val1"}, Values: []model.SamplePair{{Timestamp: testTime, Value: testValue}}}}},
|
{"run importer once", 1, 8, 4, 4, []*model.SampleStream{{Metric: model.Metric{"name1": "val1"}, Values: []model.SamplePair{{Timestamp: testTime, Value: testValue}}}}},
|
||||||
|
{"run importer with dup name label", 1, 8, 4, 4, []*model.SampleStream{{Metric: model.Metric{"__name__": "val1", "name1": "val1"}, Values: []model.SamplePair{{Timestamp: testTime, Value: testValue}}}}},
|
||||||
{"one importer twice", 2, 8, 4, 8, []*model.SampleStream{{Metric: model.Metric{"name1": "val1"}, Values: []model.SamplePair{{Timestamp: testTime, Value: testValue}, {Timestamp: testTime2, Value: testValue2}}}}},
|
{"one importer twice", 2, 8, 4, 8, []*model.SampleStream{{Metric: model.Metric{"name1": "val1"}, Values: []model.SamplePair{{Timestamp: testTime, Value: testValue}, {Timestamp: testTime2, Value: testValue2}}}}},
|
||||||
}
|
}
|
||||||
for _, tt := range testCases {
|
for _, tt := range testCases {
|
||||||
|
@ -194,7 +195,7 @@ func createMultiRuleTestFiles(path string) error {
|
||||||
- record: grp1_rule1
|
- record: grp1_rule1
|
||||||
expr: grp1_rule1_expr
|
expr: grp1_rule1_expr
|
||||||
labels:
|
labels:
|
||||||
testlabel11: testlabelvalue11
|
testlabel11: testlabelvalue12
|
||||||
- name: group2
|
- name: group2
|
||||||
rules:
|
rules:
|
||||||
- record: grp2_rule1
|
- record: grp2_rule1
|
||||||
|
@ -202,7 +203,7 @@ func createMultiRuleTestFiles(path string) error {
|
||||||
- record: grp2_rule2
|
- record: grp2_rule2
|
||||||
expr: grp2_rule2_expr
|
expr: grp2_rule2_expr
|
||||||
labels:
|
labels:
|
||||||
testlabel11: testlabelvalue11
|
testlabel11: testlabelvalue13
|
||||||
`
|
`
|
||||||
return ioutil.WriteFile(path, []byte(recordingRules), 0777)
|
return ioutil.WriteFile(path, []byte(recordingRules), 0777)
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,6 +45,7 @@ import (
|
||||||
"github.com/prometheus/prometheus/discovery/marathon"
|
"github.com/prometheus/prometheus/discovery/marathon"
|
||||||
"github.com/prometheus/prometheus/discovery/moby"
|
"github.com/prometheus/prometheus/discovery/moby"
|
||||||
"github.com/prometheus/prometheus/discovery/openstack"
|
"github.com/prometheus/prometheus/discovery/openstack"
|
||||||
|
"github.com/prometheus/prometheus/discovery/puppetdb"
|
||||||
"github.com/prometheus/prometheus/discovery/scaleway"
|
"github.com/prometheus/prometheus/discovery/scaleway"
|
||||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
"github.com/prometheus/prometheus/discovery/triton"
|
"github.com/prometheus/prometheus/discovery/triton"
|
||||||
|
@ -790,6 +791,34 @@ var expectedConf = &Config{
|
||||||
}},
|
}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
JobName: "service-puppetdb",
|
||||||
|
|
||||||
|
HonorTimestamps: true,
|
||||||
|
ScrapeInterval: model.Duration(15 * time.Second),
|
||||||
|
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||||
|
|
||||||
|
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||||
|
Scheme: DefaultScrapeConfig.Scheme,
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
|
||||||
|
ServiceDiscoveryConfigs: discovery.Configs{&puppetdb.SDConfig{
|
||||||
|
URL: "https://puppetserver/",
|
||||||
|
Query: "resources { type = \"Package\" and title = \"httpd\" }",
|
||||||
|
IncludeParameters: true,
|
||||||
|
Port: 80,
|
||||||
|
RefreshInterval: model.Duration(60 * time.Second),
|
||||||
|
HTTPClientConfig: config.HTTPClientConfig{
|
||||||
|
FollowRedirects: true,
|
||||||
|
TLSConfig: config.TLSConfig{
|
||||||
|
CAFile: "testdata/valid_ca_file",
|
||||||
|
CertFile: "testdata/valid_cert_file",
|
||||||
|
KeyFile: "testdata/valid_key_file",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
JobName: "hetzner",
|
JobName: "hetzner",
|
||||||
HonorTimestamps: true,
|
HonorTimestamps: true,
|
||||||
|
@ -1262,6 +1291,22 @@ var expectedErrors = []struct {
|
||||||
filename: "empty_static_config.bad.yml",
|
filename: "empty_static_config.bad.yml",
|
||||||
errMsg: "empty or null section in static_configs",
|
errMsg: "empty or null section in static_configs",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
filename: "puppetdb_no_query.bad.yml",
|
||||||
|
errMsg: "query missing",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
filename: "puppetdb_no_url.bad.yml",
|
||||||
|
errMsg: "URL is missing",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
filename: "puppetdb_bad_url.bad.yml",
|
||||||
|
errMsg: "host is missing in URL",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
filename: "puppetdb_no_scheme.bad.yml",
|
||||||
|
errMsg: "URL scheme must be 'http' or 'https'",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
filename: "hetzner_role.bad.yml",
|
filename: "hetzner_role.bad.yml",
|
||||||
errMsg: "unknown role",
|
errMsg: "unknown role",
|
||||||
|
|
12
config/testdata/conf.good.yml
vendored
12
config/testdata/conf.good.yml
vendored
|
@ -307,6 +307,18 @@ scrape_configs:
|
||||||
cert_file: valid_cert_file
|
cert_file: valid_cert_file
|
||||||
key_file: valid_key_file
|
key_file: valid_key_file
|
||||||
|
|
||||||
|
- job_name: service-puppetdb
|
||||||
|
puppetdb_sd_configs:
|
||||||
|
- url: https://puppetserver/
|
||||||
|
query: 'resources { type = "Package" and title = "httpd" }'
|
||||||
|
include_parameters: true
|
||||||
|
port: 80
|
||||||
|
refresh_interval: 1m
|
||||||
|
tls_config:
|
||||||
|
ca_file: valid_ca_file
|
||||||
|
cert_file: valid_cert_file
|
||||||
|
key_file: valid_key_file
|
||||||
|
|
||||||
- job_name: hetzner
|
- job_name: hetzner
|
||||||
hetzner_sd_configs:
|
hetzner_sd_configs:
|
||||||
- role: hcloud
|
- role: hcloud
|
||||||
|
|
4
config/testdata/puppetdb_bad_url.bad.yml
vendored
Normal file
4
config/testdata/puppetdb_bad_url.bad.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
scrape_configs:
|
||||||
|
- puppetdb_sd_configs:
|
||||||
|
- url: http://
|
||||||
|
query: 'resources { type = "Package" and title = "httpd" }'
|
3
config/testdata/puppetdb_no_query.bad.yml
vendored
Normal file
3
config/testdata/puppetdb_no_query.bad.yml
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
scrape_configs:
|
||||||
|
- puppetdb_sd_configs:
|
||||||
|
- url: http://puppetserver/
|
4
config/testdata/puppetdb_no_scheme.bad.yml
vendored
Normal file
4
config/testdata/puppetdb_no_scheme.bad.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
scrape_configs:
|
||||||
|
- puppetdb_sd_configs:
|
||||||
|
- url: ftp://puppet
|
||||||
|
query: 'resources { type = "Package" and title = "httpd" }'
|
3
config/testdata/puppetdb_no_url.bad.yml
vendored
Normal file
3
config/testdata/puppetdb_no_url.bad.yml
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
scrape_configs:
|
||||||
|
- puppetdb_sd_configs:
|
||||||
|
- query: 'resources { type = "Package" and title = "httpd" }'
|
|
@ -31,6 +31,7 @@ import (
|
||||||
_ "github.com/prometheus/prometheus/discovery/marathon" // register marathon
|
_ "github.com/prometheus/prometheus/discovery/marathon" // register marathon
|
||||||
_ "github.com/prometheus/prometheus/discovery/moby" // register moby
|
_ "github.com/prometheus/prometheus/discovery/moby" // register moby
|
||||||
_ "github.com/prometheus/prometheus/discovery/openstack" // register openstack
|
_ "github.com/prometheus/prometheus/discovery/openstack" // register openstack
|
||||||
|
_ "github.com/prometheus/prometheus/discovery/puppetdb" // register puppetdb
|
||||||
_ "github.com/prometheus/prometheus/discovery/scaleway" // register scaleway
|
_ "github.com/prometheus/prometheus/discovery/scaleway" // register scaleway
|
||||||
_ "github.com/prometheus/prometheus/discovery/triton" // register triton
|
_ "github.com/prometheus/prometheus/discovery/triton" // register triton
|
||||||
_ "github.com/prometheus/prometheus/discovery/xds" // register xds
|
_ "github.com/prometheus/prometheus/discovery/xds" // register xds
|
||||||
|
|
49
discovery/puppetdb/fixtures/vhosts.json
Normal file
49
discovery/puppetdb/fixtures/vhosts.json
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"certname": "edinburgh.example.com",
|
||||||
|
"environment": "prod",
|
||||||
|
"exported": false,
|
||||||
|
"file": "/etc/puppetlabs/code/environments/prod/modules/upstream/apache/manifests/init.pp",
|
||||||
|
"line": 384,
|
||||||
|
"parameters": {
|
||||||
|
"access_log": true,
|
||||||
|
"access_log_file": "ssl_access_log",
|
||||||
|
"additional_includes": [ ],
|
||||||
|
"directoryindex": "",
|
||||||
|
"docroot": "/var/www/html",
|
||||||
|
"ensure": "absent",
|
||||||
|
"options": [
|
||||||
|
"Indexes",
|
||||||
|
"FollowSymLinks",
|
||||||
|
"MultiViews"
|
||||||
|
],
|
||||||
|
"php_flags": { },
|
||||||
|
"labels": {
|
||||||
|
"alias": "edinburgh"
|
||||||
|
},
|
||||||
|
"scriptaliases": [
|
||||||
|
{
|
||||||
|
"alias": "/cgi-bin",
|
||||||
|
"path": "/var/www/cgi-bin"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"resource": "49af83866dc5a1518968b68e58a25319107afe11",
|
||||||
|
"tags": [
|
||||||
|
"roles::hypervisor",
|
||||||
|
"apache",
|
||||||
|
"apache::vhost",
|
||||||
|
"class",
|
||||||
|
"default-ssl",
|
||||||
|
"profile_hypervisor",
|
||||||
|
"vhost",
|
||||||
|
"profile_apache",
|
||||||
|
"hypervisor",
|
||||||
|
"__node_regexp__edinburgh",
|
||||||
|
"roles",
|
||||||
|
"node"
|
||||||
|
],
|
||||||
|
"title": "default-ssl",
|
||||||
|
"type": "Apache::Vhost"
|
||||||
|
}
|
||||||
|
]
|
252
discovery/puppetdb/puppetdb.go
Normal file
252
discovery/puppetdb/puppetdb.go
Normal file
|
@ -0,0 +1,252 @@
|
||||||
|
// Copyright 2021 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package puppetdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-kit/log"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/prometheus/common/config"
|
||||||
|
"github.com/prometheus/common/model"
|
||||||
|
"github.com/prometheus/common/version"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/discovery"
|
||||||
|
"github.com/prometheus/prometheus/discovery/refresh"
|
||||||
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
pdbLabel = model.MetaLabelPrefix + "puppetdb_"
|
||||||
|
pdbLabelCertname = pdbLabel + "certname"
|
||||||
|
pdbLabelResource = pdbLabel + "resource"
|
||||||
|
pdbLabelType = pdbLabel + "type"
|
||||||
|
pdbLabelTitle = pdbLabel + "title"
|
||||||
|
pdbLabelExported = pdbLabel + "exported"
|
||||||
|
pdbLabelTags = pdbLabel + "tags"
|
||||||
|
pdbLabelFile = pdbLabel + "file"
|
||||||
|
pdbLabelEnvironment = pdbLabel + "environment"
|
||||||
|
pdbLabelParameter = pdbLabel + "parameter_"
|
||||||
|
separator = ","
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// DefaultSDConfig is the default PuppetDB SD configuration.
|
||||||
|
DefaultSDConfig = SDConfig{
|
||||||
|
RefreshInterval: model.Duration(60 * time.Second),
|
||||||
|
Port: 80,
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
}
|
||||||
|
matchContentType = regexp.MustCompile(`^(?i:application\/json(;\s*charset=("utf-8"|utf-8))?)$`)
|
||||||
|
userAgent = fmt.Sprintf("Prometheus/%s", version.Version)
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
discovery.RegisterConfig(&SDConfig{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// SDConfig is the configuration for PuppetDB based discovery.
|
||||||
|
type SDConfig struct {
|
||||||
|
HTTPClientConfig config.HTTPClientConfig `yaml:",inline"`
|
||||||
|
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
|
||||||
|
URL string `yaml:"url"`
|
||||||
|
Query string `yaml:"query"`
|
||||||
|
IncludeParameters bool `yaml:"include_parameters"`
|
||||||
|
Port int `yaml:"port"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the name of the Config.
|
||||||
|
func (*SDConfig) Name() string { return "puppetdb" }
|
||||||
|
|
||||||
|
// NewDiscoverer returns a Discoverer for the Config.
|
||||||
|
func (c *SDConfig) NewDiscoverer(opts discovery.DiscovererOptions) (discovery.Discoverer, error) {
|
||||||
|
return NewDiscovery(c, opts.Logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetDirectory joins any relative file paths with dir.
|
||||||
|
func (c *SDConfig) SetDirectory(dir string) {
|
||||||
|
c.HTTPClientConfig.SetDirectory(dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||||
|
func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
|
*c = DefaultSDConfig
|
||||||
|
type plain SDConfig
|
||||||
|
err := unmarshal((*plain)(c))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if c.URL == "" {
|
||||||
|
return fmt.Errorf("URL is missing")
|
||||||
|
}
|
||||||
|
parsedURL, err := url.Parse(c.URL)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" {
|
||||||
|
return fmt.Errorf("URL scheme must be 'http' or 'https'")
|
||||||
|
}
|
||||||
|
if parsedURL.Host == "" {
|
||||||
|
return fmt.Errorf("host is missing in URL")
|
||||||
|
}
|
||||||
|
if c.Query == "" {
|
||||||
|
return fmt.Errorf("query missing")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discovery provides service discovery functionality based
|
||||||
|
// on PuppetDB resources.
|
||||||
|
type Discovery struct {
|
||||||
|
*refresh.Discovery
|
||||||
|
url string
|
||||||
|
query string
|
||||||
|
port int
|
||||||
|
includeParameters bool
|
||||||
|
client *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDiscovery returns a new PuppetDB discovery for the given config.
|
||||||
|
func NewDiscovery(conf *SDConfig, logger log.Logger) (*Discovery, error) {
|
||||||
|
if logger == nil {
|
||||||
|
logger = log.NewNopLogger()
|
||||||
|
}
|
||||||
|
|
||||||
|
client, err := config.NewClientFromConfig(conf.HTTPClientConfig, "http", config.WithHTTP2Disabled())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
client.Timeout = time.Duration(conf.RefreshInterval)
|
||||||
|
|
||||||
|
u, err := url.Parse(conf.URL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
u.Path = path.Join(u.Path, "pdb/query/v4")
|
||||||
|
|
||||||
|
d := &Discovery{
|
||||||
|
url: u.String(),
|
||||||
|
port: conf.Port,
|
||||||
|
query: conf.Query,
|
||||||
|
includeParameters: conf.IncludeParameters,
|
||||||
|
client: client,
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Discovery = refresh.NewDiscovery(
|
||||||
|
logger,
|
||||||
|
"http",
|
||||||
|
time.Duration(conf.RefreshInterval),
|
||||||
|
d.refresh,
|
||||||
|
)
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
|
||||||
|
body := struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
}{d.query}
|
||||||
|
bodyBytes, err := json.Marshal(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", d.url, bytes.NewBuffer(bodyBytes))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", userAgent)
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := d.client.Do(req.WithContext(ctx))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
io.Copy(ioutil.Discard, resp.Body)
|
||||||
|
resp.Body.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, errors.Errorf("server returned HTTP status %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ct := resp.Header.Get("Content-Type"); !matchContentType.MatchString(ct) {
|
||||||
|
return nil, errors.Errorf("unsupported content type %s", resp.Header.Get("Content-Type"))
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resources []Resource
|
||||||
|
|
||||||
|
if err := json.Unmarshal(b, &resources); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
tg := &targetgroup.Group{
|
||||||
|
// Use a pseudo-URL as source.
|
||||||
|
Source: d.url + "?query=" + d.query,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, resource := range resources {
|
||||||
|
labels := model.LabelSet{
|
||||||
|
pdbLabelCertname: model.LabelValue(resource.Certname),
|
||||||
|
pdbLabelResource: model.LabelValue(resource.Resource),
|
||||||
|
pdbLabelType: model.LabelValue(resource.Type),
|
||||||
|
pdbLabelTitle: model.LabelValue(resource.Title),
|
||||||
|
pdbLabelExported: model.LabelValue(fmt.Sprintf("%t", resource.Exported)),
|
||||||
|
pdbLabelFile: model.LabelValue(resource.File),
|
||||||
|
pdbLabelEnvironment: model.LabelValue(resource.Environment),
|
||||||
|
}
|
||||||
|
|
||||||
|
addr := net.JoinHostPort(resource.Certname, strconv.FormatUint(uint64(d.port), 10))
|
||||||
|
labels[model.AddressLabel] = model.LabelValue(addr)
|
||||||
|
|
||||||
|
if len(resource.Tags) > 0 {
|
||||||
|
// We surround the separated list with the separator as well. This way regular expressions
|
||||||
|
// in relabeling rules don't have to consider tag positions.
|
||||||
|
tags := separator + strings.Join(resource.Tags, separator) + separator
|
||||||
|
labels[pdbLabelTags] = model.LabelValue(tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parameters are not included by default. This should only be enabled
|
||||||
|
// on select resources as it might expose secrets on the Prometheus UI
|
||||||
|
// for certain resources.
|
||||||
|
if d.includeParameters {
|
||||||
|
for k, v := range resource.Parameters.toLabels() {
|
||||||
|
labels[pdbLabelParameter+k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tg.Targets = append(tg.Targets, labels)
|
||||||
|
}
|
||||||
|
|
||||||
|
return []*targetgroup.Group{tg}, nil
|
||||||
|
}
|
195
discovery/puppetdb/puppetdb_test.go
Normal file
195
discovery/puppetdb/puppetdb_test.go
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
// Copyright 2021 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package puppetdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-kit/log"
|
||||||
|
"github.com/prometheus/common/config"
|
||||||
|
"github.com/prometheus/common/model"
|
||||||
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func mockServer(t *testing.T) *httptest.Server {
|
||||||
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var request struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
}
|
||||||
|
err := json.NewDecoder(r.Body).Decode(&request)
|
||||||
|
if err != nil {
|
||||||
|
w.WriteHeader(http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.ServeFile(w, r, "fixtures/"+request.Query+".json")
|
||||||
|
}))
|
||||||
|
t.Cleanup(ts.Close)
|
||||||
|
return ts
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPuppetSlashInURL(t *testing.T) {
|
||||||
|
tests := map[string]string{
|
||||||
|
"https://puppetserver": "https://puppetserver/pdb/query/v4",
|
||||||
|
"https://puppetserver/": "https://puppetserver/pdb/query/v4",
|
||||||
|
"http://puppetserver:8080/": "http://puppetserver:8080/pdb/query/v4",
|
||||||
|
"http://puppetserver:8080": "http://puppetserver:8080/pdb/query/v4",
|
||||||
|
}
|
||||||
|
|
||||||
|
for serverURL, apiURL := range tests {
|
||||||
|
cfg := SDConfig{
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
URL: serverURL,
|
||||||
|
Query: "vhosts", // This is not a valid PuppetDB query, but it is used by the mock.
|
||||||
|
Port: 80,
|
||||||
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
|
}
|
||||||
|
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, apiURL, d.url)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPuppetDBRefresh(t *testing.T) {
|
||||||
|
ts := mockServer(t)
|
||||||
|
|
||||||
|
cfg := SDConfig{
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
URL: ts.URL,
|
||||||
|
Query: "vhosts", // This is not a valid PuppetDB query, but it is used by the mock.
|
||||||
|
Port: 80,
|
||||||
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
|
}
|
||||||
|
|
||||||
|
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
tgs, err := d.refresh(ctx)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expectedTargets := []*targetgroup.Group{
|
||||||
|
{
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
{
|
||||||
|
model.AddressLabel: model.LabelValue("edinburgh.example.com:80"),
|
||||||
|
model.LabelName("__meta_puppetdb_certname"): model.LabelValue("edinburgh.example.com"),
|
||||||
|
model.LabelName("__meta_puppetdb_environment"): model.LabelValue("prod"),
|
||||||
|
model.LabelName("__meta_puppetdb_exported"): model.LabelValue("false"),
|
||||||
|
model.LabelName("__meta_puppetdb_file"): model.LabelValue("/etc/puppetlabs/code/environments/prod/modules/upstream/apache/manifests/init.pp"),
|
||||||
|
model.LabelName("__meta_puppetdb_resource"): model.LabelValue("49af83866dc5a1518968b68e58a25319107afe11"),
|
||||||
|
model.LabelName("__meta_puppetdb_tags"): model.LabelValue(",roles::hypervisor,apache,apache::vhost,class,default-ssl,profile_hypervisor,vhost,profile_apache,hypervisor,__node_regexp__edinburgh,roles,node,"),
|
||||||
|
model.LabelName("__meta_puppetdb_title"): model.LabelValue("default-ssl"),
|
||||||
|
model.LabelName("__meta_puppetdb_type"): model.LabelValue("Apache::Vhost"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Source: ts.URL + "/pdb/query/v4?query=vhosts",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
require.Equal(t, tgs, expectedTargets)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPuppetDBRefreshWithParameters(t *testing.T) {
|
||||||
|
ts := mockServer(t)
|
||||||
|
|
||||||
|
cfg := SDConfig{
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
URL: ts.URL,
|
||||||
|
Query: "vhosts", // This is not a valid PuppetDB query, but it is used by the mock.
|
||||||
|
Port: 80,
|
||||||
|
IncludeParameters: true,
|
||||||
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
|
}
|
||||||
|
|
||||||
|
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
tgs, err := d.refresh(ctx)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expectedTargets := []*targetgroup.Group{
|
||||||
|
{
|
||||||
|
Targets: []model.LabelSet{
|
||||||
|
{
|
||||||
|
model.AddressLabel: model.LabelValue("edinburgh.example.com:80"),
|
||||||
|
model.LabelName("__meta_puppetdb_certname"): model.LabelValue("edinburgh.example.com"),
|
||||||
|
model.LabelName("__meta_puppetdb_environment"): model.LabelValue("prod"),
|
||||||
|
model.LabelName("__meta_puppetdb_exported"): model.LabelValue("false"),
|
||||||
|
model.LabelName("__meta_puppetdb_file"): model.LabelValue("/etc/puppetlabs/code/environments/prod/modules/upstream/apache/manifests/init.pp"),
|
||||||
|
model.LabelName("__meta_puppetdb_parameter_access_log"): model.LabelValue("true"),
|
||||||
|
model.LabelName("__meta_puppetdb_parameter_access_log_file"): model.LabelValue("ssl_access_log"),
|
||||||
|
model.LabelName("__meta_puppetdb_parameter_docroot"): model.LabelValue("/var/www/html"),
|
||||||
|
model.LabelName("__meta_puppetdb_parameter_ensure"): model.LabelValue("absent"),
|
||||||
|
model.LabelName("__meta_puppetdb_parameter_labels_alias"): model.LabelValue("edinburgh"),
|
||||||
|
model.LabelName("__meta_puppetdb_parameter_options"): model.LabelValue("Indexes,FollowSymLinks,MultiViews"),
|
||||||
|
model.LabelName("__meta_puppetdb_resource"): model.LabelValue("49af83866dc5a1518968b68e58a25319107afe11"),
|
||||||
|
model.LabelName("__meta_puppetdb_tags"): model.LabelValue(",roles::hypervisor,apache,apache::vhost,class,default-ssl,profile_hypervisor,vhost,profile_apache,hypervisor,__node_regexp__edinburgh,roles,node,"),
|
||||||
|
model.LabelName("__meta_puppetdb_title"): model.LabelValue("default-ssl"),
|
||||||
|
model.LabelName("__meta_puppetdb_type"): model.LabelValue("Apache::Vhost"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Source: ts.URL + "/pdb/query/v4?query=vhosts",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
require.Equal(t, tgs, expectedTargets)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPuppetDBInvalidCode(t *testing.T) {
|
||||||
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusBadRequest)
|
||||||
|
}))
|
||||||
|
|
||||||
|
t.Cleanup(ts.Close)
|
||||||
|
|
||||||
|
cfg := SDConfig{
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
URL: ts.URL,
|
||||||
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
|
}
|
||||||
|
|
||||||
|
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
_, err = d.refresh(ctx)
|
||||||
|
require.EqualError(t, err, "server returned HTTP status 400 Bad Request")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPuppetDBInvalidFormat(t *testing.T) {
|
||||||
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintln(w, "{}")
|
||||||
|
}))
|
||||||
|
|
||||||
|
t.Cleanup(ts.Close)
|
||||||
|
|
||||||
|
cfg := SDConfig{
|
||||||
|
HTTPClientConfig: config.DefaultHTTPClientConfig,
|
||||||
|
URL: ts.URL,
|
||||||
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
|
}
|
||||||
|
|
||||||
|
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
_, err = d.refresh(ctx)
|
||||||
|
require.EqualError(t, err, "unsupported content type text/plain; charset=utf-8")
|
||||||
|
}
|
82
discovery/puppetdb/resources.go
Normal file
82
discovery/puppetdb/resources.go
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
// Copyright 2021 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package puppetdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/prometheus/common/model"
|
||||||
|
"github.com/prometheus/prometheus/util/strutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Resource struct {
|
||||||
|
Certname string `json:"certname"`
|
||||||
|
Resource string `json:"resource"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Exported bool `json:"exported"`
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
File string `json:"file"`
|
||||||
|
Environment string `json:"environment"`
|
||||||
|
Parameters Parameters `json:"parameters"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parameters map[string]interface{}
|
||||||
|
|
||||||
|
func (p *Parameters) toLabels() model.LabelSet {
|
||||||
|
labels := model.LabelSet{}
|
||||||
|
|
||||||
|
for k, v := range *p {
|
||||||
|
var labelValue string
|
||||||
|
switch value := v.(type) {
|
||||||
|
case string:
|
||||||
|
labelValue = value
|
||||||
|
case bool:
|
||||||
|
labelValue = strconv.FormatBool(value)
|
||||||
|
case []string:
|
||||||
|
labelValue = separator + strings.Join(value, separator) + separator
|
||||||
|
case []interface{}:
|
||||||
|
if len(value) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
values := make([]string, len(value))
|
||||||
|
for i, v := range value {
|
||||||
|
switch value := v.(type) {
|
||||||
|
case string:
|
||||||
|
values[i] = value
|
||||||
|
case bool:
|
||||||
|
values[i] = strconv.FormatBool(value)
|
||||||
|
case []string:
|
||||||
|
values[i] = separator + strings.Join(value, separator) + separator
|
||||||
|
}
|
||||||
|
}
|
||||||
|
labelValue = strings.Join(values, separator)
|
||||||
|
case map[string]interface{}:
|
||||||
|
subParameter := Parameters(value)
|
||||||
|
prefix := strutil.SanitizeLabelName(k + "_")
|
||||||
|
for subk, subv := range subParameter.toLabels() {
|
||||||
|
labels[model.LabelName(prefix)+subk] = subv
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if labelValue == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := strutil.SanitizeLabelName(k)
|
||||||
|
labels[model.LabelName(name)] = model.LabelValue(labelValue)
|
||||||
|
}
|
||||||
|
return labels
|
||||||
|
}
|
|
@ -272,6 +272,10 @@ nerve_sd_configs:
|
||||||
openstack_sd_configs:
|
openstack_sd_configs:
|
||||||
[ - <openstack_sd_config> ... ]
|
[ - <openstack_sd_config> ... ]
|
||||||
|
|
||||||
|
# List of PuppetDB service discovery configurations.
|
||||||
|
puppetdb_sd_configs:
|
||||||
|
[ - <puppetdb_sd_config> ... ]
|
||||||
|
|
||||||
# List of Scaleway service discovery configurations.
|
# List of Scaleway service discovery configurations.
|
||||||
scaleway_sd_configs:
|
scaleway_sd_configs:
|
||||||
[ - <scaleway_sd_config> ... ]
|
[ - <scaleway_sd_config> ... ]
|
||||||
|
@ -1069,6 +1073,94 @@ tls_config:
|
||||||
[ <tls_config> ]
|
[ <tls_config> ]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### `<puppetdb_sd_config>`
|
||||||
|
|
||||||
|
PuppetDB SD configurations allow retrieving scrape targets from
|
||||||
|
[PuppetDB](https://puppet.com/docs/puppetdb/latest/index.html) resources.
|
||||||
|
|
||||||
|
This SD discovers resources and will create a target for each resource returned
|
||||||
|
by the API.
|
||||||
|
|
||||||
|
The resource address is the `certname` of the resource and can be changed during
|
||||||
|
[relabeling](#relabel_config).
|
||||||
|
|
||||||
|
The following meta labels are available on targets during [relabeling](#relabel_config):
|
||||||
|
|
||||||
|
* `__meta_puppetdb_certname`: the name of the node associated with the resource
|
||||||
|
* `__meta_puppetdb_resource`: a SHA-1 hash of the resource’s type, title, and parameters, for identification
|
||||||
|
* `__meta_puppetdb_type`: the resource type
|
||||||
|
* `__meta_puppetdb_title`: the resource title
|
||||||
|
* `__meta_puppetdb_exported`: whether the resource is exported (`"true"` or `"false"`)
|
||||||
|
* `__meta_puppetdb_tags`: comma separated list of resource tags
|
||||||
|
* `__meta_puppetdb_file`: the manifest file in which the resource was declared
|
||||||
|
* `__meta_puppetdb_environment`: the environment of the node associated with the resource
|
||||||
|
* `__meta_puppetdb_parameter_<parametername>`: the parameters of the resource
|
||||||
|
|
||||||
|
|
||||||
|
See below for the configuration options for PuppetDB discovery:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# The URL of the PuppetDB root query endpoint.
|
||||||
|
url: <string>
|
||||||
|
|
||||||
|
# Puppet Query Language (PQL) query. Only resources are supported.
|
||||||
|
# https://puppet.com/docs/puppetdb/latest/api/query/v4/pql.html
|
||||||
|
query: <string>
|
||||||
|
|
||||||
|
# Whether to include the parameters as meta labels.
|
||||||
|
# Due to the differences between parameter types and Prometheus labels,
|
||||||
|
# some parameters might not be rendered. The format of the parameters might
|
||||||
|
# also change in future releases.
|
||||||
|
#
|
||||||
|
# Note: Enabling this exposes parameters in the Prometheus UI and API. Make sure
|
||||||
|
# that you don't have secrets exposed as parameters if you enable this.
|
||||||
|
[ include_parameters: <boolean> | default = false ]
|
||||||
|
|
||||||
|
# Refresh interval to re-read the resources list.
|
||||||
|
[ refresh_interval: <duration> | default = 60s ]
|
||||||
|
|
||||||
|
# The port to scrape metrics from.
|
||||||
|
[ port: <int> | default = 80 ]
|
||||||
|
|
||||||
|
# TLS configuration to connect to the PuppetDB.
|
||||||
|
tls_config:
|
||||||
|
[ <tls_config> ]
|
||||||
|
|
||||||
|
# basic_auth, authorization, and oauth2, are mutually exclusive.
|
||||||
|
|
||||||
|
# Optional HTTP basic authentication information.
|
||||||
|
basic_auth:
|
||||||
|
[ username: <string> ]
|
||||||
|
[ password: <secret> ]
|
||||||
|
[ password_file: <string> ]
|
||||||
|
|
||||||
|
# `Authorization` HTTP header configuration.
|
||||||
|
authorization:
|
||||||
|
# Sets the authentication type.
|
||||||
|
[ type: <string> | default: Bearer ]
|
||||||
|
# Sets the credentials. It is mutually exclusive with
|
||||||
|
# `credentials_file`.
|
||||||
|
[ credentials: <secret> ]
|
||||||
|
# Sets the credentials with the credentials read from the configured file.
|
||||||
|
# It is mutually exclusive with `credentials`.
|
||||||
|
[ credentials_file: <filename> ]
|
||||||
|
|
||||||
|
# Optional OAuth 2.0 configuration.
|
||||||
|
# Cannot be used at the same time as basic_auth or authorization.
|
||||||
|
oauth2:
|
||||||
|
[ <oauth2> ]
|
||||||
|
|
||||||
|
# Optional proxy URL.
|
||||||
|
[ proxy_url: <string> ]
|
||||||
|
|
||||||
|
# Configure whether HTTP requests follow HTTP 3xx redirects.
|
||||||
|
[ follow_redirects: <bool> | default = true ]
|
||||||
|
```
|
||||||
|
|
||||||
|
See [this example Prometheus configuration file](/documentation/examples/prometheus-puppetdb.yml)
|
||||||
|
for a detailed example of configuring Prometheus with PuppetDB.
|
||||||
|
|
||||||
|
|
||||||
### `<file_sd_config>`
|
### `<file_sd_config>`
|
||||||
|
|
||||||
File-based service discovery provides a more generic way to configure static targets
|
File-based service discovery provides a more generic way to configure static targets
|
||||||
|
@ -2387,6 +2479,10 @@ nerve_sd_configs:
|
||||||
openstack_sd_configs:
|
openstack_sd_configs:
|
||||||
[ - <openstack_sd_config> ... ]
|
[ - <openstack_sd_config> ... ]
|
||||||
|
|
||||||
|
# List of PuppetDB service discovery configurations.
|
||||||
|
puppetdb_sd_configs:
|
||||||
|
[ - <puppetdb_sd_config> ... ]
|
||||||
|
|
||||||
# List of Scaleway service discovery configurations.
|
# List of Scaleway service discovery configurations.
|
||||||
scaleway_sd_configs:
|
scaleway_sd_configs:
|
||||||
[ - <scaleway_sd_config> ... ]
|
[ - <scaleway_sd_config> ... ]
|
||||||
|
|
|
@ -78,6 +78,10 @@ name: <string>
|
||||||
# How often rules in the group are evaluated.
|
# How often rules in the group are evaluated.
|
||||||
[ interval: <duration> | default = global.evaluation_interval ]
|
[ interval: <duration> | default = global.evaluation_interval ]
|
||||||
|
|
||||||
|
# Limit the number of alerts and series individual rules can produce.
|
||||||
|
# 0 is no limit.
|
||||||
|
[ limit: <int> | default = 0 ]
|
||||||
|
|
||||||
rules:
|
rules:
|
||||||
[ - <rule> ... ]
|
[ - <rule> ... ]
|
||||||
```
|
```
|
||||||
|
|
|
@ -434,3 +434,26 @@ over time and return an instant vector with per-series aggregation results:
|
||||||
|
|
||||||
Note that all values in the specified interval have the same weight in the
|
Note that all values in the specified interval have the same weight in the
|
||||||
aggregation even if the values are not equally spaced throughout the interval.
|
aggregation even if the values are not equally spaced throughout the interval.
|
||||||
|
|
||||||
|
## Trigonometric Functions
|
||||||
|
|
||||||
|
The trigonometric functions work in radians:
|
||||||
|
|
||||||
|
- `acos(v instant-vector)`: calculates the arccosine of all elements in `v` ([special cases](https://pkg.go.dev/math#Acos)).
|
||||||
|
- `acosh(v instant-vector)`: calculates the inverse hyperbolic cosine of all elements in `v` ([special cases](https://pkg.go.dev/math#Acosh)).
|
||||||
|
- `asin(v instant-vector)`: calculates the arcsine of all elements in `v` ([special cases](https://pkg.go.dev/math#Asin)).
|
||||||
|
- `asinh(v instant-vector)`: calculates the inverse hyperbolic sine of all elements in `v` ([special cases](https://pkg.go.dev/math#Asinh)).
|
||||||
|
- `atan(v instant-vector)`: calculates the arctangent of all elements in `v` ([special cases](https://pkg.go.dev/math#Atan)).
|
||||||
|
- `atanh(v instant-vector)`: calculates the inverse hyperbolic tangent of all elements in `v` ([special cases](https://pkg.go.dev/math#Atanh)).
|
||||||
|
- `cos(v instant-vector)`: calculates the cosine of all elements in `v` ([special cases](https://pkg.go.dev/math#Cos)).
|
||||||
|
- `cosh(v instant-vector)`: calculates the hyperbolic cosine of all elements in `v` ([special cases](https://pkg.go.dev/math#Cosh)).
|
||||||
|
- `sin(v instant-vector)`: calculates the sine of all elements in `v` ([special cases](https://pkg.go.dev/math#Sin)).
|
||||||
|
- `sinh(v instant-vector)`: calculates the hyperbolic sine of all elements in `v` ([special cases](https://pkg.go.dev/math#Sinh)).
|
||||||
|
- `tan(v instant-vector)`: calculates the tangent of all elements in `v` ([special cases](https://pkg.go.dev/math#Tan)).
|
||||||
|
- `tanh(v instant-vector)`: calculates the hyperbolic tangent of all elements in `v` ([special cases](https://pkg.go.dev/math#Tanh)).
|
||||||
|
|
||||||
|
The following are useful for converting between degrees and radians:
|
||||||
|
|
||||||
|
- `deg(v instant-vector)`: converts radians to degrees for all elements in `v`.
|
||||||
|
- `pi()`: returns pi.
|
||||||
|
- `rad(v instant-vector)`: converts degrees to radians for all elements in `v`.
|
40
documentation/examples/prometheus-puppetdb.yml
Normal file
40
documentation/examples/prometheus-puppetdb.yml
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
# Prometheus example configuration to be used with PuppetDB.
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: 'puppetdb-node-exporter'
|
||||||
|
puppetdb_sd_configs:
|
||||||
|
# This example discovers the nodes which have the class Prometheus::Node_exporter.
|
||||||
|
- url: https://puppetdb.example.com
|
||||||
|
query: 'resources { type = "Class" and title = "Prometheus::Node_exporter" }'
|
||||||
|
port: 9100
|
||||||
|
tls_config:
|
||||||
|
cert_file: prometheus-public.pem
|
||||||
|
key_file: prometheus-private.pem
|
||||||
|
ca_file: ca.pem
|
||||||
|
|
||||||
|
- job_name: 'puppetdb-scrape-jobs'
|
||||||
|
puppetdb_sd_configs:
|
||||||
|
# This example uses the Prometheus::Scrape_job
|
||||||
|
# exported resources.
|
||||||
|
# https://github.com/camptocamp/prometheus-puppetdb-sd
|
||||||
|
# This examples is compatible with Prometheus-puppetdb-sd,
|
||||||
|
# if the exported Prometheus::Scrape_job only have at most one target.
|
||||||
|
- url: https://puppetdb.example.com
|
||||||
|
query: 'resources { type = "Prometheus::Scrape_job" and exported = true }'
|
||||||
|
include_parameters: true
|
||||||
|
tls_config:
|
||||||
|
cert_file: prometheus-public.pem
|
||||||
|
key_file: prometheus-private.pem
|
||||||
|
ca_file: ca.pem
|
||||||
|
relabel_configs:
|
||||||
|
- source_labels: [__meta_puppetdb_certname]
|
||||||
|
target_label: certname
|
||||||
|
- source_labels: [__meta_puppetdb_parameter_targets]
|
||||||
|
regex: '(.+),?.*'
|
||||||
|
replacement: $1
|
||||||
|
target_label: __address__
|
||||||
|
- source_labels: [__meta_puppetdb_parameter_job_name]
|
||||||
|
target_label: job
|
||||||
|
- regex: '__meta_puppetdb_parameter_labels_(.+)'
|
||||||
|
replacement: '$1'
|
||||||
|
action: labelmap
|
|
@ -107,6 +107,7 @@ func (g *RuleGroups) Validate(node ruleGroups) (errs []error) {
|
||||||
type RuleGroup struct {
|
type RuleGroup struct {
|
||||||
Name string `yaml:"name"`
|
Name string `yaml:"name"`
|
||||||
Interval model.Duration `yaml:"interval,omitempty"`
|
Interval model.Duration `yaml:"interval,omitempty"`
|
||||||
|
Limit int `yaml:"limit,omitempty"`
|
||||||
Rules []RuleNode `yaml:"rules"`
|
Rules []RuleNode `yaml:"rules"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -570,6 +570,87 @@ func funcLog10(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
|
||||||
return simpleFunc(vals, enh, math.Log10)
|
return simpleFunc(vals, enh, math.Log10)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// === sin(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcSin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Sin)
|
||||||
|
}
|
||||||
|
|
||||||
|
// === cos(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcCos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Cos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// === tan(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcTan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Tan)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == asin(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcAsin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Asin)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == acos(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcAcos(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Acos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == atan(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcAtan(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Atan)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == sinh(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcSinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Sinh)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == cosh(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcCosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Cosh)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == tanh(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcTanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Tanh)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == asinh(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcAsinh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Asinh)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == acosh(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcAcosh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Acosh)
|
||||||
|
}
|
||||||
|
|
||||||
|
// == atanh(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcAtanh(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, math.Atanh)
|
||||||
|
}
|
||||||
|
|
||||||
|
// === rad(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcRad(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, func(v float64) float64 {
|
||||||
|
return v * math.Pi / 180
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// === deg(Vector parser.ValueTypeVector) Vector ===
|
||||||
|
func funcDeg(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return simpleFunc(vals, enh, func(v float64) float64 {
|
||||||
|
return v * 180 / math.Pi
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// === pi() Scalar ===
|
||||||
|
func funcPi(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
|
return Vector{Sample{Point: Point{
|
||||||
|
V: math.Pi,
|
||||||
|
}}}
|
||||||
|
}
|
||||||
|
|
||||||
// === sgn(Vector parser.ValueTypeVector) Vector ===
|
// === sgn(Vector parser.ValueTypeVector) Vector ===
|
||||||
func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
|
||||||
return simpleFunc(vals, enh, func(v float64) float64 {
|
return simpleFunc(vals, enh, func(v float64) float64 {
|
||||||
|
@ -935,16 +1016,25 @@ var FunctionCalls = map[string]FunctionCall{
|
||||||
"abs": funcAbs,
|
"abs": funcAbs,
|
||||||
"absent": funcAbsent,
|
"absent": funcAbsent,
|
||||||
"absent_over_time": funcAbsentOverTime,
|
"absent_over_time": funcAbsentOverTime,
|
||||||
|
"acos": funcAcos,
|
||||||
|
"acosh": funcAcosh,
|
||||||
|
"asin": funcAsin,
|
||||||
|
"asinh": funcAsinh,
|
||||||
|
"atan": funcAtan,
|
||||||
|
"atanh": funcAtanh,
|
||||||
"avg_over_time": funcAvgOverTime,
|
"avg_over_time": funcAvgOverTime,
|
||||||
"ceil": funcCeil,
|
"ceil": funcCeil,
|
||||||
"changes": funcChanges,
|
"changes": funcChanges,
|
||||||
"clamp": funcClamp,
|
"clamp": funcClamp,
|
||||||
"clamp_max": funcClampMax,
|
"clamp_max": funcClampMax,
|
||||||
"clamp_min": funcClampMin,
|
"clamp_min": funcClampMin,
|
||||||
|
"cos": funcCos,
|
||||||
|
"cosh": funcCosh,
|
||||||
"count_over_time": funcCountOverTime,
|
"count_over_time": funcCountOverTime,
|
||||||
"days_in_month": funcDaysInMonth,
|
"days_in_month": funcDaysInMonth,
|
||||||
"day_of_month": funcDayOfMonth,
|
"day_of_month": funcDayOfMonth,
|
||||||
"day_of_week": funcDayOfWeek,
|
"day_of_week": funcDayOfWeek,
|
||||||
|
"deg": funcDeg,
|
||||||
"delta": funcDelta,
|
"delta": funcDelta,
|
||||||
"deriv": funcDeriv,
|
"deriv": funcDeriv,
|
||||||
"exp": funcExp,
|
"exp": funcExp,
|
||||||
|
@ -965,20 +1055,26 @@ var FunctionCalls = map[string]FunctionCall{
|
||||||
"min_over_time": funcMinOverTime,
|
"min_over_time": funcMinOverTime,
|
||||||
"minute": funcMinute,
|
"minute": funcMinute,
|
||||||
"month": funcMonth,
|
"month": funcMonth,
|
||||||
|
"pi": funcPi,
|
||||||
"predict_linear": funcPredictLinear,
|
"predict_linear": funcPredictLinear,
|
||||||
"present_over_time": funcPresentOverTime,
|
"present_over_time": funcPresentOverTime,
|
||||||
"quantile_over_time": funcQuantileOverTime,
|
"quantile_over_time": funcQuantileOverTime,
|
||||||
|
"rad": funcRad,
|
||||||
"rate": funcRate,
|
"rate": funcRate,
|
||||||
"resets": funcResets,
|
"resets": funcResets,
|
||||||
"round": funcRound,
|
"round": funcRound,
|
||||||
"scalar": funcScalar,
|
"scalar": funcScalar,
|
||||||
"sgn": funcSgn,
|
"sgn": funcSgn,
|
||||||
|
"sin": funcSin,
|
||||||
|
"sinh": funcSinh,
|
||||||
"sort": funcSort,
|
"sort": funcSort,
|
||||||
"sort_desc": funcSortDesc,
|
"sort_desc": funcSortDesc,
|
||||||
"sqrt": funcSqrt,
|
"sqrt": funcSqrt,
|
||||||
"stddev_over_time": funcStddevOverTime,
|
"stddev_over_time": funcStddevOverTime,
|
||||||
"stdvar_over_time": funcStdvarOverTime,
|
"stdvar_over_time": funcStdvarOverTime,
|
||||||
"sum_over_time": funcSumOverTime,
|
"sum_over_time": funcSumOverTime,
|
||||||
|
"tan": funcTan,
|
||||||
|
"tanh": funcTanh,
|
||||||
"time": funcTime,
|
"time": funcTime,
|
||||||
"timestamp": funcTimestamp,
|
"timestamp": funcTimestamp,
|
||||||
"vector": funcVector,
|
"vector": funcVector,
|
||||||
|
|
|
@ -39,9 +39,34 @@ var Functions = map[string]*Function{
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeMatrix},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
"present_over_time": {
|
"acos": {
|
||||||
Name: "present_over_time",
|
Name: "acos",
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"acosh": {
|
||||||
|
Name: "acosh",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"asin": {
|
||||||
|
Name: "asin",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"asinh": {
|
||||||
|
Name: "asinh",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"atan": {
|
||||||
|
Name: "atan",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"atanh": {
|
||||||
|
Name: "atanh",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
"avg_over_time": {
|
"avg_over_time": {
|
||||||
|
@ -74,6 +99,16 @@ var Functions = map[string]*Function{
|
||||||
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
|
ArgTypes: []ValueType{ValueTypeVector, ValueTypeScalar},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"cos": {
|
||||||
|
Name: "cos",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"cosh": {
|
||||||
|
Name: "cosh",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
"count_over_time": {
|
"count_over_time": {
|
||||||
Name: "count_over_time",
|
Name: "count_over_time",
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeMatrix},
|
||||||
|
@ -97,6 +132,11 @@ var Functions = map[string]*Function{
|
||||||
Variadic: 1,
|
Variadic: 1,
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"deg": {
|
||||||
|
Name: "deg",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
"delta": {
|
"delta": {
|
||||||
Name: "delta",
|
Name: "delta",
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeMatrix},
|
||||||
|
@ -201,16 +241,31 @@ var Functions = map[string]*Function{
|
||||||
Variadic: 1,
|
Variadic: 1,
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"pi": {
|
||||||
|
Name: "pi",
|
||||||
|
ArgTypes: []ValueType{},
|
||||||
|
ReturnType: ValueTypeScalar,
|
||||||
|
},
|
||||||
"predict_linear": {
|
"predict_linear": {
|
||||||
Name: "predict_linear",
|
Name: "predict_linear",
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar},
|
ArgTypes: []ValueType{ValueTypeMatrix, ValueTypeScalar},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"present_over_time": {
|
||||||
|
Name: "present_over_time",
|
||||||
|
ArgTypes: []ValueType{ValueTypeMatrix},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
"quantile_over_time": {
|
"quantile_over_time": {
|
||||||
Name: "quantile_over_time",
|
Name: "quantile_over_time",
|
||||||
ArgTypes: []ValueType{ValueTypeScalar, ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeScalar, ValueTypeMatrix},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"rad": {
|
||||||
|
Name: "rad",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
"rate": {
|
"rate": {
|
||||||
Name: "rate",
|
Name: "rate",
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeMatrix},
|
||||||
|
@ -237,6 +292,16 @@ var Functions = map[string]*Function{
|
||||||
ArgTypes: []ValueType{ValueTypeVector},
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"sin": {
|
||||||
|
Name: "sin",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"sinh": {
|
||||||
|
Name: "sinh",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
"sort": {
|
"sort": {
|
||||||
Name: "sort",
|
Name: "sort",
|
||||||
ArgTypes: []ValueType{ValueTypeVector},
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
@ -267,6 +332,16 @@ var Functions = map[string]*Function{
|
||||||
ArgTypes: []ValueType{ValueTypeMatrix},
|
ArgTypes: []ValueType{ValueTypeMatrix},
|
||||||
ReturnType: ValueTypeVector,
|
ReturnType: ValueTypeVector,
|
||||||
},
|
},
|
||||||
|
"tan": {
|
||||||
|
Name: "tan",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
|
"tanh": {
|
||||||
|
Name: "tanh",
|
||||||
|
ArgTypes: []ValueType{ValueTypeVector},
|
||||||
|
ReturnType: ValueTypeVector,
|
||||||
|
},
|
||||||
"time": {
|
"time": {
|
||||||
Name: "time",
|
Name: "time",
|
||||||
ArgTypes: []ValueType{},
|
ArgTypes: []ValueType{},
|
||||||
|
|
101
promql/testdata/trig_functions.test
vendored
Normal file
101
promql/testdata/trig_functions.test
vendored
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
# Testing sin() cos() tan() asin() acos() atan() sinh() cosh() tanh() rad() deg() pi().
|
||||||
|
|
||||||
|
load 5m
|
||||||
|
trig{l="x"} 10
|
||||||
|
trig{l="y"} 20
|
||||||
|
trig{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m sin(trig)
|
||||||
|
{l="x"} -0.5440211108893699
|
||||||
|
{l="y"} 0.9129452507276277
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m cos(trig)
|
||||||
|
{l="x"} -0.8390715290764524
|
||||||
|
{l="y"} 0.40808206181339196
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m tan(trig)
|
||||||
|
{l="x"} 0.6483608274590867
|
||||||
|
{l="y"} 2.2371609442247427
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m asin(trig - 10.1)
|
||||||
|
{l="x"} -0.10016742116155944
|
||||||
|
{l="y"} NaN
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m acos(trig - 10.1)
|
||||||
|
{l="x"} 1.670963747956456
|
||||||
|
{l="y"} NaN
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m atan(trig)
|
||||||
|
{l="x"} 1.4711276743037345
|
||||||
|
{l="y"} 1.5208379310729538
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m sinh(trig)
|
||||||
|
{l="x"} 11013.232920103324
|
||||||
|
{l="y"} 2.4258259770489514e+08
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m cosh(trig)
|
||||||
|
{l="x"} 11013.232920103324
|
||||||
|
{l="y"} 2.4258259770489514e+08
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m tanh(trig)
|
||||||
|
{l="x"} 0.9999999958776927
|
||||||
|
{l="y"} 1
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m asinh(trig)
|
||||||
|
{l="x"} 2.99822295029797
|
||||||
|
{l="y"} 3.6895038689889055
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m acosh(trig)
|
||||||
|
{l="x"} 2.993222846126381
|
||||||
|
{l="y"} 3.6882538673612966
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m atanh(trig - 10.1)
|
||||||
|
{l="x"} -0.10033534773107522
|
||||||
|
{l="y"} NaN
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m rad(trig)
|
||||||
|
{l="x"} 0.17453292519943295
|
||||||
|
{l="y"} 0.3490658503988659
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m rad(trig - 10)
|
||||||
|
{l="x"} 0
|
||||||
|
{l="y"} 0.17453292519943295
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m rad(trig - 20)
|
||||||
|
{l="x"} -0.17453292519943295
|
||||||
|
{l="y"} 0
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m deg(trig)
|
||||||
|
{l="x"} 572.9577951308232
|
||||||
|
{l="y"} 1145.9155902616465
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m deg(trig - 10)
|
||||||
|
{l="x"} 0
|
||||||
|
{l="y"} 572.9577951308232
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
eval instant at 5m deg(trig - 20)
|
||||||
|
{l="x"} -572.9577951308232
|
||||||
|
{l="y"} 0
|
||||||
|
{l="NaN"} NaN
|
||||||
|
|
||||||
|
clear
|
||||||
|
|
||||||
|
eval instant at 0s pi()
|
||||||
|
3.141592653589793
|
|
@ -297,7 +297,7 @@ const resolvedRetention = 15 * time.Minute
|
||||||
|
|
||||||
// Eval evaluates the rule expression and then creates pending alerts and fires
|
// Eval evaluates the rule expression and then creates pending alerts and fires
|
||||||
// or removes previously pending alerts accordingly.
|
// or removes previously pending alerts accordingly.
|
||||||
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, externalURL *url.URL) (promql.Vector, error) {
|
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, externalURL *url.URL, limit int) (promql.Vector, error) {
|
||||||
res, err := query(ctx, r.vector.String(), ts)
|
res, err := query(ctx, r.vector.String(), ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -415,6 +415,12 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
numActive := len(r.active)
|
||||||
|
if limit != 0 && numActive > limit {
|
||||||
|
r.active = map[uint64]*Alert{}
|
||||||
|
return nil, errors.Errorf("exceeded limit of %d with %d alerts", limit, numActive)
|
||||||
|
}
|
||||||
|
|
||||||
return vec, nil
|
return vec, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -170,7 +170,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
|
||||||
t.Logf("case %d", i)
|
t.Logf("case %d", i)
|
||||||
evalTime := baseTime.Add(time.Duration(i) * time.Minute)
|
evalTime := baseTime.Add(time.Duration(i) * time.Minute)
|
||||||
result[0].Point.T = timestamp.FromTime(evalTime)
|
result[0].Point.T = timestamp.FromTime(evalTime)
|
||||||
res, err := rule.Eval(suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil)
|
res, err := rule.Eval(suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
||||||
|
@ -252,7 +252,7 @@ func TestAlertingRuleExternalLabelsInTemplate(t *testing.T) {
|
||||||
|
|
||||||
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
||||||
res, err := ruleWithoutExternalLabels.Eval(
|
res, err := ruleWithoutExternalLabels.Eval(
|
||||||
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil,
|
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0,
|
||||||
)
|
)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, smpl := range res {
|
for _, smpl := range res {
|
||||||
|
@ -266,7 +266,7 @@ func TestAlertingRuleExternalLabelsInTemplate(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err = ruleWithExternalLabels.Eval(
|
res, err = ruleWithExternalLabels.Eval(
|
||||||
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil,
|
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0,
|
||||||
)
|
)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, smpl := range res {
|
for _, smpl := range res {
|
||||||
|
@ -346,7 +346,7 @@ func TestAlertingRuleExternalURLInTemplate(t *testing.T) {
|
||||||
|
|
||||||
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
||||||
res, err := ruleWithoutExternalURL.Eval(
|
res, err := ruleWithoutExternalURL.Eval(
|
||||||
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil,
|
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0,
|
||||||
)
|
)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, smpl := range res {
|
for _, smpl := range res {
|
||||||
|
@ -360,7 +360,7 @@ func TestAlertingRuleExternalURLInTemplate(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err = ruleWithExternalURL.Eval(
|
res, err = ruleWithExternalURL.Eval(
|
||||||
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil,
|
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0,
|
||||||
)
|
)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, smpl := range res {
|
for _, smpl := range res {
|
||||||
|
@ -417,7 +417,7 @@ func TestAlertingRuleEmptyLabelFromTemplate(t *testing.T) {
|
||||||
|
|
||||||
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
||||||
res, err := rule.Eval(
|
res, err := rule.Eval(
|
||||||
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil,
|
suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0,
|
||||||
)
|
)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, smpl := range res {
|
for _, smpl := range res {
|
||||||
|
@ -460,7 +460,61 @@ func TestAlertingRuleDuplicate(t *testing.T) {
|
||||||
"",
|
"",
|
||||||
true, log.NewNopLogger(),
|
true, log.NewNopLogger(),
|
||||||
)
|
)
|
||||||
_, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil)
|
_, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil, 0)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.EqualError(t, err, "vector contains metrics with the same labelset after applying alert labels")
|
require.EqualError(t, err, "vector contains metrics with the same labelset after applying alert labels")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAlertingRuleLimit(t *testing.T) {
|
||||||
|
storage := teststorage.New(t)
|
||||||
|
defer storage.Close()
|
||||||
|
|
||||||
|
opts := promql.EngineOpts{
|
||||||
|
Logger: nil,
|
||||||
|
Reg: nil,
|
||||||
|
MaxSamples: 10,
|
||||||
|
Timeout: 10 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
engine := promql.NewEngine(opts)
|
||||||
|
ctx, cancelCtx := context.WithCancel(context.Background())
|
||||||
|
defer cancelCtx()
|
||||||
|
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
suite := []struct {
|
||||||
|
limit int
|
||||||
|
err string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
limit: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
limit: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
limit: -1,
|
||||||
|
err: "exceeded limit of -1 with 1 alerts",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range suite {
|
||||||
|
expr, _ := parser.ParseExpr(`1`)
|
||||||
|
rule := NewAlertingRule(
|
||||||
|
"foo",
|
||||||
|
expr,
|
||||||
|
time.Minute,
|
||||||
|
labels.FromStrings("test", "test"),
|
||||||
|
nil,
|
||||||
|
nil,
|
||||||
|
"",
|
||||||
|
true, log.NewNopLogger(),
|
||||||
|
)
|
||||||
|
_, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil, test.limit)
|
||||||
|
if test.err == "" {
|
||||||
|
require.NoError(t, err)
|
||||||
|
} else {
|
||||||
|
require.Equal(t, test.err, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -213,7 +213,7 @@ type Rule interface {
|
||||||
// Labels of the rule.
|
// Labels of the rule.
|
||||||
Labels() labels.Labels
|
Labels() labels.Labels
|
||||||
// eval evaluates the rule, including any associated recording or alerting actions.
|
// eval evaluates the rule, including any associated recording or alerting actions.
|
||||||
Eval(context.Context, time.Time, QueryFunc, *url.URL) (promql.Vector, error)
|
Eval(context.Context, time.Time, QueryFunc, *url.URL, int) (promql.Vector, error)
|
||||||
// String returns a human-readable string representation of the rule.
|
// String returns a human-readable string representation of the rule.
|
||||||
String() string
|
String() string
|
||||||
// Query returns the rule query expression.
|
// Query returns the rule query expression.
|
||||||
|
@ -244,6 +244,7 @@ type Group struct {
|
||||||
name string
|
name string
|
||||||
file string
|
file string
|
||||||
interval time.Duration
|
interval time.Duration
|
||||||
|
limit int
|
||||||
rules []Rule
|
rules []Rule
|
||||||
seriesInPreviousEval []map[string]labels.Labels // One per Rule.
|
seriesInPreviousEval []map[string]labels.Labels // One per Rule.
|
||||||
staleSeries []labels.Labels
|
staleSeries []labels.Labels
|
||||||
|
@ -267,6 +268,7 @@ type Group struct {
|
||||||
type GroupOptions struct {
|
type GroupOptions struct {
|
||||||
Name, File string
|
Name, File string
|
||||||
Interval time.Duration
|
Interval time.Duration
|
||||||
|
Limit int
|
||||||
Rules []Rule
|
Rules []Rule
|
||||||
ShouldRestore bool
|
ShouldRestore bool
|
||||||
Opts *ManagerOptions
|
Opts *ManagerOptions
|
||||||
|
@ -295,6 +297,7 @@ func NewGroup(o GroupOptions) *Group {
|
||||||
name: o.Name,
|
name: o.Name,
|
||||||
file: o.File,
|
file: o.File,
|
||||||
interval: o.Interval,
|
interval: o.Interval,
|
||||||
|
limit: o.Limit,
|
||||||
rules: o.Rules,
|
rules: o.Rules,
|
||||||
shouldRestore: o.ShouldRestore,
|
shouldRestore: o.ShouldRestore,
|
||||||
opts: o.Opts,
|
opts: o.Opts,
|
||||||
|
@ -319,6 +322,9 @@ func (g *Group) Rules() []Rule { return g.rules }
|
||||||
// Interval returns the group's interval.
|
// Interval returns the group's interval.
|
||||||
func (g *Group) Interval() time.Duration { return g.interval }
|
func (g *Group) Interval() time.Duration { return g.interval }
|
||||||
|
|
||||||
|
// Limit returns the group's limit.
|
||||||
|
func (g *Group) Limit() int { return g.limit }
|
||||||
|
|
||||||
func (g *Group) run(ctx context.Context) {
|
func (g *Group) run(ctx context.Context) {
|
||||||
defer close(g.terminated)
|
defer close(g.terminated)
|
||||||
|
|
||||||
|
@ -591,7 +597,7 @@ func (g *Group) Eval(ctx context.Context, ts time.Time) {
|
||||||
|
|
||||||
g.metrics.EvalTotal.WithLabelValues(GroupKey(g.File(), g.Name())).Inc()
|
g.metrics.EvalTotal.WithLabelValues(GroupKey(g.File(), g.Name())).Inc()
|
||||||
|
|
||||||
vector, err := rule.Eval(ctx, ts, g.opts.QueryFunc, g.opts.ExternalURL)
|
vector, err := rule.Eval(ctx, ts, g.opts.QueryFunc, g.opts.ExternalURL, g.Limit())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rule.SetHealth(HealthBad)
|
rule.SetHealth(HealthBad)
|
||||||
rule.SetLastError(err)
|
rule.SetLastError(err)
|
||||||
|
@ -850,6 +856,10 @@ func (g *Group) Equals(ng *Group) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if g.limit != ng.limit {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if len(g.rules) != len(ng.rules) {
|
if len(g.rules) != len(ng.rules) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -1086,6 +1096,7 @@ func (m *Manager) LoadGroups(
|
||||||
Name: rg.Name,
|
Name: rg.Name,
|
||||||
File: fn,
|
File: fn,
|
||||||
Interval: itv,
|
Interval: itv,
|
||||||
|
Limit: rg.Limit,
|
||||||
Rules: rules,
|
Rules: rules,
|
||||||
ShouldRestore: shouldRestore,
|
ShouldRestore: shouldRestore,
|
||||||
Opts: m.opts,
|
Opts: m.opts,
|
||||||
|
|
|
@ -156,7 +156,7 @@ func TestAlertingRule(t *testing.T) {
|
||||||
|
|
||||||
evalTime := baseTime.Add(test.time)
|
evalTime := baseTime.Add(test.time)
|
||||||
|
|
||||||
res, err := rule.Eval(suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil)
|
res, err := rule.Eval(suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
|
||||||
|
@ -305,7 +305,7 @@ func TestForStateAddSamples(t *testing.T) {
|
||||||
forState = float64(value.StaleNaN)
|
forState = float64(value.StaleNaN)
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err := rule.Eval(suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil)
|
res, err := rule.Eval(suite.Context(), evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var filteredRes promql.Vector // After removing 'ALERTS' samples.
|
var filteredRes promql.Vector // After removing 'ALERTS' samples.
|
||||||
|
@ -773,6 +773,12 @@ func TestUpdate(t *testing.T) {
|
||||||
}
|
}
|
||||||
reloadAndValidate(rgs, t, tmpFile, ruleManager, expected, ogs)
|
reloadAndValidate(rgs, t, tmpFile, ruleManager, expected, ogs)
|
||||||
|
|
||||||
|
// Update limit and reload.
|
||||||
|
for i := range rgs.Groups {
|
||||||
|
rgs.Groups[i].Limit = 1
|
||||||
|
}
|
||||||
|
reloadAndValidate(rgs, t, tmpFile, ruleManager, expected, ogs)
|
||||||
|
|
||||||
// Change group rules and reload.
|
// Change group rules and reload.
|
||||||
for i, g := range rgs.Groups {
|
for i, g := range rgs.Groups {
|
||||||
for j, r := range g.Rules {
|
for j, r := range g.Rules {
|
||||||
|
@ -791,6 +797,7 @@ type ruleGroupsTest struct {
|
||||||
type ruleGroupTest struct {
|
type ruleGroupTest struct {
|
||||||
Name string `yaml:"name"`
|
Name string `yaml:"name"`
|
||||||
Interval model.Duration `yaml:"interval,omitempty"`
|
Interval model.Duration `yaml:"interval,omitempty"`
|
||||||
|
Limit int `yaml:"limit,omitempty"`
|
||||||
Rules []rulefmt.Rule `yaml:"rules"`
|
Rules []rulefmt.Rule `yaml:"rules"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -812,6 +819,7 @@ func formatRules(r *rulefmt.RuleGroups) ruleGroupsTest {
|
||||||
tmp = append(tmp, ruleGroupTest{
|
tmp = append(tmp, ruleGroupTest{
|
||||||
Name: g.Name,
|
Name: g.Name,
|
||||||
Interval: g.Interval,
|
Interval: g.Interval,
|
||||||
|
Limit: g.Limit,
|
||||||
Rules: rtmp,
|
Rules: rtmp,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,7 +73,7 @@ func (rule *RecordingRule) Labels() labels.Labels {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Eval evaluates the rule and then overrides the metric names and labels accordingly.
|
// Eval evaluates the rule and then overrides the metric names and labels accordingly.
|
||||||
func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, _ *url.URL) (promql.Vector, error) {
|
func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, _ *url.URL, limit int) (promql.Vector, error) {
|
||||||
vector, err := query(ctx, rule.vector.String(), ts)
|
vector, err := query(ctx, rule.vector.String(), ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -99,6 +99,13 @@ func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFu
|
||||||
return nil, fmt.Errorf("vector contains metrics with the same labelset after applying rule labels")
|
return nil, fmt.Errorf("vector contains metrics with the same labelset after applying rule labels")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
numSamples := len(vector)
|
||||||
|
if limit != 0 && numSamples > limit {
|
||||||
|
return nil, fmt.Errorf("exceeded limit %d with %d samples", limit, numSamples)
|
||||||
|
}
|
||||||
|
|
||||||
|
rule.SetHealth(HealthGood)
|
||||||
|
rule.SetLastError(err)
|
||||||
return vector, nil
|
return vector, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,9 @@ func TestRuleEval(t *testing.T) {
|
||||||
name string
|
name string
|
||||||
expr parser.Expr
|
expr parser.Expr
|
||||||
labels labels.Labels
|
labels labels.Labels
|
||||||
|
limit int
|
||||||
result promql.Vector
|
result promql.Vector
|
||||||
|
err string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "nolabels",
|
name: "nolabels",
|
||||||
|
@ -69,12 +71,43 @@ func TestRuleEval(t *testing.T) {
|
||||||
Point: promql.Point{V: 1, T: timestamp.FromTime(now)},
|
Point: promql.Point{V: 1, T: timestamp.FromTime(now)},
|
||||||
}},
|
}},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "underlimit",
|
||||||
|
expr: &parser.NumberLiteral{Val: 1},
|
||||||
|
labels: labels.FromStrings("foo", "bar"),
|
||||||
|
limit: 2,
|
||||||
|
result: promql.Vector{promql.Sample{
|
||||||
|
Metric: labels.FromStrings("__name__", "underlimit", "foo", "bar"),
|
||||||
|
Point: promql.Point{V: 1, T: timestamp.FromTime(now)},
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "atlimit",
|
||||||
|
expr: &parser.NumberLiteral{Val: 1},
|
||||||
|
labels: labels.FromStrings("foo", "bar"),
|
||||||
|
limit: 1,
|
||||||
|
result: promql.Vector{promql.Sample{
|
||||||
|
Metric: labels.FromStrings("__name__", "atlimit", "foo", "bar"),
|
||||||
|
Point: promql.Point{V: 1, T: timestamp.FromTime(now)},
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "overlimit",
|
||||||
|
expr: &parser.NumberLiteral{Val: 1},
|
||||||
|
labels: labels.FromStrings("foo", "bar"),
|
||||||
|
limit: -1,
|
||||||
|
err: "exceeded limit -1 with 1 samples",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range suite {
|
for _, test := range suite {
|
||||||
rule := NewRecordingRule(test.name, test.expr, test.labels)
|
rule := NewRecordingRule(test.name, test.expr, test.labels)
|
||||||
result, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil)
|
result, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil, test.limit)
|
||||||
require.NoError(t, err)
|
if test.err == "" {
|
||||||
|
require.NoError(t, err)
|
||||||
|
} else {
|
||||||
|
require.Equal(t, test.err, err.Error())
|
||||||
|
}
|
||||||
require.Equal(t, test.result, result)
|
require.Equal(t, test.result, result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +147,7 @@ func TestRuleEvalDuplicate(t *testing.T) {
|
||||||
|
|
||||||
expr, _ := parser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
|
expr, _ := parser.ParseExpr(`vector(0) or label_replace(vector(0),"test","x","","")`)
|
||||||
rule := NewRecordingRule("foo", expr, labels.FromStrings("test", "test"))
|
rule := NewRecordingRule("foo", expr, labels.FromStrings("test", "test"))
|
||||||
_, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil)
|
_, err := rule.Eval(ctx, now, EngineQueryFunc(engine, storage), nil, 0)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.EqualError(t, err, "vector contains metrics with the same labelset after applying rule labels")
|
require.EqualError(t, err, "vector contains metrics with the same labelset after applying rule labels")
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
#
|
|
||||||
# Build React web UI.
|
|
||||||
# Run from repository root.
|
|
||||||
set -e
|
|
||||||
set -u
|
|
||||||
|
|
||||||
if ! [[ "$0" =~ "scripts/build_react_app.sh" ]]; then
|
|
||||||
echo "must be run from repository root"
|
|
||||||
exit 255
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd web/ui/react-app
|
|
||||||
|
|
||||||
echo "building React app"
|
|
||||||
PUBLIC_URL=. npm run build
|
|
||||||
rm -rf ../static/react
|
|
||||||
mv build ../static/react
|
|
|
@ -1674,9 +1674,12 @@ func (db *DB) Delete(mint, maxt int64, ms ...*labels.Matcher) error {
|
||||||
}(b))
|
}(b))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
g.Go(func() error {
|
if db.head.OverlapsClosedInterval(mint, maxt) {
|
||||||
return db.head.Delete(mint, maxt, ms...)
|
g.Go(func() error {
|
||||||
})
|
return db.head.Delete(mint, maxt, ms...)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return g.Wait()
|
return g.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
15
tsdb/head.go
15
tsdb/head.go
|
@ -736,6 +736,11 @@ func (h *Head) Truncate(mint int64) (err error) {
|
||||||
return h.truncateWAL(mint)
|
return h.truncateWAL(mint)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OverlapsClosedInterval returns true if the head overlaps [mint, maxt].
|
||||||
|
func (h *Head) OverlapsClosedInterval(mint, maxt int64) bool {
|
||||||
|
return h.MinTime() <= maxt && mint <= h.MaxTime()
|
||||||
|
}
|
||||||
|
|
||||||
// truncateMemory removes old data before mint from the head.
|
// truncateMemory removes old data before mint from the head.
|
||||||
func (h *Head) truncateMemory(mint int64) (err error) {
|
func (h *Head) truncateMemory(mint int64) (err error) {
|
||||||
h.chunkSnapshotMtx.Lock()
|
h.chunkSnapshotMtx.Lock()
|
||||||
|
@ -1101,6 +1106,10 @@ func (h *Head) gc() int64 {
|
||||||
// Remove deleted series IDs from the postings lists.
|
// Remove deleted series IDs from the postings lists.
|
||||||
h.postings.Delete(deleted)
|
h.postings.Delete(deleted)
|
||||||
|
|
||||||
|
// Remove tombstones referring to the deleted series.
|
||||||
|
h.tombstones.DeleteTombstones(deleted)
|
||||||
|
h.tombstones.TruncateBefore(mint)
|
||||||
|
|
||||||
if h.wal != nil {
|
if h.wal != nil {
|
||||||
_, last, _ := wal.Segments(h.wal.Dir())
|
_, last, _ := wal.Segments(h.wal.Dir())
|
||||||
h.deletedMtx.Lock()
|
h.deletedMtx.Lock()
|
||||||
|
@ -1168,12 +1177,12 @@ func (h *Head) Close() error {
|
||||||
defer h.closedMtx.Unlock()
|
defer h.closedMtx.Unlock()
|
||||||
h.closed = true
|
h.closed = true
|
||||||
errs := tsdb_errors.NewMulti(h.chunkDiskMapper.Close())
|
errs := tsdb_errors.NewMulti(h.chunkDiskMapper.Close())
|
||||||
if errs.Err() == nil && h.opts.EnableMemorySnapshotOnShutdown {
|
|
||||||
errs.Add(h.performChunkSnapshot())
|
|
||||||
}
|
|
||||||
if h.wal != nil {
|
if h.wal != nil {
|
||||||
errs.Add(h.wal.Close())
|
errs.Add(h.wal.Close())
|
||||||
}
|
}
|
||||||
|
if errs.Err() == nil && h.opts.EnableMemorySnapshotOnShutdown {
|
||||||
|
errs.Add(h.performChunkSnapshot())
|
||||||
|
}
|
||||||
return errs.Err()
|
return errs.Err()
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2727,6 +2727,10 @@ func TestChunkSnapshot(t *testing.T) {
|
||||||
// These references should be the ones used for the snapshot.
|
// These references should be the ones used for the snapshot.
|
||||||
wlast, woffset, err = head.wal.LastSegmentAndOffset()
|
wlast, woffset, err = head.wal.LastSegmentAndOffset()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
if woffset != 0 && woffset < 32*1024 {
|
||||||
|
// The page is always filled before taking the snapshot.
|
||||||
|
woffset = 32 * 1024
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
// Creating snapshot and verifying it.
|
// Creating snapshot and verifying it.
|
||||||
|
@ -2793,6 +2797,10 @@ func TestChunkSnapshot(t *testing.T) {
|
||||||
// Creating another snapshot should delete the older snapshot and replay still works fine.
|
// Creating another snapshot should delete the older snapshot and replay still works fine.
|
||||||
wlast, woffset, err = head.wal.LastSegmentAndOffset()
|
wlast, woffset, err = head.wal.LastSegmentAndOffset()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
if woffset != 0 && woffset < 32*1024 {
|
||||||
|
// The page is always filled before taking the snapshot.
|
||||||
|
woffset = 32 * 1024
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
// Close Head and verify that new snapshot was created.
|
// Close Head and verify that new snapshot was created.
|
||||||
|
|
|
@ -252,6 +252,34 @@ func (t *MemTombstones) Get(ref uint64) (Intervals, error) {
|
||||||
return t.intvlGroups[ref], nil
|
return t.intvlGroups[ref], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *MemTombstones) DeleteTombstones(refs map[uint64]struct{}) {
|
||||||
|
t.mtx.Lock()
|
||||||
|
defer t.mtx.Unlock()
|
||||||
|
for ref := range refs {
|
||||||
|
delete(t.intvlGroups, ref)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *MemTombstones) TruncateBefore(beforeT int64) {
|
||||||
|
t.mtx.Lock()
|
||||||
|
defer t.mtx.Unlock()
|
||||||
|
for ref, ivs := range t.intvlGroups {
|
||||||
|
i := len(ivs) - 1
|
||||||
|
for ; i >= 0; i-- {
|
||||||
|
if beforeT > ivs[i].Maxt {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(ivs[i+1:]) == 0 {
|
||||||
|
delete(t.intvlGroups, ref)
|
||||||
|
} else {
|
||||||
|
newIvs := make(Intervals, len(ivs[i+1:]))
|
||||||
|
copy(newIvs, ivs[i+1:])
|
||||||
|
t.intvlGroups[ref] = newIvs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (t *MemTombstones) Iter(f func(uint64, Intervals) error) error {
|
func (t *MemTombstones) Iter(f func(uint64, Intervals) error) error {
|
||||||
t.mtx.RLock()
|
t.mtx.RLock()
|
||||||
defer t.mtx.RUnlock()
|
defer t.mtx.RUnlock()
|
||||||
|
|
|
@ -63,6 +63,66 @@ func TestWriteAndReadbackTombstones(t *testing.T) {
|
||||||
require.Equal(t, stones, restr)
|
require.Equal(t, stones, restr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDeletingTombstones(t *testing.T) {
|
||||||
|
stones := NewMemTombstones()
|
||||||
|
|
||||||
|
ref := uint64(42)
|
||||||
|
mint := rand.Int63n(time.Now().UnixNano())
|
||||||
|
dranges := make(Intervals, 0, 1)
|
||||||
|
dranges = dranges.Add(Interval{mint, mint + rand.Int63n(1000)})
|
||||||
|
stones.AddInterval(ref, dranges...)
|
||||||
|
stones.AddInterval(uint64(43), dranges...)
|
||||||
|
|
||||||
|
intervals, err := stones.Get(ref)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, intervals, dranges)
|
||||||
|
|
||||||
|
stones.DeleteTombstones(map[uint64]struct{}{ref: struct{}{}})
|
||||||
|
|
||||||
|
intervals, err = stones.Get(ref)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Empty(t, intervals)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTruncateBefore(t *testing.T) {
|
||||||
|
cases := []struct {
|
||||||
|
before Intervals
|
||||||
|
beforeT int64
|
||||||
|
after Intervals
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
before: Intervals{{1, 2}, {4, 10}, {12, 100}},
|
||||||
|
beforeT: 3,
|
||||||
|
after: Intervals{{4, 10}, {12, 100}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
before: Intervals{{1, 2}, {4, 10}, {12, 100}, {200, 1000}},
|
||||||
|
beforeT: 900,
|
||||||
|
after: Intervals{{200, 1000}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
before: Intervals{{1, 2}, {4, 10}, {12, 100}, {200, 1000}},
|
||||||
|
beforeT: 2000,
|
||||||
|
after: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
before: Intervals{{1, 2}, {4, 10}, {12, 100}, {200, 1000}},
|
||||||
|
beforeT: 0,
|
||||||
|
after: Intervals{{1, 2}, {4, 10}, {12, 100}, {200, 1000}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, c := range cases {
|
||||||
|
ref := uint64(42)
|
||||||
|
stones := NewMemTombstones()
|
||||||
|
stones.AddInterval(ref, c.before...)
|
||||||
|
|
||||||
|
stones.TruncateBefore(c.beforeT)
|
||||||
|
ts, err := stones.Get(ref)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, c.after, ts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestAddingNewIntervals(t *testing.T) {
|
func TestAddingNewIntervals(t *testing.T) {
|
||||||
cases := []struct {
|
cases := []struct {
|
||||||
exist Intervals
|
exist Intervals
|
||||||
|
|
51
web/ui/build_ui.sh
Normal file
51
web/ui/build_ui.sh
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2021 The Prometheus Authors
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
current=$(pwd)
|
||||||
|
|
||||||
|
buildOrder=(module/codemirror-promql)
|
||||||
|
|
||||||
|
function buildModule() {
|
||||||
|
for module in "${buildOrder[@]}"; do
|
||||||
|
cd "${module}"
|
||||||
|
echo "build ${module}"
|
||||||
|
npm run build
|
||||||
|
cd "${current}"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildReactApp() {
|
||||||
|
cd react-app
|
||||||
|
echo "build react-app"
|
||||||
|
npm run build
|
||||||
|
cd "${current}"
|
||||||
|
rm -rf ./static/react
|
||||||
|
mv ./react-app/build ./static/react
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in "$@"; do
|
||||||
|
case ${i} in
|
||||||
|
--all)
|
||||||
|
buildModule
|
||||||
|
buildReactApp
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--build-module)
|
||||||
|
buildModule
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
|
@ -1,60 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
MODULE_LIST=(codemirror-promql)
|
|
||||||
|
|
||||||
build-module() {
|
|
||||||
for module in "${MODULE_LIST[@]}"; do
|
|
||||||
cd "${module}"
|
|
||||||
echo "building ${module}"
|
|
||||||
npm run build
|
|
||||||
cd ../
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
lint-module() {
|
|
||||||
for module in "${MODULE_LIST[@]}"; do
|
|
||||||
cd "${module}"
|
|
||||||
echo "running linter for ${module}"
|
|
||||||
npm run lint
|
|
||||||
cd ../
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
test-module() {
|
|
||||||
for module in "${MODULE_LIST[@]}"; do
|
|
||||||
cd "${module}"
|
|
||||||
echo "running all tests for ${module}"
|
|
||||||
npm run test
|
|
||||||
cd ../
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
install-module(){
|
|
||||||
for module in "${MODULE_LIST[@]}"; do
|
|
||||||
cd "${module}"
|
|
||||||
echo "install deps for ${module}"
|
|
||||||
npm ci
|
|
||||||
cd ../
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in "$@"; do
|
|
||||||
case ${i} in
|
|
||||||
--build)
|
|
||||||
build-module
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--lint)
|
|
||||||
lint-module
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--test)
|
|
||||||
test-module
|
|
||||||
;;
|
|
||||||
--install)
|
|
||||||
install-module
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
|
@ -5,7 +5,6 @@
|
||||||
"plugin:prettier/recommended"
|
"plugin:prettier/recommended"
|
||||||
],
|
],
|
||||||
"rules": {
|
"rules": {
|
||||||
"@typescript-eslint/camelcase": "warn",
|
|
||||||
"@typescript-eslint/explicit-function-return-type": ["off"],
|
"@typescript-eslint/explicit-function-return-type": ["off"],
|
||||||
"eol-last": [
|
"eol-last": [
|
||||||
"error",
|
"error",
|
||||||
|
|
6
web/ui/module/codemirror-promql/.gitignore
vendored
6
web/ui/module/codemirror-promql/.gitignore
vendored
|
@ -4,8 +4,8 @@ node_modules/
|
||||||
dist/
|
dist/
|
||||||
lib/
|
lib/
|
||||||
|
|
||||||
src/lang-promql/grammar/**.ts
|
src/grammar/**.ts
|
||||||
src/lang-promql/grammar/parser.js
|
src/grammar/parser.js
|
||||||
src/lang-promql/grammar/parser.terms.js
|
src/grammar/parser.terms.js
|
||||||
|
|
||||||
/.nyc_output
|
/.nyc_output
|
||||||
|
|
|
@ -11,3 +11,4 @@
|
||||||
/.npmignore
|
/.npmignore
|
||||||
/.gitignore
|
/.gitignore
|
||||||
/.eslintrc.js
|
/.eslintrc.js
|
||||||
|
/.nyc_output
|
||||||
|
|
|
@ -16,14 +16,5 @@
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
# build the lib (both ES2015 and CommonJS)
|
# build the lib (both ES2015 and CommonJS)
|
||||||
tsc --module ES2015 --target ES2015 --outDir lib/esm
|
tsc --module ES2015 --target ES2015 --outDir dist/esm
|
||||||
tsc --module commonjs --target es5 --outDir lib/cjs --downlevelIteration
|
tsc --module commonjs --target es5 --outDir dist/cjs --downlevelIteration
|
||||||
|
|
||||||
# Finally, copy some useful files into the distribution folder for documentation purposes.
|
|
||||||
cp ./README.md ./lib/README.md
|
|
||||||
cp ./CHANGELOG.md ./lib/CHANGELOG.md
|
|
||||||
cp ./package.json ./lib/package.json
|
|
||||||
|
|
||||||
if [ -f "./LICENSE" ]; then
|
|
||||||
cp ./LICENSE ./lib/LICENSE
|
|
||||||
fi
|
|
||||||
|
|
22167
web/ui/module/codemirror-promql/package-lock.json
generated
22167
web/ui/module/codemirror-promql/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,17 +1,15 @@
|
||||||
{
|
{
|
||||||
"name": "codemirror-promql",
|
"name": "codemirror-promql",
|
||||||
"version": "0.17.0",
|
"version": "0.18.0",
|
||||||
"description": "a CodeMirror mode for the PromQL language",
|
"description": "a CodeMirror mode for the PromQL language",
|
||||||
"main": "cjs/index.js",
|
"main": "dist/cjs/index.js",
|
||||||
"module": "esm/index.js",
|
"module": "dist/esm/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "webpack-dev-server --config webpack.config.cjs --open",
|
"build": "npm run build:grammar && npm run build:lib",
|
||||||
"build": "npm run build:grammar && npm run build:lib && npm run build:app",
|
"build:grammar": "lezer-generator src/grammar/promql.grammar -o src/grammar/parser",
|
||||||
"build:grammar": "lezer-generator src/lang-promql/grammar/promql.grammar -o src/lang-promql/grammar/parser",
|
|
||||||
"build:lib": "bash ./build.sh",
|
"build:lib": "bash ./build.sh",
|
||||||
"build:app": "webpack --config webpack.config.cjs",
|
"test": "npm run build:grammar && ts-mocha -p tsconfig.json ./**/*.test.ts",
|
||||||
"test": "npm run build:grammar && ts-mocha -p tsconfig.json src/**/*.test.ts",
|
"test:coverage": "npm run build:grammar && nyc ts-mocha -p ./tsconfig.json ./**/*.test.ts",
|
||||||
"test-coverage": "npm run build:grammar && nyc ts-mocha -p ./tsconfig.json ./**/*.test.ts",
|
|
||||||
"codecov": "nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
"codecov": "nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
||||||
"lint": "eslint src/ --ext .ts",
|
"lint": "eslint src/ --ext .ts",
|
||||||
"lint:fix": "eslint --fix src/ --ext .ts"
|
"lint:fix": "eslint --fix src/ --ext .ts"
|
||||||
|
@ -46,32 +44,27 @@
|
||||||
"@types/chai": "^4.2.12",
|
"@types/chai": "^4.2.12",
|
||||||
"@types/lru-cache": "^5.1.0",
|
"@types/lru-cache": "^5.1.0",
|
||||||
"@types/mocha": "^8.0.3",
|
"@types/mocha": "^8.0.3",
|
||||||
"@types/node": "^14.0.13",
|
"@types/node": "^16.7.6",
|
||||||
"@typescript-eslint/eslint-plugin": "^2.22.0",
|
"@typescript-eslint/eslint-plugin": "^4.31.0",
|
||||||
"@typescript-eslint/parser": "^2.22.0",
|
"@typescript-eslint/parser": "^4.31.0",
|
||||||
"chai": "^4.2.0",
|
"chai": "^4.2.0",
|
||||||
"clean-webpack-plugin": "^3.0.0",
|
|
||||||
"codecov": "^3.8.1",
|
"codecov": "^3.8.1",
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^7.32.0",
|
||||||
"eslint-config-prettier": "^6.11.0",
|
"eslint-config-prettier": "^8.3.0",
|
||||||
"eslint-plugin-flowtype": "^5.2.0",
|
"eslint-plugin-flowtype": "^5.9.2",
|
||||||
"eslint-plugin-import": "^2.22.0",
|
"eslint-plugin-import": "^2.24.2",
|
||||||
"eslint-plugin-prettier": "^3.1.4",
|
"eslint-plugin-prettier": "^4.0.0",
|
||||||
"html-webpack-plugin": "^4.3.0",
|
|
||||||
"isomorphic-fetch": "^3.0.0",
|
"isomorphic-fetch": "^3.0.0",
|
||||||
"lezer": "^0.13.1",
|
"lezer": "^0.13.1",
|
||||||
"lezer-generator": "^0.13.1",
|
"lezer-generator": "^0.13.1",
|
||||||
"mocha": "^8.1.2",
|
"mocha": "^8.1.2",
|
||||||
"nock": "^13.0.11",
|
"nock": "^13.0.11",
|
||||||
"nyc": "^15.1.0",
|
"nyc": "^15.1.0",
|
||||||
"prettier": "^2.0.5",
|
"prettier": "^2.3.2",
|
||||||
"ts-loader": "^7.0.4",
|
"ts-loader": "^7.0.4",
|
||||||
"ts-mocha": "^8.0.0",
|
"ts-mocha": "^8.0.0",
|
||||||
"ts-node": "^9.0.0",
|
"ts-node": "^9.0.0",
|
||||||
"typescript": "^4.2.3",
|
"typescript": "^4.2.3"
|
||||||
"webpack": "^4.43.0",
|
|
||||||
"webpack-cli": "^3.3.11",
|
|
||||||
"webpack-dev-server": "^3.11.0"
|
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"@codemirror/autocomplete": "^0.18.3",
|
"@codemirror/autocomplete": "^0.18.3",
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
<!-- The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2020 The Prometheus Authors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
-->
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>PromQL</title>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<h3>CodeMirror Mode PromQL</h3>
|
|
||||||
<label for="completion">choose the completion mode:</label>
|
|
||||||
<select name="completion" id="completion">
|
|
||||||
<option selected value="offline">Offline</option>
|
|
||||||
<option value="prometheus">Prometheus</option>
|
|
||||||
</select>
|
|
||||||
<br>
|
|
||||||
<label for="languageType">Language to complete</label>
|
|
||||||
<select name="languageType" id="languageType">
|
|
||||||
<option selected value="promql">Full PromQL</option>
|
|
||||||
<option value="metricName">Metric names</option>
|
|
||||||
</select>
|
|
||||||
|
|
||||||
<button id="apply">apply</button>
|
|
||||||
|
|
||||||
<div id=editor></div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,85 +0,0 @@
|
||||||
// Copyright 2021 The Prometheus Authors
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
import { basicSetup } from '@codemirror/basic-setup';
|
|
||||||
import { EditorState } from '@codemirror/state';
|
|
||||||
import { EditorView } from '@codemirror/view';
|
|
||||||
import { LanguageType, PromQLExtension } from '../lang-promql';
|
|
||||||
import { customTheme, promQLHighlightMaterialTheme } from './theme';
|
|
||||||
|
|
||||||
const promqlExtension = new PromQLExtension();
|
|
||||||
let editor: EditorView;
|
|
||||||
|
|
||||||
function getLanguageType(): LanguageType {
|
|
||||||
const completionSelect = document.getElementById('languageType') as HTMLSelectElement;
|
|
||||||
const completionValue = completionSelect.options[completionSelect.selectedIndex].value;
|
|
||||||
switch (completionValue) {
|
|
||||||
case 'promql':
|
|
||||||
return LanguageType.PromQL;
|
|
||||||
case 'metricName':
|
|
||||||
return LanguageType.MetricName;
|
|
||||||
default:
|
|
||||||
return LanguageType.PromQL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function setCompletion() {
|
|
||||||
const completionSelect = document.getElementById('completion') as HTMLSelectElement;
|
|
||||||
const completionValue = completionSelect.options[completionSelect.selectedIndex].value;
|
|
||||||
switch (completionValue) {
|
|
||||||
case 'offline':
|
|
||||||
promqlExtension.setComplete();
|
|
||||||
break;
|
|
||||||
case 'prometheus':
|
|
||||||
promqlExtension.setComplete({
|
|
||||||
remote: {
|
|
||||||
url: 'https://prometheus.demo.do.prometheus.io',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
promqlExtension.setComplete();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createEditor() {
|
|
||||||
let doc = '';
|
|
||||||
if (editor) {
|
|
||||||
// When the linter is changed, it required to reload completely the editor.
|
|
||||||
// So the first thing to do, is to completely delete the previous editor and to recreate it from scratch
|
|
||||||
// We should preserve the current text entered as well.
|
|
||||||
doc = editor.state.sliceDoc(0, editor.state.doc.length);
|
|
||||||
editor.destroy();
|
|
||||||
}
|
|
||||||
editor = new EditorView({
|
|
||||||
state: EditorState.create({
|
|
||||||
extensions: [basicSetup, promqlExtension.asExtension(getLanguageType()), promQLHighlightMaterialTheme, customTheme],
|
|
||||||
doc: doc,
|
|
||||||
}),
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
||||||
parent: document.querySelector('#editor')!,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyConfiguration(): void {
|
|
||||||
setCompletion();
|
|
||||||
createEditor();
|
|
||||||
}
|
|
||||||
|
|
||||||
createEditor();
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion,@typescript-eslint/ban-ts-ignore
|
|
||||||
// @ts-ignore
|
|
||||||
document.getElementById('apply').addEventListener('click', function () {
|
|
||||||
applyConfiguration();
|
|
||||||
});
|
|
|
@ -1,105 +0,0 @@
|
||||||
// Copyright 2021 The Prometheus Authors
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
import { EditorView } from '@codemirror/view';
|
|
||||||
import { HighlightStyle, tags } from '@codemirror/highlight';
|
|
||||||
|
|
||||||
// promQLHighlightMaterialTheme is based on the material theme defined here:
|
|
||||||
// https://codemirror.net/theme/material.css
|
|
||||||
export const promQLHighlightMaterialTheme = HighlightStyle.define([
|
|
||||||
{
|
|
||||||
tag: tags.deleted,
|
|
||||||
textDecoration: 'line-through',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.inserted,
|
|
||||||
textDecoration: 'underline',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.link,
|
|
||||||
textDecoration: 'underline',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.strong,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.emphasis,
|
|
||||||
fontStyle: 'italic',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.invalid,
|
|
||||||
color: '#f00',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.keyword,
|
|
||||||
color: '#C792EA',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.operator,
|
|
||||||
color: '#89DDFF',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.atom,
|
|
||||||
color: '#F78C6C',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.number,
|
|
||||||
color: '#FF5370',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.string,
|
|
||||||
color: '#99b867',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: [tags.escape, tags.regexp],
|
|
||||||
color: '#e40',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.definition(tags.variableName),
|
|
||||||
color: '#f07178',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.labelName,
|
|
||||||
color: '#f07178',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.typeName,
|
|
||||||
color: '#085',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.function(tags.variableName),
|
|
||||||
color: '#C792EA',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.definition(tags.propertyName),
|
|
||||||
color: '#00c',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tag: tags.comment,
|
|
||||||
color: '#546E7A',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
export const customTheme = EditorView.theme({
|
|
||||||
$completionDetail: {
|
|
||||||
marginLeft: '0.5em',
|
|
||||||
float: 'right',
|
|
||||||
color: '#9d4040',
|
|
||||||
},
|
|
||||||
$completionMatchedText: {
|
|
||||||
color: '#83080a',
|
|
||||||
textDecoration: 'none',
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
});
|
|
|
@ -55,6 +55,7 @@ export interface CacheConfig {
|
||||||
export interface PrometheusConfig {
|
export interface PrometheusConfig {
|
||||||
url: string;
|
url: string;
|
||||||
lookbackInterval?: number;
|
lookbackInterval?: number;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
httpErrorHandler?: (error: any) => void;
|
httpErrorHandler?: (error: any) => void;
|
||||||
fetchFn?: FetchFn;
|
fetchFn?: FetchFn;
|
||||||
// cache will allow user to change the configuration of the cached Prometheus client (which is used by default)
|
// cache will allow user to change the configuration of the cached Prometheus client (which is used by default)
|
||||||
|
@ -79,6 +80,7 @@ const serviceUnavailable = 503;
|
||||||
export class HTTPPrometheusClient implements PrometheusClient {
|
export class HTTPPrometheusClient implements PrometheusClient {
|
||||||
private readonly lookbackInterval = 60 * 60 * 1000 * 12; //12 hours
|
private readonly lookbackInterval = 60 * 60 * 1000 * 12; //12 hours
|
||||||
private readonly url: string;
|
private readonly url: string;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
private readonly errorHandler?: (error: any) => void;
|
private readonly errorHandler?: (error: any) => void;
|
||||||
private readonly httpMethod: 'POST' | 'GET' = 'POST';
|
private readonly httpMethod: 'POST' | 'GET' = 'POST';
|
||||||
// For some reason, just assigning via "= fetch" here does not end up executing fetch correctly
|
// For some reason, just assigning via "= fetch" here does not end up executing fetch correctly
|
||||||
|
@ -272,10 +274,7 @@ class Cache {
|
||||||
}
|
}
|
||||||
const labelValues = currentAssociation.get(key);
|
const labelValues = currentAssociation.get(key);
|
||||||
if (labelValues === undefined) {
|
if (labelValues === undefined) {
|
||||||
currentAssociation.set(
|
currentAssociation.set(key, new Set<string>([value]));
|
||||||
key,
|
|
||||||
new Set<string>([value])
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
labelValues.add(value);
|
labelValues.add(value);
|
||||||
}
|
}
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
import chai from 'chai';
|
import chai from 'chai';
|
||||||
import { analyzeCompletion, computeStartCompletePosition, ContextKind } from './hybrid';
|
import { analyzeCompletion, computeStartCompletePosition, ContextKind } from './hybrid';
|
||||||
import { createEditorState, mockedMetricsTerms, mockPrometheusServer } from '../../test/utils';
|
import { createEditorState, mockedMetricsTerms, mockPrometheusServer } from '../test/utils.test';
|
||||||
import { Completion, CompletionContext } from '@codemirror/autocomplete';
|
import { Completion, CompletionContext } from '@codemirror/autocomplete';
|
||||||
import {
|
import {
|
||||||
aggregateOpModifierTerms,
|
aggregateOpModifierTerms,
|
||||||
|
@ -778,8 +778,7 @@ describe('autocomplete promQL test', () => {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'offline function/aggregation autocompletion in aggregation 4',
|
title: 'offline function/aggregation autocompletion in aggregation 4',
|
||||||
expr:
|
expr: 'sum by (instance, job) ( sum_over(scrape_series_added[1h])) / sum by (instance, job) (sum_over_time(scrape_samples_scraped[1h])) > 0.1 and sum by(instance, job) (scrape_samples_scraped{) > 100',
|
||||||
'sum by (instance, job) ( sum_over(scrape_series_added[1h])) / sum by (instance, job) (sum_over_time(scrape_samples_scraped[1h])) > 0.1 and sum by(instance, job) (scrape_samples_scraped{) > 100',
|
|
||||||
pos: 33,
|
pos: 33,
|
||||||
expectedResult: {
|
expectedResult: {
|
||||||
options: ([] as Completion[]).concat(functionIdentifierTerms, aggregateOpTerms, snippets),
|
options: ([] as Completion[]).concat(functionIdentifierTerms, aggregateOpTerms, snippets),
|
|
@ -64,6 +64,42 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Determine whether input range vector is empty',
|
info: 'Determine whether input range vector is empty',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'acos',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the arccosine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'acosh',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the inverse hyperbolic cosine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'asin',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the arcsine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'asinh',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the inverse hyperbolic sine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'atan',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the arctangent, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'atanh',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the inverse hyperbolic tangent, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'avg_over_time',
|
label: 'avg_over_time',
|
||||||
detail: 'function',
|
detail: 'function',
|
||||||
|
@ -100,6 +136,18 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Limit the value of input series to a minimum',
|
info: 'Limit the value of input series to a minimum',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'cos',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the cosine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'cosh',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the hyperbolic cosine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'count_over_time',
|
label: 'count_over_time',
|
||||||
detail: 'function',
|
detail: 'function',
|
||||||
|
@ -124,6 +172,12 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Return the day of the week for provided timestamps',
|
info: 'Return the day of the week for provided timestamps',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'deg',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Convert radians to degrees for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'delta',
|
label: 'delta',
|
||||||
detail: 'function',
|
detail: 'function',
|
||||||
|
@ -244,6 +298,12 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Return the month for provided timestamps',
|
info: 'Return the month for provided timestamps',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'pi',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Return pi',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'predict_linear',
|
label: 'predict_linear',
|
||||||
detail: 'function',
|
detail: 'function',
|
||||||
|
@ -262,6 +322,12 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Calculate value quantiles over time for input series',
|
info: 'Calculate value quantiles over time for input series',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'rad',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Convert degrees to radians for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'rate',
|
label: 'rate',
|
||||||
detail: 'function',
|
detail: 'function',
|
||||||
|
@ -292,6 +358,18 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Returns the sign of the instant vector',
|
info: 'Returns the sign of the instant vector',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'sin',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the sine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'sinh',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the hyperbolic sine, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'sort',
|
label: 'sort',
|
||||||
detail: 'function',
|
detail: 'function',
|
||||||
|
@ -328,6 +406,18 @@ export const functionIdentifierTerms = [
|
||||||
info: 'Calculate the sum over the values of input series over time',
|
info: 'Calculate the sum over the values of input series over time',
|
||||||
type: 'function',
|
type: 'function',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'tan',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the tangent, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'tanh',
|
||||||
|
detail: 'function',
|
||||||
|
info: 'Calculate the hyperbolic tangent, in radians, for input series',
|
||||||
|
type: 'function',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
label: 'time',
|
label: 'time',
|
||||||
detail: 'function',
|
detail: 'function',
|
|
@ -121,16 +121,25 @@ FunctionIdentifier {
|
||||||
AbsentOverTime |
|
AbsentOverTime |
|
||||||
Absent |
|
Absent |
|
||||||
Abs |
|
Abs |
|
||||||
|
Acos |
|
||||||
|
Acosh |
|
||||||
|
Asin |
|
||||||
|
Asinh |
|
||||||
|
Atan |
|
||||||
|
Atanh |
|
||||||
AvgOverTime |
|
AvgOverTime |
|
||||||
Ceil |
|
Ceil |
|
||||||
Changes |
|
Changes |
|
||||||
Clamp |
|
Clamp |
|
||||||
ClampMax |
|
ClampMax |
|
||||||
ClampMin |
|
ClampMin |
|
||||||
|
Cos |
|
||||||
|
Cosh |
|
||||||
CountOverTime |
|
CountOverTime |
|
||||||
DaysInMonth |
|
DaysInMonth |
|
||||||
DayOfMonth |
|
DayOfMonth |
|
||||||
DayOfWeek |
|
DayOfWeek |
|
||||||
|
Deg |
|
||||||
Delta |
|
Delta |
|
||||||
Deriv |
|
Deriv |
|
||||||
Exp |
|
Exp |
|
||||||
|
@ -151,20 +160,26 @@ FunctionIdentifier {
|
||||||
MinOverTime |
|
MinOverTime |
|
||||||
Minute |
|
Minute |
|
||||||
Month |
|
Month |
|
||||||
|
Pi |
|
||||||
PredictLinear |
|
PredictLinear |
|
||||||
PresentOverTime |
|
PresentOverTime |
|
||||||
QuantileOverTime |
|
QuantileOverTime |
|
||||||
|
Rad |
|
||||||
Rate |
|
Rate |
|
||||||
Resets |
|
Resets |
|
||||||
Round |
|
Round |
|
||||||
Scalar |
|
Scalar |
|
||||||
Sgn |
|
Sgn |
|
||||||
|
Sin |
|
||||||
|
Sinh |
|
||||||
Sort |
|
Sort |
|
||||||
SortDesc |
|
SortDesc |
|
||||||
Sqrt |
|
Sqrt |
|
||||||
StddevOverTime |
|
StddevOverTime |
|
||||||
StdvarOverTime |
|
StdvarOverTime |
|
||||||
SumOverTime |
|
SumOverTime |
|
||||||
|
Tan |
|
||||||
|
Tanh |
|
||||||
Timestamp |
|
Timestamp |
|
||||||
Time |
|
Time |
|
||||||
Vector |
|
Vector |
|
||||||
|
@ -343,16 +358,25 @@ NumberLiteral {
|
||||||
Abs { condFn<"abs"> }
|
Abs { condFn<"abs"> }
|
||||||
Absent { condFn<"absent"> }
|
Absent { condFn<"absent"> }
|
||||||
AbsentOverTime { condFn<"absent_over_time"> }
|
AbsentOverTime { condFn<"absent_over_time"> }
|
||||||
|
Acos { condFn<"acos"> }
|
||||||
|
Acosh { condFn<"acosh"> }
|
||||||
|
Asin { condFn<"asin"> }
|
||||||
|
Asinh { condFn<"asinh">}
|
||||||
|
Atan { condFn<"atan"> }
|
||||||
|
Atanh { condFn<"atanh">}
|
||||||
AvgOverTime { condFn<"avg_over_time"> }
|
AvgOverTime { condFn<"avg_over_time"> }
|
||||||
Ceil { condFn<"ceil"> }
|
Ceil { condFn<"ceil"> }
|
||||||
Changes { condFn<"changes"> }
|
Changes { condFn<"changes"> }
|
||||||
Clamp { condFn<"clamp"> }
|
Clamp { condFn<"clamp"> }
|
||||||
ClampMax { condFn<"clamp_max"> }
|
ClampMax { condFn<"clamp_max"> }
|
||||||
ClampMin { condFn<"clamp_min"> }
|
ClampMin { condFn<"clamp_min"> }
|
||||||
|
Cos { condFn<"cos">}
|
||||||
|
Cosh { condFn<"cosh">}
|
||||||
CountOverTime { condFn<"count_over_time"> }
|
CountOverTime { condFn<"count_over_time"> }
|
||||||
DaysInMonth { condFn<"days_in_month"> }
|
DaysInMonth { condFn<"days_in_month"> }
|
||||||
DayOfMonth { condFn<"day_of_month"> }
|
DayOfMonth { condFn<"day_of_month"> }
|
||||||
DayOfWeek { condFn<"day_of_week"> }
|
DayOfWeek { condFn<"day_of_week"> }
|
||||||
|
Deg { condFn<"deg"> }
|
||||||
Delta { condFn<"delta"> }
|
Delta { condFn<"delta"> }
|
||||||
Deriv { condFn<"deriv"> }
|
Deriv { condFn<"deriv"> }
|
||||||
Exp { condFn<"exp"> }
|
Exp { condFn<"exp"> }
|
||||||
|
@ -373,20 +397,26 @@ NumberLiteral {
|
||||||
MinOverTime { condFn<"min_over_time"> }
|
MinOverTime { condFn<"min_over_time"> }
|
||||||
Minute { condFn<"minute"> }
|
Minute { condFn<"minute"> }
|
||||||
Month { condFn<"month"> }
|
Month { condFn<"month"> }
|
||||||
|
Pi { condFn<"pi">}
|
||||||
PredictLinear { condFn<"predict_linear"> }
|
PredictLinear { condFn<"predict_linear"> }
|
||||||
PresentOverTime { condFn<"present_over_time"> }
|
PresentOverTime { condFn<"present_over_time"> }
|
||||||
QuantileOverTime { condFn<"quantile_over_time"> }
|
QuantileOverTime { condFn<"quantile_over_time"> }
|
||||||
|
Rad { condFn<"rad"> }
|
||||||
Rate { condFn<"rate"> }
|
Rate { condFn<"rate"> }
|
||||||
Resets { condFn<"resets"> }
|
Resets { condFn<"resets"> }
|
||||||
Round { condFn<"round"> }
|
Round { condFn<"round"> }
|
||||||
Scalar { condFn<"scalar"> }
|
Scalar { condFn<"scalar"> }
|
||||||
Sgn { condFn<"sgn"> }
|
Sgn { condFn<"sgn"> }
|
||||||
|
Sin { condFn<"sin">}
|
||||||
|
Sinh { condFn<"sinh"> }
|
||||||
Sort { condFn<"sort"> }
|
Sort { condFn<"sort"> }
|
||||||
SortDesc { condFn<"sort_desc"> }
|
SortDesc { condFn<"sort_desc"> }
|
||||||
Sqrt { condFn<"sqrt"> }
|
Sqrt { condFn<"sqrt"> }
|
||||||
StddevOverTime { condFn<"stddev_over_time"> }
|
StddevOverTime { condFn<"stddev_over_time"> }
|
||||||
StdvarOverTime { condFn<"stdvar_over_time"> }
|
StdvarOverTime { condFn<"stdvar_over_time"> }
|
||||||
SumOverTime { condFn<"sum_over_time"> }
|
SumOverTime { condFn<"sum_over_time"> }
|
||||||
|
Tan { condFn<"tan"> }
|
||||||
|
Tanh { condFn<"tanh">}
|
||||||
Time { condFn<"time"> }
|
Time { condFn<"time"> }
|
||||||
Timestamp { condFn<"timestamp"> }
|
Timestamp { condFn<"timestamp"> }
|
||||||
Vector { condFn<"vector"> }
|
Vector { condFn<"vector"> }
|
|
@ -4,7 +4,7 @@ import { fileTests } from 'lezer-generator/dist/test';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
const caseDir = './src/lang-promql/grammar/test';
|
const caseDir = './src/grammar/test';
|
||||||
for (const file of fs.readdirSync(caseDir)) {
|
for (const file of fs.readdirSync(caseDir)) {
|
||||||
if (!/\.txt$/.test(file)) continue;
|
if (!/\.txt$/.test(file)) continue;
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
import { EditorView } from '@codemirror/view';
|
import { EditorView } from '@codemirror/view';
|
||||||
import { Diagnostic, linter } from '@codemirror/lint';
|
import { Diagnostic, linter } from '@codemirror/lint';
|
||||||
import { HybridLint } from './hybrid';
|
import { HybridLint } from './hybrid';
|
||||||
|
import { Extension } from '@codemirror/state';
|
||||||
|
|
||||||
type lintFunc = (view: EditorView) => readonly Diagnostic[] | Promise<readonly Diagnostic[]>;
|
type lintFunc = (view: EditorView) => readonly Diagnostic[] | Promise<readonly Diagnostic[]>;
|
||||||
|
|
||||||
|
@ -27,6 +28,6 @@ export function newLintStrategy(): LintStrategy {
|
||||||
return new HybridLint();
|
return new HybridLint();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function promQLLinter(callbackFunc: (this: LintStrategy) => lintFunc, thisArg: LintStrategy) {
|
export function promQLLinter(callbackFunc: (this: LintStrategy) => lintFunc, thisArg: LintStrategy): Extension {
|
||||||
return linter(callbackFunc.call(thisArg));
|
return linter(callbackFunc.call(thisArg));
|
||||||
}
|
}
|
|
@ -14,7 +14,7 @@
|
||||||
import chai from 'chai';
|
import chai from 'chai';
|
||||||
import { Parser } from './parser';
|
import { Parser } from './parser';
|
||||||
import { Diagnostic } from '@codemirror/lint';
|
import { Diagnostic } from '@codemirror/lint';
|
||||||
import { createEditorState } from '../../test/utils';
|
import { createEditorState } from '../test/utils.test';
|
||||||
import { syntaxTree } from '@codemirror/language';
|
import { syntaxTree } from '@codemirror/language';
|
||||||
import { ValueType } from '../types';
|
import { ValueType } from '../types';
|
||||||
|
|
|
@ -73,7 +73,7 @@ export class Parser {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
analyze() {
|
analyze(): void {
|
||||||
// when you are at the root of the tree, the first node is not `Expr` but a node with no name.
|
// when you are at the root of the tree, the first node is not `Expr` but a node with no name.
|
||||||
// So to be able to iterate other the node relative to the promql node, we have to get the first child at the beginning
|
// So to be able to iterate other the node relative to the promql node, we have to get the first child at the beginning
|
||||||
this.checkAST(this.tree.topNode.firstChild);
|
this.checkAST(this.tree.topNode.firstChild);
|
|
@ -33,7 +33,7 @@ import {
|
||||||
Sub,
|
Sub,
|
||||||
VectorSelector,
|
VectorSelector,
|
||||||
} from '../grammar/parser.terms';
|
} from '../grammar/parser.terms';
|
||||||
import { createEditorState } from '../../test/utils';
|
import { createEditorState } from '../test/utils.test';
|
||||||
import { containsAtLeastOneChild, containsChild, retrieveAllRecursiveNodes, walkBackward, walkThrough } from './path-finder';
|
import { containsAtLeastOneChild, containsChild, retrieveAllRecursiveNodes, walkBackward, walkThrough } from './path-finder';
|
||||||
import { SyntaxNode } from 'lezer-tree';
|
import { SyntaxNode } from 'lezer-tree';
|
||||||
import { syntaxTree } from '@codemirror/language';
|
import { syntaxTree } from '@codemirror/language';
|
|
@ -12,7 +12,7 @@
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import { buildVectorMatching } from './vector';
|
import { buildVectorMatching } from './vector';
|
||||||
import { createEditorState } from '../../test/utils';
|
import { createEditorState } from '../test/utils.test';
|
||||||
import { walkThrough } from './path-finder';
|
import { walkThrough } from './path-finder';
|
||||||
import { BinaryExpr, Expr } from '../grammar/parser.terms';
|
import { BinaryExpr, Expr } from '../grammar/parser.terms';
|
||||||
import chai from 'chai';
|
import chai from 'chai';
|
|
@ -30,7 +30,7 @@ import {
|
||||||
import { VectorMatchCardinality, VectorMatching } from '../types';
|
import { VectorMatchCardinality, VectorMatching } from '../types';
|
||||||
import { containsAtLeastOneChild, retrieveAllRecursiveNodes } from './path-finder';
|
import { containsAtLeastOneChild, retrieveAllRecursiveNodes } from './path-finder';
|
||||||
|
|
||||||
export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode) {
|
export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode): VectorMatching | null {
|
||||||
if (!binaryNode || binaryNode.type.id !== BinaryExpr) {
|
if (!binaryNode || binaryNode.type.id !== BinaryExpr) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
|
@ -24,7 +24,7 @@ export enum LanguageType {
|
||||||
MetricName = 'MetricName',
|
MetricName = 'MetricName',
|
||||||
}
|
}
|
||||||
|
|
||||||
export function promQLLanguage(top: LanguageType) {
|
export function promQLLanguage(top: LanguageType): LezerLanguage {
|
||||||
return LezerLanguage.define({
|
return LezerLanguage.define({
|
||||||
parser: parser.configure({
|
parser: parser.configure({
|
||||||
top: top,
|
top: top,
|
||||||
|
@ -35,9 +35,8 @@ export function promQLLanguage(top: LanguageType) {
|
||||||
StringLiteral: tags.string,
|
StringLiteral: tags.string,
|
||||||
NumberLiteral: tags.number,
|
NumberLiteral: tags.number,
|
||||||
Duration: tags.number,
|
Duration: tags.number,
|
||||||
'Abs Absent AbsentOverTime AvgOverTime Ceil Changes Clamp ClampMax ClampMin CountOverTime DaysInMonth DayOfMonth DayOfWeek Delta Deriv Exp Floor HistogramQuantile HoltWinters Hour Idelta Increase Irate LabelReplace LabelJoin LastOverTime Ln Log10 Log2 MaxOverTime MinOverTime Minute Month PredictLinear PresentOverTime QuantileOverTime Rate Resets Round Scalar Sgn Sort SortDesc Sqrt StddevOverTime StdvarOverTime SumOverTime Time Timestamp Vector Year': tags.function(
|
'Abs Absent AbsentOverTime Acos Acosh Asin Asinh Atan Atanh AvgOverTime Ceil Changes Clamp ClampMax ClampMin Cos Cosh CountOverTime DaysInMonth DayOfMonth DayOfWeek Deg Delta Deriv Exp Floor HistogramQuantile HoltWinters Hour Idelta Increase Irate LabelReplace LabelJoin LastOverTime Ln Log10 Log2 MaxOverTime MinOverTime Minute Month Pi PredictLinear PresentOverTime QuantileOverTime Rad Rate Resets Round Scalar Sgn Sin Sinh Sort SortDesc Sqrt StddevOverTime StdvarOverTime SumOverTime Tan Tanh Time Timestamp Vector Year':
|
||||||
tags.variableName
|
tags.function(tags.variableName),
|
||||||
),
|
|
||||||
'Avg Bottomk Count Count_values Group Max Min Quantile Stddev Stdvar Sum Topk': tags.operatorKeyword,
|
'Avg Bottomk Count Count_values Group Max Min Quantile Stddev Stdvar Sum Topk': tags.operatorKeyword,
|
||||||
'By Without Bool On Ignoring GroupLeft GroupRight Offset Start End': tags.modifier,
|
'By Without Bool On Ignoring GroupLeft GroupRight Offset Start End': tags.modifier,
|
||||||
'And Unless Or': tags.logicOperator,
|
'And Unless Or': tags.logicOperator,
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
import { parser } from '../lang-promql/grammar/parser';
|
import { parser } from '../grammar/parser';
|
||||||
import { EditorState } from '@codemirror/state';
|
import { EditorState } from '@codemirror/state';
|
||||||
import { LezerLanguage } from '@codemirror/language';
|
import { LezerLanguage } from '@codemirror/language';
|
||||||
import nock from 'nock';
|
import nock from 'nock';
|
||||||
|
@ -28,7 +28,7 @@ export function createEditorState(expr: string): EditorState {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function mockPrometheusServer() {
|
export function mockPrometheusServer(): void {
|
||||||
nock('http://localhost:8080')
|
nock('http://localhost:8080')
|
||||||
.get('/api/v1/label/__name__/values')
|
.get('/api/v1/label/__name__/values')
|
||||||
.query(true)
|
.query(true)
|
|
@ -15,16 +15,25 @@ import {
|
||||||
Abs,
|
Abs,
|
||||||
Absent,
|
Absent,
|
||||||
AbsentOverTime,
|
AbsentOverTime,
|
||||||
|
Acos,
|
||||||
|
Acosh,
|
||||||
|
Asin,
|
||||||
|
Asinh,
|
||||||
|
Atan,
|
||||||
|
Atanh,
|
||||||
AvgOverTime,
|
AvgOverTime,
|
||||||
Ceil,
|
Ceil,
|
||||||
Changes,
|
Changes,
|
||||||
Clamp,
|
Clamp,
|
||||||
ClampMax,
|
ClampMax,
|
||||||
ClampMin,
|
ClampMin,
|
||||||
|
Cos,
|
||||||
|
Cosh,
|
||||||
CountOverTime,
|
CountOverTime,
|
||||||
DayOfMonth,
|
DayOfMonth,
|
||||||
DayOfWeek,
|
DayOfWeek,
|
||||||
DaysInMonth,
|
DaysInMonth,
|
||||||
|
Deg,
|
||||||
Delta,
|
Delta,
|
||||||
Deriv,
|
Deriv,
|
||||||
Exp,
|
Exp,
|
||||||
|
@ -45,20 +54,26 @@ import {
|
||||||
MinOverTime,
|
MinOverTime,
|
||||||
Minute,
|
Minute,
|
||||||
Month,
|
Month,
|
||||||
|
Pi,
|
||||||
PredictLinear,
|
PredictLinear,
|
||||||
PresentOverTime,
|
PresentOverTime,
|
||||||
QuantileOverTime,
|
QuantileOverTime,
|
||||||
|
Rad,
|
||||||
Rate,
|
Rate,
|
||||||
Resets,
|
Resets,
|
||||||
Round,
|
Round,
|
||||||
Scalar,
|
Scalar,
|
||||||
Sgn,
|
Sgn,
|
||||||
|
Sin,
|
||||||
|
Sinh,
|
||||||
Sort,
|
Sort,
|
||||||
SortDesc,
|
SortDesc,
|
||||||
Sqrt,
|
Sqrt,
|
||||||
StddevOverTime,
|
StddevOverTime,
|
||||||
StdvarOverTime,
|
StdvarOverTime,
|
||||||
SumOverTime,
|
SumOverTime,
|
||||||
|
Tan,
|
||||||
|
Tanh,
|
||||||
Time,
|
Time,
|
||||||
Timestamp,
|
Timestamp,
|
||||||
Vector,
|
Vector,
|
||||||
|
@ -101,6 +116,42 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 0,
|
variadic: 0,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Acos]: {
|
||||||
|
name: 'acos',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Acosh]: {
|
||||||
|
name: 'acosh',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Asin]: {
|
||||||
|
name: 'asin',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Asinh]: {
|
||||||
|
name: 'asinh',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Atan]: {
|
||||||
|
name: 'atan',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Atanh]: {
|
||||||
|
name: 'atanh',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[AvgOverTime]: {
|
[AvgOverTime]: {
|
||||||
name: 'avg_over_time',
|
name: 'avg_over_time',
|
||||||
argTypes: [ValueType.matrix],
|
argTypes: [ValueType.matrix],
|
||||||
|
@ -137,6 +188,18 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 0,
|
variadic: 0,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Cos]: {
|
||||||
|
name: 'cos',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Cosh]: {
|
||||||
|
name: 'Cosh',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[CountOverTime]: {
|
[CountOverTime]: {
|
||||||
name: 'count_over_time',
|
name: 'count_over_time',
|
||||||
argTypes: [ValueType.matrix],
|
argTypes: [ValueType.matrix],
|
||||||
|
@ -161,6 +224,12 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 1,
|
variadic: 1,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Deg]: {
|
||||||
|
name: 'deg',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[Delta]: {
|
[Delta]: {
|
||||||
name: 'delta',
|
name: 'delta',
|
||||||
argTypes: [ValueType.matrix],
|
argTypes: [ValueType.matrix],
|
||||||
|
@ -281,6 +350,12 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 1,
|
variadic: 1,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Pi]: {
|
||||||
|
name: 'pi',
|
||||||
|
argTypes: [],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[PredictLinear]: {
|
[PredictLinear]: {
|
||||||
name: 'predict_linear',
|
name: 'predict_linear',
|
||||||
argTypes: [ValueType.matrix, ValueType.scalar],
|
argTypes: [ValueType.matrix, ValueType.scalar],
|
||||||
|
@ -299,6 +374,12 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 0,
|
variadic: 0,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Rad]: {
|
||||||
|
name: 'rad',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[Rate]: {
|
[Rate]: {
|
||||||
name: 'rate',
|
name: 'rate',
|
||||||
argTypes: [ValueType.matrix],
|
argTypes: [ValueType.matrix],
|
||||||
|
@ -329,6 +410,18 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 0,
|
variadic: 0,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Sin]: {
|
||||||
|
name: 'sin',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Sinh]: {
|
||||||
|
name: 'Sinh',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[Sort]: {
|
[Sort]: {
|
||||||
name: 'sort',
|
name: 'sort',
|
||||||
argTypes: [ValueType.vector],
|
argTypes: [ValueType.vector],
|
||||||
|
@ -365,6 +458,18 @@ const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||||
variadic: 0,
|
variadic: 0,
|
||||||
returnType: ValueType.vector,
|
returnType: ValueType.vector,
|
||||||
},
|
},
|
||||||
|
[Tan]: {
|
||||||
|
name: 'tan',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
|
[Tanh]: {
|
||||||
|
name: 'tanh',
|
||||||
|
argTypes: [ValueType.vector],
|
||||||
|
variadic: 0,
|
||||||
|
returnType: ValueType.vector,
|
||||||
|
},
|
||||||
[Time]: {
|
[Time]: {
|
||||||
name: 'time',
|
name: 'time',
|
||||||
argTypes: [],
|
argTypes: [],
|
|
@ -8,19 +8,19 @@
|
||||||
"dom"
|
"dom"
|
||||||
],
|
],
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"outDir": "lib",
|
"outDir": "dist",
|
||||||
"strict": true,
|
"strict": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"allowSyntheticDefaultImports": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
"allowJs": true
|
"allowJs": true,
|
||||||
|
"skipLibCheck": true
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [
|
||||||
"src/lang-promql"
|
"src/"
|
||||||
],
|
],
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"node_modules",
|
|
||||||
"src/**/*.test.ts"
|
"src/**/*.test.ts"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,40 +0,0 @@
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
||||||
const path = require('path');
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
||||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
||||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
mode: 'development',
|
|
||||||
entry: path.join(__dirname, '/src/app/app.ts'),
|
|
||||||
output: {
|
|
||||||
filename: '[name].bundle.js',
|
|
||||||
path: path.resolve(__dirname, 'dist'),
|
|
||||||
},
|
|
||||||
devtool: 'inline-source-map',
|
|
||||||
module: {
|
|
||||||
rules: [
|
|
||||||
{
|
|
||||||
test: /\.tsx?$/,
|
|
||||||
loader: 'ts-loader',
|
|
||||||
exclude: /node_modules/,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
plugins: [
|
|
||||||
new CleanWebpackPlugin({ cleanStaleWebpackAssets: false }),
|
|
||||||
new HtmlWebpackPlugin({
|
|
||||||
hash: true,
|
|
||||||
filename: 'index.html', //relative to root of the application
|
|
||||||
path: path.resolve(__dirname, 'dist'),
|
|
||||||
template: './src/app/app.html',
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
resolve: {
|
|
||||||
extensions: ['.tsx', '.ts', '.js'],
|
|
||||||
},
|
|
||||||
devServer: {
|
|
||||||
contentBase: './dist',
|
|
||||||
},
|
|
||||||
};
|
|
45605
web/ui/package-lock.json
generated
Normal file
45605
web/ui/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
16
web/ui/package.json
Normal file
16
web/ui/package.json
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"name": "prometheus",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"build": "bash build_ui.sh --all",
|
||||||
|
"build:module": "bash build_ui.sh --build-module",
|
||||||
|
"start": "npm run start -w react-app",
|
||||||
|
"test": "npm run test --workspaces",
|
||||||
|
"test:coverage": "npm run test:coverage --workspaces",
|
||||||
|
"lint": "npm run lint --workspaces"
|
||||||
|
},
|
||||||
|
"workspaces": [
|
||||||
|
"react-app",
|
||||||
|
"module/*"
|
||||||
|
]
|
||||||
|
}
|
|
@ -25,6 +25,8 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"plugins": ["prettier"],
|
"plugins": [
|
||||||
|
"prettier"
|
||||||
|
],
|
||||||
"ignorePatterns": ["src/vendor/**"]
|
"ignorePatterns": ["src/vendor/**"]
|
||||||
}
|
}
|
||||||
|
|
48650
web/ui/react-app/package-lock.json
generated
48650
web/ui/react-app/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -21,33 +21,32 @@
|
||||||
"@fortawesome/react-fontawesome": "^0.1.4",
|
"@fortawesome/react-fontawesome": "^0.1.4",
|
||||||
"@nexucis/fuzzy": "^0.3.0",
|
"@nexucis/fuzzy": "^0.3.0",
|
||||||
"bootstrap": "^4.6.0",
|
"bootstrap": "^4.6.0",
|
||||||
"codemirror-promql": "^0.17.0",
|
"codemirror-promql": "0.18.0",
|
||||||
"css.escape": "^1.5.1",
|
"css.escape": "^1.5.1",
|
||||||
"downshift": "^3.4.8",
|
"downshift": "^3.4.8",
|
||||||
"i": "^0.3.6",
|
"i": "^0.3.6",
|
||||||
"jquery": "^3.5.1",
|
"jquery": "^3.5.1",
|
||||||
"jquery.flot.tooltip": "^0.9.0",
|
"jquery.flot.tooltip": "^0.9.0",
|
||||||
"jsdom": "^16.4.0",
|
|
||||||
"moment": "^2.24.0",
|
"moment": "^2.24.0",
|
||||||
"moment-timezone": "^0.5.23",
|
"moment-timezone": "^0.5.23",
|
||||||
"popper.js": "^1.14.3",
|
"popper.js": "^1.14.3",
|
||||||
"react": "^16.7.0",
|
"react": "^17.0.2",
|
||||||
"react-copy-to-clipboard": "^5.0.1",
|
"react-copy-to-clipboard": "^5.0.4",
|
||||||
"react-dom": "^16.7.0",
|
"react-dom": "^17.0.2",
|
||||||
"react-resize-detector": "^5.0.7",
|
"react-resize-detector": "^6.7.6",
|
||||||
"react-router-dom": "^5.2.1",
|
"react-router-dom": "^5.2.1",
|
||||||
"react-test-renderer": "^16.9.0",
|
"react-test-renderer": "^17.0.2",
|
||||||
"reactstrap": "^8.9.0",
|
"reactstrap": "^8.9.0",
|
||||||
"sanitize-html": "^2.3.3",
|
"sanitize-html": "^2.3.3",
|
||||||
"sass": "1.39.0",
|
"sass": "1.39.0",
|
||||||
"tempusdominus-bootstrap-4": "^5.1.2",
|
"tempusdominus-bootstrap-4": "^5.1.2",
|
||||||
"tempusdominus-core": "^5.0.3",
|
"tempusdominus-core": "^5.0.3"
|
||||||
"use-media": "^1.4.0"
|
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "react-scripts start",
|
"start": "react-scripts start",
|
||||||
"build": "react-scripts build",
|
"build": "react-scripts build",
|
||||||
"test": "react-scripts test --runInBand --resetMocks=false",
|
"test": "react-scripts test --runInBand --resetMocks=false",
|
||||||
|
"test:coverage": "react-scripts test --runInBand --resetMocks=false --no-watch --coverage",
|
||||||
"test:debug": "react-scripts --inspect-brk test --runInBand --no-cache",
|
"test:debug": "react-scripts --inspect-brk test --runInBand --no-cache",
|
||||||
"eject": "react-scripts eject",
|
"eject": "react-scripts eject",
|
||||||
"lint:ci": "eslint --quiet \"src/**/*.{ts,tsx}\"",
|
"lint:ci": "eslint --quiet \"src/**/*.{ts,tsx}\"",
|
||||||
|
@ -65,33 +64,31 @@
|
||||||
"not op_mini all"
|
"not op_mini all"
|
||||||
],
|
],
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@testing-library/react-hooks": "^3.1.1",
|
"@testing-library/react-hooks": "^7.0.1",
|
||||||
"@types/enzyme": "^3.10.3",
|
"@types/enzyme": "^3.10.9",
|
||||||
"@types/enzyme-adapter-react-16": "^1.0.5",
|
|
||||||
"@types/flot": "0.0.32",
|
"@types/flot": "0.0.32",
|
||||||
"@types/jest": "^27.0.0",
|
"@types/jest": "^27.0.1",
|
||||||
"@types/jquery": "^3.5.1",
|
"@types/jquery": "^3.5.1",
|
||||||
"@types/moment-timezone": "^0.5.10",
|
"@types/node": "^16.7.6",
|
||||||
"@types/node": "^12.11.1",
|
"@types/react": "^17.0.19",
|
||||||
"@types/react": "^16.8.2",
|
"@types/react-copy-to-clipboard": "^5.0.1",
|
||||||
"@types/react-copy-to-clipboard": "^5.0.0",
|
"@types/react-dom": "^17.0.9",
|
||||||
"@types/react-dom": "^16.8.0",
|
|
||||||
"@types/react-resize-detector": "^5.0.0",
|
"@types/react-resize-detector": "^5.0.0",
|
||||||
"@types/react-router-dom": "^5.1.8",
|
"@types/react-router-dom": "^5.1.8",
|
||||||
"@types/reactstrap": "^8.7.2",
|
|
||||||
"@types/sanitize-html": "^1.20.2",
|
"@types/sanitize-html": "^1.20.2",
|
||||||
"@types/sinon": "^9.0.4",
|
"@types/sinon": "^10.0.2",
|
||||||
"enzyme": "^3.10.0",
|
"@wojtekmaj/enzyme-adapter-react-17": "^0.6.3",
|
||||||
"enzyme-adapter-react-16": "^1.15.1",
|
"enzyme": "^3.11.0",
|
||||||
"enzyme-to-json": "^3.4.3",
|
"enzyme-to-json": "^3.6.2",
|
||||||
"eslint-config-prettier": "^8.3.0",
|
"eslint-config-prettier": "^8.3.0",
|
||||||
"eslint-config-react-app": "^6.0.0",
|
"eslint-config-react-app": "^6.0.0",
|
||||||
"eslint-plugin-prettier": "^4.0.0",
|
"eslint-plugin-prettier": "^4.0.0",
|
||||||
|
"jest-canvas-mock": "^2.3.1",
|
||||||
"jest-fetch-mock": "^3.0.3",
|
"jest-fetch-mock": "^3.0.3",
|
||||||
"mutationobserver-shim": "^0.3.7",
|
"mutationobserver-shim": "^0.3.7",
|
||||||
"prettier": "^2.3.2",
|
"prettier": "^2.3.2",
|
||||||
"react-scripts": "4.0.3",
|
"react-scripts": "4.0.3",
|
||||||
"sinon": "^9.0.3",
|
"sinon": "^11.1.2",
|
||||||
"typescript": "^4.4.2"
|
"typescript": "^4.4.2"
|
||||||
},
|
},
|
||||||
"proxy": "http://localhost:9090",
|
"proxy": "http://localhost:9090",
|
||||||
|
|
|
@ -3,7 +3,6 @@ import Navigation from './Navbar';
|
||||||
import { Container } from 'reactstrap';
|
import { Container } from 'reactstrap';
|
||||||
|
|
||||||
import { BrowserRouter as Router, Redirect, Switch, Route } from 'react-router-dom';
|
import { BrowserRouter as Router, Redirect, Switch, Route } from 'react-router-dom';
|
||||||
import useMedia from 'use-media';
|
|
||||||
import {
|
import {
|
||||||
AlertsPage,
|
AlertsPage,
|
||||||
ConfigPage,
|
ConfigPage,
|
||||||
|
@ -19,6 +18,7 @@ import { PathPrefixContext } from './contexts/PathPrefixContext';
|
||||||
import { ThemeContext, themeName, themeSetting } from './contexts/ThemeContext';
|
import { ThemeContext, themeName, themeSetting } from './contexts/ThemeContext';
|
||||||
import { Theme, themeLocalStorageKey } from './Theme';
|
import { Theme, themeLocalStorageKey } from './Theme';
|
||||||
import { useLocalStorage } from './hooks/useLocalStorage';
|
import { useLocalStorage } from './hooks/useLocalStorage';
|
||||||
|
import useMedia from './hooks/useMedia';
|
||||||
|
|
||||||
interface AppProps {
|
interface AppProps {
|
||||||
consolesLink: string | null;
|
consolesLink: string | null;
|
||||||
|
|
17
web/ui/react-app/src/hooks/useMedia.ts
Normal file
17
web/ui/react-app/src/hooks/useMedia.ts
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
import { useEffect, useState } from 'react';
|
||||||
|
|
||||||
|
// A hook to determine whether a CSS media query finds any matches.
|
||||||
|
const useMedia = (query: string): boolean => {
|
||||||
|
const mediaQuery = window.matchMedia(query);
|
||||||
|
const [matches, setMatches] = useState(mediaQuery.matches);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const handler = () => setMatches(mediaQuery.matches);
|
||||||
|
mediaQuery.addEventListener('change', handler);
|
||||||
|
return () => mediaQuery.removeEventListener('change', handler);
|
||||||
|
}, [mediaQuery]);
|
||||||
|
|
||||||
|
return matches;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default useMedia;
|
|
@ -11,7 +11,6 @@ import { closeBrackets, closeBracketsKeymap } from '@codemirror/closebrackets';
|
||||||
import { highlightSelectionMatches } from '@codemirror/search';
|
import { highlightSelectionMatches } from '@codemirror/search';
|
||||||
import { commentKeymap } from '@codemirror/comment';
|
import { commentKeymap } from '@codemirror/comment';
|
||||||
import { lintKeymap } from '@codemirror/lint';
|
import { lintKeymap } from '@codemirror/lint';
|
||||||
import { PromQLExtension, CompleteStrategy } from 'codemirror-promql';
|
|
||||||
import { autocompletion, completionKeymap, CompletionContext, CompletionResult } from '@codemirror/autocomplete';
|
import { autocompletion, completionKeymap, CompletionContext, CompletionResult } from '@codemirror/autocomplete';
|
||||||
import { baseTheme, lightTheme, darkTheme, promqlHighlighter } from './CMTheme';
|
import { baseTheme, lightTheme, darkTheme, promqlHighlighter } from './CMTheme';
|
||||||
|
|
||||||
|
@ -19,8 +18,9 @@ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
import { faSearch, faSpinner, faGlobeEurope } from '@fortawesome/free-solid-svg-icons';
|
import { faSearch, faSpinner, faGlobeEurope } from '@fortawesome/free-solid-svg-icons';
|
||||||
import MetricsExplorer from './MetricsExplorer';
|
import MetricsExplorer from './MetricsExplorer';
|
||||||
import { usePathPrefix } from '../../contexts/PathPrefixContext';
|
import { usePathPrefix } from '../../contexts/PathPrefixContext';
|
||||||
import { newCompleteStrategy } from 'codemirror-promql/cjs/complete';
|
|
||||||
import { useTheme } from '../../contexts/ThemeContext';
|
import { useTheme } from '../../contexts/ThemeContext';
|
||||||
|
import { CompleteStrategy, PromQLExtension } from 'codemirror-promql';
|
||||||
|
import { newCompleteStrategy } from 'codemirror-promql/dist/cjs/complete';
|
||||||
|
|
||||||
const promqlExtension = new PromQLExtension();
|
const promqlExtension = new PromQLExtension();
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,20 @@ describe('Graph', () => {
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb: any) => cb());
|
jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb: any) => cb());
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Source: https://github.com/maslianok/react-resize-detector#testing-with-enzyme-and-jest
|
||||||
|
beforeEach(() => {
|
||||||
|
window.ResizeObserver = jest.fn().mockImplementation(() => ({
|
||||||
|
observe: jest.fn(),
|
||||||
|
unobserve: jest.fn(),
|
||||||
|
disconnect: jest.fn(),
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
window.ResizeObserver = ResizeObserver;
|
||||||
|
});
|
||||||
|
|
||||||
describe('data is returned', () => {
|
describe('data is returned', () => {
|
||||||
const props: any = {
|
const props: any = {
|
||||||
queryParams: {
|
queryParams: {
|
||||||
|
|
|
@ -213,7 +213,7 @@ class Panel extends Component<PanelProps, PanelState> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
setOptions(opts: Record<string, unknown>): void {
|
setOptions(opts: Partial<PanelOptions>): void {
|
||||||
const newOpts = { ...this.props.options, ...opts };
|
const newOpts = { ...this.props.options, ...opts };
|
||||||
this.props.onOptionsChanged(newOpts);
|
this.props.onOptionsChanged(newOpts);
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import moment from 'moment-timezone';
|
||||||
|
|
||||||
import 'tempusdominus-core';
|
import 'tempusdominus-core';
|
||||||
import 'tempusdominus-bootstrap-4';
|
import 'tempusdominus-bootstrap-4';
|
||||||
import '../../../node_modules/tempusdominus-bootstrap-4/build/css/tempusdominus-bootstrap-4.min.css';
|
import 'tempusdominus-bootstrap-4/build/css/tempusdominus-bootstrap-4.min.css';
|
||||||
|
|
||||||
import { dom, library } from '@fortawesome/fontawesome-svg-core';
|
import { dom, library } from '@fortawesome/fontawesome-svg-core';
|
||||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
|
|
|
@ -1,14 +1,31 @@
|
||||||
import { configure } from 'enzyme';
|
import { configure } from 'enzyme';
|
||||||
import Adapter from 'enzyme-adapter-react-16';
|
import Adapter from '@wojtekmaj/enzyme-adapter-react-17';
|
||||||
import { GlobalWithFetchMock } from 'jest-fetch-mock';
|
import { GlobalWithFetchMock } from 'jest-fetch-mock';
|
||||||
import 'mutationobserver-shim'; // Needed for CodeMirror.
|
import 'mutationobserver-shim'; // Needed for CodeMirror.
|
||||||
import './globals';
|
import './globals';
|
||||||
|
import 'jest-canvas-mock';
|
||||||
|
|
||||||
configure({ adapter: new Adapter() });
|
configure({ adapter: new Adapter() });
|
||||||
const customGlobal: GlobalWithFetchMock = global as GlobalWithFetchMock;
|
const customGlobal: GlobalWithFetchMock = global as GlobalWithFetchMock;
|
||||||
customGlobal.fetch = require('jest-fetch-mock');
|
customGlobal.fetch = require('jest-fetch-mock');
|
||||||
customGlobal.fetchMock = customGlobal.fetch;
|
customGlobal.fetchMock = customGlobal.fetch;
|
||||||
|
|
||||||
|
// https://stackoverflow.com/questions/39830580/jest-test-fails-typeerror-window-matchmedia-is-not-a-function
|
||||||
|
// https://jestjs.io/docs/manual-mocks#mocking-methods-which-are-not-implemented-in-jsdom
|
||||||
|
Object.defineProperty(window, 'matchMedia', {
|
||||||
|
writable: true,
|
||||||
|
value: jest.fn().mockImplementation((query) => ({
|
||||||
|
matches: false,
|
||||||
|
media: query,
|
||||||
|
onchange: null,
|
||||||
|
addListener: jest.fn(), // Deprecated
|
||||||
|
removeListener: jest.fn(), // Deprecated
|
||||||
|
addEventListener: jest.fn(),
|
||||||
|
removeEventListener: jest.fn(),
|
||||||
|
dispatchEvent: jest.fn(),
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
|
||||||
// CodeMirror in the expression input requires this DOM API. When we upgrade react-scripts
|
// CodeMirror in the expression input requires this DOM API. When we upgrade react-scripts
|
||||||
// and the associated Jest deps, hopefully this won't be needed anymore.
|
// and the associated Jest deps, hopefully this won't be needed anymore.
|
||||||
document.getSelection = function () {
|
document.getSelection = function () {
|
||||||
|
|
|
@ -5,14 +5,14 @@
|
||||||
and https://github.com/ForEvolve/bootstrap-dark/issues/49
|
and https://github.com/ForEvolve/bootstrap-dark/issues/49
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/functions';
|
@import '~bootstrap/scss/functions';
|
||||||
@import 'node_modules/bootstrap/scss/variables';
|
@import '~bootstrap/scss/variables';
|
||||||
|
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/dark-variables';
|
@import '~@forevolve/bootstrap-dark/scss/dark-variables';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/mixins';
|
@import '~bootstrap/scss/mixins';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/mixins-overrides';
|
@import '~@forevolve/bootstrap-dark/scss/mixins-overrides';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/dark-mixins';
|
@import '~@forevolve/bootstrap-dark/scss/dark-mixins';
|
||||||
|
|
||||||
html {
|
html {
|
||||||
font-family: sans-serif; // 2
|
font-family: sans-serif; // 2
|
||||||
|
@ -33,51 +33,51 @@ body.bootstrap-dark {
|
||||||
}
|
}
|
||||||
|
|
||||||
.bootstrap-dark {
|
.bootstrap-dark {
|
||||||
@import 'node_modules/bootstrap/scss/root';
|
@import '~bootstrap/scss/root';
|
||||||
@import 'node_modules/bootstrap/scss/type';
|
@import '~bootstrap/scss/type';
|
||||||
@import 'node_modules/bootstrap/scss/images';
|
@import '~bootstrap/scss/images';
|
||||||
@import 'node_modules/bootstrap/scss/code';
|
@import '~bootstrap/scss/code';
|
||||||
@import 'node_modules/bootstrap/scss/grid';
|
@import '~bootstrap/scss/grid';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/tables';
|
@import '~bootstrap/scss/tables';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/dark-tables';
|
@import '~@forevolve/bootstrap-dark/scss/dark-tables';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/forms';
|
@import '~bootstrap/scss/forms';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/form-overrides';
|
@import '~@forevolve/bootstrap-dark/scss/form-overrides';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/buttons';
|
@import '~bootstrap/scss/buttons';
|
||||||
@import 'node_modules/bootstrap/scss/transitions';
|
@import '~bootstrap/scss/transitions';
|
||||||
@import 'node_modules/bootstrap/scss/dropdown';
|
@import '~bootstrap/scss/dropdown';
|
||||||
@import 'node_modules/bootstrap/scss/button-group';
|
@import '~bootstrap/scss/button-group';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/input-group';
|
@import '~bootstrap/scss/input-group';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/dark-input-group';
|
@import '~@forevolve/bootstrap-dark/scss/dark-input-group';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/custom-forms';
|
@import '~bootstrap/scss/custom-forms';
|
||||||
@import 'node_modules/bootstrap/scss/nav';
|
@import '~bootstrap/scss/nav';
|
||||||
@import 'node_modules/bootstrap/scss/navbar';
|
@import '~bootstrap/scss/navbar';
|
||||||
@import 'node_modules/bootstrap/scss/card';
|
@import '~bootstrap/scss/card';
|
||||||
@import 'node_modules/bootstrap/scss/breadcrumb';
|
@import '~bootstrap/scss/breadcrumb';
|
||||||
@import 'node_modules/bootstrap/scss/pagination';
|
@import '~bootstrap/scss/pagination';
|
||||||
@import 'node_modules/bootstrap/scss/badge';
|
@import '~bootstrap/scss/badge';
|
||||||
@import 'node_modules/bootstrap/scss/jumbotron';
|
@import '~bootstrap/scss/jumbotron';
|
||||||
@import 'node_modules/bootstrap/scss/alert';
|
@import '~bootstrap/scss/alert';
|
||||||
@import 'node_modules/bootstrap/scss/progress';
|
@import '~bootstrap/scss/progress';
|
||||||
@import 'node_modules/bootstrap/scss/media';
|
@import '~bootstrap/scss/media';
|
||||||
@import 'node_modules/bootstrap/scss/list-group';
|
@import '~bootstrap/scss/list-group';
|
||||||
@import 'node_modules/bootstrap/scss/close';
|
@import '~bootstrap/scss/close';
|
||||||
@import 'node_modules/bootstrap/scss/toasts';
|
@import '~bootstrap/scss/toasts';
|
||||||
@import 'node_modules/bootstrap/scss/modal';
|
@import '~bootstrap/scss/modal';
|
||||||
@import 'node_modules/bootstrap/scss/tooltip';
|
@import '~bootstrap/scss/tooltip';
|
||||||
@import 'node_modules/bootstrap/scss/popover';
|
@import '~bootstrap/scss/popover';
|
||||||
@import 'node_modules/bootstrap/scss/carousel';
|
@import '~bootstrap/scss/carousel';
|
||||||
@import 'node_modules/bootstrap/scss/spinners';
|
@import '~bootstrap/scss/spinners';
|
||||||
@import 'node_modules/bootstrap/scss/utilities';
|
@import '~bootstrap/scss/utilities';
|
||||||
|
|
||||||
.navbar-themed {
|
.navbar-themed {
|
||||||
@extend .bg-dark;
|
@extend .bg-dark;
|
||||||
@extend .navbar-dark;
|
@extend .navbar-dark;
|
||||||
}
|
}
|
||||||
|
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/dark-styles';
|
@import '~@forevolve/bootstrap-dark/scss/dark-styles';
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,14 +6,14 @@
|
||||||
and https://github.com/ForEvolve/bootstrap-dark/issues/49
|
and https://github.com/ForEvolve/bootstrap-dark/issues/49
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/functions';
|
@import '~bootstrap/scss/functions';
|
||||||
@import 'node_modules/bootstrap/scss/variables';
|
@import '~bootstrap/scss/variables';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/mixins';
|
@import '~bootstrap/scss/mixins';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/mixins-overrides';
|
@import '~@forevolve/bootstrap-dark/scss/mixins-overrides';
|
||||||
|
|
||||||
/* Add reboot styles using light theme variables */
|
/* Add reboot styles using light theme variables */
|
||||||
@import 'node_modules/bootstrap/scss/reboot';
|
@import '~bootstrap/scss/reboot';
|
||||||
|
|
||||||
body.bootstrap {
|
body.bootstrap {
|
||||||
margin: 0; // 1
|
margin: 0; // 1
|
||||||
|
@ -27,41 +27,41 @@ body.bootstrap {
|
||||||
}
|
}
|
||||||
|
|
||||||
.bootstrap {
|
.bootstrap {
|
||||||
@import 'node_modules/bootstrap/scss/root';
|
@import '~bootstrap/scss/root';
|
||||||
@import 'node_modules/bootstrap/scss/type';
|
@import '~bootstrap/scss/type';
|
||||||
@import 'node_modules/bootstrap/scss/images';
|
@import '~bootstrap/scss/images';
|
||||||
@import 'node_modules/bootstrap/scss/code';
|
@import '~bootstrap/scss/code';
|
||||||
@import 'node_modules/bootstrap/scss/grid';
|
@import '~bootstrap/scss/grid';
|
||||||
@import 'node_modules/bootstrap/scss/tables';
|
@import '~bootstrap/scss/tables';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/forms';
|
@import '~bootstrap/scss/forms';
|
||||||
@import 'node_modules/@forevolve/bootstrap-dark/scss/form-overrides';
|
@import '~@forevolve/bootstrap-dark/scss/form-overrides';
|
||||||
|
|
||||||
@import 'node_modules/bootstrap/scss/buttons';
|
@import '~bootstrap/scss/buttons';
|
||||||
@import 'node_modules/bootstrap/scss/transitions';
|
@import '~bootstrap/scss/transitions';
|
||||||
@import 'node_modules/bootstrap/scss/dropdown';
|
@import '~bootstrap/scss/dropdown';
|
||||||
@import 'node_modules/bootstrap/scss/button-group';
|
@import '~bootstrap/scss/button-group';
|
||||||
@import 'node_modules/bootstrap/scss/input-group';
|
@import '~bootstrap/scss/input-group';
|
||||||
@import 'node_modules/bootstrap/scss/custom-forms';
|
@import '~bootstrap/scss/custom-forms';
|
||||||
@import 'node_modules/bootstrap/scss/nav';
|
@import '~bootstrap/scss/nav';
|
||||||
@import 'node_modules/bootstrap/scss/navbar';
|
@import '~bootstrap/scss/navbar';
|
||||||
@import 'node_modules/bootstrap/scss/card';
|
@import '~bootstrap/scss/card';
|
||||||
@import 'node_modules/bootstrap/scss/breadcrumb';
|
@import '~bootstrap/scss/breadcrumb';
|
||||||
@import 'node_modules/bootstrap/scss/pagination';
|
@import '~bootstrap/scss/pagination';
|
||||||
@import 'node_modules/bootstrap/scss/badge';
|
@import '~bootstrap/scss/badge';
|
||||||
@import 'node_modules/bootstrap/scss/jumbotron';
|
@import '~bootstrap/scss/jumbotron';
|
||||||
@import 'node_modules/bootstrap/scss/alert';
|
@import '~bootstrap/scss/alert';
|
||||||
@import 'node_modules/bootstrap/scss/progress';
|
@import '~bootstrap/scss/progress';
|
||||||
@import 'node_modules/bootstrap/scss/media';
|
@import '~bootstrap/scss/media';
|
||||||
@import 'node_modules/bootstrap/scss/list-group';
|
@import '~bootstrap/scss/list-group';
|
||||||
@import 'node_modules/bootstrap/scss/close';
|
@import '~bootstrap/scss/close';
|
||||||
@import 'node_modules/bootstrap/scss/toasts';
|
@import '~bootstrap/scss/toasts';
|
||||||
@import 'node_modules/bootstrap/scss/modal';
|
@import '~bootstrap/scss/modal';
|
||||||
@import 'node_modules/bootstrap/scss/tooltip';
|
@import '~bootstrap/scss/tooltip';
|
||||||
@import 'node_modules/bootstrap/scss/popover';
|
@import '~bootstrap/scss/popover';
|
||||||
@import 'node_modules/bootstrap/scss/carousel';
|
@import '~bootstrap/scss/carousel';
|
||||||
@import 'node_modules/bootstrap/scss/spinners';
|
@import '~bootstrap/scss/spinners';
|
||||||
@import 'node_modules/bootstrap/scss/utilities';
|
@import '~bootstrap/scss/utilities';
|
||||||
|
|
||||||
.navbar-themed {
|
.navbar-themed {
|
||||||
@extend .bg-light;
|
@extend .bg-light;
|
||||||
|
|
Loading…
Reference in a new issue