mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-24 05:04:05 -08:00
move codemirror-promql as a prometheus web module (#9188)
* move codemirror-promql as a prometheus web module Signed-off-by: Augustin Husson <husson.augustin@gmail.com> * remove unecessary file for the codemirror module Signed-off-by: Augustin Husson <husson.augustin@gmail.com> * change license for Apache Signed-off-by: Augustin Husson <husson.augustin@gmail.com> * fix codemirror build Signed-off-by: Augustin Husson <husson.augustin@gmail.com>
This commit is contained in:
parent
05dba96ebf
commit
54dfee02b2
|
@ -65,6 +65,14 @@ jobs:
|
|||
paths:
|
||||
- ~/.npm
|
||||
|
||||
test_web_module:
|
||||
executor: golang
|
||||
steps:
|
||||
- checkout
|
||||
- run: make web-module-install
|
||||
- run: make web-module-test
|
||||
- run: make web-module-lint
|
||||
|
||||
test_windows:
|
||||
executor:
|
||||
name: win/default
|
||||
|
@ -137,6 +145,10 @@ workflows:
|
|||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- test_web_module:
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- test_tsdb_go115:
|
||||
filters:
|
||||
tags:
|
||||
|
|
|
@ -11,6 +11,7 @@ Julien Pivotto (<roidelapluie@prometheus.io> / @roidelapluie) is the main/defaul
|
|||
* `tsdb`: Ganesh Vernekar (<ganesh@grafana.com> / @codesome), Bartłomiej Płotka (<bwplotka@gmail.com> / @bwplotka)
|
||||
* `web`
|
||||
* `ui`: Julius Volz (<julius.volz@gmail.com> / @juliusv)
|
||||
* `module`: Augustin Husson (<husson.augustin@gmail.com> @nexucis)
|
||||
* `Makefile` and related build configuration: Simon Pasquier (<pasquier.simon@gmail.com> / @simonpasquier), Ben Kochie (<superq@gmail.com> / @SuperQ)
|
||||
|
||||
For the sake of brevity, not all subtrees are explicitly listed. Due to the
|
||||
|
|
21
Makefile
21
Makefile
|
@ -14,6 +14,7 @@
|
|||
# Needs to be defined before including Makefile.common to auto-generate targets
|
||||
DOCKER_ARCHS ?= amd64 armv7 arm64 ppc64le s390x
|
||||
|
||||
WEB_MODULE_PATH = web/ui/module
|
||||
REACT_APP_PATH = web/ui/react-app
|
||||
REACT_APP_SOURCE_FILES = $(shell find $(REACT_APP_PATH)/public/ $(REACT_APP_PATH)/src/ $(REACT_APP_PATH)/tsconfig.json)
|
||||
REACT_APP_OUTPUT_DIR = web/ui/static/react
|
||||
|
@ -40,7 +41,7 @@ $(REACT_APP_OUTPUT_DIR): $(REACT_APP_NODE_MODULES_PATH) $(REACT_APP_SOURCE_FILES
|
|||
@$(REACT_APP_BUILD_SCRIPT)
|
||||
|
||||
.PHONY: assets
|
||||
assets: $(REACT_APP_OUTPUT_DIR)
|
||||
assets: web-module-install web-module-build $(REACT_APP_OUTPUT_DIR)
|
||||
@echo ">> writing assets"
|
||||
# Un-setting GOOS and GOARCH here because the generated Go code is always the same,
|
||||
# but the cached object code is incompatible between architectures and OSes (which
|
||||
|
@ -63,13 +64,29 @@ react-app-test: | $(REACT_APP_NODE_MODULES_PATH) react-app-lint
|
|||
@echo ">> running React app tests"
|
||||
cd $(REACT_APP_PATH) && npm run test --no-watch --coverage
|
||||
|
||||
.PHONY: web-module-build
|
||||
web-module-build:
|
||||
@cd ${WEB_MODULE_PATH} && ./build.sh --build
|
||||
|
||||
.PHONY: web-module-lint
|
||||
web-module-lint:
|
||||
@cd ${WEB_MODULE_PATH} && ./build.sh --lint
|
||||
|
||||
.PHONY: web-module-test
|
||||
web-module-test:
|
||||
@cd ${WEB_MODULE_PATH} && ./build.sh --test
|
||||
|
||||
.PHONY: web-module-install
|
||||
web-module-install:
|
||||
@cd ${WEB_MODULE_PATH} && ./build.sh --install
|
||||
|
||||
.PHONY: test
|
||||
# If we only want to only test go code we have to change the test target
|
||||
# which is called by all.
|
||||
ifeq ($(GO_ONLY),1)
|
||||
test: common-test
|
||||
else
|
||||
test: common-test react-app-test
|
||||
test: common-test react-app-test web-module-test web-module-lint
|
||||
endif
|
||||
|
||||
|
||||
|
|
60
web/ui/module/build.sh
Executable file
60
web/ui/module/build.sh
Executable file
|
@ -0,0 +1,60 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
MODULE_LIST=(codemirror-promql)
|
||||
|
||||
build-module() {
|
||||
for module in "${MODULE_LIST[@]}"; do
|
||||
cd "${module}"
|
||||
echo "building ${module}"
|
||||
npm run build
|
||||
cd ../
|
||||
done
|
||||
}
|
||||
|
||||
lint-module() {
|
||||
for module in "${MODULE_LIST[@]}"; do
|
||||
cd "${module}"
|
||||
echo "running linter for ${module}"
|
||||
npm run lint
|
||||
cd ../
|
||||
done
|
||||
}
|
||||
|
||||
test-module() {
|
||||
for module in "${MODULE_LIST[@]}"; do
|
||||
cd "${module}"
|
||||
echo "running all tests for ${module}"
|
||||
npm run test
|
||||
cd ../
|
||||
done
|
||||
}
|
||||
|
||||
install-module(){
|
||||
for module in "${MODULE_LIST[@]}"; do
|
||||
cd "${module}"
|
||||
echo "install deps for ${module}"
|
||||
npm ci
|
||||
cd ../
|
||||
done
|
||||
}
|
||||
|
||||
for i in "$@"; do
|
||||
case ${i} in
|
||||
--build)
|
||||
build-module
|
||||
shift
|
||||
;;
|
||||
--lint)
|
||||
lint-module
|
||||
shift
|
||||
;;
|
||||
--test)
|
||||
test-module
|
||||
;;
|
||||
--install)
|
||||
install-module
|
||||
;;
|
||||
esac
|
||||
done
|
35
web/ui/module/codemirror-promql/.eslintrc.json
Normal file
35
web/ui/module/codemirror-promql/.eslintrc.json
Normal file
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"extends": [
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:prettier/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/camelcase": "warn",
|
||||
"@typescript-eslint/explicit-function-return-type": ["off"],
|
||||
"eol-last": [
|
||||
"error",
|
||||
"always"
|
||||
],
|
||||
"object-curly-spacing": [
|
||||
"error",
|
||||
"always"
|
||||
],
|
||||
"prefer-const": "warn",
|
||||
"comma-dangle": [
|
||||
"error",
|
||||
{
|
||||
"arrays": "always-multiline",
|
||||
"objects": "always-multiline",
|
||||
"imports": "always-multiline"
|
||||
}
|
||||
]
|
||||
},
|
||||
"plugins": [
|
||||
"prettier"
|
||||
],
|
||||
"ignorePatterns": ["node_modules/"],
|
||||
"parserOptions": {
|
||||
"sourceType": "module"
|
||||
}
|
||||
}
|
8
web/ui/module/codemirror-promql/.gitignore
vendored
Normal file
8
web/ui/module/codemirror-promql/.gitignore
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
.idea/
|
||||
|
||||
node_modules/
|
||||
dist/
|
||||
lib/
|
||||
src/**/codemirror_grammar.js
|
||||
|
||||
/.nyc_output
|
13
web/ui/module/codemirror-promql/.npmignore
Normal file
13
web/ui/module/codemirror-promql/.npmignore
Normal file
|
@ -0,0 +1,13 @@
|
|||
/.vscode/
|
||||
/release/**/*.test.js
|
||||
/release/**/test/
|
||||
/scripts/
|
||||
/.circleci/
|
||||
/src/
|
||||
/test/
|
||||
/examples/
|
||||
/gulpfile.js
|
||||
/tsconfig.json
|
||||
/.npmignore
|
||||
/.gitignore
|
||||
/.eslintrc.js
|
99
web/ui/module/codemirror-promql/CHANGELOG.md
Normal file
99
web/ui/module/codemirror-promql/CHANGELOG.md
Normal file
|
@ -0,0 +1,99 @@
|
|||
0.17.0 / 2021-08-10
|
||||
===================
|
||||
|
||||
* **[Feature]**: Support `present_over_time`
|
||||
* **[Feature]**: HTTP method used to contact Prometheus is now configurable.
|
||||
|
||||
0.16.0 / 2021-05-20
|
||||
===================
|
||||
|
||||
* **[Feature]**: Support partial PromQL language called `MetricName`. Can be used to autocomplete only the metric
|
||||
name. (#142)
|
||||
* **[Feature]**: Autocomplete `NaN` and `Inf` (#141)
|
||||
* **[Enhancement]**: Fetch series using the HTTP `POST` method (#139)
|
||||
* **[Enhancement]**: Upgrade lezer-promql that fixed the parsing of metric names starting with `Inf`/`NaN` like infra (#142)
|
||||
* **[BreakingChange]**: The constant `promQLLanguage` has been changed to be a function. It takes a `LanguageType` as a
|
||||
parameter (#142)
|
||||
|
||||
0.15.0 / 2021-04-13
|
||||
===================
|
||||
|
||||
* **[Feature]**: Provide a way to inject an initial metric list for the autocompletion (#134)
|
||||
* **[Enhancement]**: Autocomplete metrics/function/aggregation when the editor is empty (#133)
|
||||
* **[Enhancement]**: Improve the documentation to reflect what the lib is providing. (#134)
|
||||
* **[Change]**: Export the essential interface in the root index of the lib. (#132)
|
||||
* **[Change]**: Downgrade the NodeJS version required (from 14 to 12) (#112)
|
||||
* **[BreakingChange]**: Support CommonJS module. (#130)
|
||||
|
||||
Note that this requires to change the import path if you are using something not exported by the root index of lib. For
|
||||
example: `import { labelMatchersToString } from 'codemirror-promql/parser/matcher';`
|
||||
becomes `import { labelMatchersToString } from 'codemirror-promql/esm/parser/matcher';`
|
||||
or `import { labelMatchersToString } from 'codemirror-promql/cjs/parser/matcher';`
|
||||
|
||||
0.14.1 / 2021-04-07
|
||||
===================
|
||||
|
||||
* **[Enhancement]**: Provide getter and setter to easily manipulate the different objects exposed by the lib
|
||||
* **[BugFix]**: fix the autocompletion of the labels after a comma (in a label matcher list or in a grouping label list)
|
||||
|
||||
0.14.0 / 2021-03-26
|
||||
===================
|
||||
|
||||
* **[Feature]**: Through the update of [lezer-promql](https://github.com/promlabs/lezer-promql/releases/tag/0.18.0)
|
||||
support negative offset
|
||||
* **[Enhancement]**: Add snippet to ease the usage of the aggregation `topk`, `bottomk` and `count_value`
|
||||
* **[Enhancement]**: Autocomplete the 2nd hard of subquery time selector
|
||||
|
||||
0.13.0 / 2021-03-22
|
||||
===================
|
||||
* **[Feature]**: Linter and Autocompletion support 3 new PromQL functions: `clamp` , `last_over_time`, `sgn`
|
||||
* **[Feature]**: Linter and Autocompletion support the `@` expression.
|
||||
* **[Enhancement]**: Signature of `CompleteStrategy.promQL` has been updated to support the type `Promise<null>`
|
||||
* **[BreakingChange]**: Support last version of Codemirror.next (v0.18.0)
|
||||
* **[BreakingChange]**: Remove the function `enricher`
|
||||
|
||||
0.12.0 / 2021-01-12
|
||||
===================
|
||||
|
||||
* **[Enhancement]**: Improve the parsing of `BinExpr` thanks to the changes provided by lezer-promql (v0.15.0)
|
||||
* **[BreakingChange]**: Support the new version of codemirror v0.17.x
|
||||
|
||||
0.11.0 / 2020-12-08
|
||||
===================
|
||||
|
||||
* **[Feature]**: Add the completion of the keyword `bool`. (#89)
|
||||
* **[Feature]**: Add a function `enricher` that can be used to enrich the completion with a custom one.
|
||||
* **[Feature]**: Add a LRU caching system. (#71)
|
||||
* **[Feature]**: You can now configure the maximum number of metrics in Prometheus for which metadata is fetched.
|
||||
* **[Feature]**: Allow the possibility to inject a custom `CompleteStrategy`. (#83)
|
||||
* **[Feature]**: Provide the Matchers in the PrometheusClient for the method `labelValues` and `series`. (#84)
|
||||
* **[Feature]**: Add the method `metricName` in the PrometheusClient that supports a prefix of the metric searched. (#84)
|
||||
* **[Enhancement]**: Caching mechanism and PrometheusClient are splitted. (#71)
|
||||
* **[Enhancement]**: Optimize the code of the PrometheusClient when no cache is used.
|
||||
* **[Enhancement]**: General improvement of the code thanks to Codemirror.next v0.14.0 (for the new tree management) and v0.15.0 (for the new tags/highlight management)
|
||||
* **[Enhancement]**: Improve the code coverage of the parser concerning the parsing of the function / aggregation.
|
||||
* **[BugFix]**: In certain case, the linter didn't ignore the comments. (#78)
|
||||
* **[BreakingChange]**: Use an object instead of a map when querying the metrics metadata.
|
||||
* **[BreakingChange]**: Support last version of Codemirror.next (v0.15.0).
|
||||
* **[BreakingChange]**: Change the way the completion configuration is structured.
|
||||
|
||||
0.10.2 / 2020-10-18
|
||||
===================
|
||||
|
||||
* **[BugFix]**: Fixed missing autocompletion of binary operators after aggregations
|
||||
|
||||
0.10.1 / 2020-10-16
|
||||
===================
|
||||
|
||||
* **[Enhancement]**: Caching of series label names and values for autocompletion is now optimized to be much faster
|
||||
* **[BugFix]**: Fixed incorrect linter errors around binary operator arguments not separated from the operator by a space
|
||||
|
||||
0.10.0 / 2020-10-14
|
||||
===================
|
||||
|
||||
* **[Enhancement]**: The Linter is now checking operation many-to-many, one-to-one, many-to-one and one-to-many
|
||||
* **[Enhancement]**: The autocompletion is now showing the type of the metric if the type is same for every possible definition of the same metric
|
||||
* **[Enhancement]**: The autocompletion is supporting the completion of the duration
|
||||
* **[Enhancement]**: Descriptions have been added for the snippet, the binary operator modifier and the aggregation operator modifier
|
||||
* **[Enhancement]**: Coverage of the code has been increased (a lot).
|
||||
* **[BreakingChange]**: Removing LSP support
|
265
web/ui/module/codemirror-promql/README.md
Normal file
265
web/ui/module/codemirror-promql/README.md
Normal file
|
@ -0,0 +1,265 @@
|
|||
CodeMirror-promql
|
||||
=================
|
||||
[![CircleCI](https://circleci.com/gh/prometheus-community/codemirror-promql.svg?style=shield)](https://circleci.com/gh/prometheus-community/codemirror-promql) [![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](./LICENSE)
|
||||
[![NPM version](https://img.shields.io/npm/v/codemirror-promql.svg)](https://www.npmjs.org/package/codemirror-promql) [![codecov](https://codecov.io/gh/prometheus-community/codemirror-promql/branch/master/graph/badge.svg?token=1OSVPBDKZC)](https://codecov.io/gh/prometheus-community/codemirror-promql)
|
||||
|
||||
## Overview
|
||||
|
||||
This project provides a mode for [CodeMirror Next](https://codemirror.net/6) that handles syntax highlighting, linting
|
||||
and autocompletion for PromQL ([Prometheus Query Language](https://prometheus.io/docs/introduction/overview/)).
|
||||
|
||||
### Installation
|
||||
|
||||
This mode is available as a npm package:
|
||||
|
||||
```bash
|
||||
npm install --save codemirror-promql
|
||||
```
|
||||
|
||||
**Note:** You will have to manually install different packages that are part
|
||||
of [CodeMirror Next](https://codemirror.net/6), as they are a peer dependency to this package. Here are the different
|
||||
packages you need to install:
|
||||
|
||||
* **@codemirror/autocomplete**
|
||||
* **@codemirror/highlight**
|
||||
* **@codemirror/language**
|
||||
* **@codemirror/lint**
|
||||
* **@codemirror/state**
|
||||
* **@codemirror/view**
|
||||
|
||||
```bash
|
||||
npm install --save @codemirror/autocomplete @codemirror/highlight @codemirror/language @codemirror/lint @codemirror/state @codemirror/view
|
||||
```
|
||||
|
||||
**Note 2**: that's the minimum required to install the lib. You would probably need to install as well the dependency
|
||||
**@codemirror/basic-setup** to ease the setup of codeMirror itself:
|
||||
|
||||
```bash
|
||||
npm install --save @codemirror/basic-setup
|
||||
```
|
||||
|
||||
### Playground
|
||||
|
||||
[Here](https://codemirror-promql.netlify.app/) you have a playground available that is deployed from the latest commit
|
||||
available on the `master` branch.
|
||||
|
||||
Here is a short preview of it looks like currently:
|
||||
|
||||
![preview](https://user-images.githubusercontent.com/4548045/95660829-d5e4b680-0b2a-11eb-9ecb-41dca6396273.gif)
|
||||
|
||||
## Usage
|
||||
|
||||
As the setup of the PromQL language can a bit tricky in CMN, this lib provides a class `PromQLExtension`
|
||||
which is here to help you to configure the different extensions we aim to provide.
|
||||
|
||||
### Default setup
|
||||
|
||||
If you want to enjoy about the different features provided without taking too much time to understand how to configure
|
||||
them, then the easiest way is this one:
|
||||
|
||||
```typescript
|
||||
import { PromQLExtension } from 'codemirror-promql';
|
||||
import { basicSetup } from '@codemirror/basic-setup';
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { EditorView } from '@codemirror/view';
|
||||
|
||||
const promQL = new PromQLExtension()
|
||||
new EditorView({
|
||||
state: EditorState.create({
|
||||
extensions: [basicSetup, promQL.asExtension()],
|
||||
}),
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
// tslint:disable-next-line:no-non-null-assertion
|
||||
parent: document.getElementById('editor')!,
|
||||
});
|
||||
```
|
||||
|
||||
Using the default setup will activate:
|
||||
|
||||
* syntax highlighting
|
||||
* an offline autocompletion that will suggest PromQL keywords such as functions / aggregations, depending on the
|
||||
context.
|
||||
* an offline linter that will display PromQL syntax errors (which is closer to what Prometheus returns)
|
||||
|
||||
### Deactivate autocompletion - linter
|
||||
|
||||
In case you would like to deactivate the linter and/or the autocompletion it's simple as that:
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().activateLinter(false).activateCompletion(false) // here the linter and the autocomplete are deactivated
|
||||
```
|
||||
|
||||
### Linter
|
||||
|
||||
There is no particular configuration today for the linter. Feel free to file an issue if you have some use cases that
|
||||
would require configurability.
|
||||
|
||||
### Autocompletion
|
||||
|
||||
The autocompletion feature provides multiple different parameters that can be used to adapt this lib to your
|
||||
environment.
|
||||
|
||||
#### maxMetricsMetadata
|
||||
|
||||
`maxMetricsMetadata` is the maximum number of metrics in Prometheus for which metadata is fetched. If the number of
|
||||
metrics exceeds this limit, no metric metadata is fetched at all.
|
||||
|
||||
By default, the limit is 10 000 metrics.
|
||||
|
||||
Use it cautiously. A high value of this limit can cause a crash of your browser due to too many data fetched.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ maxMetricsMetadata: 10000 })
|
||||
```
|
||||
|
||||
#### Connect the autocompletion extension to a remote Prometheus server
|
||||
|
||||
Connecting the autocompletion extension to a remote Prometheus server will provide autocompletion of metric names, label
|
||||
names, and label values.
|
||||
|
||||
##### Use the default Prometheus client
|
||||
|
||||
###### Prometheus URL
|
||||
|
||||
If you want to use the default Prometheus client provided by this lib, you have to provide the url used to contact the
|
||||
Prometheus server.
|
||||
|
||||
Note: this is the only mandatory parameter in case you want to use the default Prometheus client. Without this
|
||||
parameter, the rest of the config will be ignored, and the Prometheus client won't be initialized.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { url: 'https://prometheus.land' } })
|
||||
```
|
||||
|
||||
###### Override FetchFn
|
||||
|
||||
In case your Prometheus server is protected and requires a special HTTP client, you can override the function `fetchFn`
|
||||
that is used to perform any required HTTP request.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { fetchFn: myHTTPClient } })
|
||||
```
|
||||
|
||||
###### Duration to use for looking back when retrieving metrics / labels
|
||||
|
||||
If you are a bit familiar with the Prometheus API, you are aware that you can pass a time interval that is used to tell
|
||||
to Prometheus which period of time you are looking for when retrieving metadata (like metrics / labels).
|
||||
|
||||
In case you would like to provide your own duration, you can override the variable `lookbackInterval`. By default, the
|
||||
value is `12 * 60 * 60 * 1000` (12h). The value must be defined in **milliseconds**.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { lookbackInterval: 12 * 60 * 60 * 1000 } })
|
||||
```
|
||||
|
||||
###### Error Handling
|
||||
|
||||
You can set up your own error handler to catch any HTTP error that can occur when the PrometheusClient is contacting
|
||||
Prometheus.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { httpErrorHandler: (error: any) => console.error(error) } })
|
||||
```
|
||||
|
||||
###### HTTP method used
|
||||
|
||||
By default, the Prometheus client will use the HTTP method `POST` when contacting Prometheus for the
|
||||
endpoints `/api/v1/labels` and `/api/v1/series`.
|
||||
|
||||
You can change it to use the HTTP method `GET` if you prefer.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { httpMethod: 'GET' } })
|
||||
```
|
||||
|
||||
###### Cache
|
||||
|
||||
The default client has an embedded cache that is used to store the different metrics and labels retrieved from a remote
|
||||
Prometheus server.
|
||||
|
||||
###### Max Age
|
||||
|
||||
The data are stored in the cache for a limited amount of time defined by the variable `maxAge` which is by default 5
|
||||
minutes. The value must be defined in **milliseconds**.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { cache: { maxAge: 5 * 60 * 1000 } } })
|
||||
```
|
||||
|
||||
###### Initial Metric List
|
||||
|
||||
The cache can be initialized with a list of metric names. It is useful when you already have the list of the metrics
|
||||
somewhere else in your application, and you would like to share this list with the embedded Prometheus client
|
||||
of `codemirror-promql`.
|
||||
|
||||
Note: keep in mind that this list will be kept into the embedded Prometheus client until the time defined by `maxAge`.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({
|
||||
remote: {
|
||||
cache: {
|
||||
initialMetricList: [
|
||||
'ALERTS',
|
||||
'ALERTS_FOR_STATE',
|
||||
'alertmanager_alerts',
|
||||
'alertmanager_alerts_invalid_total',
|
||||
'alertmanager_alerts_received_total',
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
##### Override the default Prometheus client
|
||||
|
||||
In case you are not satisfied by our default Prometheus client, you can still provide your own. It has to implement the
|
||||
interface [PrometheusClient](https://github.com/prometheus-community/codemirror-promql/blob/master/src/lang-promql/client/prometheus.ts#L111-L117)
|
||||
.
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ remote: { prometheusClient: MyPrometheusClient } })
|
||||
```
|
||||
|
||||
#### Provide your own implementation of the autocompletion
|
||||
|
||||
In case you would like to provide you own implementation of the autocompletion, you can simply do it like that:
|
||||
|
||||
```typescript
|
||||
const promQL = new PromQLExtension().setComplete({ completeStrategy: myCustomImpl })
|
||||
```
|
||||
|
||||
Note: In case this parameter is provided, then the rest of the configuration is ignored.
|
||||
|
||||
### Example
|
||||
|
||||
* The development [app](./src/app) can give you an example of how to use it with no TS Framework.
|
||||
* [ReactJS example](https://github.com/prometheus/prometheus/blob/431ea75a11ca165dad9dd5d629b3cf975f4c186b/web/ui/react-app/src/pages/graph/CMExpressionInput.tsx)
|
||||
* [Angular example](https://github.com/perses/perses/blob/28b3bdac88b0ed7a4602f9c91106442eafcb6c34/internal/api/front/perses/src/app/project/prometheusrule/promql-editor/promql-editor.component.ts)
|
||||
|
||||
## Contributions
|
||||
|
||||
Any contribution or suggestion would be really appreciated. Feel free
|
||||
to [file an issue](https://github.com/prometheus-community/codemirror-promql/issues)
|
||||
or [send a pull request](https://github.com/prometheus-community/codemirror-promql/pulls).
|
||||
|
||||
## Development
|
||||
|
||||
In case you want to contribute and change the code by yourself, run the following commands:
|
||||
|
||||
To install all dependencies:
|
||||
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
To start the web server:
|
||||
|
||||
```
|
||||
npm start
|
||||
```
|
||||
|
||||
This should create a tab in your browser with the development app that contains CodeMirror Next with the PromQL plugin.
|
||||
|
||||
## License
|
||||
|
||||
[MIT](./LICENSE)
|
29
web/ui/module/codemirror-promql/build.sh
Normal file
29
web/ui/module/codemirror-promql/build.sh
Normal file
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Copyright 2021 The Prometheus Authors
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -ex
|
||||
|
||||
# build the lib (both ES2015 and CommonJS)
|
||||
tsc --module ES2015 --target ES2015 --outDir lib/esm
|
||||
tsc --module commonjs --target es5 --outDir lib/cjs --downlevelIteration
|
||||
|
||||
# Finally, copy some useful files into the distribution folder for documentation purposes.
|
||||
cp ./README.md ./lib/README.md
|
||||
cp ./CHANGELOG.md ./lib/CHANGELOG.md
|
||||
cp ./package.json ./lib/package.json
|
||||
|
||||
if [ -f "./LICENSE" ]; then
|
||||
cp ./LICENSE ./lib/LICENSE
|
||||
fi
|
22154
web/ui/module/codemirror-promql/package-lock.json
generated
Normal file
22154
web/ui/module/codemirror-promql/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
90
web/ui/module/codemirror-promql/package.json
Normal file
90
web/ui/module/codemirror-promql/package.json
Normal file
|
@ -0,0 +1,90 @@
|
|||
{
|
||||
"name": "codemirror-promql",
|
||||
"version": "0.17.0",
|
||||
"description": "a CodeMirror mode for the PromQL language",
|
||||
"main": "cjs/index.js",
|
||||
"module": "esm/index.js",
|
||||
"scripts": {
|
||||
"start": "webpack-dev-server --config webpack.config.cjs --open",
|
||||
"build": "npm run build-lib && npm run build-app",
|
||||
"build-lib": "bash ./build.sh",
|
||||
"build-app": "webpack --config webpack.config.cjs",
|
||||
"test": "ts-mocha -p tsconfig.json src/**/*.test.ts",
|
||||
"test-coverage": "nyc ts-mocha -p ./tsconfig.json ./**/*.test.ts",
|
||||
"codecov": "nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
||||
"lint": "eslint src/ --ext .ts",
|
||||
"lint:fix": "eslint --fix src/ --ext .ts"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/prometheus-community/codemirror-promql.git"
|
||||
},
|
||||
"keywords": [
|
||||
"promql",
|
||||
"codemirror",
|
||||
"mode",
|
||||
"prometheus"
|
||||
],
|
||||
"author": "Prometheus Authors <prometheus-developers@googlegroups.com>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/prometheus-community/codemirror-promql/issues"
|
||||
},
|
||||
"homepage": "https://github.com/prometheus-community/codemirror-promql/blob/master/README.md",
|
||||
"dependencies": {
|
||||
"lezer-promql": "^0.20.0",
|
||||
"lru-cache": "^6.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@codemirror/autocomplete": "^0.18.3",
|
||||
"@codemirror/basic-setup": "^0.18.0",
|
||||
"@codemirror/highlight": "^0.18.3",
|
||||
"@codemirror/language": "^0.18.0",
|
||||
"@codemirror/lint": "^0.18.1",
|
||||
"@codemirror/state": "^0.18.2",
|
||||
"@codemirror/view": "^0.18.1",
|
||||
"@types/chai": "^4.2.12",
|
||||
"@types/lru-cache": "^5.1.0",
|
||||
"@types/mocha": "^8.0.3",
|
||||
"@types/node": "^14.0.13",
|
||||
"@typescript-eslint/eslint-plugin": "^2.22.0",
|
||||
"@typescript-eslint/parser": "^2.22.0",
|
||||
"chai": "^4.2.0",
|
||||
"clean-webpack-plugin": "^3.0.0",
|
||||
"codecov": "^3.8.1",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-prettier": "^6.11.0",
|
||||
"eslint-plugin-flowtype": "^5.2.0",
|
||||
"eslint-plugin-import": "^2.22.0",
|
||||
"eslint-plugin-prettier": "^3.1.4",
|
||||
"html-webpack-plugin": "^4.3.0",
|
||||
"isomorphic-fetch": "^3.0.0",
|
||||
"mocha": "^8.1.2",
|
||||
"nock": "^13.0.11",
|
||||
"nyc": "^15.1.0",
|
||||
"prettier": "^2.0.5",
|
||||
"ts-loader": "^7.0.4",
|
||||
"ts-mocha": "^8.0.0",
|
||||
"ts-node": "^9.0.0",
|
||||
"typescript": "^4.2.3",
|
||||
"webpack": "^4.43.0",
|
||||
"webpack-cli": "^3.3.11",
|
||||
"webpack-dev-server": "^3.11.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@codemirror/autocomplete": "^0.18.3",
|
||||
"@codemirror/highlight": "^0.18.3",
|
||||
"@codemirror/language": "^0.18.0",
|
||||
"@codemirror/lint": "^0.18.1",
|
||||
"@codemirror/state": "^0.18.2",
|
||||
"@codemirror/view": "^0.18.1"
|
||||
},
|
||||
"prettier": {
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"printWidth": 150
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
}
|
48
web/ui/module/codemirror-promql/src/app/app.html
Normal file
48
web/ui/module/codemirror-promql/src/app/app.html
Normal file
|
@ -0,0 +1,48 @@
|
|||
<!-- The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2020 The Prometheus Authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
-->
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>PromQL</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h3>CodeMirror Mode PromQL</h3>
|
||||
<label for="completion">choose the completion mode:</label>
|
||||
<select name="completion" id="completion">
|
||||
<option selected value="offline">Offline</option>
|
||||
<option value="prometheus">Prometheus</option>
|
||||
</select>
|
||||
<br>
|
||||
<label for="languageType">Language to complete</label>
|
||||
<select name="languageType" id="languageType">
|
||||
<option selected value="promql">Full PromQL</option>
|
||||
<option value="metricName">Metric names</option>
|
||||
</select>
|
||||
|
||||
<button id="apply">apply</button>
|
||||
|
||||
<div id=editor></div>
|
||||
</body>
|
||||
</html>
|
85
web/ui/module/codemirror-promql/src/app/app.ts
Normal file
85
web/ui/module/codemirror-promql/src/app/app.ts
Normal file
|
@ -0,0 +1,85 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { basicSetup } from '@codemirror/basic-setup';
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { EditorView } from '@codemirror/view';
|
||||
import { LanguageType, PromQLExtension } from '../lang-promql';
|
||||
import { customTheme, promQLHighlightMaterialTheme } from './theme';
|
||||
|
||||
const promqlExtension = new PromQLExtension();
|
||||
let editor: EditorView;
|
||||
|
||||
function getLanguageType(): LanguageType {
|
||||
const completionSelect = document.getElementById('languageType') as HTMLSelectElement;
|
||||
const completionValue = completionSelect.options[completionSelect.selectedIndex].value;
|
||||
switch (completionValue) {
|
||||
case 'promql':
|
||||
return LanguageType.PromQL;
|
||||
case 'metricName':
|
||||
return LanguageType.MetricName;
|
||||
default:
|
||||
return LanguageType.PromQL;
|
||||
}
|
||||
}
|
||||
|
||||
function setCompletion() {
|
||||
const completionSelect = document.getElementById('completion') as HTMLSelectElement;
|
||||
const completionValue = completionSelect.options[completionSelect.selectedIndex].value;
|
||||
switch (completionValue) {
|
||||
case 'offline':
|
||||
promqlExtension.setComplete();
|
||||
break;
|
||||
case 'prometheus':
|
||||
promqlExtension.setComplete({
|
||||
remote: {
|
||||
url: 'https://prometheus.demo.do.prometheus.io',
|
||||
},
|
||||
});
|
||||
break;
|
||||
default:
|
||||
promqlExtension.setComplete();
|
||||
}
|
||||
}
|
||||
|
||||
function createEditor() {
|
||||
let doc = '';
|
||||
if (editor) {
|
||||
// When the linter is changed, it required to reload completely the editor.
|
||||
// So the first thing to do, is to completely delete the previous editor and to recreate it from scratch
|
||||
// We should preserve the current text entered as well.
|
||||
doc = editor.state.sliceDoc(0, editor.state.doc.length);
|
||||
editor.destroy();
|
||||
}
|
||||
editor = new EditorView({
|
||||
state: EditorState.create({
|
||||
extensions: [basicSetup, promqlExtension.asExtension(getLanguageType()), promQLHighlightMaterialTheme, customTheme],
|
||||
doc: doc,
|
||||
}),
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
parent: document.querySelector('#editor')!,
|
||||
});
|
||||
}
|
||||
|
||||
function applyConfiguration(): void {
|
||||
setCompletion();
|
||||
createEditor();
|
||||
}
|
||||
|
||||
createEditor();
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion,@typescript-eslint/ban-ts-ignore
|
||||
// @ts-ignore
|
||||
document.getElementById('apply').addEventListener('click', function () {
|
||||
applyConfiguration();
|
||||
});
|
105
web/ui/module/codemirror-promql/src/app/theme.ts
Normal file
105
web/ui/module/codemirror-promql/src/app/theme.ts
Normal file
|
@ -0,0 +1,105 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { EditorView } from '@codemirror/view';
|
||||
import { HighlightStyle, tags } from '@codemirror/highlight';
|
||||
|
||||
// promQLHighlightMaterialTheme is based on the material theme defined here:
|
||||
// https://codemirror.net/theme/material.css
|
||||
export const promQLHighlightMaterialTheme = HighlightStyle.define([
|
||||
{
|
||||
tag: tags.deleted,
|
||||
textDecoration: 'line-through',
|
||||
},
|
||||
{
|
||||
tag: tags.inserted,
|
||||
textDecoration: 'underline',
|
||||
},
|
||||
{
|
||||
tag: tags.link,
|
||||
textDecoration: 'underline',
|
||||
},
|
||||
{
|
||||
tag: tags.strong,
|
||||
fontWeight: 'bold',
|
||||
},
|
||||
{
|
||||
tag: tags.emphasis,
|
||||
fontStyle: 'italic',
|
||||
},
|
||||
{
|
||||
tag: tags.invalid,
|
||||
color: '#f00',
|
||||
},
|
||||
{
|
||||
tag: tags.keyword,
|
||||
color: '#C792EA',
|
||||
},
|
||||
{
|
||||
tag: tags.operator,
|
||||
color: '#89DDFF',
|
||||
},
|
||||
{
|
||||
tag: tags.atom,
|
||||
color: '#F78C6C',
|
||||
},
|
||||
{
|
||||
tag: tags.number,
|
||||
color: '#FF5370',
|
||||
},
|
||||
{
|
||||
tag: tags.string,
|
||||
color: '#99b867',
|
||||
},
|
||||
{
|
||||
tag: [tags.escape, tags.regexp],
|
||||
color: '#e40',
|
||||
},
|
||||
{
|
||||
tag: tags.definition(tags.variableName),
|
||||
color: '#f07178',
|
||||
},
|
||||
{
|
||||
tag: tags.labelName,
|
||||
color: '#f07178',
|
||||
},
|
||||
{
|
||||
tag: tags.typeName,
|
||||
color: '#085',
|
||||
},
|
||||
{
|
||||
tag: tags.function(tags.variableName),
|
||||
color: '#C792EA',
|
||||
},
|
||||
{
|
||||
tag: tags.definition(tags.propertyName),
|
||||
color: '#00c',
|
||||
},
|
||||
{
|
||||
tag: tags.comment,
|
||||
color: '#546E7A',
|
||||
},
|
||||
]);
|
||||
|
||||
export const customTheme = EditorView.theme({
|
||||
$completionDetail: {
|
||||
marginLeft: '0.5em',
|
||||
float: 'right',
|
||||
color: '#9d4040',
|
||||
},
|
||||
$completionMatchedText: {
|
||||
color: '#83080a',
|
||||
textDecoration: 'none',
|
||||
fontWeight: 'bold',
|
||||
},
|
||||
});
|
|
@ -0,0 +1,16 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
export { PrometheusClient, PrometheusConfig, CacheConfig } from './prometheus';
|
||||
|
||||
export type FetchFn = (input: RequestInfo, init?: RequestInit) => Promise<Response>;
|
|
@ -0,0 +1,391 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { FetchFn } from '.';
|
||||
import { Matcher } from '../types';
|
||||
import { labelMatchersToString } from '../parser';
|
||||
import LRUCache from 'lru-cache';
|
||||
|
||||
const apiPrefix = '/api/v1';
|
||||
const labelsEndpoint = apiPrefix + '/labels';
|
||||
const labelValuesEndpoint = apiPrefix + '/label/:name/values';
|
||||
const seriesEndpoint = apiPrefix + '/series';
|
||||
const metricMetadataEndpoint = apiPrefix + '/metadata';
|
||||
|
||||
export interface MetricMetadata {
|
||||
type: string;
|
||||
help: string;
|
||||
}
|
||||
|
||||
export interface PrometheusClient {
|
||||
labelNames(metricName?: string): Promise<string[]>;
|
||||
|
||||
// labelValues return a list of the value associated to the given labelName.
|
||||
// In case a metric is provided, then the list of values is then associated to the couple <MetricName, LabelName>
|
||||
labelValues(labelName: string, metricName?: string, matchers?: Matcher[]): Promise<string[]>;
|
||||
|
||||
metricMetadata(): Promise<Record<string, MetricMetadata[]>>;
|
||||
|
||||
series(metricName: string, matchers?: Matcher[], labelName?: string): Promise<Map<string, string>[]>;
|
||||
|
||||
// metricNames returns a list of suggestions for the metric name given the `prefix`.
|
||||
// Note that the returned list can be a superset of those suggestions for the prefix (i.e., including ones without the
|
||||
// prefix), as codemirror will filter these out when displaying suggestions to the user.
|
||||
metricNames(prefix?: string): Promise<string[]>;
|
||||
}
|
||||
|
||||
export interface CacheConfig {
|
||||
// maxAge is the maximum amount of time that a cached completion item is valid before it needs to be refreshed.
|
||||
// It is in milliseconds. Default value: 300 000 (5min)
|
||||
maxAge?: number;
|
||||
// the cache can be initialized with a list of metrics
|
||||
initialMetricList?: string[];
|
||||
}
|
||||
|
||||
export interface PrometheusConfig {
|
||||
url: string;
|
||||
lookbackInterval?: number;
|
||||
httpErrorHandler?: (error: any) => void;
|
||||
fetchFn?: FetchFn;
|
||||
// cache will allow user to change the configuration of the cached Prometheus client (which is used by default)
|
||||
cache?: CacheConfig;
|
||||
httpMethod?: 'POST' | 'GET';
|
||||
}
|
||||
|
||||
interface APIResponse<T> {
|
||||
status: 'success' | 'error';
|
||||
data?: T;
|
||||
error?: string;
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
// These are status codes where the Prometheus API still returns a valid JSON body,
|
||||
// with an error encoded within the JSON.
|
||||
const badRequest = 400;
|
||||
const unprocessableEntity = 422;
|
||||
const serviceUnavailable = 503;
|
||||
|
||||
// HTTPPrometheusClient is the HTTP client that should be used to get some information from the different endpoint provided by prometheus.
|
||||
export class HTTPPrometheusClient implements PrometheusClient {
|
||||
private readonly lookbackInterval = 60 * 60 * 1000 * 12; //12 hours
|
||||
private readonly url: string;
|
||||
private readonly errorHandler?: (error: any) => void;
|
||||
private readonly httpMethod: 'POST' | 'GET' = 'POST';
|
||||
// For some reason, just assigning via "= fetch" here does not end up executing fetch correctly
|
||||
// when calling it, thus the indirection via another function wrapper.
|
||||
private readonly fetchFn: FetchFn = (input: RequestInfo, init?: RequestInit): Promise<Response> => fetch(input, init);
|
||||
|
||||
constructor(config: PrometheusConfig) {
|
||||
this.url = config.url;
|
||||
this.errorHandler = config.httpErrorHandler;
|
||||
if (config.lookbackInterval) {
|
||||
this.lookbackInterval = config.lookbackInterval;
|
||||
}
|
||||
if (config.fetchFn) {
|
||||
this.fetchFn = config.fetchFn;
|
||||
}
|
||||
if (config.httpMethod) {
|
||||
this.httpMethod = config.httpMethod;
|
||||
}
|
||||
}
|
||||
|
||||
labelNames(metricName?: string): Promise<string[]> {
|
||||
const end = new Date();
|
||||
const start = new Date(end.getTime() - this.lookbackInterval);
|
||||
if (metricName === undefined || metricName === '') {
|
||||
const request = this.buildRequest(
|
||||
labelsEndpoint,
|
||||
new URLSearchParams({
|
||||
start: start.toISOString(),
|
||||
end: end.toISOString(),
|
||||
})
|
||||
);
|
||||
// See https://prometheus.io/docs/prometheus/latest/querying/api/#getting-label-names
|
||||
return this.fetchAPI<string[]>(request.uri, {
|
||||
method: this.httpMethod,
|
||||
body: request.body,
|
||||
}).catch((error) => {
|
||||
if (this.errorHandler) {
|
||||
this.errorHandler(error);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
return this.series(metricName).then((series) => {
|
||||
const labelNames = new Set<string>();
|
||||
for (const labelSet of series) {
|
||||
for (const [key] of Object.entries(labelSet)) {
|
||||
if (key === '__name__') {
|
||||
continue;
|
||||
}
|
||||
labelNames.add(key);
|
||||
}
|
||||
}
|
||||
return Array.from(labelNames);
|
||||
});
|
||||
}
|
||||
|
||||
// labelValues return a list of the value associated to the given labelName.
|
||||
// In case a metric is provided, then the list of values is then associated to the couple <MetricName, LabelName>
|
||||
labelValues(labelName: string, metricName?: string, matchers?: Matcher[]): Promise<string[]> {
|
||||
const end = new Date();
|
||||
const start = new Date(end.getTime() - this.lookbackInterval);
|
||||
|
||||
if (!metricName || metricName.length === 0) {
|
||||
const params: URLSearchParams = new URLSearchParams({
|
||||
start: start.toISOString(),
|
||||
end: end.toISOString(),
|
||||
});
|
||||
// See https://prometheus.io/docs/prometheus/latest/querying/api/#querying-label-values
|
||||
return this.fetchAPI<string[]>(`${labelValuesEndpoint.replace(/:name/gi, labelName)}?${params}`).catch((error) => {
|
||||
if (this.errorHandler) {
|
||||
this.errorHandler(error);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
return this.series(metricName, matchers, labelName).then((series) => {
|
||||
const labelValues = new Set<string>();
|
||||
for (const labelSet of series) {
|
||||
for (const [key, value] of Object.entries(labelSet)) {
|
||||
if (key === '__name__') {
|
||||
continue;
|
||||
}
|
||||
if (key === labelName) {
|
||||
labelValues.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return Array.from(labelValues);
|
||||
});
|
||||
}
|
||||
|
||||
metricMetadata(): Promise<Record<string, MetricMetadata[]>> {
|
||||
return this.fetchAPI<Record<string, MetricMetadata[]>>(metricMetadataEndpoint).catch((error) => {
|
||||
if (this.errorHandler) {
|
||||
this.errorHandler(error);
|
||||
}
|
||||
return {};
|
||||
});
|
||||
}
|
||||
|
||||
series(metricName: string, matchers?: Matcher[], labelName?: string): Promise<Map<string, string>[]> {
|
||||
const end = new Date();
|
||||
const start = new Date(end.getTime() - this.lookbackInterval);
|
||||
const request = this.buildRequest(
|
||||
seriesEndpoint,
|
||||
new URLSearchParams({
|
||||
start: start.toISOString(),
|
||||
end: end.toISOString(),
|
||||
'match[]': labelMatchersToString(metricName, matchers, labelName),
|
||||
})
|
||||
);
|
||||
// See https://prometheus.io/docs/prometheus/latest/querying/api/#finding-series-by-label-matchers
|
||||
return this.fetchAPI<Map<string, string>[]>(request.uri, {
|
||||
method: this.httpMethod,
|
||||
body: request.body,
|
||||
}).catch((error) => {
|
||||
if (this.errorHandler) {
|
||||
this.errorHandler(error);
|
||||
}
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
metricNames(): Promise<string[]> {
|
||||
return this.labelValues('__name__');
|
||||
}
|
||||
|
||||
private fetchAPI<T>(resource: string, init?: RequestInit): Promise<T> {
|
||||
return this.fetchFn(this.url + resource, init)
|
||||
.then((res) => {
|
||||
if (!res.ok && ![badRequest, unprocessableEntity, serviceUnavailable].includes(res.status)) {
|
||||
throw new Error(res.statusText);
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.then((res) => res.json())
|
||||
.then((apiRes: APIResponse<T>) => {
|
||||
if (apiRes.status === 'error') {
|
||||
throw new Error(apiRes.error !== undefined ? apiRes.error : 'missing "error" field in response JSON');
|
||||
}
|
||||
if (apiRes.data === undefined) {
|
||||
throw new Error('missing "data" field in response JSON');
|
||||
}
|
||||
return apiRes.data;
|
||||
});
|
||||
}
|
||||
|
||||
private buildRequest(endpoint: string, params: URLSearchParams) {
|
||||
let uri = endpoint;
|
||||
let body: URLSearchParams | null = params;
|
||||
if (this.httpMethod === 'GET') {
|
||||
uri = `${uri}?${params}`;
|
||||
body = null;
|
||||
}
|
||||
return { uri, body };
|
||||
}
|
||||
}
|
||||
|
||||
class Cache {
|
||||
// completeAssociation is the association between a metric name, a label name and the possible label values
|
||||
private readonly completeAssociation: LRUCache<string, Map<string, Set<string>>>;
|
||||
// metricMetadata is the association between a metric name and the associated metadata
|
||||
private metricMetadata: Record<string, MetricMetadata[]>;
|
||||
private labelValues: LRUCache<string, string[]>;
|
||||
private labelNames: string[];
|
||||
|
||||
constructor(config?: CacheConfig) {
|
||||
const maxAge = config && config.maxAge ? config.maxAge : 5 * 60 * 1000;
|
||||
this.completeAssociation = new LRUCache<string, Map<string, Set<string>>>(maxAge);
|
||||
this.metricMetadata = {};
|
||||
this.labelValues = new LRUCache<string, string[]>(maxAge);
|
||||
this.labelNames = [];
|
||||
if (config?.initialMetricList) {
|
||||
this.setLabelValues('__name__', config.initialMetricList);
|
||||
}
|
||||
}
|
||||
|
||||
setAssociations(metricName: string, series: Map<string, string>[]): void {
|
||||
series.forEach((labelSet: Map<string, string>) => {
|
||||
let currentAssociation = this.completeAssociation.get(metricName);
|
||||
if (!currentAssociation) {
|
||||
currentAssociation = new Map<string, Set<string>>();
|
||||
this.completeAssociation.set(metricName, currentAssociation);
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(labelSet)) {
|
||||
if (key === '__name__') {
|
||||
continue;
|
||||
}
|
||||
const labelValues = currentAssociation.get(key);
|
||||
if (labelValues === undefined) {
|
||||
currentAssociation.set(
|
||||
key,
|
||||
new Set<string>([value])
|
||||
);
|
||||
} else {
|
||||
labelValues.add(value);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
setMetricMetadata(metadata: Record<string, MetricMetadata[]>): void {
|
||||
this.metricMetadata = metadata;
|
||||
}
|
||||
|
||||
getMetricMetadata(): Record<string, MetricMetadata[]> {
|
||||
return this.metricMetadata;
|
||||
}
|
||||
|
||||
setLabelNames(labelNames: string[]): void {
|
||||
this.labelNames = labelNames;
|
||||
}
|
||||
|
||||
getLabelNames(metricName?: string): string[] {
|
||||
if (!metricName || metricName.length === 0) {
|
||||
return this.labelNames;
|
||||
}
|
||||
const labelSet = this.completeAssociation.get(metricName);
|
||||
return labelSet ? Array.from(labelSet.keys()) : [];
|
||||
}
|
||||
|
||||
setLabelValues(labelName: string, labelValues: string[]): void {
|
||||
this.labelValues.set(labelName, labelValues);
|
||||
}
|
||||
|
||||
getLabelValues(labelName: string, metricName?: string): string[] {
|
||||
if (!metricName || metricName.length === 0) {
|
||||
const result = this.labelValues.get(labelName);
|
||||
return result ? result : [];
|
||||
}
|
||||
|
||||
const labelSet = this.completeAssociation.get(metricName);
|
||||
if (labelSet) {
|
||||
const labelValues = labelSet.get(labelName);
|
||||
return labelValues ? Array.from(labelValues) : [];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export class CachedPrometheusClient implements PrometheusClient {
|
||||
private readonly cache: Cache;
|
||||
private readonly client: PrometheusClient;
|
||||
|
||||
constructor(client: PrometheusClient, config?: CacheConfig) {
|
||||
this.client = client;
|
||||
this.cache = new Cache(config);
|
||||
}
|
||||
|
||||
labelNames(metricName?: string): Promise<string[]> {
|
||||
const cachedLabel = this.cache.getLabelNames(metricName);
|
||||
if (cachedLabel && cachedLabel.length > 0) {
|
||||
return Promise.resolve(cachedLabel);
|
||||
}
|
||||
|
||||
if (metricName === undefined || metricName === '') {
|
||||
return this.client.labelNames().then((labelNames) => {
|
||||
this.cache.setLabelNames(labelNames);
|
||||
return labelNames;
|
||||
});
|
||||
}
|
||||
return this.series(metricName).then(() => {
|
||||
return this.cache.getLabelNames(metricName);
|
||||
});
|
||||
}
|
||||
|
||||
labelValues(labelName: string, metricName?: string): Promise<string[]> {
|
||||
const cachedLabel = this.cache.getLabelValues(labelName, metricName);
|
||||
if (cachedLabel && cachedLabel.length > 0) {
|
||||
return Promise.resolve(cachedLabel);
|
||||
}
|
||||
|
||||
if (metricName === undefined || metricName === '') {
|
||||
return this.client.labelValues(labelName).then((labelValues) => {
|
||||
this.cache.setLabelValues(labelName, labelValues);
|
||||
return labelValues;
|
||||
});
|
||||
}
|
||||
|
||||
return this.series(metricName).then(() => {
|
||||
return this.cache.getLabelValues(labelName, metricName);
|
||||
});
|
||||
}
|
||||
|
||||
metricMetadata(): Promise<Record<string, MetricMetadata[]>> {
|
||||
const cachedMetadata = this.cache.getMetricMetadata();
|
||||
if (cachedMetadata && Object.keys(cachedMetadata).length > 0) {
|
||||
return Promise.resolve(cachedMetadata);
|
||||
}
|
||||
|
||||
return this.client.metricMetadata().then((metadata) => {
|
||||
this.cache.setMetricMetadata(metadata);
|
||||
return this.cache.getMetricMetadata();
|
||||
});
|
||||
}
|
||||
|
||||
series(metricName: string): Promise<Map<string, string>[]> {
|
||||
return this.client.series(metricName).then((series) => {
|
||||
this.cache.setAssociations(metricName, series);
|
||||
return series;
|
||||
});
|
||||
}
|
||||
|
||||
metricNames(): Promise<string[]> {
|
||||
return this.labelValues('__name__');
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,640 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { CompleteStrategy } from './index';
|
||||
import { SyntaxNode } from 'lezer-tree';
|
||||
import { PrometheusClient } from '../client';
|
||||
import {
|
||||
Add,
|
||||
AggregateExpr,
|
||||
And,
|
||||
BinaryExpr,
|
||||
BinModifiers,
|
||||
Bool,
|
||||
Div,
|
||||
Duration,
|
||||
Eql,
|
||||
EqlRegex,
|
||||
EqlSingle,
|
||||
Expr,
|
||||
FunctionCallArgs,
|
||||
FunctionCallBody,
|
||||
GroupingLabel,
|
||||
GroupingLabels,
|
||||
Gte,
|
||||
Gtr,
|
||||
Identifier,
|
||||
LabelMatcher,
|
||||
LabelMatchers,
|
||||
LabelMatchList,
|
||||
LabelName,
|
||||
Lss,
|
||||
Lte,
|
||||
MatchOp,
|
||||
MatrixSelector,
|
||||
MetricIdentifier,
|
||||
Mod,
|
||||
Mul,
|
||||
Neq,
|
||||
NeqRegex,
|
||||
NumberLiteral,
|
||||
OffsetExpr,
|
||||
Or,
|
||||
Pow,
|
||||
PromQL,
|
||||
StepInvariantExpr,
|
||||
StringLiteral,
|
||||
Sub,
|
||||
SubqueryExpr,
|
||||
Unless,
|
||||
VectorSelector,
|
||||
} from 'lezer-promql';
|
||||
import { Completion, CompletionContext, CompletionResult } from '@codemirror/autocomplete';
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { buildLabelMatchers, containsAtLeastOneChild, containsChild, retrieveAllRecursiveNodes, walkBackward, walkThrough } from '../parser';
|
||||
import {
|
||||
aggregateOpModifierTerms,
|
||||
aggregateOpTerms,
|
||||
atModifierTerms,
|
||||
binOpModifierTerms,
|
||||
binOpTerms,
|
||||
durationTerms,
|
||||
functionIdentifierTerms,
|
||||
matchOpTerms,
|
||||
numberTerms,
|
||||
snippets,
|
||||
} from './promql.terms';
|
||||
import { Matcher } from '../types';
|
||||
import { syntaxTree } from '@codemirror/language';
|
||||
|
||||
const autocompleteNodes: { [key: string]: Completion[] } = {
|
||||
matchOp: matchOpTerms,
|
||||
binOp: binOpTerms,
|
||||
duration: durationTerms,
|
||||
binOpModifier: binOpModifierTerms,
|
||||
atModifier: atModifierTerms,
|
||||
functionIdentifier: functionIdentifierTerms,
|
||||
aggregateOp: aggregateOpTerms,
|
||||
aggregateOpModifier: aggregateOpModifierTerms,
|
||||
number: numberTerms,
|
||||
};
|
||||
|
||||
// ContextKind is the different possible value determinate by the autocompletion
|
||||
export enum ContextKind {
|
||||
// dynamic autocompletion (required a distant server)
|
||||
MetricName,
|
||||
LabelName,
|
||||
LabelValue,
|
||||
// static autocompletion
|
||||
Function,
|
||||
Aggregation,
|
||||
BinOpModifier,
|
||||
BinOp,
|
||||
MatchOp,
|
||||
AggregateOpModifier,
|
||||
Duration,
|
||||
Offset,
|
||||
Bool,
|
||||
AtModifiers,
|
||||
Number,
|
||||
}
|
||||
|
||||
export interface Context {
|
||||
kind: ContextKind;
|
||||
metricName?: string;
|
||||
labelName?: string;
|
||||
matchers?: Matcher[];
|
||||
}
|
||||
|
||||
function getMetricNameInVectorSelector(tree: SyntaxNode, state: EditorState): string {
|
||||
// Find if there is a defined metric name. Should be used to autocomplete a labelValue or a labelName
|
||||
// First find the parent "VectorSelector" to be able to find then the subChild "MetricIdentifier" if it exists.
|
||||
let currentNode: SyntaxNode | null = walkBackward(tree, VectorSelector);
|
||||
if (!currentNode) {
|
||||
// Weird case that shouldn't happen, because "VectorSelector" is by definition the parent of the LabelMatchers.
|
||||
return '';
|
||||
}
|
||||
currentNode = walkThrough(currentNode, MetricIdentifier, Identifier);
|
||||
if (!currentNode) {
|
||||
return '';
|
||||
}
|
||||
return state.sliceDoc(currentNode.from, currentNode.to);
|
||||
}
|
||||
|
||||
function arrayToCompletionResult(data: Completion[], from: number, to: number, includeSnippet = false, span = true): CompletionResult {
|
||||
const options = data;
|
||||
if (includeSnippet) {
|
||||
options.push(...snippets);
|
||||
}
|
||||
return {
|
||||
from: from,
|
||||
to: to,
|
||||
options: options,
|
||||
span: span ? /^[a-zA-Z0-9_:]+$/ : undefined,
|
||||
} as CompletionResult;
|
||||
}
|
||||
|
||||
// computeStartCompleteLabelPositionInLabelMatcherOrInGroupingLabel calculates the start position only when the node is a LabelMatchers or a GroupingLabels
|
||||
function computeStartCompleteLabelPositionInLabelMatcherOrInGroupingLabel(node: SyntaxNode, pos: number): number {
|
||||
// Here we can have two different situations:
|
||||
// 1. `metric{}` or `sum by()` with the cursor between the bracket
|
||||
// and so we have increment the starting position to avoid to consider the open bracket when filtering the autocompletion list.
|
||||
// 2. `metric{foo="bar",} or `sum by(foo,) with the cursor after the comma.
|
||||
// Then the start number should be the current position to avoid to consider the previous labelMatcher/groupingLabel when filtering the autocompletion list.
|
||||
let start = node.from + 1;
|
||||
if (node.firstChild !== null) {
|
||||
// here that means the LabelMatchers / GroupingLabels has a child, which is not possible if we have the expression `metric{}`. So we are likely trying to autocomplete the label list after a comma
|
||||
start = pos;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
|
||||
// computeStartCompletePosition calculates the start position of the autocompletion.
|
||||
// It is an important step because the start position will be used by CMN to find the string and then to use it to filter the CompletionResult.
|
||||
// A wrong `start` position will lead to have the completion not working.
|
||||
// Note: this method is exported only for testing purpose.
|
||||
export function computeStartCompletePosition(node: SyntaxNode, pos: number): number {
|
||||
let start = node.from;
|
||||
if (node.type.id === LabelMatchers || node.type.id === GroupingLabels) {
|
||||
start = computeStartCompleteLabelPositionInLabelMatcherOrInGroupingLabel(node, pos);
|
||||
} else if (node.type.id === FunctionCallBody || (node.type.id === StringLiteral && node.parent?.type.id === LabelMatcher)) {
|
||||
// When the cursor is between bracket, quote, we need to increment the starting position to avoid to consider the open bracket/ first string.
|
||||
start++;
|
||||
} else if (
|
||||
node.type.id === OffsetExpr ||
|
||||
(node.type.id === NumberLiteral && node.parent?.type.id === 0 && node.parent.parent?.type.id === SubqueryExpr) ||
|
||||
(node.type.id === 0 &&
|
||||
(node.parent?.type.id === OffsetExpr ||
|
||||
node.parent?.type.id === MatrixSelector ||
|
||||
(node.parent?.type.id === SubqueryExpr && containsAtLeastOneChild(node.parent, Duration))))
|
||||
) {
|
||||
start = pos;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
|
||||
// analyzeCompletion is going to determinate what should be autocompleted.
|
||||
// The value of the autocompletion is then calculate by the function buildCompletion.
|
||||
// Note: this method is exported for testing purpose only. Do not use it directly.
|
||||
export function analyzeCompletion(state: EditorState, node: SyntaxNode): Context[] {
|
||||
const result: Context[] = [];
|
||||
switch (node.type.id) {
|
||||
case 0: // 0 is the id of the error node
|
||||
if (node.parent?.type.id === OffsetExpr) {
|
||||
// we are likely in the given situation:
|
||||
// `metric_name offset 5` that leads to this tree:
|
||||
// `Expr(OffsetExpr(Expr(VectorSelector(MetricIdentifier(Identifier))),Offset,⚠))`
|
||||
// Here we can just autocomplete a duration.
|
||||
result.push({ kind: ContextKind.Duration });
|
||||
break;
|
||||
}
|
||||
if (node.parent?.type.id === LabelMatcher) {
|
||||
// In this case the current token is not itself a valid match op yet:
|
||||
// metric_name{labelName!}
|
||||
result.push({ kind: ContextKind.MatchOp });
|
||||
break;
|
||||
}
|
||||
if (node.parent?.type.id === MatrixSelector) {
|
||||
// we are likely in the given situation:
|
||||
// `metric_name{}[5]`
|
||||
// We can also just autocomplete a duration
|
||||
result.push({ kind: ContextKind.Duration });
|
||||
break;
|
||||
}
|
||||
if (node.parent?.type.id === SubqueryExpr && containsAtLeastOneChild(node.parent, Duration)) {
|
||||
// we are likely in the given situation:
|
||||
// `rate(foo[5d:5])`
|
||||
// so we should autocomplete a duration
|
||||
result.push({ kind: ContextKind.Duration });
|
||||
break;
|
||||
}
|
||||
// when we are in the situation 'metric_name !', we have the following tree
|
||||
// Expr(VectorSelector(MetricIdentifier(Identifier),⚠))
|
||||
// We should try to know if the char '!' is part of a binOp.
|
||||
// Note: as it is quite experimental, maybe it requires more condition and to check the current tree (parent, other child at the same level ..etc.).
|
||||
const operator = state.sliceDoc(node.from, node.to);
|
||||
if (binOpTerms.filter((term) => term.label.includes(operator)).length > 0) {
|
||||
result.push({ kind: ContextKind.BinOp });
|
||||
}
|
||||
break;
|
||||
case Identifier:
|
||||
// sometimes an Identifier has an error has parent. This should be treated in priority
|
||||
if (node.parent?.type.id === 0) {
|
||||
const parent = node.parent;
|
||||
if (parent.parent?.type.id === StepInvariantExpr) {
|
||||
// we are likely in the given situation:
|
||||
// `expr @ s`
|
||||
// we can autocomplete start / end
|
||||
result.push({ kind: ContextKind.AtModifiers });
|
||||
break;
|
||||
}
|
||||
if (parent.parent?.type.id === AggregateExpr) {
|
||||
// it matches 'sum() b'. So here we can autocomplete:
|
||||
// - the aggregate operation modifier
|
||||
// - the binary operation (since it's not mandatory to have an aggregate operation modifier)
|
||||
result.push({ kind: ContextKind.AggregateOpModifier }, { kind: ContextKind.BinOp });
|
||||
break;
|
||||
}
|
||||
if (parent.parent?.type.id === VectorSelector) {
|
||||
// it matches 'sum b'. So here we also have to autocomplete the aggregate operation modifier only
|
||||
// if the associated metricIdentifier is matching an aggregation operation.
|
||||
// Note: here is the corresponding tree in order to understand the situation:
|
||||
// Expr(
|
||||
// VectorSelector(
|
||||
// MetricIdentifier(Identifier),
|
||||
// ⚠(Identifier)
|
||||
// )
|
||||
// )
|
||||
const operator = getMetricNameInVectorSelector(node, state);
|
||||
if (aggregateOpTerms.filter((term) => term.label === operator).length > 0) {
|
||||
result.push({ kind: ContextKind.AggregateOpModifier });
|
||||
}
|
||||
// It's possible it also match the expr 'metric_name unle'.
|
||||
// It's also possible that the operator is also a metric even if it matches the list of aggregation function.
|
||||
// So we also have to autocomplete the binary operator.
|
||||
//
|
||||
// The expr `metric_name off` leads to the same tree. So we have to provide the offset keyword too here.
|
||||
result.push({ kind: ContextKind.BinOp }, { kind: ContextKind.Offset });
|
||||
break;
|
||||
}
|
||||
}
|
||||
// As the leaf Identifier is coming for a lot of different case, we have to take a bit time to analyze the tree
|
||||
// in order to know what we have to autocomplete exactly.
|
||||
// Here is some cases:
|
||||
// 1. metric_name / ignor --> we should autocomplete the BinOpModifier + metric/function/aggregation
|
||||
// 2. http_requests_total{method="GET"} off --> offset or binOp should be autocompleted here
|
||||
// 3. rate(foo[5m]) un --> offset or binOp should be autocompleted
|
||||
// 4. sum(http_requests_total{method="GET"} off) --> offset or binOp should be autocompleted
|
||||
// 5. sum(http_requests_total{method="GET"} / o) --> BinOpModifier + metric/function/aggregation
|
||||
// All examples above give a different tree each time but ends up to be treated in this case.
|
||||
// But they all have the following common tree pattern:
|
||||
// Parent( Expr(...),
|
||||
// ... ,
|
||||
// Expr(VectorSelector(MetricIdentifier(Identifier)))
|
||||
// )
|
||||
//
|
||||
// So the first things to do is to get the `Parent` and to determinate if we are in this configuration.
|
||||
// Otherwise we would just have to autocomplete the metric / function / aggregation.
|
||||
|
||||
const parent = node.parent?.parent?.parent?.parent;
|
||||
if (!parent) {
|
||||
// this case can be possible if the topNode is not anymore PromQL but MetricName.
|
||||
// In this particular case, then we just want to autocomplete the metric
|
||||
result.push({ kind: ContextKind.MetricName, metricName: state.sliceDoc(node.from, node.to) });
|
||||
break;
|
||||
}
|
||||
// now we have to know if we have two Expr in the direct children of the `parent`
|
||||
const containExprTwice = containsChild(parent, Expr, Expr);
|
||||
if (containExprTwice) {
|
||||
if (parent.type.id === BinaryExpr && !containsAtLeastOneChild(parent, 0)) {
|
||||
// We are likely in the case 1 or 5
|
||||
result.push(
|
||||
{ kind: ContextKind.MetricName, metricName: state.sliceDoc(node.from, node.to) },
|
||||
{ kind: ContextKind.Function },
|
||||
{ kind: ContextKind.Aggregation },
|
||||
{ kind: ContextKind.BinOpModifier },
|
||||
{ kind: ContextKind.Number }
|
||||
);
|
||||
// in case the BinaryExpr is a comparison, we should autocomplete the `bool` keyword. But only if it is not present.
|
||||
// When the `bool` keyword is NOT present, then the expression looks like this:
|
||||
// BinaryExpr( Expr(...), Gtr , BinModifiers, Expr(...) )
|
||||
// When the `bool` keyword is present, then the expression looks like this:
|
||||
// BinaryExpr( Expr(...), Gtr , BinModifiers(Bool), Expr(...) )
|
||||
// To know if it is not present, we just have to check if the Bool is not present as a child of the BinModifiers.
|
||||
if (containsAtLeastOneChild(parent, Eql, Gte, Gtr, Lte, Lss, Neq) && !walkThrough(parent, BinModifiers, Bool)) {
|
||||
result.push({ kind: ContextKind.Bool });
|
||||
}
|
||||
} else if (parent.type.id !== BinaryExpr || (parent.type.id === BinaryExpr && containsAtLeastOneChild(parent, 0))) {
|
||||
result.push({ kind: ContextKind.BinOp }, { kind: ContextKind.Offset });
|
||||
}
|
||||
} else {
|
||||
result.push(
|
||||
{ kind: ContextKind.MetricName, metricName: state.sliceDoc(node.from, node.to) },
|
||||
{ kind: ContextKind.Function },
|
||||
{ kind: ContextKind.Aggregation }
|
||||
);
|
||||
if (parent.type.id !== FunctionCallArgs && parent.type.id !== MatrixSelector) {
|
||||
// it's too avoid to autocomplete a number in situation where it shouldn't.
|
||||
// Like with `sum by(rat)`
|
||||
result.push({ kind: ContextKind.Number });
|
||||
}
|
||||
}
|
||||
break;
|
||||
case PromQL:
|
||||
if (!node.firstChild) {
|
||||
// this situation can happen when there is nothing in the text area and the user is explicitly triggering the autocompletion (with ctrl + space)
|
||||
result.push(
|
||||
{ kind: ContextKind.MetricName, metricName: '' },
|
||||
{ kind: ContextKind.Function },
|
||||
{ kind: ContextKind.Aggregation },
|
||||
{ kind: ContextKind.Number }
|
||||
);
|
||||
}
|
||||
break;
|
||||
case GroupingLabels:
|
||||
// In this case we are in the given situation:
|
||||
// sum by ()
|
||||
// So we have to autocomplete any labelName
|
||||
result.push({ kind: ContextKind.LabelName });
|
||||
break;
|
||||
case LabelMatchers:
|
||||
// In that case we are in the given situation:
|
||||
// metric_name{} or {}
|
||||
// so we have or to autocomplete any kind of labelName or to autocomplete only the labelName associated to the metric
|
||||
result.push({ kind: ContextKind.LabelName, metricName: getMetricNameInVectorSelector(node, state) });
|
||||
break;
|
||||
case LabelName:
|
||||
if (node.parent?.type.id === GroupingLabel) {
|
||||
// In this case we are in the given situation:
|
||||
// sum by (myL)
|
||||
// So we have to continue to autocomplete any kind of labelName
|
||||
result.push({ kind: ContextKind.LabelName });
|
||||
} else if (node.parent?.type.id === LabelMatcher) {
|
||||
// In that case we are in the given situation:
|
||||
// metric_name{myL} or {myL}
|
||||
// so we have or to continue to autocomplete any kind of labelName or
|
||||
// to continue to autocomplete only the labelName associated to the metric
|
||||
result.push({ kind: ContextKind.LabelName, metricName: getMetricNameInVectorSelector(node, state) });
|
||||
}
|
||||
break;
|
||||
case StringLiteral:
|
||||
if (node.parent?.type.id === LabelMatcher) {
|
||||
// In this case we are in the given situation:
|
||||
// metric_name{labelName=""}
|
||||
// So we can autocomplete the labelValue
|
||||
|
||||
// Get the labelName.
|
||||
// By definition it's the firstChild: https://github.com/promlabs/lezer-promql/blob/0ef65e196a8db6a989ff3877d57fd0447d70e971/src/promql.grammar#L250
|
||||
let labelName = '';
|
||||
if (node.parent.firstChild?.type.id === LabelName) {
|
||||
labelName = state.sliceDoc(node.parent.firstChild.from, node.parent.firstChild.to);
|
||||
}
|
||||
// then find the metricName if it exists
|
||||
const metricName = getMetricNameInVectorSelector(node, state);
|
||||
// finally get the full matcher available
|
||||
const labelMatchers = buildLabelMatchers(retrieveAllRecursiveNodes(walkBackward(node, LabelMatchList), LabelMatchList, LabelMatcher), state);
|
||||
result.push({
|
||||
kind: ContextKind.LabelValue,
|
||||
metricName: metricName,
|
||||
labelName: labelName,
|
||||
matchers: labelMatchers,
|
||||
});
|
||||
}
|
||||
break;
|
||||
case NumberLiteral:
|
||||
if (node.parent?.type.id === 0 && node.parent.parent?.type.id === SubqueryExpr) {
|
||||
// Here we are likely in this situation:
|
||||
// `go[5d:4]`
|
||||
// and we have the given tree:
|
||||
// Expr( SubqueryExpr(
|
||||
// Expr(VectorSelector(MetricIdentifier(Identifier))),
|
||||
// Duration, Duration, ⚠(NumberLiteral)
|
||||
// ))
|
||||
// So we should continue to autocomplete a duration
|
||||
result.push({ kind: ContextKind.Duration });
|
||||
} else {
|
||||
result.push({ kind: ContextKind.Number });
|
||||
}
|
||||
break;
|
||||
case Duration:
|
||||
case OffsetExpr:
|
||||
result.push({ kind: ContextKind.Duration });
|
||||
break;
|
||||
case FunctionCallBody:
|
||||
// In this case we are in the given situation:
|
||||
// sum() or in rate()
|
||||
// with the cursor between the bracket. So we can autocomplete the metric, the function and the aggregation.
|
||||
result.push({ kind: ContextKind.MetricName, metricName: '' }, { kind: ContextKind.Function }, { kind: ContextKind.Aggregation });
|
||||
break;
|
||||
case Neq:
|
||||
if (node.parent?.type.id === MatchOp) {
|
||||
result.push({ kind: ContextKind.MatchOp });
|
||||
} else if (node.parent?.type.id === BinaryExpr) {
|
||||
result.push({ kind: ContextKind.BinOp });
|
||||
}
|
||||
break;
|
||||
case EqlSingle:
|
||||
case EqlRegex:
|
||||
case NeqRegex:
|
||||
case MatchOp:
|
||||
result.push({ kind: ContextKind.MatchOp });
|
||||
break;
|
||||
case Pow:
|
||||
case Mul:
|
||||
case Div:
|
||||
case Mod:
|
||||
case Add:
|
||||
case Sub:
|
||||
case Eql:
|
||||
case Gte:
|
||||
case Gtr:
|
||||
case Lte:
|
||||
case Lss:
|
||||
case And:
|
||||
case Unless:
|
||||
case Or:
|
||||
case BinaryExpr:
|
||||
result.push({ kind: ContextKind.BinOp });
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// HybridComplete provides a full completion result with or without a remote prometheus.
|
||||
export class HybridComplete implements CompleteStrategy {
|
||||
private readonly prometheusClient: PrometheusClient | undefined;
|
||||
private readonly maxMetricsMetadata: number;
|
||||
|
||||
constructor(prometheusClient?: PrometheusClient, maxMetricsMetadata = 10000) {
|
||||
this.prometheusClient = prometheusClient;
|
||||
this.maxMetricsMetadata = maxMetricsMetadata;
|
||||
}
|
||||
|
||||
getPrometheusClient(): PrometheusClient | undefined {
|
||||
return this.prometheusClient;
|
||||
}
|
||||
|
||||
promQL(context: CompletionContext): Promise<CompletionResult | null> | CompletionResult | null {
|
||||
const { state, pos } = context;
|
||||
const tree = syntaxTree(state).resolve(pos, -1);
|
||||
const contexts = analyzeCompletion(state, tree);
|
||||
let asyncResult: Promise<Completion[]> = Promise.resolve([]);
|
||||
let completeSnippet = false;
|
||||
let span = true;
|
||||
for (const context of contexts) {
|
||||
switch (context.kind) {
|
||||
case ContextKind.Aggregation:
|
||||
completeSnippet = true;
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.aggregateOp);
|
||||
});
|
||||
break;
|
||||
case ContextKind.Function:
|
||||
completeSnippet = true;
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.functionIdentifier);
|
||||
});
|
||||
break;
|
||||
case ContextKind.BinOpModifier:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.binOpModifier);
|
||||
});
|
||||
break;
|
||||
case ContextKind.BinOp:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.binOp);
|
||||
});
|
||||
break;
|
||||
case ContextKind.MatchOp:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.matchOp);
|
||||
});
|
||||
break;
|
||||
case ContextKind.AggregateOpModifier:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.aggregateOpModifier);
|
||||
});
|
||||
break;
|
||||
case ContextKind.Duration:
|
||||
span = false;
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.duration);
|
||||
});
|
||||
break;
|
||||
case ContextKind.Offset:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat([{ label: 'offset' }]);
|
||||
});
|
||||
break;
|
||||
case ContextKind.Bool:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat([{ label: 'bool' }]);
|
||||
});
|
||||
break;
|
||||
case ContextKind.AtModifiers:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.atModifier);
|
||||
});
|
||||
break;
|
||||
case ContextKind.Number:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return result.concat(autocompleteNodes.number);
|
||||
});
|
||||
break;
|
||||
case ContextKind.MetricName:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return this.autocompleteMetricName(result, context);
|
||||
});
|
||||
break;
|
||||
case ContextKind.LabelName:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return this.autocompleteLabelName(result, context);
|
||||
});
|
||||
break;
|
||||
case ContextKind.LabelValue:
|
||||
asyncResult = asyncResult.then((result) => {
|
||||
return this.autocompleteLabelValue(result, context);
|
||||
});
|
||||
}
|
||||
}
|
||||
return asyncResult.then((result) => {
|
||||
return arrayToCompletionResult(result, computeStartCompletePosition(tree, pos), pos, completeSnippet, span);
|
||||
});
|
||||
}
|
||||
|
||||
private autocompleteMetricName(result: Completion[], context: Context): Completion[] | Promise<Completion[]> {
|
||||
if (!this.prometheusClient) {
|
||||
return result;
|
||||
}
|
||||
const metricCompletion = new Map<string, Completion>();
|
||||
return this.prometheusClient
|
||||
.metricNames(context.metricName)
|
||||
.then((metricNames: string[]) => {
|
||||
for (const metricName of metricNames) {
|
||||
metricCompletion.set(metricName, { label: metricName, type: 'constant' });
|
||||
}
|
||||
|
||||
// avoid to get all metric metadata if the prometheus server is too big
|
||||
if (metricNames.length <= this.maxMetricsMetadata) {
|
||||
// in order to enrich the completion list of the metric,
|
||||
// we are trying to find the associated metadata
|
||||
return this.prometheusClient?.metricMetadata();
|
||||
}
|
||||
})
|
||||
.then((metricMetadata) => {
|
||||
if (metricMetadata) {
|
||||
for (const [metricName, node] of metricCompletion) {
|
||||
// For histograms and summaries, the metadata is only exposed for the base metric name,
|
||||
// not separately for the _count, _sum, and _bucket time series.
|
||||
const metadata = metricMetadata[metricName.replace(/(_count|_sum|_bucket)$/, '')];
|
||||
if (metadata) {
|
||||
if (metadata.length > 1) {
|
||||
// it means the metricName has different possible helper and type
|
||||
for (const m of metadata) {
|
||||
if (node.detail === '') {
|
||||
node.detail = m.type;
|
||||
} else if (node.detail !== m.type) {
|
||||
node.detail = 'unknown';
|
||||
node.info = 'multiple different definitions for this metric';
|
||||
}
|
||||
|
||||
if (node.info === '') {
|
||||
node.info = m.help;
|
||||
} else if (node.info !== m.help) {
|
||||
node.info = 'multiple different definitions for this metric';
|
||||
}
|
||||
}
|
||||
} else if (metadata.length === 1) {
|
||||
let { type, help } = metadata[0];
|
||||
if (type === 'histogram' || type === 'summary') {
|
||||
if (metricName.endsWith('_count')) {
|
||||
type = 'counter';
|
||||
help = `The total number of observations for: ${help}`;
|
||||
}
|
||||
if (metricName.endsWith('_sum')) {
|
||||
type = 'counter';
|
||||
help = `The total sum of observations for: ${help}`;
|
||||
}
|
||||
if (metricName.endsWith('_bucket')) {
|
||||
type = 'counter';
|
||||
help = `The total count of observations for a bucket in the histogram: ${help}`;
|
||||
}
|
||||
}
|
||||
node.detail = type;
|
||||
node.info = help;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.concat(Array.from(metricCompletion.values()));
|
||||
});
|
||||
}
|
||||
|
||||
private autocompleteLabelName(result: Completion[], context: Context): Completion[] | Promise<Completion[]> {
|
||||
if (!this.prometheusClient) {
|
||||
return result;
|
||||
}
|
||||
return this.prometheusClient.labelNames(context.metricName).then((labelNames: string[]) => {
|
||||
return result.concat(labelNames.map((value) => ({ label: value, type: 'constant' })));
|
||||
});
|
||||
}
|
||||
|
||||
private autocompleteLabelValue(result: Completion[], context: Context): Completion[] | Promise<Completion[]> {
|
||||
if (!this.prometheusClient || !context.labelName) {
|
||||
return result;
|
||||
}
|
||||
return this.prometheusClient.labelValues(context.labelName, context.metricName, context.matchers).then((labelValues: string[]) => {
|
||||
return result.concat(labelValues.map((value) => ({ label: value, type: 'text' })));
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { HybridComplete } from './hybrid';
|
||||
import { CachedPrometheusClient, HTTPPrometheusClient, PrometheusClient, PrometheusConfig } from '../client/prometheus';
|
||||
import { CompletionContext, CompletionResult } from '@codemirror/autocomplete';
|
||||
|
||||
// Complete is the interface that defines the simple method that returns a CompletionResult.
|
||||
// Every different completion mode must implement this interface.
|
||||
export interface CompleteStrategy {
|
||||
promQL(context: CompletionContext): Promise<CompletionResult | null> | CompletionResult | null;
|
||||
}
|
||||
|
||||
// CompleteConfiguration should be used to customize the autocompletion.
|
||||
export interface CompleteConfiguration {
|
||||
remote?: PrometheusConfig | PrometheusClient;
|
||||
// maxMetricsMetadata is the maximum number of metrics in Prometheus for which metadata is fetched.
|
||||
// If the number of metrics exceeds this limit, no metric metadata is fetched at all.
|
||||
maxMetricsMetadata?: number;
|
||||
// When providing this custom CompleteStrategy, the settings above will not be used.
|
||||
completeStrategy?: CompleteStrategy;
|
||||
}
|
||||
|
||||
function isPrometheusConfig(remoteConfig: PrometheusConfig | PrometheusClient): remoteConfig is PrometheusConfig {
|
||||
return (remoteConfig as PrometheusConfig).url !== undefined;
|
||||
}
|
||||
|
||||
export function newCompleteStrategy(conf?: CompleteConfiguration): CompleteStrategy {
|
||||
if (conf?.completeStrategy) {
|
||||
return conf.completeStrategy;
|
||||
}
|
||||
if (conf?.remote) {
|
||||
if (!isPrometheusConfig(conf.remote)) {
|
||||
return new HybridComplete(conf.remote, conf.maxMetricsMetadata);
|
||||
}
|
||||
return new HybridComplete(new CachedPrometheusClient(new HTTPPrometheusClient(conf.remote), conf.remote.cache), conf.maxMetricsMetadata);
|
||||
}
|
||||
return new HybridComplete();
|
||||
}
|
|
@ -0,0 +1,493 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { Completion, snippet } from '@codemirror/autocomplete';
|
||||
|
||||
export const durationTerms = [{ label: 'y' }, { label: 'w' }, { label: 'd' }, { label: 'h' }, { label: 'm' }, { label: 's' }, { label: 'ms' }];
|
||||
export const matchOpTerms = [{ label: '=' }, { label: '!=' }, { label: '=~' }, { label: '!~' }];
|
||||
export const binOpTerms = [
|
||||
{ label: '^' },
|
||||
{ label: '*' },
|
||||
{ label: '/' },
|
||||
{ label: '%' },
|
||||
{ label: '+' },
|
||||
{ label: '-' },
|
||||
{ label: '==' },
|
||||
{ label: '>=' },
|
||||
{ label: '>' },
|
||||
{ label: '<' },
|
||||
{ label: '<=' },
|
||||
{ label: '!=' },
|
||||
{ label: 'and' },
|
||||
{ label: 'or' },
|
||||
{ label: 'unless' },
|
||||
];
|
||||
|
||||
export const binOpModifierTerms = [
|
||||
{ label: 'on', info: 'Match only on specified labels', type: 'keyword' },
|
||||
{ label: 'ignoring', info: 'Ignore specified labels for matching', type: 'keyword' },
|
||||
{ label: 'group_left', info: 'Allow many-to-one matching', type: 'keyword' },
|
||||
{ label: 'group_right', info: 'Allow one-to-many matching', type: 'keyword' },
|
||||
];
|
||||
|
||||
export const atModifierTerms = [
|
||||
{ label: 'start()', info: 'resolve to the start of the query', type: 'keyword' },
|
||||
{ label: 'end()', info: 'resolve to the end of the query', type: 'keyword' },
|
||||
];
|
||||
|
||||
export const functionIdentifierTerms = [
|
||||
{
|
||||
label: 'abs',
|
||||
detail: 'function',
|
||||
info: 'Return absolute values of input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'absent',
|
||||
detail: 'function',
|
||||
info: 'Determine whether input vector is empty',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'absent_over_time',
|
||||
detail: 'function',
|
||||
info: 'Determine whether input range vector is empty',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'avg_over_time',
|
||||
detail: 'function',
|
||||
info: 'Average series values over time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'ceil',
|
||||
detail: 'function',
|
||||
info: 'Round up values of input series to nearest integer',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'changes',
|
||||
detail: 'function',
|
||||
info: 'Return number of value changes in input series over time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'clamp',
|
||||
detail: 'function',
|
||||
info: 'Limit the value of input series between a minimum and a maximum',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'clamp_max',
|
||||
detail: 'function',
|
||||
info: 'Limit the value of input series to a maximum',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'clamp_min',
|
||||
detail: 'function',
|
||||
info: 'Limit the value of input series to a minimum',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'count_over_time',
|
||||
detail: 'function',
|
||||
info: 'Count the number of values for each input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'days_in_month',
|
||||
detail: 'function',
|
||||
info: 'Return the number of days in current month for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'day_of_month',
|
||||
detail: 'function',
|
||||
info: 'Return the day of the month for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'day_of_week',
|
||||
detail: 'function',
|
||||
info: 'Return the day of the week for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'delta',
|
||||
detail: 'function',
|
||||
info: 'Calculate the difference between beginning and end of a range vector (for gauges)',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'deriv',
|
||||
detail: 'function',
|
||||
info: 'Calculate the per-second derivative over series in a range vector (for gauges)',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'exp',
|
||||
detail: 'function',
|
||||
info: 'Calculate exponential function for input vector values',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'floor',
|
||||
detail: 'function',
|
||||
info: 'Round down values of input series to nearest integer',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'histogram_quantile',
|
||||
detail: 'function',
|
||||
info: 'Calculate quantiles from histogram buckets',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'holt_winters',
|
||||
detail: 'function',
|
||||
info: 'Calculate smoothed value of input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'hour',
|
||||
detail: 'function',
|
||||
info: 'Return the hour of the day for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'idelta',
|
||||
detail: 'function',
|
||||
info: 'Calculate the difference between the last two samples of a range vector (for counters)',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'increase',
|
||||
detail: 'function',
|
||||
info: 'Calculate the increase in value over a range of time (for counters)',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'irate',
|
||||
detail: 'function',
|
||||
info: 'Calculate the per-second increase over the last two samples of a range vector (for counters)',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'label_replace',
|
||||
detail: 'function',
|
||||
info: 'Set or replace label values',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'label_join',
|
||||
detail: 'function',
|
||||
info: 'Join together label values into new label',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'last_over_time',
|
||||
detail: 'function',
|
||||
info: 'The most recent point value in specified interval.',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'ln',
|
||||
detail: 'function',
|
||||
info: 'Calculate natural logarithm of input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'log10',
|
||||
detail: 'function',
|
||||
info: 'Calulcate base-10 logarithm of input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'log2',
|
||||
detail: 'function',
|
||||
info: 'Calculate base-2 logarithm of input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'max_over_time',
|
||||
detail: 'function',
|
||||
info: 'Return the maximum value over time for input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'min_over_time',
|
||||
detail: 'function',
|
||||
info: 'Return the minimum value over time for input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'minute',
|
||||
detail: 'function',
|
||||
info: 'Return the minute of the hour for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'month',
|
||||
detail: 'function',
|
||||
info: 'Return the month for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'predict_linear',
|
||||
detail: 'function',
|
||||
info: 'Predict the value of a gauge into the future',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'present_over_time',
|
||||
detail: 'function',
|
||||
info: 'the value 1 for any series in the specified interval',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'quantile_over_time',
|
||||
detail: 'function',
|
||||
info: 'Calculate value quantiles over time for input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'rate',
|
||||
detail: 'function',
|
||||
info: 'Calculate per-second increase over a range vector (for counters)',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'resets',
|
||||
detail: 'function',
|
||||
info: 'Return number of value decreases (resets) in input series of time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'round',
|
||||
detail: 'function',
|
||||
info: 'Round values of input series to nearest integer',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'scalar',
|
||||
detail: 'function',
|
||||
info: 'Convert single-element series vector into scalar value',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'sgn',
|
||||
detail: 'function',
|
||||
info: 'Returns the sign of the instant vector',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'sort',
|
||||
detail: 'function',
|
||||
info: 'Sort input series ascendingly by value',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'sort_desc',
|
||||
detail: 'function',
|
||||
info: 'Sort input series descendingly by value',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'sqrt',
|
||||
detail: 'function',
|
||||
info: 'Return the square root for input series',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'stddev_over_time',
|
||||
detail: 'function',
|
||||
info: 'Calculate the standard deviation within input series over time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'stdvar_over_time',
|
||||
detail: 'function',
|
||||
info: 'Calculate the standard variation within input series over time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'sum_over_time',
|
||||
detail: 'function',
|
||||
info: 'Calculate the sum over the values of input series over time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'time',
|
||||
detail: 'function',
|
||||
info: 'Return the Unix timestamp at the current evaluation time',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'timestamp',
|
||||
detail: 'function',
|
||||
info: 'Return the Unix timestamp for the samples in the input vector',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'vector',
|
||||
detail: 'function',
|
||||
info: 'Convert a scalar value into a single-element series vector',
|
||||
type: 'function',
|
||||
},
|
||||
{
|
||||
label: 'year',
|
||||
detail: 'function',
|
||||
info: 'Return the year for provided timestamps',
|
||||
type: 'function',
|
||||
},
|
||||
];
|
||||
|
||||
export const aggregateOpTerms = [
|
||||
{
|
||||
label: 'avg',
|
||||
detail: 'aggregation',
|
||||
info: 'Calculate the average over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'bottomk',
|
||||
detail: 'aggregation',
|
||||
info: 'Smallest k elements by sample value',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'count',
|
||||
detail: 'aggregation',
|
||||
info: 'Count number of elements in the vector',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'count_values',
|
||||
detail: 'aggregation',
|
||||
info: 'Count number of elements with the same value',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'group',
|
||||
detail: 'aggregation',
|
||||
info: 'Group series, while setting the sample value to 1',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'max',
|
||||
detail: 'aggregation',
|
||||
info: 'Select maximum over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'min',
|
||||
detail: 'aggregation',
|
||||
info: 'Select minimum over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'quantile',
|
||||
detail: 'aggregation',
|
||||
info: 'Calculate φ-quantile (0 ≤ φ ≤ 1) over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'stddev',
|
||||
detail: 'aggregation',
|
||||
info: 'Calculate population standard deviation over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'stdvar',
|
||||
detail: 'aggregation',
|
||||
info: 'Calculate population standard variance over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'sum',
|
||||
detail: 'aggregation',
|
||||
info: 'Calculate sum over dimensions',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'topk',
|
||||
detail: 'aggregation',
|
||||
info: 'Largest k elements by sample value',
|
||||
type: 'keyword',
|
||||
},
|
||||
];
|
||||
|
||||
export const aggregateOpModifierTerms = [
|
||||
{
|
||||
label: 'by',
|
||||
info: 'Keep the listed labels, remove all others.',
|
||||
type: 'keyword',
|
||||
},
|
||||
{
|
||||
label: 'without',
|
||||
info: 'Remove the listed labels, preserve all others.',
|
||||
type: 'keyword',
|
||||
},
|
||||
];
|
||||
|
||||
export const numberTerms = [
|
||||
{ label: 'nan', info: 'Floating-point NaN value', type: 'constant' },
|
||||
{ label: 'inf', info: 'Floating-point infinity', type: 'constant' },
|
||||
];
|
||||
|
||||
export const snippets: readonly Completion[] = [
|
||||
{
|
||||
label: 'sum(rate(__input_vector__[5m]))',
|
||||
type: 'function',
|
||||
detail: 'snippet',
|
||||
info: 'Sum over rates of increase',
|
||||
apply: snippet('sum(rate(${__input_vector__}[5m]))'),
|
||||
},
|
||||
{
|
||||
label: 'histogram_quantile(__quantile__, sum by(le) (rate(__histogram_metric__[5m])))',
|
||||
type: 'function',
|
||||
detail: 'snippet',
|
||||
info: 'Approximate a quantile value from an aggregated histogram',
|
||||
apply: snippet('histogram_quantile(${__quantile__}, sum by(le) (rate(${__histogram_metric__}[5m])))'),
|
||||
},
|
||||
{
|
||||
label: 'label_replace(__input_vector__, "__dst__", "__replacement__", "__src__", "__regex__")',
|
||||
type: 'function',
|
||||
detail: 'snippet',
|
||||
info: 'Set or replace a label value in an input vector',
|
||||
apply: snippet('label_replace(${__input_vector__}, "${__dst__}", "${__replacement__}", "${__src__}", "${__regex__}")'),
|
||||
},
|
||||
{
|
||||
label: 'topk(__rank_number__, __input_vector__)',
|
||||
type: 'function',
|
||||
detail: 'snippet',
|
||||
info: 'Largest k elements by sample value',
|
||||
apply: snippet('topk(${__rank_number__}, ${__input_vector__})'),
|
||||
},
|
||||
{
|
||||
label: 'bottomk(__rank_number__, __input_vector__)',
|
||||
type: 'function',
|
||||
detail: 'snippet',
|
||||
info: 'Smallest k elements by sample value',
|
||||
apply: snippet('bottomk(${__rank_number__}, ${__input_vector__})'),
|
||||
},
|
||||
{
|
||||
label: 'count_values("__label_name__", __input_vector__)',
|
||||
type: 'function',
|
||||
detail: 'snippet',
|
||||
info: 'Count the number of series per distinct sample value',
|
||||
apply: snippet('count_values("${__label_name__}", ${__metric__})'),
|
||||
},
|
||||
];
|
17
web/ui/module/codemirror-promql/src/lang-promql/index.ts
Normal file
17
web/ui/module/codemirror-promql/src/lang-promql/index.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
export { PrometheusClient } from './client';
|
||||
export { CompleteConfiguration, CompleteStrategy } from './complete';
|
||||
export { LintStrategy } from './lint';
|
||||
export { PromQLExtension, LanguageType, promQLLanguage } from './promql';
|
|
@ -0,0 +1,28 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { LintStrategy } from './index';
|
||||
import { EditorView } from '@codemirror/view';
|
||||
import { Diagnostic } from '@codemirror/lint';
|
||||
import { Parser } from '../parser';
|
||||
|
||||
// HybridLint will provide a promQL linter with static analysis
|
||||
export class HybridLint implements LintStrategy {
|
||||
public promQL(this: HybridLint): (view: EditorView) => readonly Diagnostic[] {
|
||||
return (view: EditorView) => {
|
||||
const parser = new Parser(view.state);
|
||||
parser.analyze();
|
||||
return parser.getDiagnostics();
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { EditorView } from '@codemirror/view';
|
||||
import { Diagnostic, linter } from '@codemirror/lint';
|
||||
import { HybridLint } from './hybrid';
|
||||
|
||||
type lintFunc = (view: EditorView) => readonly Diagnostic[] | Promise<readonly Diagnostic[]>;
|
||||
|
||||
// LintStrategy is the interface that defines the simple method that returns a DiagnosticResult.
|
||||
// Every different lint mode must implement this interface.
|
||||
export interface LintStrategy {
|
||||
promQL(this: LintStrategy): lintFunc;
|
||||
}
|
||||
|
||||
export function newLintStrategy(): LintStrategy {
|
||||
return new HybridLint();
|
||||
}
|
||||
|
||||
export function promQLLinter(callbackFunc: (this: LintStrategy) => lintFunc, thisArg: LintStrategy) {
|
||||
return linter(callbackFunc.call(thisArg));
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
export { buildLabelMatchers, labelMatchersToString } from './matcher';
|
||||
export { Parser } from './parser';
|
||||
export { walkBackward, walkThrough, containsAtLeastOneChild, containsChild, retrieveAllRecursiveNodes } from './path-finder';
|
|
@ -0,0 +1,137 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { EqlRegex, EqlSingle, Neq, NeqRegex } from 'lezer-promql';
|
||||
import { labelMatchersToString } from './matcher';
|
||||
import { Matcher } from '../types';
|
||||
import chai from 'chai';
|
||||
|
||||
describe('labelMatchersToString test', () => {
|
||||
const testCases = [
|
||||
{
|
||||
title: 'metric_name',
|
||||
metricName: 'metric_name',
|
||||
labelName: undefined,
|
||||
matchers: [] as Matcher[],
|
||||
result: 'metric_name',
|
||||
},
|
||||
{
|
||||
title: 'metric_name 2',
|
||||
metricName: 'metric_name',
|
||||
labelName: undefined,
|
||||
matchers: undefined,
|
||||
result: 'metric_name',
|
||||
},
|
||||
{
|
||||
title: 'metric_name{}',
|
||||
metricName: 'metric_name',
|
||||
labelName: undefined,
|
||||
matchers: [
|
||||
{
|
||||
type: EqlSingle,
|
||||
name: 'LabelName',
|
||||
value: '',
|
||||
},
|
||||
] as Matcher[],
|
||||
result: 'metric_name{}',
|
||||
},
|
||||
{
|
||||
title: 'sum{LabelName!="LabelValue"}',
|
||||
metricName: 'sum',
|
||||
labelName: undefined,
|
||||
matchers: [
|
||||
{
|
||||
type: Neq,
|
||||
name: 'LabelName',
|
||||
value: 'LabelValue',
|
||||
},
|
||||
] as Matcher[],
|
||||
result: 'sum{LabelName!="LabelValue"}',
|
||||
},
|
||||
{
|
||||
title: 'rate{LabelName=~"label.+"}',
|
||||
metricName: 'rate',
|
||||
labelName: undefined,
|
||||
matchers: [
|
||||
{
|
||||
type: EqlSingle,
|
||||
name: 'LabelName',
|
||||
value: '',
|
||||
},
|
||||
{
|
||||
type: EqlRegex,
|
||||
name: 'LabelName',
|
||||
value: 'label.+',
|
||||
},
|
||||
] as Matcher[],
|
||||
result: 'rate{LabelName=~"label.+"}',
|
||||
},
|
||||
{
|
||||
title: 'rate{LabelName="l1",labelName2=~"label.+",labelName3!~"label.+"}',
|
||||
metricName: 'rate',
|
||||
labelName: undefined,
|
||||
matchers: [
|
||||
{
|
||||
type: EqlSingle,
|
||||
name: 'LabelName',
|
||||
value: 'l1',
|
||||
},
|
||||
{
|
||||
type: EqlRegex,
|
||||
name: 'labelName2',
|
||||
value: 'label.+',
|
||||
},
|
||||
{
|
||||
type: NeqRegex,
|
||||
name: 'labelName3',
|
||||
value: 'label.+',
|
||||
},
|
||||
] as Matcher[],
|
||||
result: 'rate{LabelName="l1",labelName2=~"label.+",labelName3!~"label.+"}',
|
||||
},
|
||||
{
|
||||
title: 'rate{LabelName="l1",labelName2=~"label.+",labelName3!~"label.+"}',
|
||||
metricName: 'rate',
|
||||
labelName: 'LabelName',
|
||||
matchers: [
|
||||
{
|
||||
type: EqlSingle,
|
||||
name: 'LabelName',
|
||||
value: 'l1',
|
||||
},
|
||||
{
|
||||
type: EqlRegex,
|
||||
name: 'labelName2',
|
||||
value: 'label.+',
|
||||
},
|
||||
{
|
||||
type: NeqRegex,
|
||||
name: 'labelName3',
|
||||
value: 'label.+',
|
||||
},
|
||||
{
|
||||
type: Neq,
|
||||
name: 'labelName4',
|
||||
value: '',
|
||||
},
|
||||
] as Matcher[],
|
||||
result: 'rate{labelName2=~"label.+",labelName3!~"label.+"}',
|
||||
},
|
||||
];
|
||||
|
||||
testCases.forEach((value) => {
|
||||
it(value.title, () => {
|
||||
chai.expect(labelMatchersToString(value.metricName, value.matchers, value.labelName)).to.equal(value.result);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,88 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { SyntaxNode } from 'lezer-tree';
|
||||
import { EqlRegex, EqlSingle, LabelName, MatchOp, Neq, NeqRegex, StringLiteral } from 'lezer-promql';
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { Matcher } from '../types';
|
||||
|
||||
function createMatcher(labelMatcher: SyntaxNode, state: EditorState): Matcher {
|
||||
const matcher = new Matcher(0, '', '');
|
||||
const cursor = labelMatcher.cursor;
|
||||
if (!cursor.next()) {
|
||||
// weird case, that would mean the labelMatcher doesn't have any child.
|
||||
return matcher;
|
||||
}
|
||||
do {
|
||||
switch (cursor.type.id) {
|
||||
case LabelName:
|
||||
matcher.name = state.sliceDoc(cursor.from, cursor.to);
|
||||
break;
|
||||
case MatchOp:
|
||||
const ope = cursor.node.firstChild;
|
||||
if (ope) {
|
||||
matcher.type = ope.type.id;
|
||||
}
|
||||
break;
|
||||
case StringLiteral:
|
||||
matcher.value = state.sliceDoc(cursor.from, cursor.to).slice(1, -1);
|
||||
break;
|
||||
}
|
||||
} while (cursor.nextSibling());
|
||||
return matcher;
|
||||
}
|
||||
|
||||
export function buildLabelMatchers(labelMatchers: SyntaxNode[], state: EditorState): Matcher[] {
|
||||
const matchers: Matcher[] = [];
|
||||
labelMatchers.forEach((value) => {
|
||||
matchers.push(createMatcher(value, state));
|
||||
});
|
||||
return matchers;
|
||||
}
|
||||
|
||||
export function labelMatchersToString(metricName: string, matchers?: Matcher[], labelName?: string): string {
|
||||
if (!matchers || matchers.length === 0) {
|
||||
return metricName;
|
||||
}
|
||||
|
||||
let matchersAsString = '';
|
||||
for (const matcher of matchers) {
|
||||
if (matcher.name === labelName || matcher.value === '') {
|
||||
continue;
|
||||
}
|
||||
let type = '';
|
||||
switch (matcher.type) {
|
||||
case EqlSingle:
|
||||
type = '=';
|
||||
break;
|
||||
case Neq:
|
||||
type = '!=';
|
||||
break;
|
||||
case NeqRegex:
|
||||
type = '!~';
|
||||
break;
|
||||
case EqlRegex:
|
||||
type = '=~';
|
||||
break;
|
||||
default:
|
||||
type = '=';
|
||||
}
|
||||
const m = `${matcher.name}${type}"${matcher.value}"`;
|
||||
if (matchersAsString === '') {
|
||||
matchersAsString = m;
|
||||
} else {
|
||||
matchersAsString = `${matchersAsString},${m}`;
|
||||
}
|
||||
}
|
||||
return `${metricName}{${matchersAsString}}`;
|
||||
}
|
|
@ -0,0 +1,759 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import chai from 'chai';
|
||||
import { Parser } from './parser';
|
||||
import { Diagnostic } from '@codemirror/lint';
|
||||
import { createEditorState } from '../../test/utils';
|
||||
import { syntaxTree } from '@codemirror/language';
|
||||
import { ValueType } from '../types';
|
||||
|
||||
describe('promql operations', () => {
|
||||
const testCases = [
|
||||
{
|
||||
expr: '1',
|
||||
expectedValueType: ValueType.scalar,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: '2 * 3',
|
||||
expectedValueType: ValueType.scalar,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: '1 unless 1',
|
||||
expectedValueType: ValueType.scalar,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 10,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'metric_name * "string"',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 14,
|
||||
to: 22,
|
||||
message: 'binary expression must contain only scalar and instant vector types',
|
||||
severity: 'error',
|
||||
},
|
||||
] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'metric_name_1 > bool metric_name_2',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'metric_name_1 + bool metric_name_2',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 34,
|
||||
message: 'bool modifier can only be used on comparison operators',
|
||||
severity: 'error',
|
||||
},
|
||||
] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'metric_name offset 1d',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'metric_name[5m] offset 1d',
|
||||
expectedValueType: ValueType.matrix,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'rate(metric_name[5m])[1h:] offset 1m',
|
||||
expectedValueType: ValueType.matrix,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'sum(metric_name offset 1m)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'rate(metric_name[5m] offset 1d)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'max_over_time(rate(metric_name[5m])[1h:] offset 1m)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo * bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo*bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo* bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo *bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo==bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo * sum',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo == 1',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo == bool 1',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: '2.5 / bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo and bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo or bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo unless bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
// Test and/or precedence and reassigning of operands.
|
||||
// Here it will test only the first VectorMatching so (a + b) or (c and d) ==> ManyToMany
|
||||
expr: 'foo + bar or bla and blub',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
// Test and/or/unless precedence.
|
||||
// Here it will test only the first VectorMatching so ((a and b) unless c) or d ==> ManyToMany
|
||||
expr: 'foo and bar unless baz or qux',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo * on(test,blub) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo*on(test,blub)bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo * on(test,blub) group_left bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo*on(test,blub)group_left()bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo and on(test,blub) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo and on() bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo and ignoring(test,blub) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo and ignoring() bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo unless on(bar) baz',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo / on(test,blub) group_left(bar) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo / ignoring(test,blub) group_left(blub) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo / ignoring(test,blub) group_left(bar) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo - on(test,blub) group_right(bar,foo) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo - ignoring(test,blub) group_right(bar,foo) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [] as Diagnostic[],
|
||||
},
|
||||
{
|
||||
expr: 'foo and 1',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 9,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: '1 and foo',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 9,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo or 1',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 8,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: '1 or foo',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 8,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo unless 1',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 12,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: '1 unless foo',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 12,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: '1 or on(bar) foo',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 16,
|
||||
message: 'vector matching only allowed between instant vectors',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 16,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo == on(bar) 10',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 17,
|
||||
message: 'vector matching only allowed between instant vectors',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo and on(bar) group_left(baz) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 35,
|
||||
message: 'no grouping allowed for set operations',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 35,
|
||||
message: 'set operations must always be many-to-many',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo and on(bar) group_right(baz) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 36,
|
||||
message: 'no grouping allowed for set operations',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 36,
|
||||
message: 'set operations must always be many-to-many',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo or on(bar) group_left(baz) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 34,
|
||||
message: 'no grouping allowed for set operations',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 34,
|
||||
message: 'set operations must always be many-to-many',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo or on(bar) group_right(baz) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 35,
|
||||
message: 'no grouping allowed for set operations',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 35,
|
||||
message: 'set operations must always be many-to-many',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo unless on(bar) group_left(baz) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 38,
|
||||
message: 'no grouping allowed for set operations',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 38,
|
||||
message: 'set operations must always be many-to-many',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo unless on(bar) group_right(baz) bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 39,
|
||||
message: 'no grouping allowed for set operations',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 39,
|
||||
message: 'set operations must always be many-to-many',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'http_requests{group="production"} + on(instance) group_left(job,instance) cpu_count{type="smp"}',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 95,
|
||||
message: 'label "instance" must not occur in ON and GROUP clause at once',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo + bool bar',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 14,
|
||||
message: 'bool modifier can only be used on comparison operators',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo + bool 10',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 13,
|
||||
message: 'bool modifier can only be used on comparison operators',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo and bool 10',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 15,
|
||||
message: 'bool modifier can only be used on comparison operators',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
from: 0,
|
||||
to: 15,
|
||||
message: 'set operator not allowed in binary scalar expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
// test aggregration
|
||||
{
|
||||
expr: 'sum by (foo)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'avg by (foo)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'max by (foo)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum without (foo) (some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum (some_metric) without (foo)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'stddev(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'stdvar by (foo)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum by ()(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum by (foo,bar,)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum by (foo,)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'topk(5, some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'topk( # my awesome comment\n' + '5, some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'count_values("value", some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum without(and, by, avg, count, alert, annotations)(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum some_metric by (test)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 25,
|
||||
message: 'unable to find the parameter for the expression',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
// Test function calls.
|
||||
{
|
||||
expr: 'time()',
|
||||
expectedValueType: ValueType.scalar,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'floor(some_metric{foo!="bar"})',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'rate(some_metric[5m])',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'round(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'round(some_metric, 5)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'floor()',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 7,
|
||||
message: 'expected 1 argument(s) in call to "floor", got 0',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'floor(some_metric, other_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 32,
|
||||
message: 'expected 1 argument(s) in call to "floor", got 2',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'floor(some_metric, 1)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 21,
|
||||
message: 'expected 1 argument(s) in call to "floor", got 2',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'floor(1)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 6,
|
||||
to: 7,
|
||||
message: 'expected type vector in call to function "floor", got scalar',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'hour(some_metric, some_metric, some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 43,
|
||||
message: 'expected at most 1 argument(s) in call to "hour", got 3',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'time(some_metric)',
|
||||
expectedValueType: ValueType.scalar,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 17,
|
||||
message: 'expected 0 argument(s) in call to "time", got 1',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'rate(some_metric)',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 5,
|
||||
to: 16,
|
||||
message: 'expected type matrix in call to function "rate", got vector',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr:
|
||||
'histogram_quantile( # Root of the query, final result, approximates a quantile.\n' +
|
||||
' 0.9, # 1st argument to histogram_quantile(), the target quantile.\n' +
|
||||
' sum by(le, method, path) ( # 2nd argument to histogram_quantile(), an aggregated histogram.\n' +
|
||||
' rate( # Argument to sum(), the per-second increase of a histogram over 5m.\n' +
|
||||
' demo_api_request_duration_seconds_bucket{job="demo"}[5m] # Argument to rate(), the raw histogram series over the last 5m.\n' +
|
||||
' )\n' +
|
||||
' )\n' +
|
||||
')',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: '1 @ start()',
|
||||
expectedValueType: ValueType.scalar,
|
||||
expectedDiag: [
|
||||
{
|
||||
from: 0,
|
||||
to: 11,
|
||||
message: '@ modifier must be preceded by an instant selector vector or range vector selector or a subquery',
|
||||
severity: 'error',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
expr: 'foo @ 879',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'food @ start()',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'food @ end()',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
{
|
||||
expr: 'sum (rate(foo[5m])) @ 456',
|
||||
expectedValueType: ValueType.vector,
|
||||
expectedDiag: [],
|
||||
},
|
||||
];
|
||||
testCases.forEach((value) => {
|
||||
const state = createEditorState(value.expr);
|
||||
const parser = new Parser(state);
|
||||
it(value.expr, () => {
|
||||
chai.expect(parser.checkAST(syntaxTree(state).topNode.firstChild)).to.equal(value.expectedValueType);
|
||||
chai.expect(parser.getDiagnostics()).to.deep.equal(value.expectedDiag);
|
||||
});
|
||||
});
|
||||
});
|
345
web/ui/module/codemirror-promql/src/lang-promql/parser/parser.ts
Normal file
345
web/ui/module/codemirror-promql/src/lang-promql/parser/parser.ts
Normal file
|
@ -0,0 +1,345 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { Diagnostic } from '@codemirror/lint';
|
||||
import { SyntaxNode, Tree } from 'lezer-tree';
|
||||
import {
|
||||
AggregateExpr,
|
||||
And,
|
||||
BinaryExpr,
|
||||
BinModifiers,
|
||||
Bool,
|
||||
Bottomk,
|
||||
CountValues,
|
||||
Eql,
|
||||
EqlSingle,
|
||||
Expr,
|
||||
FunctionCall,
|
||||
FunctionCallArgs,
|
||||
FunctionCallBody,
|
||||
Gte,
|
||||
Gtr,
|
||||
Identifier,
|
||||
LabelMatcher,
|
||||
LabelMatchers,
|
||||
LabelMatchList,
|
||||
Lss,
|
||||
Lte,
|
||||
MatrixSelector,
|
||||
MetricIdentifier,
|
||||
Neq,
|
||||
Or,
|
||||
ParenExpr,
|
||||
Quantile,
|
||||
StepInvariantExpr,
|
||||
SubqueryExpr,
|
||||
Topk,
|
||||
UnaryExpr,
|
||||
Unless,
|
||||
VectorSelector,
|
||||
} from 'lezer-promql';
|
||||
import { containsAtLeastOneChild, retrieveAllRecursiveNodes, walkThrough } from './path-finder';
|
||||
import { getType } from './type';
|
||||
import { buildLabelMatchers } from './matcher';
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { syntaxTree } from '@codemirror/language';
|
||||
import { getFunction, Matcher, VectorMatchCardinality, ValueType } from '../types';
|
||||
import { buildVectorMatching } from './vector';
|
||||
|
||||
export class Parser {
|
||||
private readonly tree: Tree;
|
||||
private readonly state: EditorState;
|
||||
private readonly diagnostics: Diagnostic[];
|
||||
|
||||
constructor(state: EditorState) {
|
||||
this.tree = syntaxTree(state);
|
||||
this.state = state;
|
||||
this.diagnostics = [];
|
||||
}
|
||||
|
||||
getDiagnostics(): Diagnostic[] {
|
||||
return this.diagnostics.sort((a, b) => {
|
||||
return a.from - b.from;
|
||||
});
|
||||
}
|
||||
|
||||
analyze() {
|
||||
// when you are at the root of the tree, the first node is not `Expr` but a node with no name.
|
||||
// So to be able to iterate other the node relative to the promql node, we have to get the first child at the beginning
|
||||
this.checkAST(this.tree.topNode.firstChild);
|
||||
this.diagnoseAllErrorNodes();
|
||||
}
|
||||
|
||||
private diagnoseAllErrorNodes() {
|
||||
const cursor = this.tree.cursor();
|
||||
while (cursor.next()) {
|
||||
// usually there is an error node at the end of the expression when user is typing
|
||||
// so it's not really a useful information to say the expression is wrong.
|
||||
// Hopefully if there is an error node at the end of the tree, checkAST should yell more precisely
|
||||
if (cursor.type.id === 0 && cursor.to !== this.tree.topNode.to) {
|
||||
const node = cursor.node.parent;
|
||||
this.diagnostics.push({
|
||||
severity: 'error',
|
||||
message: 'unexpected expression',
|
||||
from: node ? node.from : cursor.from,
|
||||
to: node ? node.to : cursor.to,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkAST is inspired of the same named method from prometheus/prometheus:
|
||||
// https://github.com/prometheus/prometheus/blob/3470ee1fbf9d424784eb2613bab5ab0f14b4d222/promql/parser/parse.go#L433
|
||||
checkAST(node: SyntaxNode | null): ValueType {
|
||||
if (!node) {
|
||||
return ValueType.none;
|
||||
}
|
||||
switch (node.type.id) {
|
||||
case Expr:
|
||||
return this.checkAST(node.firstChild);
|
||||
case AggregateExpr:
|
||||
this.checkAggregationExpr(node);
|
||||
break;
|
||||
case BinaryExpr:
|
||||
this.checkBinaryExpr(node);
|
||||
break;
|
||||
case FunctionCall:
|
||||
this.checkCallFunction(node);
|
||||
break;
|
||||
case ParenExpr:
|
||||
this.checkAST(walkThrough(node, Expr));
|
||||
break;
|
||||
case UnaryExpr:
|
||||
const unaryExprType = this.checkAST(walkThrough(node, Expr));
|
||||
if (unaryExprType !== ValueType.scalar && unaryExprType !== ValueType.vector) {
|
||||
this.addDiagnostic(node, `unary expression only allowed on expressions of type scalar or instant vector, got ${unaryExprType}`);
|
||||
}
|
||||
break;
|
||||
case SubqueryExpr:
|
||||
const subQueryExprType = this.checkAST(walkThrough(node, Expr));
|
||||
if (subQueryExprType !== ValueType.vector) {
|
||||
this.addDiagnostic(node, `subquery is only allowed on instant vector, got ${subQueryExprType} in ${node.name} instead`);
|
||||
}
|
||||
break;
|
||||
case MatrixSelector:
|
||||
this.checkAST(walkThrough(node, Expr));
|
||||
break;
|
||||
case VectorSelector:
|
||||
this.checkVectorSelector(node);
|
||||
break;
|
||||
case StepInvariantExpr:
|
||||
const exprValue = this.checkAST(walkThrough(node, Expr));
|
||||
if (exprValue !== ValueType.vector && exprValue !== ValueType.matrix) {
|
||||
this.addDiagnostic(node, `@ modifier must be preceded by an instant selector vector or range vector selector or a subquery`);
|
||||
}
|
||||
// if you are looking at the Prometheus code, you will likely find that some checks are missing here.
|
||||
// Specially the one checking if the timestamp after the `@` is ok: https://github.com/prometheus/prometheus/blob/ad5ed416ba635834370bfa06139258b31f8c33f9/promql/parser/parse.go#L722-L725
|
||||
// Since Javascript is managing the number as a float64 and so on 53 bits, we cannot validate that the maxInt64 number is a valid value.
|
||||
// So, to manage properly this issue, we would need to use the BigInt which is possible or by using ES2020.BigInt, or by using the lib: https://github.com/GoogleChromeLabs/jsbi.
|
||||
// * Introducing a lib just for theses checks is quite overkilled
|
||||
// * Using ES2020 would be the way to go. Unfortunately moving to ES2020 is breaking the build of the lib.
|
||||
// So far I didn't find the way to fix it. I think it's likely due to the fact we are building an ESM package which is now something stable in nodeJS/javascript but still experimental in typescript.
|
||||
// For the above reason, we decided to drop these checks.
|
||||
break;
|
||||
}
|
||||
|
||||
return getType(node);
|
||||
}
|
||||
|
||||
private checkAggregationExpr(node: SyntaxNode): void {
|
||||
// according to https://github.com/promlabs/lezer-promql/blob/master/src/promql.grammar#L26
|
||||
// the name of the aggregator function is stored in the first child
|
||||
const aggregateOp = node.firstChild?.firstChild;
|
||||
if (!aggregateOp) {
|
||||
this.addDiagnostic(node, 'aggregation operator expected in aggregation expression but got nothing');
|
||||
return;
|
||||
}
|
||||
const expr = walkThrough(node, FunctionCallBody, FunctionCallArgs, Expr);
|
||||
if (!expr) {
|
||||
this.addDiagnostic(node, 'unable to find the parameter for the expression');
|
||||
return;
|
||||
}
|
||||
this.expectType(expr, ValueType.vector, 'aggregation expression');
|
||||
// get the parameter of the aggregation operator
|
||||
const params = walkThrough(node, FunctionCallBody, FunctionCallArgs, FunctionCallArgs, Expr);
|
||||
if (aggregateOp.type.id === Topk || aggregateOp.type.id === Bottomk || aggregateOp.type.id === Quantile) {
|
||||
if (!params) {
|
||||
this.addDiagnostic(node, 'no parameter found');
|
||||
return;
|
||||
}
|
||||
this.expectType(params, ValueType.scalar, 'aggregation parameter');
|
||||
}
|
||||
if (aggregateOp.type.id === CountValues) {
|
||||
if (!params) {
|
||||
this.addDiagnostic(node, 'no parameter found');
|
||||
return;
|
||||
}
|
||||
this.expectType(params, ValueType.string, 'aggregation parameter');
|
||||
}
|
||||
}
|
||||
|
||||
private checkBinaryExpr(node: SyntaxNode): void {
|
||||
// Following the definition of the BinaryExpr, the left and the right
|
||||
// expression are respectively the first and last child
|
||||
// https://github.com/promlabs/lezer-promql/blob/master/src/promql.grammar#L52
|
||||
const lExpr = node.firstChild;
|
||||
const rExpr = node.lastChild;
|
||||
if (!lExpr || !rExpr) {
|
||||
this.addDiagnostic(node, 'left or right expression is missing in binary expression');
|
||||
return;
|
||||
}
|
||||
const lt = this.checkAST(lExpr);
|
||||
const rt = this.checkAST(rExpr);
|
||||
const boolModifierUsed = walkThrough(node, BinModifiers, Bool);
|
||||
const isComparisonOperator = containsAtLeastOneChild(node, Eql, Neq, Lte, Lss, Gte, Gtr);
|
||||
const isSetOperator = containsAtLeastOneChild(node, And, Or, Unless);
|
||||
|
||||
// BOOL modifier check
|
||||
if (boolModifierUsed) {
|
||||
if (!isComparisonOperator) {
|
||||
this.addDiagnostic(node, 'bool modifier can only be used on comparison operators');
|
||||
}
|
||||
} else {
|
||||
if (isComparisonOperator && lt === ValueType.scalar && rt === ValueType.scalar) {
|
||||
this.addDiagnostic(node, 'comparisons between scalars must use BOOL modifier');
|
||||
}
|
||||
}
|
||||
|
||||
const vectorMatching = buildVectorMatching(this.state, node);
|
||||
if (vectorMatching !== null && vectorMatching.on) {
|
||||
for (const l1 of vectorMatching.matchingLabels) {
|
||||
for (const l2 of vectorMatching.include) {
|
||||
if (l1 === l2) {
|
||||
this.addDiagnostic(node, `label "${l1}" must not occur in ON and GROUP clause at once`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (lt !== ValueType.scalar && lt !== ValueType.vector) {
|
||||
this.addDiagnostic(lExpr, 'binary expression must contain only scalar and instant vector types');
|
||||
}
|
||||
if (rt !== ValueType.scalar && rt !== ValueType.vector) {
|
||||
this.addDiagnostic(rExpr, 'binary expression must contain only scalar and instant vector types');
|
||||
}
|
||||
|
||||
if ((lt !== ValueType.vector || rt !== ValueType.vector) && vectorMatching !== null) {
|
||||
if (vectorMatching.matchingLabels.length > 0) {
|
||||
this.addDiagnostic(node, 'vector matching only allowed between instant vectors');
|
||||
}
|
||||
} else {
|
||||
if (isSetOperator) {
|
||||
if (vectorMatching?.card === VectorMatchCardinality.CardOneToMany || vectorMatching?.card === VectorMatchCardinality.CardManyToOne) {
|
||||
this.addDiagnostic(node, 'no grouping allowed for set operations');
|
||||
}
|
||||
if (vectorMatching?.card !== VectorMatchCardinality.CardManyToMany) {
|
||||
this.addDiagnostic(node, 'set operations must always be many-to-many');
|
||||
}
|
||||
}
|
||||
}
|
||||
if ((lt === ValueType.scalar || rt === ValueType.scalar) && isSetOperator) {
|
||||
this.addDiagnostic(node, 'set operator not allowed in binary scalar expression');
|
||||
}
|
||||
}
|
||||
|
||||
private checkCallFunction(node: SyntaxNode): void {
|
||||
const funcID = node.firstChild?.firstChild;
|
||||
if (!funcID) {
|
||||
this.addDiagnostic(node, 'function not defined');
|
||||
return;
|
||||
}
|
||||
|
||||
const args = retrieveAllRecursiveNodes(walkThrough(node, FunctionCallBody), FunctionCallArgs, Expr);
|
||||
const funcSignature = getFunction(funcID.type.id);
|
||||
const nargs = funcSignature.argTypes.length;
|
||||
|
||||
if (funcSignature.variadic === 0) {
|
||||
if (args.length !== nargs) {
|
||||
this.addDiagnostic(node, `expected ${nargs} argument(s) in call to "${funcSignature.name}", got ${args.length}`);
|
||||
}
|
||||
} else {
|
||||
const na = nargs - 1;
|
||||
if (na > args.length) {
|
||||
this.addDiagnostic(node, `expected at least ${na} argument(s) in call to "${funcSignature.name}", got ${args.length}`);
|
||||
} else {
|
||||
const nargsmax = na + funcSignature.variadic;
|
||||
if (funcSignature.variadic > 0 && nargsmax < args.length) {
|
||||
this.addDiagnostic(node, `expected at most ${nargsmax} argument(s) in call to "${funcSignature.name}", got ${args.length}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let j = 0;
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
j = i;
|
||||
if (j >= funcSignature.argTypes.length) {
|
||||
if (funcSignature.variadic === 0) {
|
||||
// This is not a vararg function so we should not check the
|
||||
// type of the extra arguments.
|
||||
break;
|
||||
}
|
||||
j = funcSignature.argTypes.length - 1;
|
||||
}
|
||||
this.expectType(args[i], funcSignature.argTypes[j], `call to function "${funcSignature.name}"`);
|
||||
}
|
||||
}
|
||||
|
||||
private checkVectorSelector(node: SyntaxNode): void {
|
||||
const labelMatchers = buildLabelMatchers(
|
||||
retrieveAllRecursiveNodes(walkThrough(node, LabelMatchers, LabelMatchList), LabelMatchList, LabelMatcher),
|
||||
this.state
|
||||
);
|
||||
let vectorSelectorName = '';
|
||||
// VectorSelector ( MetricIdentifier ( Identifier ) )
|
||||
// https://github.com/promlabs/lezer-promql/blob/71e2f9fa5ae6f5c5547d5738966cd2512e6b99a8/src/promql.grammar#L200
|
||||
const vectorSelectorNodeName = walkThrough(node, MetricIdentifier, Identifier);
|
||||
if (vectorSelectorNodeName) {
|
||||
vectorSelectorName = this.state.sliceDoc(vectorSelectorNodeName.from, vectorSelectorNodeName.to);
|
||||
}
|
||||
if (vectorSelectorName !== '') {
|
||||
// In this case the last LabelMatcher is checking for the metric name
|
||||
// set outside the braces. This checks if the name has already been set
|
||||
// previously
|
||||
const labelMatcherMetricName = labelMatchers.find((lm) => lm.name === '__name__');
|
||||
if (labelMatcherMetricName) {
|
||||
this.addDiagnostic(node, `metric name must not be set twice: ${vectorSelectorName} or ${labelMatcherMetricName.value}`);
|
||||
}
|
||||
// adding the metric name as a Matcher to avoid a false positive for this kind of expression:
|
||||
// foo{bare=''}
|
||||
labelMatchers.push(new Matcher(EqlSingle, '__name__', vectorSelectorName));
|
||||
}
|
||||
|
||||
// A Vector selector must contain at least one non-empty matcher to prevent
|
||||
// implicit selection of all metrics (e.g. by a typo).
|
||||
const empty = labelMatchers.every((lm) => lm.matchesEmpty());
|
||||
if (empty) {
|
||||
this.addDiagnostic(node, 'vector selector must contain at least one non-empty matcher');
|
||||
}
|
||||
}
|
||||
|
||||
private expectType(node: SyntaxNode, want: ValueType, context: string): void {
|
||||
const t = this.checkAST(node);
|
||||
if (t !== want) {
|
||||
this.addDiagnostic(node, `expected type ${want} in ${context}, got ${t}`);
|
||||
}
|
||||
}
|
||||
|
||||
private addDiagnostic(node: SyntaxNode, msg: string): void {
|
||||
this.diagnostics.push({
|
||||
severity: 'error',
|
||||
message: msg,
|
||||
from: node.from,
|
||||
to: node.to,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,209 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import chai from 'chai';
|
||||
import {
|
||||
Add,
|
||||
AggregateExpr,
|
||||
BinaryExpr,
|
||||
Div,
|
||||
Eql,
|
||||
Expr,
|
||||
FunctionCall,
|
||||
FunctionCallArgs,
|
||||
FunctionCallBody,
|
||||
Gte,
|
||||
Gtr,
|
||||
Lss,
|
||||
Lte,
|
||||
Mod,
|
||||
Mul,
|
||||
Neq,
|
||||
NumberLiteral,
|
||||
Sub,
|
||||
VectorSelector,
|
||||
} from 'lezer-promql';
|
||||
import { createEditorState } from '../../test/utils';
|
||||
import { containsAtLeastOneChild, containsChild, retrieveAllRecursiveNodes, walkBackward, walkThrough } from './path-finder';
|
||||
import { SyntaxNode } from 'lezer-tree';
|
||||
import { syntaxTree } from '@codemirror/language';
|
||||
|
||||
describe('walkThrough test', () => {
|
||||
const testCases = [
|
||||
{
|
||||
title: 'should return the node when no path is given',
|
||||
expr: '1 > bool 2',
|
||||
pos: 0,
|
||||
expectedNode: 'PromQL',
|
||||
path: [] as number[],
|
||||
expectedDoc: '1 > bool 2',
|
||||
},
|
||||
{
|
||||
title: 'should find the path',
|
||||
expr: "100 * (1 - avg by(instance)(irate(node_cpu{mode='idle'}[5m])))",
|
||||
pos: 11,
|
||||
path: [Expr, NumberLiteral],
|
||||
// for the moment the function walkThrough is not able to find the following path.
|
||||
// That's because the function is iterating through the tree by searching the first possible node that matched
|
||||
// the node ID path[i].
|
||||
// So for the current expression, and the given position we are in the sub expr (1 - avg ...).
|
||||
// Expr is matching 1 and not avg.
|
||||
// TODO fix this issue
|
||||
// path: [Expr, AggregateExpr, AggregateOp, Avg],
|
||||
expectedNode: NumberLiteral,
|
||||
expectedDoc: '1',
|
||||
},
|
||||
{
|
||||
title: 'should not find the path',
|
||||
expr: 'topk(10, count by (job)({__name__=~".+"}))',
|
||||
pos: 12,
|
||||
path: [Expr, BinaryExpr],
|
||||
expectedNode: undefined,
|
||||
expectedDoc: undefined,
|
||||
},
|
||||
{
|
||||
title: 'should find a node in a recursive node definition',
|
||||
expr: 'rate(1, 2, 3)',
|
||||
pos: 0,
|
||||
path: [Expr, FunctionCall, FunctionCallBody, FunctionCallArgs, FunctionCallArgs, Expr, NumberLiteral],
|
||||
expectedNode: NumberLiteral,
|
||||
expectedDoc: '2',
|
||||
},
|
||||
];
|
||||
testCases.forEach((value) => {
|
||||
it(value.title, () => {
|
||||
const state = createEditorState(value.expr);
|
||||
const subTree = syntaxTree(state).resolve(value.pos, -1);
|
||||
const node = walkThrough(subTree, ...value.path);
|
||||
if (typeof value.expectedNode === 'number') {
|
||||
chai.expect(value.expectedNode).to.equal(node?.type.id);
|
||||
} else {
|
||||
chai.expect(value.expectedNode).to.equal(node?.type.name);
|
||||
}
|
||||
if (node) {
|
||||
chai.expect(value.expectedDoc).to.equal(state.sliceDoc(node.from, node.to));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('containsAtLeastOneChild test', () => {
|
||||
const testCases = [
|
||||
{
|
||||
title: 'should not find a node if none is defined',
|
||||
expr: '1 > 2',
|
||||
pos: 3,
|
||||
expectedResult: false,
|
||||
walkThrough: [],
|
||||
child: [],
|
||||
},
|
||||
{
|
||||
title: 'should find a node in the given list',
|
||||
expr: '1 > 2',
|
||||
pos: 0,
|
||||
walkThrough: [Expr, BinaryExpr],
|
||||
child: [Eql, Neq, Lte, Lss, Gte, Gtr],
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
title: 'should not find a node in the given list',
|
||||
expr: '1 > 2',
|
||||
pos: 0,
|
||||
walkThrough: [Expr, BinaryExpr],
|
||||
child: [Mul, Div, Mod, Add, Sub],
|
||||
expectedResult: false,
|
||||
},
|
||||
];
|
||||
testCases.forEach((value) => {
|
||||
it(value.title, () => {
|
||||
const state = createEditorState(value.expr);
|
||||
const subTree = syntaxTree(state).resolve(value.pos, -1);
|
||||
const node = walkThrough(subTree, ...value.walkThrough);
|
||||
chai.expect(node).to.not.null;
|
||||
if (node) {
|
||||
chai.expect(value.expectedResult).to.equal(containsAtLeastOneChild(node, ...value.child));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('containsChild test', () => {
|
||||
const testCases = [
|
||||
{
|
||||
title: 'Should find all expr in a subtree',
|
||||
expr: 'metric_name / ignor',
|
||||
pos: 0,
|
||||
expectedResult: true,
|
||||
walkThrough: [Expr, BinaryExpr],
|
||||
child: [Expr, Expr],
|
||||
},
|
||||
{
|
||||
title: 'Should find all expr in a subtree 2',
|
||||
expr: 'http_requests_total{method="GET"} off',
|
||||
pos: 0,
|
||||
expectedResult: true,
|
||||
walkThrough: [Expr, BinaryExpr],
|
||||
child: [Expr, Expr],
|
||||
},
|
||||
{
|
||||
title: 'Should not find all child required',
|
||||
expr: 'sum(ra)',
|
||||
pos: 0,
|
||||
expectedResult: false,
|
||||
walkThrough: [Expr, AggregateExpr, FunctionCallBody, FunctionCallArgs],
|
||||
child: [Expr, Expr],
|
||||
},
|
||||
];
|
||||
testCases.forEach((value) => {
|
||||
it(value.title, () => {
|
||||
const state = createEditorState(value.expr);
|
||||
const subTree = syntaxTree(state).resolve(value.pos, -1);
|
||||
const node: SyntaxNode | null = walkThrough(subTree, ...value.walkThrough);
|
||||
|
||||
chai.expect(node).to.not.null;
|
||||
if (node) {
|
||||
chai.expect(value.expectedResult).to.equal(containsChild(node, ...value.child));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('retrieveAllRecursiveNodes test', () => {
|
||||
it('should find every occurrence', () => {
|
||||
const state = createEditorState('rate(1,2,3)');
|
||||
const tree = syntaxTree(state).topNode.firstChild;
|
||||
chai.expect(tree).to.not.null;
|
||||
if (tree) {
|
||||
chai.expect(3).to.equal(retrieveAllRecursiveNodes(walkThrough(tree, FunctionCall, FunctionCallBody), FunctionCallArgs, Expr).length);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('walkbackward test', () => {
|
||||
const testCases = [
|
||||
{
|
||||
title: 'should find the parent',
|
||||
expr: 'metric_name{}',
|
||||
pos: 12,
|
||||
exit: VectorSelector,
|
||||
expectedResult: VectorSelector,
|
||||
},
|
||||
];
|
||||
testCases.forEach((value) => {
|
||||
it(value.title, () => {
|
||||
const state = createEditorState(value.expr);
|
||||
const tree = syntaxTree(state).resolve(value.pos, -1);
|
||||
chai.expect(value.expectedResult).to.equal(walkBackward(tree, value.exit)?.type.id);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,100 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { SyntaxNode } from 'lezer-tree';
|
||||
|
||||
// walkBackward will iterate other the tree from the leaf to the root until it founds the given `exit` node.
|
||||
// It returns null if the exit is not found.
|
||||
export function walkBackward(node: SyntaxNode, exit: number): SyntaxNode | null {
|
||||
const cursor = node.cursor;
|
||||
let cursorIsMoving = true;
|
||||
while (cursorIsMoving && cursor.type.id !== exit) {
|
||||
cursorIsMoving = cursor.parent();
|
||||
}
|
||||
return cursor.type.id === exit ? cursor.node : null;
|
||||
}
|
||||
|
||||
// walkThrough is going to follow the path passed in parameter.
|
||||
// If it succeeds to reach the last id/name of the path, then it will return the corresponding Subtree.
|
||||
// Otherwise if it's not possible to reach the last id/name of the path, it will return `null`
|
||||
// Note: the way followed during the iteration of the tree to find the given path, is only from the root to the leaf.
|
||||
export function walkThrough(node: SyntaxNode, ...path: (number | string)[]): SyntaxNode | null {
|
||||
const cursor = node.cursor;
|
||||
let i = 0;
|
||||
let cursorIsMoving = true;
|
||||
path.unshift(cursor.type.id);
|
||||
while (i < path.length && cursorIsMoving) {
|
||||
if (cursor.type.id === path[i] || cursor.type.name === path[i]) {
|
||||
i++;
|
||||
if (i < path.length) {
|
||||
cursorIsMoving = cursor.next();
|
||||
}
|
||||
} else {
|
||||
cursorIsMoving = cursor.nextSibling();
|
||||
}
|
||||
}
|
||||
if (i >= path.length) {
|
||||
return cursor.node;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function containsAtLeastOneChild(node: SyntaxNode, ...child: (number | string)[]): boolean {
|
||||
const cursor = node.cursor;
|
||||
if (!cursor.next()) {
|
||||
// let's try to move directly to the children level and
|
||||
// return false immediately if the current node doesn't have any child
|
||||
return false;
|
||||
}
|
||||
let result = false;
|
||||
do {
|
||||
result = child.some((n) => cursor.type.id === n || cursor.type.name === n);
|
||||
} while (!result && cursor.nextSibling());
|
||||
return result;
|
||||
}
|
||||
|
||||
export function containsChild(node: SyntaxNode, ...child: (number | string)[]): boolean {
|
||||
const cursor = node.cursor;
|
||||
if (!cursor.next()) {
|
||||
// let's try to move directly to the children level and
|
||||
// return false immediately if the current node doesn't have any child
|
||||
return false;
|
||||
}
|
||||
let i = 0;
|
||||
|
||||
do {
|
||||
if (cursor.type.id === child[i] || cursor.type.name === child[i]) {
|
||||
i++;
|
||||
}
|
||||
} while (i < child.length && cursor.nextSibling());
|
||||
|
||||
return i >= child.length;
|
||||
}
|
||||
|
||||
export function retrieveAllRecursiveNodes(parentNode: SyntaxNode | null, recursiveNode: number, leaf: number): SyntaxNode[] {
|
||||
const nodes: SyntaxNode[] = [];
|
||||
|
||||
function recursiveRetrieveNode(node: SyntaxNode | null, nodes: SyntaxNode[]) {
|
||||
const subNode = node?.getChild(recursiveNode);
|
||||
const le = node?.lastChild;
|
||||
if (subNode && subNode.type.id === recursiveNode) {
|
||||
recursiveRetrieveNode(subNode, nodes);
|
||||
}
|
||||
if (le && le.type.id === leaf) {
|
||||
nodes.push(le);
|
||||
}
|
||||
}
|
||||
|
||||
recursiveRetrieveNode(parentNode, nodes);
|
||||
return nodes;
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { SyntaxNode } from 'lezer-tree';
|
||||
import {
|
||||
AggregateExpr,
|
||||
BinaryExpr,
|
||||
Expr,
|
||||
FunctionCall,
|
||||
MatrixSelector,
|
||||
NumberLiteral,
|
||||
OffsetExpr,
|
||||
ParenExpr,
|
||||
StepInvariantExpr,
|
||||
StringLiteral,
|
||||
SubqueryExpr,
|
||||
UnaryExpr,
|
||||
VectorSelector,
|
||||
} from 'lezer-promql';
|
||||
import { walkThrough } from './path-finder';
|
||||
import { getFunction, ValueType } from '../types';
|
||||
|
||||
// Based on https://github.com/prometheus/prometheus/blob/d668a7efe3107dbdcc67bf4e9f12430ed8e2b396/promql/parser/ast.go#L191
|
||||
export function getType(node: SyntaxNode | null): ValueType {
|
||||
if (!node) {
|
||||
return ValueType.none;
|
||||
}
|
||||
switch (node.type.id) {
|
||||
case Expr:
|
||||
return getType(node.firstChild);
|
||||
case AggregateExpr:
|
||||
return ValueType.vector;
|
||||
case VectorSelector:
|
||||
return ValueType.vector;
|
||||
case OffsetExpr:
|
||||
return getType(node.firstChild);
|
||||
case StringLiteral:
|
||||
return ValueType.string;
|
||||
case NumberLiteral:
|
||||
return ValueType.scalar;
|
||||
case MatrixSelector:
|
||||
return ValueType.matrix;
|
||||
case SubqueryExpr:
|
||||
return ValueType.matrix;
|
||||
case ParenExpr:
|
||||
return getType(walkThrough(node, Expr));
|
||||
case UnaryExpr:
|
||||
return getType(walkThrough(node, Expr));
|
||||
case BinaryExpr:
|
||||
const lt = getType(node.firstChild);
|
||||
const rt = getType(node.lastChild);
|
||||
if (lt === ValueType.scalar && rt === ValueType.scalar) {
|
||||
return ValueType.scalar;
|
||||
}
|
||||
return ValueType.vector;
|
||||
case FunctionCall:
|
||||
const funcNode = node.firstChild?.firstChild;
|
||||
if (!funcNode) {
|
||||
return ValueType.none;
|
||||
}
|
||||
return getFunction(funcNode.type.id).returnType;
|
||||
case StepInvariantExpr:
|
||||
return getType(walkThrough(node, Expr));
|
||||
default:
|
||||
return ValueType.none;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,213 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { buildVectorMatching } from './vector';
|
||||
import { createEditorState } from '../../test/utils';
|
||||
import { walkThrough } from './path-finder';
|
||||
import { BinaryExpr, Expr } from 'lezer-promql';
|
||||
import chai from 'chai';
|
||||
import { syntaxTree } from '@codemirror/language';
|
||||
import { VectorMatchCardinality } from '../types';
|
||||
|
||||
describe('buildVectorMatching test', () => {
|
||||
const testCases = [
|
||||
{
|
||||
binaryExpr: 'foo * bar',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo * sum',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo == 1',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo == bool 1',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
},
|
||||
{
|
||||
binaryExpr: '2.5 / bar',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo and bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo or bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo unless bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
// Test and/or precedence and reassigning of operands.
|
||||
// Here it will test only the first VectorMatching so (a + b) or (c and d) ==> ManyToMany
|
||||
binaryExpr: 'foo + bar or bla and blub',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
// Test and/or/unless precedence.
|
||||
// Here it will test only the first VectorMatching so ((a and b) unless c) or d ==> ManyToMany
|
||||
binaryExpr: 'foo and bar unless baz or qux',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo * on(test,blub) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo * on(test,blub) group_left bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToOne,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo and on(test,blub) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo and on() bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo and ignoring(test,blub) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo and ignoring() bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo unless on(bar) baz',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToMany,
|
||||
matchingLabels: ['bar'],
|
||||
on: true,
|
||||
include: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo / on(test,blub) group_left(bar) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToOne,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: ['bar'],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo / ignoring(test,blub) group_left(blub) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToOne,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: ['blub'],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo / ignoring(test,blub) group_left(bar) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardManyToOne,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: ['bar'],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo - on(test,blub) group_right(bar,foo) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToMany,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: ['bar', 'foo'],
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo - ignoring(test,blub) group_right(bar,foo) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToMany,
|
||||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: ['bar', 'foo'],
|
||||
},
|
||||
},
|
||||
];
|
||||
testCases.forEach((value) => {
|
||||
it(value.binaryExpr, () => {
|
||||
const state = createEditorState(value.binaryExpr);
|
||||
const node = walkThrough(syntaxTree(state).topNode, Expr, BinaryExpr);
|
||||
chai.expect(node).to.not.null;
|
||||
chai.expect(node).to.not.undefined;
|
||||
if (node) {
|
||||
chai.expect(value.expectedVectorMatching).to.deep.equal(buildVectorMatching(state, node));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { SyntaxNode } from 'lezer-tree';
|
||||
import {
|
||||
And,
|
||||
BinaryExpr,
|
||||
BinModifiers,
|
||||
GroupingLabel,
|
||||
GroupingLabelList,
|
||||
GroupingLabels,
|
||||
GroupLeft,
|
||||
GroupRight,
|
||||
On,
|
||||
OnOrIgnoring,
|
||||
Or,
|
||||
Unless,
|
||||
} from 'lezer-promql';
|
||||
import { VectorMatchCardinality, VectorMatching } from '../types';
|
||||
import { containsAtLeastOneChild, retrieveAllRecursiveNodes } from './path-finder';
|
||||
|
||||
export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode) {
|
||||
if (!binaryNode || binaryNode.type.id !== BinaryExpr) {
|
||||
return null;
|
||||
}
|
||||
const result: VectorMatching = {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
};
|
||||
const binModifiers = binaryNode.getChild(BinModifiers);
|
||||
if (binModifiers) {
|
||||
const onOrIgnoring = binModifiers.getChild(OnOrIgnoring);
|
||||
if (onOrIgnoring) {
|
||||
result.on = onOrIgnoring.getChild(On) !== null;
|
||||
const labels = retrieveAllRecursiveNodes(onOrIgnoring.getChild(GroupingLabels), GroupingLabelList, GroupingLabel);
|
||||
if (labels.length > 0) {
|
||||
for (const label of labels) {
|
||||
result.matchingLabels.push(state.sliceDoc(label.from, label.to));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const groupLeft = binModifiers.getChild(GroupLeft);
|
||||
const groupRight = binModifiers.getChild(GroupRight);
|
||||
if (groupLeft || groupRight) {
|
||||
result.card = groupLeft ? VectorMatchCardinality.CardManyToOne : VectorMatchCardinality.CardOneToMany;
|
||||
const includeLabels = retrieveAllRecursiveNodes(binModifiers.getChild(GroupingLabels), GroupingLabelList, GroupingLabel);
|
||||
if (includeLabels.length > 0) {
|
||||
for (const label of includeLabels) {
|
||||
result.include.push(state.sliceDoc(label.from, label.to));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const isSetOperator = containsAtLeastOneChild(binaryNode, And, Or, Unless);
|
||||
if (isSetOperator && result.card === VectorMatchCardinality.CardOneToOne) {
|
||||
result.card = VectorMatchCardinality.CardManyToMany;
|
||||
}
|
||||
return result;
|
||||
}
|
120
web/ui/module/codemirror-promql/src/lang-promql/promql.ts
Normal file
120
web/ui/module/codemirror-promql/src/lang-promql/promql.ts
Normal file
|
@ -0,0 +1,120 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { parser } from 'lezer-promql';
|
||||
import { styleTags, tags } from '@codemirror/highlight';
|
||||
import { Extension } from '@codemirror/state';
|
||||
import { CompleteConfiguration, CompleteStrategy, newCompleteStrategy } from './complete';
|
||||
import { LintStrategy, newLintStrategy, promQLLinter } from './lint';
|
||||
import { CompletionContext } from '@codemirror/autocomplete';
|
||||
import { LezerLanguage } from '@codemirror/language';
|
||||
|
||||
export enum LanguageType {
|
||||
PromQL = 'PromQL',
|
||||
MetricName = 'MetricName',
|
||||
}
|
||||
|
||||
export function promQLLanguage(top: LanguageType) {
|
||||
return LezerLanguage.define({
|
||||
parser: parser.configure({
|
||||
top: top,
|
||||
props: [
|
||||
styleTags({
|
||||
LineComment: tags.comment,
|
||||
LabelName: tags.labelName,
|
||||
StringLiteral: tags.string,
|
||||
NumberLiteral: tags.number,
|
||||
Duration: tags.number,
|
||||
'Abs Absent AbsentOverTime AvgOverTime Ceil Changes Clamp ClampMax ClampMin CountOverTime DaysInMonth DayOfMonth DayOfWeek Delta Deriv Exp Floor HistogramQuantile HoltWinters Hour Idelta Increase Irate LabelReplace LabelJoin LastOverTime Ln Log10 Log2 MaxOverTime MinOverTime Minute Month PredictLinear PresentOverTime QuantileOverTime Rate Resets Round Scalar Sgn Sort SortDesc Sqrt StddevOverTime StdvarOverTime SumOverTime Time Timestamp Vector Year': tags.function(
|
||||
tags.variableName
|
||||
),
|
||||
'Avg Bottomk Count Count_values Group Max Min Quantile Stddev Stdvar Sum Topk': tags.operatorKeyword,
|
||||
'By Without Bool On Ignoring GroupLeft GroupRight Offset Start End': tags.modifier,
|
||||
'And Unless Or': tags.logicOperator,
|
||||
'Sub Add Mul Mod Div Eql Neq Lte Lss Gte Gtr EqlRegex EqlSingle NeqRegex Pow At': tags.operator,
|
||||
UnaryOp: tags.arithmeticOperator,
|
||||
'( )': tags.paren,
|
||||
'[ ]': tags.squareBracket,
|
||||
'{ }': tags.brace,
|
||||
'⚠': tags.invalid,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
languageData: {
|
||||
closeBrackets: { brackets: ['(', '[', '{', "'", '"', '`'] },
|
||||
commentTokens: { line: '#' },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This class holds the state of the completion extension for CodeMirror and allow hot-swapping the complete strategy.
|
||||
*/
|
||||
export class PromQLExtension {
|
||||
private complete: CompleteStrategy;
|
||||
private lint: LintStrategy;
|
||||
private enableCompletion: boolean;
|
||||
private enableLinter: boolean;
|
||||
|
||||
constructor() {
|
||||
this.complete = newCompleteStrategy();
|
||||
this.lint = newLintStrategy();
|
||||
this.enableLinter = true;
|
||||
this.enableCompletion = true;
|
||||
}
|
||||
|
||||
setComplete(conf?: CompleteConfiguration): PromQLExtension {
|
||||
this.complete = newCompleteStrategy(conf);
|
||||
return this;
|
||||
}
|
||||
|
||||
getComplete(): CompleteStrategy {
|
||||
return this.complete;
|
||||
}
|
||||
|
||||
activateCompletion(activate: boolean): PromQLExtension {
|
||||
this.enableCompletion = activate;
|
||||
return this;
|
||||
}
|
||||
|
||||
setLinter(linter: LintStrategy): PromQLExtension {
|
||||
this.lint = linter;
|
||||
return this;
|
||||
}
|
||||
|
||||
getLinter(): LintStrategy {
|
||||
return this.lint;
|
||||
}
|
||||
|
||||
activateLinter(activate: boolean): PromQLExtension {
|
||||
this.enableLinter = activate;
|
||||
return this;
|
||||
}
|
||||
|
||||
asExtension(languageType = LanguageType.PromQL): Extension {
|
||||
const language = promQLLanguage(languageType);
|
||||
let extension: Extension = [language];
|
||||
if (this.enableCompletion) {
|
||||
const completion = language.data.of({
|
||||
autocomplete: (context: CompletionContext) => {
|
||||
return this.complete.promQL(context);
|
||||
},
|
||||
});
|
||||
extension = extension.concat(completion);
|
||||
}
|
||||
if (this.enableLinter) {
|
||||
extension = extension.concat(promQLLinter(this.lint.promQL, this.lint));
|
||||
}
|
||||
return extension;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,396 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import {
|
||||
Abs,
|
||||
Absent,
|
||||
AbsentOverTime,
|
||||
AvgOverTime,
|
||||
Ceil,
|
||||
Changes,
|
||||
Clamp,
|
||||
ClampMax,
|
||||
ClampMin,
|
||||
CountOverTime,
|
||||
DayOfMonth,
|
||||
DayOfWeek,
|
||||
DaysInMonth,
|
||||
Delta,
|
||||
Deriv,
|
||||
Exp,
|
||||
Floor,
|
||||
HistogramQuantile,
|
||||
HoltWinters,
|
||||
Hour,
|
||||
Idelta,
|
||||
Increase,
|
||||
Irate,
|
||||
LabelJoin,
|
||||
LabelReplace,
|
||||
LastOverTime,
|
||||
Ln,
|
||||
Log10,
|
||||
Log2,
|
||||
MaxOverTime,
|
||||
MinOverTime,
|
||||
Minute,
|
||||
Month,
|
||||
PredictLinear,
|
||||
PresentOverTime,
|
||||
QuantileOverTime,
|
||||
Rate,
|
||||
Resets,
|
||||
Round,
|
||||
Scalar,
|
||||
Sgn,
|
||||
Sort,
|
||||
SortDesc,
|
||||
Sqrt,
|
||||
StddevOverTime,
|
||||
StdvarOverTime,
|
||||
SumOverTime,
|
||||
Time,
|
||||
Timestamp,
|
||||
Vector,
|
||||
Year,
|
||||
} from 'lezer-promql';
|
||||
|
||||
export enum ValueType {
|
||||
none = 'none',
|
||||
vector = 'vector',
|
||||
scalar = 'scalar',
|
||||
matrix = 'matrix',
|
||||
string = 'string',
|
||||
}
|
||||
|
||||
export interface PromQLFunction {
|
||||
name: string;
|
||||
argTypes: ValueType[];
|
||||
variadic: number;
|
||||
returnType: ValueType;
|
||||
}
|
||||
|
||||
// promqlFunctions is a list of all functions supported by PromQL, including their types.
|
||||
// Based on https://github.com/prometheus/prometheus/blob/master/promql/parser/functions.go#L26
|
||||
const promqlFunctions: { [key: number]: PromQLFunction } = {
|
||||
[Abs]: {
|
||||
name: 'abs',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Absent]: {
|
||||
name: 'absent',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[AbsentOverTime]: {
|
||||
name: 'absent_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[AvgOverTime]: {
|
||||
name: 'avg_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Ceil]: {
|
||||
name: 'ceil',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Changes]: {
|
||||
name: 'changes',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Clamp]: {
|
||||
name: 'clamp',
|
||||
argTypes: [ValueType.vector, ValueType.scalar, ValueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[ClampMax]: {
|
||||
name: 'clamp_max',
|
||||
argTypes: [ValueType.vector, ValueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[ClampMin]: {
|
||||
name: 'clamp_min',
|
||||
argTypes: [ValueType.vector, ValueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[CountOverTime]: {
|
||||
name: 'count_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[DaysInMonth]: {
|
||||
name: 'days_in_month',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[DayOfMonth]: {
|
||||
name: 'day_of_month',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[DayOfWeek]: {
|
||||
name: 'day_of_week',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Delta]: {
|
||||
name: 'delta',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Deriv]: {
|
||||
name: 'deriv',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Exp]: {
|
||||
name: 'exp',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Floor]: {
|
||||
name: 'floor',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[HistogramQuantile]: {
|
||||
name: 'histogram_quantile',
|
||||
argTypes: [ValueType.scalar, ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[HoltWinters]: {
|
||||
name: 'holt_winters',
|
||||
argTypes: [ValueType.matrix, ValueType.scalar, ValueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Hour]: {
|
||||
name: 'hour',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Idelta]: {
|
||||
name: 'idelta',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Increase]: {
|
||||
name: 'increase',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Irate]: {
|
||||
name: 'irate',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[LabelReplace]: {
|
||||
name: 'label_replace',
|
||||
argTypes: [ValueType.vector, ValueType.string, ValueType.string, ValueType.string, ValueType.string],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[LabelJoin]: {
|
||||
name: 'label_join',
|
||||
argTypes: [ValueType.vector, ValueType.string, ValueType.string, ValueType.string],
|
||||
variadic: -1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[LastOverTime]: {
|
||||
name: 'last_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Ln]: {
|
||||
name: 'ln',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Log10]: {
|
||||
name: 'log10',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Log2]: {
|
||||
name: 'log2',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[MaxOverTime]: {
|
||||
name: 'max_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[MinOverTime]: {
|
||||
name: 'min_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Minute]: {
|
||||
name: 'minute',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Month]: {
|
||||
name: 'month',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[PredictLinear]: {
|
||||
name: 'predict_linear',
|
||||
argTypes: [ValueType.matrix, ValueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[PresentOverTime]: {
|
||||
name: 'present_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[QuantileOverTime]: {
|
||||
name: 'quantile_over_time',
|
||||
argTypes: [ValueType.scalar, ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Rate]: {
|
||||
name: 'rate',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Resets]: {
|
||||
name: 'resets',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Round]: {
|
||||
name: 'round',
|
||||
argTypes: [ValueType.vector, ValueType.scalar],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Scalar]: {
|
||||
name: 'scalar',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.scalar,
|
||||
},
|
||||
[Sgn]: {
|
||||
name: 'sgn',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Sort]: {
|
||||
name: 'sort',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[SortDesc]: {
|
||||
name: 'sort_desc',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Sqrt]: {
|
||||
name: 'sqrt',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[StddevOverTime]: {
|
||||
name: 'stddev_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[StdvarOverTime]: {
|
||||
name: 'stdvar_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[SumOverTime]: {
|
||||
name: 'sum_over_time',
|
||||
argTypes: [ValueType.matrix],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Time]: {
|
||||
name: 'time',
|
||||
argTypes: [],
|
||||
variadic: 0,
|
||||
returnType: ValueType.scalar,
|
||||
},
|
||||
[Timestamp]: {
|
||||
name: 'timestamp',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Vector]: {
|
||||
name: 'vector',
|
||||
argTypes: [ValueType.scalar],
|
||||
variadic: 0,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
[Year]: {
|
||||
name: 'year',
|
||||
argTypes: [ValueType.vector],
|
||||
variadic: 1,
|
||||
returnType: ValueType.vector,
|
||||
},
|
||||
};
|
||||
|
||||
export function getFunction(id: number): PromQLFunction {
|
||||
return promqlFunctions[id];
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
export { ValueType, PromQLFunction, getFunction } from './function';
|
||||
export { Matcher } from './matcher';
|
||||
export { VectorMatchCardinality, VectorMatching } from './vector';
|
|
@ -0,0 +1,37 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { EqlSingle, Neq } from 'lezer-promql';
|
||||
|
||||
export class Matcher {
|
||||
type: number;
|
||||
name: string;
|
||||
value: string;
|
||||
|
||||
constructor(type: number, name: string, value: string) {
|
||||
this.type = type;
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
matchesEmpty(): boolean {
|
||||
switch (this.type) {
|
||||
case EqlSingle:
|
||||
return this.value === '';
|
||||
case Neq:
|
||||
return this.value !== '';
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
export enum VectorMatchCardinality {
|
||||
CardOneToOne = 'one-to-one',
|
||||
CardManyToOne = 'many-to-one',
|
||||
CardOneToMany = 'one-to-many',
|
||||
CardManyToMany = 'many-to-many',
|
||||
}
|
||||
|
||||
export interface VectorMatching {
|
||||
// The cardinality of the two Vectors.
|
||||
card: VectorMatchCardinality;
|
||||
// MatchingLabels contains the labels which define equality of a pair of
|
||||
// elements from the Vectors.
|
||||
matchingLabels: string[];
|
||||
// On includes the given label names from matching,
|
||||
// rather than excluding them.
|
||||
on: boolean;
|
||||
// Include contains additional labels that should be included in
|
||||
// the result from the side with the lower cardinality.
|
||||
include: string[];
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"status": "success",
|
||||
"data": [
|
||||
{
|
||||
"__name__": "alertmanager_alerts",
|
||||
"env": "demo",
|
||||
"instance": "demo.do.prometheus.io:9093",
|
||||
"job": "alertmanager",
|
||||
"state": "active"
|
||||
},
|
||||
{
|
||||
"__name__": "alertmanager_alerts",
|
||||
"env": "demo",
|
||||
"instance": "demo.do.prometheus.io:9093",
|
||||
"job": "alertmanager",
|
||||
"state": "suppressed"
|
||||
}
|
||||
]
|
||||
}
|
89
web/ui/module/codemirror-promql/src/test/metadata.json
Normal file
89
web/ui/module/codemirror-promql/src/test/metadata.json
Normal file
|
@ -0,0 +1,89 @@
|
|||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"alertmanager_alerts": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "How many alerts by state.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_alerts_invalid_total": [
|
||||
{
|
||||
"type": "counter",
|
||||
"help": "The total number of received alerts that were invalid.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_alerts_received_total": [
|
||||
{
|
||||
"type": "counter",
|
||||
"help": "The total number of received alerts.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_build_info": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which alertmanager was built.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_cluster_enabled": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "Indicates whether the clustering is enabled or not.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_config_hash": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "Hash of the currently loaded alertmanager configuration.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_config_last_reload_success_timestamp_seconds": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "Timestamp of the last successful configuration reload.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_config_last_reload_successful": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "Whether the last configuration reload attempt was successful.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_dispatcher_aggregation_groups": [
|
||||
{
|
||||
"type": "gauge",
|
||||
"help": "Number of active aggregation groups",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_dispatcher_alert_processing_duration_seconds": [
|
||||
{
|
||||
"type": "summary",
|
||||
"help": "Summary of latencies for the processing of alerts.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_http_concurrency_limit_exceeded_total": [
|
||||
{
|
||||
"type": "counter",
|
||||
"help": "Total number of times an HTTP request failed because the concurrency limit was reached.",
|
||||
"unit": ""
|
||||
}
|
||||
],
|
||||
"alertmanager_http_request_duration_seconds": [
|
||||
{
|
||||
"type": "histogram",
|
||||
"help": "Histogram of latencies for HTTP requests.",
|
||||
"unit": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
22
web/ui/module/codemirror-promql/src/test/metric_name.json
Normal file
22
web/ui/module/codemirror-promql/src/test/metric_name.json
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"status": "success",
|
||||
"data": [
|
||||
"ALERTS",
|
||||
"ALERTS_FOR_STATE",
|
||||
"alertmanager_alerts",
|
||||
"alertmanager_alerts_invalid_total",
|
||||
"alertmanager_alerts_received_total",
|
||||
"alertmanager_build_info",
|
||||
"alertmanager_cluster_enabled",
|
||||
"alertmanager_config_hash",
|
||||
"alertmanager_config_last_reload_success_timestamp_seconds",
|
||||
"alertmanager_config_last_reload_successful",
|
||||
"alertmanager_dispatcher_aggregation_groups",
|
||||
"alertmanager_dispatcher_alert_processing_duration_seconds_count",
|
||||
"alertmanager_dispatcher_alert_processing_duration_seconds_sum",
|
||||
"alertmanager_http_concurrency_limit_exceeded_total",
|
||||
"alertmanager_http_request_duration_seconds_bucket",
|
||||
"alertmanager_http_request_duration_seconds_count",
|
||||
"alertmanager_http_request_duration_seconds_sum"
|
||||
]
|
||||
}
|
144
web/ui/module/codemirror-promql/src/test/utils.ts
Normal file
144
web/ui/module/codemirror-promql/src/test/utils.ts
Normal file
|
@ -0,0 +1,144 @@
|
|||
// Copyright 2021 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import { parser } from 'lezer-promql';
|
||||
import { EditorState } from '@codemirror/state';
|
||||
import { LezerLanguage } from '@codemirror/language';
|
||||
import nock from 'nock';
|
||||
|
||||
// used to inject an implementation of fetch in NodeJS
|
||||
require('isomorphic-fetch');
|
||||
|
||||
const lightPromQLSyntax = LezerLanguage.define({ parser: parser });
|
||||
|
||||
export function createEditorState(expr: string): EditorState {
|
||||
return EditorState.create({
|
||||
doc: expr,
|
||||
extensions: lightPromQLSyntax,
|
||||
});
|
||||
}
|
||||
|
||||
export function mockPrometheusServer() {
|
||||
nock('http://localhost:8080')
|
||||
.get('/api/v1/label/__name__/values')
|
||||
.query(true)
|
||||
.replyWithFile(200, __dirname + '/metric_name.json')
|
||||
.get('/api/v1/metadata')
|
||||
.replyWithFile(200, __dirname + '/metadata.json')
|
||||
.get('/api/v1/series')
|
||||
.query(true)
|
||||
.replyWithFile(200, __dirname + '/alertmanager_alerts_series.json')
|
||||
.post('/api/v1/series')
|
||||
.replyWithFile(200, __dirname + '/alertmanager_alerts_series.json');
|
||||
}
|
||||
|
||||
export const mockedMetricsTerms = [
|
||||
{
|
||||
label: 'ALERTS',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
label: 'ALERTS_FOR_STATE',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: 'How many alerts by state.',
|
||||
label: 'alertmanager_alerts',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total number of received alerts that were invalid.',
|
||||
label: 'alertmanager_alerts_invalid_total',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total number of received alerts.',
|
||||
label: 'alertmanager_alerts_received_total',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which alertmanager was built.",
|
||||
label: 'alertmanager_build_info',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: 'Indicates whether the clustering is enabled or not.',
|
||||
label: 'alertmanager_cluster_enabled',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: 'Hash of the currently loaded alertmanager configuration.',
|
||||
label: 'alertmanager_config_hash',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: 'Timestamp of the last successful configuration reload.',
|
||||
label: 'alertmanager_config_last_reload_success_timestamp_seconds',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: 'Whether the last configuration reload attempt was successful.',
|
||||
label: 'alertmanager_config_last_reload_successful',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'gauge',
|
||||
info: 'Number of active aggregation groups',
|
||||
label: 'alertmanager_dispatcher_aggregation_groups',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total number of observations for: Summary of latencies for the processing of alerts.',
|
||||
label: 'alertmanager_dispatcher_alert_processing_duration_seconds_count',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total sum of observations for: Summary of latencies for the processing of alerts.',
|
||||
label: 'alertmanager_dispatcher_alert_processing_duration_seconds_sum',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'Total number of times an HTTP request failed because the concurrency limit was reached.',
|
||||
label: 'alertmanager_http_concurrency_limit_exceeded_total',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total count of observations for a bucket in the histogram: Histogram of latencies for HTTP requests.',
|
||||
label: 'alertmanager_http_request_duration_seconds_bucket',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total number of observations for: Histogram of latencies for HTTP requests.',
|
||||
label: 'alertmanager_http_request_duration_seconds_count',
|
||||
type: 'constant',
|
||||
},
|
||||
{
|
||||
detail: 'counter',
|
||||
info: 'The total sum of observations for: Histogram of latencies for HTTP requests.',
|
||||
label: 'alertmanager_http_request_duration_seconds_sum',
|
||||
type: 'constant',
|
||||
},
|
||||
];
|
25
web/ui/module/codemirror-promql/tsconfig.json
Normal file
25
web/ui/module/codemirror-promql/tsconfig.json
Normal file
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"compileOnSave": false,
|
||||
"compilerOptions": {
|
||||
"target": "ES2015",
|
||||
"module": "commonjs",
|
||||
"lib": [
|
||||
"es6",
|
||||
"dom"
|
||||
],
|
||||
"declaration": true,
|
||||
"outDir": "lib",
|
||||
"strict": true,
|
||||
"sourceMap": true,
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true
|
||||
},
|
||||
"include": [
|
||||
"src/lang-promql"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"src/**/*.test.ts"
|
||||
]
|
||||
}
|
40
web/ui/module/codemirror-promql/webpack.config.cjs
Normal file
40
web/ui/module/codemirror-promql/webpack.config.cjs
Normal file
|
@ -0,0 +1,40 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const path = require('path');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
|
||||
|
||||
module.exports = {
|
||||
mode: 'development',
|
||||
entry: path.join(__dirname, '/src/app/app.ts'),
|
||||
output: {
|
||||
filename: '[name].bundle.js',
|
||||
path: path.resolve(__dirname, 'dist'),
|
||||
},
|
||||
devtool: 'inline-source-map',
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.tsx?$/,
|
||||
loader: 'ts-loader',
|
||||
exclude: /node_modules/,
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new CleanWebpackPlugin({ cleanStaleWebpackAssets: false }),
|
||||
new HtmlWebpackPlugin({
|
||||
hash: true,
|
||||
filename: 'index.html', //relative to root of the application
|
||||
path: path.resolve(__dirname, 'dist'),
|
||||
template: './src/app/app.html',
|
||||
}),
|
||||
],
|
||||
resolve: {
|
||||
extensions: ['.tsx', '.ts', '.js'],
|
||||
},
|
||||
devServer: {
|
||||
contentBase: './dist',
|
||||
},
|
||||
};
|
Loading…
Reference in a new issue