mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 05:34:05 -08:00
Merge branch 'main' into sparsehistogram
This commit is contained in:
commit
a9008f5423
|
@ -116,6 +116,7 @@ This vulnerability has been reported by Aaron Devaney from MDSec.
|
||||||
|
|
||||||
## 2.27.0 / 2021-05-12
|
## 2.27.0 / 2021-05-12
|
||||||
|
|
||||||
|
* [CHANGE] Remote write: Metric `prometheus_remote_storage_samples_bytes_total` renamed to `prometheus_remote_storage_bytes_total`. #8296
|
||||||
* [FEATURE] Promtool: Retroactive rule evaluation functionality. #7675
|
* [FEATURE] Promtool: Retroactive rule evaluation functionality. #7675
|
||||||
* [FEATURE] Configuration: Environment variable expansion for external labels. Behind `--enable-feature=expand-external-labels` flag. #8649
|
* [FEATURE] Configuration: Environment variable expansion for external labels. Behind `--enable-feature=expand-external-labels` flag. #8649
|
||||||
* [FEATURE] TSDB: Add a flag(`--storage.tsdb.max-block-chunk-segment-size`) to control the max chunks file size of the blocks for small Prometheus instances. #8478
|
* [FEATURE] TSDB: Add a flag(`--storage.tsdb.max-block-chunk-segment-size`) to control the max chunks file size of the blocks for small Prometheus instances. #8478
|
||||||
|
|
|
@ -55,10 +55,10 @@ Prometheus will now be reachable at http://localhost:9090/.
|
||||||
|
|
||||||
### Building from source
|
### Building from source
|
||||||
|
|
||||||
To build Prometheus from source code, first ensure that you have a working
|
To build Prometheus from source code, You need:
|
||||||
Go environment with [version 1.14 or greater installed](https://golang.org/doc/install).
|
* Go [version 1.14 or greater](https://golang.org/doc/install).
|
||||||
You also need [Node.js](https://nodejs.org/) and [npm](https://www.npmjs.com/)
|
* NodeJS [version 16 or greater](https://nodejs.org/).
|
||||||
installed in order to build the frontend assets.
|
* npm [version 7 or greater](https://www.npmjs.com/).
|
||||||
|
|
||||||
You can directly use the `go` tool to download and install the `prometheus`
|
You can directly use the `go` tool to download and install the `prometheus`
|
||||||
and `promtool` binaries into your `GOPATH`:
|
and `promtool` binaries into your `GOPATH`:
|
||||||
|
|
21
RELEASE.md
21
RELEASE.md
|
@ -95,24 +95,13 @@ git commit -m "Update dependencies"
|
||||||
|
|
||||||
#### Updating React dependencies
|
#### Updating React dependencies
|
||||||
|
|
||||||
Either upgrade the dependencies within their existing version constraints as specified in the `package.json` file (see https://docs.npmjs.com/files/package.json#dependencies):
|
The React application recently moved to a monorepo system with multiple internal npm packages. Dependency upgrades are
|
||||||
|
quite sensitive for the time being and should be done manually with caution.
|
||||||
|
|
||||||
```
|
When you want to update a dependency, you have to go to every internal npm package where the dependency is used and
|
||||||
cd web/ui/react-app
|
manually change the version. Once you have taken care of that, you need to go back to `web/ui` and run `npm install`
|
||||||
npm update
|
|
||||||
git add package.json package-lock.json
|
|
||||||
```
|
|
||||||
|
|
||||||
Or alternatively, update all dependencies to their latest major versions. This is potentially more disruptive and will require more follow-up fixes, but should be done from time to time (use your best judgement):
|
**NOTE**: We are researching ways to automate and improve this.
|
||||||
|
|
||||||
```
|
|
||||||
cd web/ui/react-app
|
|
||||||
npx npm-check-updates -u
|
|
||||||
npm install
|
|
||||||
git add package.json package-lock.json
|
|
||||||
```
|
|
||||||
|
|
||||||
You can find more details on managing npm dependencies and updates [in this blog post](https://www.carlrippon.com/upgrading-npm-dependencies/).
|
|
||||||
|
|
||||||
### 1. Prepare your release
|
### 1. Prepare your release
|
||||||
|
|
||||||
|
|
|
@ -147,12 +147,18 @@ func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr, ruleName
|
||||||
matrix = val.(model.Matrix)
|
matrix = val.(model.Matrix)
|
||||||
|
|
||||||
for _, sample := range matrix {
|
for _, sample := range matrix {
|
||||||
lb := labels.NewBuilder(ruleLabels)
|
lb := labels.NewBuilder(labels.Labels{})
|
||||||
|
|
||||||
for name, value := range sample.Metric {
|
for name, value := range sample.Metric {
|
||||||
lb.Set(string(name), string(value))
|
lb.Set(string(name), string(value))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Setting the rule labels after the output of the query,
|
||||||
|
// so they can override query output.
|
||||||
|
for _, l := range ruleLabels {
|
||||||
|
lb.Set(l.Name, l.Value)
|
||||||
|
}
|
||||||
|
|
||||||
lb.Set(labels.MetricName, ruleName)
|
lb.Set(labels.MetricName, ruleName)
|
||||||
|
|
||||||
for _, value := range sample.Values {
|
for _, value := range sample.Values {
|
||||||
|
|
|
@ -207,3 +207,67 @@ func createMultiRuleTestFiles(path string) error {
|
||||||
`
|
`
|
||||||
return ioutil.WriteFile(path, []byte(recordingRules), 0777)
|
return ioutil.WriteFile(path, []byte(recordingRules), 0777)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TestBackfillLabels confirms that the labels in the rule file override the labels from the metrics
|
||||||
|
// received from Prometheus Query API, including the __name__ label.
|
||||||
|
func TestBackfillLabels(t *testing.T) {
|
||||||
|
tmpDir, err := ioutil.TempDir("", "backfilldata")
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer func() {
|
||||||
|
require.NoError(t, os.RemoveAll(tmpDir))
|
||||||
|
}()
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
start := time.Date(2009, time.November, 10, 6, 34, 0, 0, time.UTC)
|
||||||
|
mockAPISamples := []*model.SampleStream{
|
||||||
|
{
|
||||||
|
Metric: model.Metric{"name1": "override-me", "__name__": "override-me-too"},
|
||||||
|
Values: []model.SamplePair{{Timestamp: model.TimeFromUnixNano(start.UnixNano()), Value: 123}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
ruleImporter, err := newTestRuleImporter(ctx, start, tmpDir, mockAPISamples)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
path := filepath.Join(tmpDir, "test.file")
|
||||||
|
recordingRules := `groups:
|
||||||
|
- name: group0
|
||||||
|
rules:
|
||||||
|
- record: rulename
|
||||||
|
expr: ruleExpr
|
||||||
|
labels:
|
||||||
|
name1: value-from-rule
|
||||||
|
`
|
||||||
|
require.NoError(t, ioutil.WriteFile(path, []byte(recordingRules), 0777))
|
||||||
|
errs := ruleImporter.loadGroups(ctx, []string{path})
|
||||||
|
for _, err := range errs {
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
errs = ruleImporter.importAll(ctx)
|
||||||
|
for _, err := range errs {
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := tsdb.DefaultOptions()
|
||||||
|
opts.AllowOverlappingBlocks = true
|
||||||
|
db, err := tsdb.Open(tmpDir, nil, nil, opts, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
q, err := db.Querier(context.Background(), math.MinInt64, math.MaxInt64)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
t.Run("correct-labels", func(t *testing.T) {
|
||||||
|
selectedSeries := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
|
||||||
|
for selectedSeries.Next() {
|
||||||
|
series := selectedSeries.At()
|
||||||
|
expectedLabels := labels.Labels{
|
||||||
|
labels.Label{Name: "__name__", Value: "rulename"},
|
||||||
|
labels.Label{Name: "name1", Value: "value-from-rule"},
|
||||||
|
}
|
||||||
|
require.Equal(t, expectedLabels, series.Labels())
|
||||||
|
}
|
||||||
|
require.NoError(t, selectedSeries.Err())
|
||||||
|
require.NoError(t, q.Close())
|
||||||
|
require.NoError(t, db.Close())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -49,6 +49,7 @@ import (
|
||||||
"github.com/prometheus/prometheus/discovery/scaleway"
|
"github.com/prometheus/prometheus/discovery/scaleway"
|
||||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
"github.com/prometheus/prometheus/discovery/triton"
|
"github.com/prometheus/prometheus/discovery/triton"
|
||||||
|
"github.com/prometheus/prometheus/discovery/uyuni"
|
||||||
"github.com/prometheus/prometheus/discovery/xds"
|
"github.com/prometheus/prometheus/discovery/xds"
|
||||||
"github.com/prometheus/prometheus/discovery/zookeeper"
|
"github.com/prometheus/prometheus/discovery/zookeeper"
|
||||||
"github.com/prometheus/prometheus/pkg/labels"
|
"github.com/prometheus/prometheus/pkg/labels"
|
||||||
|
@ -934,6 +935,26 @@ var expectedConf = &Config{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
JobName: "uyuni",
|
||||||
|
|
||||||
|
HonorTimestamps: true,
|
||||||
|
ScrapeInterval: model.Duration(15 * time.Second),
|
||||||
|
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||||
|
HTTPClientConfig: config.HTTPClientConfig{FollowRedirects: true},
|
||||||
|
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||||
|
Scheme: DefaultScrapeConfig.Scheme,
|
||||||
|
ServiceDiscoveryConfigs: discovery.Configs{
|
||||||
|
&uyuni.SDConfig{
|
||||||
|
Server: kubernetesSDHostURL(),
|
||||||
|
Username: "gopher",
|
||||||
|
Password: "hole",
|
||||||
|
Entitlement: "monitoring_entitled",
|
||||||
|
Separator: ",",
|
||||||
|
RefreshInterval: model.Duration(60 * time.Second),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
AlertingConfig: AlertingConfig{
|
AlertingConfig: AlertingConfig{
|
||||||
AlertmanagerConfigs: []*AlertmanagerConfig{
|
AlertmanagerConfigs: []*AlertmanagerConfig{
|
||||||
|
@ -1018,7 +1039,7 @@ func TestElideSecrets(t *testing.T) {
|
||||||
yamlConfig := string(config)
|
yamlConfig := string(config)
|
||||||
|
|
||||||
matches := secretRe.FindAllStringIndex(yamlConfig, -1)
|
matches := secretRe.FindAllStringIndex(yamlConfig, -1)
|
||||||
require.Equal(t, 15, len(matches), "wrong number of secret matches found")
|
require.Equal(t, 16, len(matches), "wrong number of secret matches found")
|
||||||
require.NotContains(t, yamlConfig, "mysecret",
|
require.NotContains(t, yamlConfig, "mysecret",
|
||||||
"yaml marshal reveals authentication credentials.")
|
"yaml marshal reveals authentication credentials.")
|
||||||
}
|
}
|
||||||
|
|
6
config/testdata/conf.good.yml
vendored
6
config/testdata/conf.good.yml
vendored
|
@ -349,6 +349,12 @@ scrape_configs:
|
||||||
- authorization:
|
- authorization:
|
||||||
credentials: abcdef
|
credentials: abcdef
|
||||||
|
|
||||||
|
- job_name: uyuni
|
||||||
|
uyuni_sd_configs:
|
||||||
|
- server: https://localhost:1234
|
||||||
|
username: gopher
|
||||||
|
password: hole
|
||||||
|
|
||||||
alerting:
|
alerting:
|
||||||
alertmanagers:
|
alertmanagers:
|
||||||
- scheme: https
|
- scheme: https
|
||||||
|
|
|
@ -34,6 +34,7 @@ import (
|
||||||
_ "github.com/prometheus/prometheus/discovery/puppetdb" // register puppetdb
|
_ "github.com/prometheus/prometheus/discovery/puppetdb" // register puppetdb
|
||||||
_ "github.com/prometheus/prometheus/discovery/scaleway" // register scaleway
|
_ "github.com/prometheus/prometheus/discovery/scaleway" // register scaleway
|
||||||
_ "github.com/prometheus/prometheus/discovery/triton" // register triton
|
_ "github.com/prometheus/prometheus/discovery/triton" // register triton
|
||||||
|
_ "github.com/prometheus/prometheus/discovery/uyuni" // register uyuni
|
||||||
_ "github.com/prometheus/prometheus/discovery/xds" // register xds
|
_ "github.com/prometheus/prometheus/discovery/xds" // register xds
|
||||||
_ "github.com/prometheus/prometheus/discovery/zookeeper" // register zookeeper
|
_ "github.com/prometheus/prometheus/discovery/zookeeper" // register zookeeper
|
||||||
)
|
)
|
||||||
|
|
|
@ -308,6 +308,14 @@ func (e *Endpoints) buildEndpoints(eps *apiv1.Endpoints) *targetgroup.Group {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
v := eps.Labels[apiv1.EndpointsOverCapacity]
|
||||||
|
if v == "truncated" {
|
||||||
|
level.Warn(e.logger).Log("msg", "Number of endpoints in one Endpoints object exceeds 1000 and has been truncated, please use \"role: endpointslice\" instead", "endpoint", eps.Name)
|
||||||
|
}
|
||||||
|
if v == "warning" {
|
||||||
|
level.Warn(e.logger).Log("msg", "Number of endpoints in one Endpoints object exceeds 1000, please use \"role: endpointslice\" instead", "endpoint", eps.Name)
|
||||||
|
}
|
||||||
|
|
||||||
// For all seen pods, check all container ports. If they were not covered
|
// For all seen pods, check all container ports. If they were not covered
|
||||||
// by one of the service endpoints, generate targets for them.
|
// by one of the service endpoints, generate targets for them.
|
||||||
for _, pe := range seenPods {
|
for _, pe := range seenPods {
|
||||||
|
|
341
discovery/uyuni/uyuni.go
Normal file
341
discovery/uyuni/uyuni.go
Normal file
|
@ -0,0 +1,341 @@
|
||||||
|
// Copyright 2020 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package uyuni
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-kit/log"
|
||||||
|
"github.com/go-kit/log/level"
|
||||||
|
"github.com/kolo/xmlrpc"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/prometheus/common/config"
|
||||||
|
"github.com/prometheus/common/model"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/discovery"
|
||||||
|
"github.com/prometheus/prometheus/discovery/refresh"
|
||||||
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
uyuniXMLRPCAPIPath = "/rpc/api"
|
||||||
|
|
||||||
|
uyuniMetaLabelPrefix = model.MetaLabelPrefix + "uyuni_"
|
||||||
|
uyuniLabelMinionHostname = uyuniMetaLabelPrefix + "minion_hostname"
|
||||||
|
uyuniLabelPrimaryFQDN = uyuniMetaLabelPrefix + "primary_fqdn"
|
||||||
|
uyuniLablelSystemID = uyuniMetaLabelPrefix + "system_id"
|
||||||
|
uyuniLablelGroups = uyuniMetaLabelPrefix + "groups"
|
||||||
|
uyuniLablelEndpointName = uyuniMetaLabelPrefix + "endpoint_name"
|
||||||
|
uyuniLablelExporter = uyuniMetaLabelPrefix + "exporter"
|
||||||
|
uyuniLabelProxyModule = uyuniMetaLabelPrefix + "proxy_module"
|
||||||
|
uyuniLabelMetricsPath = uyuniMetaLabelPrefix + "metrics_path"
|
||||||
|
uyuniLabelScheme = uyuniMetaLabelPrefix + "scheme"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DefaultSDConfig is the default Uyuni SD configuration.
|
||||||
|
var DefaultSDConfig = SDConfig{
|
||||||
|
Entitlement: "monitoring_entitled",
|
||||||
|
Separator: ",",
|
||||||
|
RefreshInterval: model.Duration(1 * time.Minute),
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
discovery.RegisterConfig(&SDConfig{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// SDConfig is the configuration for Uyuni based service discovery.
|
||||||
|
type SDConfig struct {
|
||||||
|
Server config.URL `yaml:"server"`
|
||||||
|
Username string `yaml:"username"`
|
||||||
|
Password config.Secret `yaml:"password"`
|
||||||
|
HTTPClientConfig config.HTTPClientConfig `yaml:",inline"`
|
||||||
|
Entitlement string `yaml:"entitlement,omitempty"`
|
||||||
|
Separator string `yaml:"separator,omitempty"`
|
||||||
|
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Uyuni API Response structures
|
||||||
|
type systemGroupID struct {
|
||||||
|
GroupID int `xmlrpc:"id"`
|
||||||
|
GroupName string `xmlrpc:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type networkInfo struct {
|
||||||
|
SystemID int `xmlrpc:"system_id"`
|
||||||
|
Hostname string `xmlrpc:"hostname"`
|
||||||
|
PrimaryFQDN string `xmlrpc:"primary_fqdn"`
|
||||||
|
IP string `xmlrpc:"ip"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type endpointInfo struct {
|
||||||
|
SystemID int `xmlrpc:"system_id"`
|
||||||
|
EndpointName string `xmlrpc:"endpoint_name"`
|
||||||
|
Port int `xmlrpc:"port"`
|
||||||
|
Path string `xmlrpc:"path"`
|
||||||
|
Module string `xmlrpc:"module"`
|
||||||
|
ExporterName string `xmlrpc:"exporter_name"`
|
||||||
|
TLSEnabled bool `xmlrpc:"tls_enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discovery periodically performs Uyuni API requests. It implements the Discoverer interface.
|
||||||
|
type Discovery struct {
|
||||||
|
*refresh.Discovery
|
||||||
|
apiURL *url.URL
|
||||||
|
roundTripper http.RoundTripper
|
||||||
|
username string
|
||||||
|
password string
|
||||||
|
entitlement string
|
||||||
|
separator string
|
||||||
|
interval time.Duration
|
||||||
|
logger log.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the name of the Config.
|
||||||
|
func (*SDConfig) Name() string { return "uyuni" }
|
||||||
|
|
||||||
|
// NewDiscoverer returns a Discoverer for the Config.
|
||||||
|
func (c *SDConfig) NewDiscoverer(opts discovery.DiscovererOptions) (discovery.Discoverer, error) {
|
||||||
|
return NewDiscovery(c, opts.Logger)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||||
|
func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
|
*c = DefaultSDConfig
|
||||||
|
type plain SDConfig
|
||||||
|
err := unmarshal((*plain)(c))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if c.Server.URL == nil {
|
||||||
|
return errors.New("Uyuni SD configuration requires server host")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = url.Parse(c.Server.String())
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "Uyuni Server URL is not valid")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Username == "" {
|
||||||
|
return errors.New("Uyuni SD configuration requires a username")
|
||||||
|
}
|
||||||
|
if c.Password == "" {
|
||||||
|
return errors.New("Uyuni SD configuration requires a password")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt to login in Uyuni Server and get an auth token
|
||||||
|
func login(rpcclient *xmlrpc.Client, user string, pass string) (string, error) {
|
||||||
|
var result string
|
||||||
|
err := rpcclient.Call("auth.login", []interface{}{user, pass}, &result)
|
||||||
|
return result, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Logout from Uyuni API
|
||||||
|
func logout(rpcclient *xmlrpc.Client, token string) error {
|
||||||
|
return rpcclient.Call("auth.logout", token, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the system groups information of monitored clients
|
||||||
|
func getSystemGroupsInfoOfMonitoredClients(rpcclient *xmlrpc.Client, token string, entitlement string) (map[int][]systemGroupID, error) {
|
||||||
|
var systemGroupsInfos []struct {
|
||||||
|
SystemID int `xmlrpc:"id"`
|
||||||
|
SystemGroups []systemGroupID `xmlrpc:"system_groups"`
|
||||||
|
}
|
||||||
|
|
||||||
|
err := rpcclient.Call("system.listSystemGroupsForSystemsWithEntitlement", []interface{}{token, entitlement}, &systemGroupsInfos)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make(map[int][]systemGroupID)
|
||||||
|
for _, systemGroupsInfo := range systemGroupsInfos {
|
||||||
|
result[systemGroupsInfo.SystemID] = systemGroupsInfo.SystemGroups
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetSystemNetworkInfo lists client FQDNs.
|
||||||
|
func getNetworkInformationForSystems(rpcclient *xmlrpc.Client, token string, systemIDs []int) (map[int]networkInfo, error) {
|
||||||
|
var networkInfos []networkInfo
|
||||||
|
err := rpcclient.Call("system.getNetworkForSystems", []interface{}{token, systemIDs}, &networkInfos)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make(map[int]networkInfo)
|
||||||
|
for _, networkInfo := range networkInfos {
|
||||||
|
result[networkInfo.SystemID] = networkInfo
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get endpoints information for given systems
|
||||||
|
func getEndpointInfoForSystems(
|
||||||
|
rpcclient *xmlrpc.Client,
|
||||||
|
token string,
|
||||||
|
systemIDs []int,
|
||||||
|
) ([]endpointInfo, error) {
|
||||||
|
var endpointInfos []endpointInfo
|
||||||
|
err := rpcclient.Call(
|
||||||
|
"system.monitoring.listEndpoints",
|
||||||
|
[]interface{}{token, systemIDs}, &endpointInfos)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return endpointInfos, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDiscovery returns a uyuni discovery for the given configuration.
|
||||||
|
func NewDiscovery(conf *SDConfig, logger log.Logger) (*Discovery, error) {
|
||||||
|
var apiURL *url.URL
|
||||||
|
*apiURL = *conf.Server.URL
|
||||||
|
apiURL.Path = path.Join(apiURL.Path, uyuniXMLRPCAPIPath)
|
||||||
|
|
||||||
|
rt, err := config.NewRoundTripperFromConfig(conf.HTTPClientConfig, "uyuni_sd", config.WithHTTP2Disabled())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
d := &Discovery{
|
||||||
|
apiURL: apiURL,
|
||||||
|
roundTripper: rt,
|
||||||
|
username: conf.Username,
|
||||||
|
password: string(conf.Password),
|
||||||
|
entitlement: conf.Entitlement,
|
||||||
|
separator: conf.Separator,
|
||||||
|
interval: time.Duration(conf.RefreshInterval),
|
||||||
|
logger: logger,
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Discovery = refresh.NewDiscovery(
|
||||||
|
logger,
|
||||||
|
"uyuni",
|
||||||
|
time.Duration(conf.RefreshInterval),
|
||||||
|
d.refresh,
|
||||||
|
)
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Discovery) getEndpointLabels(
|
||||||
|
endpoint endpointInfo,
|
||||||
|
systemGroupIDs []systemGroupID,
|
||||||
|
networkInfo networkInfo,
|
||||||
|
) model.LabelSet {
|
||||||
|
|
||||||
|
var addr, scheme string
|
||||||
|
managedGroupNames := getSystemGroupNames(systemGroupIDs)
|
||||||
|
addr = fmt.Sprintf("%s:%d", networkInfo.Hostname, endpoint.Port)
|
||||||
|
if endpoint.TLSEnabled {
|
||||||
|
scheme = "https"
|
||||||
|
} else {
|
||||||
|
scheme = "http"
|
||||||
|
}
|
||||||
|
|
||||||
|
result := model.LabelSet{
|
||||||
|
model.AddressLabel: model.LabelValue(addr),
|
||||||
|
uyuniLabelMinionHostname: model.LabelValue(networkInfo.Hostname),
|
||||||
|
uyuniLabelPrimaryFQDN: model.LabelValue(networkInfo.PrimaryFQDN),
|
||||||
|
uyuniLablelSystemID: model.LabelValue(fmt.Sprintf("%d", endpoint.SystemID)),
|
||||||
|
uyuniLablelGroups: model.LabelValue(strings.Join(managedGroupNames, d.separator)),
|
||||||
|
uyuniLablelEndpointName: model.LabelValue(endpoint.EndpointName),
|
||||||
|
uyuniLablelExporter: model.LabelValue(endpoint.ExporterName),
|
||||||
|
uyuniLabelProxyModule: model.LabelValue(endpoint.Module),
|
||||||
|
uyuniLabelMetricsPath: model.LabelValue(endpoint.Path),
|
||||||
|
uyuniLabelScheme: model.LabelValue(scheme),
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSystemGroupNames(systemGroupsIDs []systemGroupID) []string {
|
||||||
|
managedGroupNames := make([]string, 0, len(systemGroupsIDs))
|
||||||
|
for _, systemGroupInfo := range systemGroupsIDs {
|
||||||
|
managedGroupNames = append(managedGroupNames, systemGroupInfo.GroupName)
|
||||||
|
}
|
||||||
|
|
||||||
|
return managedGroupNames
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Discovery) getTargetsForSystems(
|
||||||
|
rpcClient *xmlrpc.Client,
|
||||||
|
token string,
|
||||||
|
entitlement string,
|
||||||
|
) ([]model.LabelSet, error) {
|
||||||
|
|
||||||
|
result := make([]model.LabelSet, 0)
|
||||||
|
|
||||||
|
systemGroupIDsBySystemID, err := getSystemGroupsInfoOfMonitoredClients(rpcClient, token, entitlement)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "unable to get the managed system groups information of monitored clients")
|
||||||
|
}
|
||||||
|
|
||||||
|
systemIDs := make([]int, 0, len(systemGroupIDsBySystemID))
|
||||||
|
for systemID := range systemGroupIDsBySystemID {
|
||||||
|
systemIDs = append(systemIDs, systemID)
|
||||||
|
}
|
||||||
|
|
||||||
|
endpointInfos, err := getEndpointInfoForSystems(rpcClient, token, systemIDs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "unable to get endpoints information")
|
||||||
|
}
|
||||||
|
|
||||||
|
networkInfoBySystemID, err := getNetworkInformationForSystems(rpcClient, token, systemIDs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "unable to get the systems network information")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, endpoint := range endpointInfos {
|
||||||
|
systemID := endpoint.SystemID
|
||||||
|
labels := d.getEndpointLabels(
|
||||||
|
endpoint,
|
||||||
|
systemGroupIDsBySystemID[systemID],
|
||||||
|
networkInfoBySystemID[systemID])
|
||||||
|
result = append(result, labels)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
|
||||||
|
rpcClient, err := xmlrpc.NewClient(d.apiURL.String(), d.roundTripper)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rpcClient.Close()
|
||||||
|
|
||||||
|
token, err := login(rpcClient, d.username, d.password)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "unable to login to Uyuni API")
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err := logout(rpcClient, token); err != nil {
|
||||||
|
level.Debug(d.logger).Log("msg", "Failed to log out from Uyuni API", "err", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
targetsForSystems, err := d.getTargetsForSystems(rpcClient, token, d.entitlement)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return []*targetgroup.Group{{Targets: targetsForSystems, Source: d.apiURL.String()}}, nil
|
||||||
|
}
|
|
@ -288,6 +288,10 @@ serverset_sd_configs:
|
||||||
triton_sd_configs:
|
triton_sd_configs:
|
||||||
[ - <triton_sd_config> ... ]
|
[ - <triton_sd_config> ... ]
|
||||||
|
|
||||||
|
# List of Uyuni service discovery configurations.
|
||||||
|
uyuni_sd_configs:
|
||||||
|
[ - <uyuni_sd_config> ... ]
|
||||||
|
|
||||||
# List of labeled statically configured targets for this job.
|
# List of labeled statically configured targets for this job.
|
||||||
static_configs:
|
static_configs:
|
||||||
[ - <static_config> ... ]
|
[ - <static_config> ... ]
|
||||||
|
@ -2256,6 +2260,79 @@ tls_config:
|
||||||
[ <tls_config> ]
|
[ <tls_config> ]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### `<uyuni_sd_config>`
|
||||||
|
|
||||||
|
Uyuni SD configurations allow retrieving scrape targets from managed systems
|
||||||
|
via [Uyuni](https://www.uyuni-project.org/) API.
|
||||||
|
|
||||||
|
The following meta labels are available on targets during [relabeling](#relabel_config):
|
||||||
|
|
||||||
|
* `__meta_uyuni_endpoint_name`: the name of the application endpoint
|
||||||
|
* `__meta_uyuni_exporter`: the exporter exposing metrics for the target
|
||||||
|
* `__meta_uyuni_groups`: the system groups of the target
|
||||||
|
* `__meta_uyuni_metrics_path`: metrics path for the target
|
||||||
|
* `__meta_uyuni_minion_hostname`: hostname of the Uyuni client
|
||||||
|
* `__meta_uyuni_primary_fqdn`: primary FQDN of the Uyuni client
|
||||||
|
* `__meta_uyuni_proxy_module`: the module name if _Exporter Exporter_ proxy is
|
||||||
|
configured for the target
|
||||||
|
* `__meta_uyuni_scheme`: the protocol scheme used for requests
|
||||||
|
* `__meta_uyuni_system_id`: the system ID of the client
|
||||||
|
|
||||||
|
See below for the configuration options for Uyuni discovery:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# The URL to connect to the Uyuni server.
|
||||||
|
server: <string>
|
||||||
|
|
||||||
|
# Credentials are used to authenticate the requests to Uyuni API.
|
||||||
|
username: <string>
|
||||||
|
password: <secret>
|
||||||
|
|
||||||
|
# The entitlement string to filter eligible systems.
|
||||||
|
[ entitlement: <string> | default = monitoring_entitled ]
|
||||||
|
|
||||||
|
# The string by which Uyuni group names are joined into the groups label.
|
||||||
|
[ separator: <string> | default = , ]
|
||||||
|
|
||||||
|
# Refresh interval to re-read the managed targets list.
|
||||||
|
[ refresh_interval: <duration> | default = 60s ]
|
||||||
|
|
||||||
|
# Optional HTTP basic authentication information, currently not supported by Uyuni.
|
||||||
|
basic_auth:
|
||||||
|
[ username: <string> ]
|
||||||
|
[ password: <secret> ]
|
||||||
|
[ password_file: <string> ]
|
||||||
|
|
||||||
|
# Optional `Authorization` header configuration, currently not supported by Uyuni.
|
||||||
|
authorization:
|
||||||
|
# Sets the authentication type.
|
||||||
|
[ type: <string> | default: Bearer ]
|
||||||
|
# Sets the credentials. It is mutually exclusive with
|
||||||
|
# `credentials_file`.
|
||||||
|
[ credentials: <secret> ]
|
||||||
|
# Sets the credentials to the credentials read from the configured file.
|
||||||
|
# It is mutually exclusive with `credentials`.
|
||||||
|
[ credentials_file: <filename> ]
|
||||||
|
|
||||||
|
# Optional OAuth 2.0 configuration, currently not supported by Uyuni.
|
||||||
|
# Cannot be used at the same time as basic_auth or authorization.
|
||||||
|
oauth2:
|
||||||
|
[ <oauth2> ]
|
||||||
|
|
||||||
|
# Optional proxy URL.
|
||||||
|
[ proxy_url: <string> ]
|
||||||
|
|
||||||
|
# Configure whether HTTP requests follow HTTP 3xx redirects.
|
||||||
|
[ follow_redirects: <bool> | default = true ]
|
||||||
|
|
||||||
|
# TLS configuration.
|
||||||
|
tls_config:
|
||||||
|
[ <tls_config> ]
|
||||||
|
```
|
||||||
|
|
||||||
|
See [the Prometheus uyuni-sd configuration file](/documentation/examples/prometheus-uyuni.yml)
|
||||||
|
for a practical example on how to set up Uyuni Prometheus configuration.
|
||||||
|
|
||||||
### `<static_config>`
|
### `<static_config>`
|
||||||
|
|
||||||
A `static_config` allows specifying a list of targets and a common label set
|
A `static_config` allows specifying a list of targets and a common label set
|
||||||
|
@ -2518,6 +2595,10 @@ serverset_sd_configs:
|
||||||
triton_sd_configs:
|
triton_sd_configs:
|
||||||
[ - <triton_sd_config> ... ]
|
[ - <triton_sd_config> ... ]
|
||||||
|
|
||||||
|
# List of Uyuni service discovery configurations.
|
||||||
|
uyuni_sd_configs:
|
||||||
|
[ - <uyuni_sd_config> ... ]
|
||||||
|
|
||||||
# List of labeled statically configured Alertmanagers.
|
# List of labeled statically configured Alertmanagers.
|
||||||
static_configs:
|
static_configs:
|
||||||
[ - <static_config> ... ]
|
[ - <static_config> ... ]
|
||||||
|
|
36
documentation/examples/prometheus-uyuni.yml
Normal file
36
documentation/examples/prometheus-uyuni.yml
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
# A example scrape configuration for running Prometheus with Uyuni.
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
|
||||||
|
# Make Prometheus scrape itself for metrics.
|
||||||
|
- job_name: 'prometheus'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:9090']
|
||||||
|
|
||||||
|
# Discover Uyuni managed targets to scrape.
|
||||||
|
- job_name: 'uyuni'
|
||||||
|
|
||||||
|
# Scrape Uyuni itself to discover new services.
|
||||||
|
uyuni_sd_configs:
|
||||||
|
- server: http://uyuni-project.org
|
||||||
|
username: gopher
|
||||||
|
password: hole
|
||||||
|
relabel_configs:
|
||||||
|
- source_labels: [__meta_uyuni_exporter]
|
||||||
|
target_label: exporter
|
||||||
|
- source_labels: [__meta_uyuni_groups]
|
||||||
|
target_label: groups
|
||||||
|
- source_labels: [__meta_uyuni_minion_hostname]
|
||||||
|
target_label: hostname
|
||||||
|
- source_labels: [__meta_uyuni_primary_fqdn]
|
||||||
|
regex: (.+)
|
||||||
|
target_label: hostname
|
||||||
|
- source_labels: [hostname, __address__]
|
||||||
|
regex: (.*);.*:(.*)
|
||||||
|
replacement: ${1}:${2}
|
||||||
|
target_label: __address__
|
||||||
|
- source_labels: [__meta_uyuni_metrics_path]
|
||||||
|
regex: (.+)
|
||||||
|
target_label: __metrics_path__
|
||||||
|
- source_labels: [__meta_uyuni_proxy_module]
|
||||||
|
target_label: __param_module
|
1
go.mod
1
go.mod
|
@ -33,6 +33,7 @@ require (
|
||||||
github.com/hetznercloud/hcloud-go v1.32.0
|
github.com/hetznercloud/hcloud-go v1.32.0
|
||||||
github.com/influxdata/influxdb v1.9.3
|
github.com/influxdata/influxdb v1.9.3
|
||||||
github.com/json-iterator/go v1.1.11
|
github.com/json-iterator/go v1.1.11
|
||||||
|
github.com/kolo/xmlrpc v0.0.0-20201022064351-38db28db192b
|
||||||
github.com/linode/linodego v0.32.0
|
github.com/linode/linodego v0.32.0
|
||||||
github.com/miekg/dns v1.1.43
|
github.com/miekg/dns v1.1.43
|
||||||
github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 // indirect
|
github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 // indirect
|
||||||
|
|
2
go.sum
2
go.sum
|
@ -886,6 +886,8 @@ github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdY
|
||||||
github.com/klauspost/cpuid v0.0.0-20170728055534-ae7887de9fa5/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
github.com/klauspost/cpuid v0.0.0-20170728055534-ae7887de9fa5/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||||
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
|
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
|
||||||
github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||||
|
github.com/kolo/xmlrpc v0.0.0-20201022064351-38db28db192b h1:iNjcivnc6lhbvJA3LD622NPrUponluJrBWPIwGG/3Bg=
|
||||||
|
github.com/kolo/xmlrpc v0.0.0-20201022064351-38db28db192b/go.mod h1:pcaDhQK0/NJZEvtCO0qQPPropqV0sJOJ6YW7X+9kRwM=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
|
|
|
@ -83,7 +83,7 @@ func (g *RuleGroups) Validate(node ruleGroups) (errs []error) {
|
||||||
set[g.Name] = struct{}{}
|
set[g.Name] = struct{}{}
|
||||||
|
|
||||||
for i, r := range g.Rules {
|
for i, r := range g.Rules {
|
||||||
for _, node := range r.Validate() {
|
for _, node := range g.Rules[i].Validate() {
|
||||||
var ruleName yaml.Node
|
var ruleName yaml.Node
|
||||||
if r.Alert.Value != "" {
|
if r.Alert.Value != "" {
|
||||||
ruleName = r.Alert
|
ruleName = r.Alert
|
||||||
|
|
|
@ -156,5 +156,31 @@ groups:
|
||||||
passed := (tst.shouldPass && len(errs) == 0) || (!tst.shouldPass && len(errs) > 0)
|
passed := (tst.shouldPass && len(errs) == 0) || (!tst.shouldPass && len(errs) > 0)
|
||||||
require.True(t, passed, "Rule validation failed, rule=\n"+tst.ruleString)
|
require.True(t, passed, "Rule validation failed, rule=\n"+tst.ruleString)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUniqueErrorNodes(t *testing.T) {
|
||||||
|
group := `
|
||||||
|
groups:
|
||||||
|
- name: example
|
||||||
|
rules:
|
||||||
|
- alert: InstanceDown
|
||||||
|
expr: up ===== 0
|
||||||
|
for: 5m
|
||||||
|
labels:
|
||||||
|
severity: "page"
|
||||||
|
annotations:
|
||||||
|
summary: "Instance {{ $labels.instance }} down"
|
||||||
|
- alert: InstanceUp
|
||||||
|
expr: up ===== 1
|
||||||
|
for: 5m
|
||||||
|
labels:
|
||||||
|
severity: "page"
|
||||||
|
annotations:
|
||||||
|
summary: "Instance {{ $labels.instance }} up"
|
||||||
|
`
|
||||||
|
_, errs := Parse([]byte(group))
|
||||||
|
require.Len(t, errs, 2, "Expected two errors")
|
||||||
|
err0 := errs[0].(*Error).Err.node
|
||||||
|
err1 := errs[1].(*Error).Err.node
|
||||||
|
require.NotEqual(t, err0, err1, "Error nodes should not be the same")
|
||||||
}
|
}
|
||||||
|
|
|
@ -2085,6 +2085,8 @@ func scalarBinop(op parser.ItemType, lhs, rhs float64) float64 {
|
||||||
return btos(lhs >= rhs)
|
return btos(lhs >= rhs)
|
||||||
case parser.LTE:
|
case parser.LTE:
|
||||||
return btos(lhs <= rhs)
|
return btos(lhs <= rhs)
|
||||||
|
case parser.ATAN2:
|
||||||
|
return math.Atan2(lhs, rhs)
|
||||||
}
|
}
|
||||||
panic(errors.Errorf("operator %q not allowed for Scalar operations", op))
|
panic(errors.Errorf("operator %q not allowed for Scalar operations", op))
|
||||||
}
|
}
|
||||||
|
@ -2136,6 +2138,7 @@ type groupedAggregation struct {
|
||||||
func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without bool, param interface{}, vec Vector, seriesHelper []EvalSeriesHelper, enh *EvalNodeHelper) Vector {
|
func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without bool, param interface{}, vec Vector, seriesHelper []EvalSeriesHelper, enh *EvalNodeHelper) Vector {
|
||||||
|
|
||||||
result := map[uint64]*groupedAggregation{}
|
result := map[uint64]*groupedAggregation{}
|
||||||
|
orderedResult := []*groupedAggregation{}
|
||||||
var k int64
|
var k int64
|
||||||
if op == parser.TOPK || op == parser.BOTTOMK {
|
if op == parser.TOPK || op == parser.BOTTOMK {
|
||||||
f := param.(float64)
|
f := param.(float64)
|
||||||
|
@ -2204,12 +2207,16 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
|
||||||
} else {
|
} else {
|
||||||
m = metric.WithLabels(grouping...)
|
m = metric.WithLabels(grouping...)
|
||||||
}
|
}
|
||||||
result[groupingKey] = &groupedAggregation{
|
newAgg := &groupedAggregation{
|
||||||
labels: m,
|
labels: m,
|
||||||
value: s.V,
|
value: s.V,
|
||||||
mean: s.V,
|
mean: s.V,
|
||||||
groupCount: 1,
|
groupCount: 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
result[groupingKey] = newAgg
|
||||||
|
orderedResult = append(orderedResult, newAgg)
|
||||||
|
|
||||||
inputVecLen := int64(len(vec))
|
inputVecLen := int64(len(vec))
|
||||||
resultSize := k
|
resultSize := k
|
||||||
if k > inputVecLen {
|
if k > inputVecLen {
|
||||||
|
@ -2331,7 +2338,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct the result Vector from the aggregated groups.
|
// Construct the result Vector from the aggregated groups.
|
||||||
for _, aggr := range result {
|
for _, aggr := range orderedResult {
|
||||||
switch op {
|
switch op {
|
||||||
case parser.AVG:
|
case parser.AVG:
|
||||||
aggr.value = aggr.mean
|
aggr.value = aggr.mean
|
||||||
|
|
11
promql/testdata/aggregators.test
vendored
11
promql/testdata/aggregators.test
vendored
|
@ -497,3 +497,14 @@ eval instant at 1m avg(data{test="-big"})
|
||||||
|
|
||||||
eval instant at 1m avg(data{test="bigzero"})
|
eval instant at 1m avg(data{test="bigzero"})
|
||||||
{} 0
|
{} 0
|
||||||
|
|
||||||
|
clear
|
||||||
|
|
||||||
|
# Test that aggregations are deterministic.
|
||||||
|
# Commented because it is flaky in range mode.
|
||||||
|
#load 10s
|
||||||
|
# up{job="prometheus"} 1
|
||||||
|
# up{job="prometheus2"} 1
|
||||||
|
#
|
||||||
|
#eval instant at 1m count(topk(1,max(up) without()) == topk(1,max(up) without()) == topk(1,max(up) without()) == topk(1,max(up) without()) == topk(1,max(up) without()))
|
||||||
|
# {} 1
|
||||||
|
|
6
promql/testdata/operators.test
vendored
6
promql/testdata/operators.test
vendored
|
@ -481,3 +481,9 @@ eval instant at 5m trigy atan2 trigx
|
||||||
|
|
||||||
eval instant at 5m trigy atan2 trigNaN
|
eval instant at 5m trigy atan2 trigNaN
|
||||||
trigy{} NaN
|
trigy{} NaN
|
||||||
|
|
||||||
|
eval instant at 5m 10 atan2 20
|
||||||
|
0.4636476090008061
|
||||||
|
|
||||||
|
eval instant at 5m 10 atan2 NaN
|
||||||
|
NaN
|
||||||
|
|
|
@ -24,6 +24,7 @@ import (
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
@ -641,23 +642,28 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
|
||||||
|
|
||||||
func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*relabel.Config) labels.Labels {
|
func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*relabel.Config) labels.Labels {
|
||||||
lb := labels.NewBuilder(lset)
|
lb := labels.NewBuilder(lset)
|
||||||
|
targetLabels := target.Labels()
|
||||||
|
|
||||||
if honor {
|
if honor {
|
||||||
for _, l := range target.Labels() {
|
for _, l := range targetLabels {
|
||||||
if !lset.Has(l.Name) {
|
if !lset.Has(l.Name) {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for _, l := range target.Labels() {
|
var conflictingExposedLabels labels.Labels
|
||||||
// existingValue will be empty if l.Name doesn't exist.
|
for _, l := range targetLabels {
|
||||||
existingValue := lset.Get(l.Name)
|
existingValue := lset.Get(l.Name)
|
||||||
if existingValue != "" {
|
if existingValue != "" {
|
||||||
lb.Set(model.ExportedLabelPrefix+l.Name, existingValue)
|
conflictingExposedLabels = append(conflictingExposedLabels, labels.Label{Name: l.Name, Value: existingValue})
|
||||||
}
|
}
|
||||||
// It is now safe to set the target label.
|
// It is now safe to set the target label.
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(conflictingExposedLabels) > 0 {
|
||||||
|
resolveConflictingExposedLabels(lb, lset, targetLabels, conflictingExposedLabels)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res := lb.Labels()
|
res := lb.Labels()
|
||||||
|
@ -669,6 +675,29 @@ func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*re
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLabels, conflictingExposedLabels labels.Labels) {
|
||||||
|
sort.SliceStable(conflictingExposedLabels, func(i, j int) bool {
|
||||||
|
return len(conflictingExposedLabels[i].Name) < len(conflictingExposedLabels[j].Name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for i, l := range conflictingExposedLabels {
|
||||||
|
newName := l.Name
|
||||||
|
for {
|
||||||
|
newName = model.ExportedLabelPrefix + newName
|
||||||
|
if !exposedLabels.Has(newName) &&
|
||||||
|
!targetLabels.Has(newName) &&
|
||||||
|
!conflictingExposedLabels[:i].Has(newName) {
|
||||||
|
conflictingExposedLabels[i].Name = newName
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, l := range conflictingExposedLabels {
|
||||||
|
lb.Set(l.Name, l.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func mutateReportSampleLabels(lset labels.Labels, target *Target) labels.Labels {
|
func mutateReportSampleLabels(lset labels.Labels, target *Target) labels.Labels {
|
||||||
lb := labels.NewBuilder(lset)
|
lb := labels.NewBuilder(lset)
|
||||||
|
|
||||||
|
|
|
@ -1379,6 +1379,81 @@ func TestScrapeLoopAppend(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestScrapeLoopAppendForConflictingPrefixedLabels(t *testing.T) {
|
||||||
|
testcases := map[string]struct {
|
||||||
|
targetLabels []string
|
||||||
|
exposedLabels string
|
||||||
|
expected []string
|
||||||
|
}{
|
||||||
|
"One target label collides with existing label": {
|
||||||
|
targetLabels: []string{"foo", "2"},
|
||||||
|
exposedLabels: `metric{foo="1"} 0`,
|
||||||
|
expected: []string{"__name__", "metric", "exported_foo", "1", "foo", "2"},
|
||||||
|
},
|
||||||
|
|
||||||
|
"One target label collides with existing label, plus target label already with prefix 'exported'": {
|
||||||
|
targetLabels: []string{"foo", "2", "exported_foo", "3"},
|
||||||
|
exposedLabels: `metric{foo="1"} 0`,
|
||||||
|
expected: []string{"__name__", "metric", "exported_exported_foo", "1", "exported_foo", "3", "foo", "2"},
|
||||||
|
},
|
||||||
|
"One target label collides with existing label, plus existing label already with prefix 'exported": {
|
||||||
|
targetLabels: []string{"foo", "3"},
|
||||||
|
exposedLabels: `metric{foo="1" exported_foo="2"} 0`,
|
||||||
|
expected: []string{"__name__", "metric", "exported_exported_foo", "1", "exported_foo", "2", "foo", "3"},
|
||||||
|
},
|
||||||
|
"One target label collides with existing label, both already with prefix 'exported'": {
|
||||||
|
targetLabels: []string{"exported_foo", "2"},
|
||||||
|
exposedLabels: `metric{exported_foo="1"} 0`,
|
||||||
|
expected: []string{"__name__", "metric", "exported_exported_foo", "1", "exported_foo", "2"},
|
||||||
|
},
|
||||||
|
"Two target labels collide with existing labels, both with and without prefix 'exported'": {
|
||||||
|
targetLabels: []string{"foo", "3", "exported_foo", "4"},
|
||||||
|
exposedLabels: `metric{foo="1" exported_foo="2"} 0`,
|
||||||
|
expected: []string{"__name__", "metric", "exported_exported_foo", "1", "exported_exported_exported_foo",
|
||||||
|
"2", "exported_foo", "4", "foo", "3"},
|
||||||
|
},
|
||||||
|
"Extreme example": {
|
||||||
|
targetLabels: []string{"foo", "0", "exported_exported_foo", "1", "exported_exported_exported_foo", "2"},
|
||||||
|
exposedLabels: `metric{foo="3" exported_foo="4" exported_exported_exported_foo="5"} 0`,
|
||||||
|
expected: []string{
|
||||||
|
"__name__", "metric",
|
||||||
|
"exported_exported_exported_exported_exported_foo", "5",
|
||||||
|
"exported_exported_exported_exported_foo", "3",
|
||||||
|
"exported_exported_exported_foo", "2",
|
||||||
|
"exported_exported_foo", "1",
|
||||||
|
"exported_foo", "4",
|
||||||
|
"foo", "0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for name, tc := range testcases {
|
||||||
|
t.Run(name, func(t *testing.T) {
|
||||||
|
app := &collectResultAppender{}
|
||||||
|
sl := newScrapeLoop(context.Background(), nil, nil, nil,
|
||||||
|
func(l labels.Labels) labels.Labels {
|
||||||
|
return mutateSampleLabels(l, &Target{labels: labels.FromStrings(tc.targetLabels...)}, false, nil)
|
||||||
|
},
|
||||||
|
nil,
|
||||||
|
func(ctx context.Context) storage.Appender { return app }, nil, 0, true, 0, nil, 0, 0, false,
|
||||||
|
)
|
||||||
|
slApp := sl.appender(context.Background())
|
||||||
|
_, _, _, err := sl.append(slApp, []byte(tc.exposedLabels), "", time.Date(2000, 1, 1, 1, 0, 0, 0, time.UTC))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.NoError(t, slApp.Commit())
|
||||||
|
|
||||||
|
require.Equal(t, []sample{
|
||||||
|
{
|
||||||
|
metric: labels.FromStrings(tc.expected...),
|
||||||
|
t: timestamp.FromTime(time.Date(2000, 1, 1, 1, 0, 0, 0, time.UTC)),
|
||||||
|
v: 0,
|
||||||
|
},
|
||||||
|
}, app.result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) {
|
func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) {
|
||||||
// collectResultAppender's AddFast always returns ErrNotFound if we don't give it a next.
|
// collectResultAppender's AddFast always returns ErrNotFound if we don't give it a next.
|
||||||
app := &collectResultAppender{}
|
app := &collectResultAppender{}
|
||||||
|
|
|
@ -432,7 +432,7 @@ func ChainedSeriesMerge(series ...Series) Series {
|
||||||
for _, s := range series {
|
for _, s := range series {
|
||||||
iterators = append(iterators, s.Iterator())
|
iterators = append(iterators, s.Iterator())
|
||||||
}
|
}
|
||||||
return newChainSampleIterator(iterators)
|
return NewChainSampleIterator(iterators)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -448,7 +448,10 @@ type chainSampleIterator struct {
|
||||||
lastt int64
|
lastt int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func newChainSampleIterator(iterators []chunkenc.Iterator) chunkenc.Iterator {
|
// NewChainSampleIterator returns a single iterator that iterates over the samples from the given iterators in a sorted
|
||||||
|
// fashion. If samples overlap, one sample from overlapped ones is kept (randomly) and all others with the same
|
||||||
|
// timestamp are dropped.
|
||||||
|
func NewChainSampleIterator(iterators []chunkenc.Iterator) chunkenc.Iterator {
|
||||||
return &chainSampleIterator{
|
return &chainSampleIterator{
|
||||||
iterators: iterators,
|
iterators: iterators,
|
||||||
h: nil,
|
h: nil,
|
||||||
|
|
|
@ -631,7 +631,7 @@ func TestChainSampleIterator(t *testing.T) {
|
||||||
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}},
|
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}},
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
merged := newChainSampleIterator(tc.input)
|
merged := NewChainSampleIterator(tc.input)
|
||||||
actual, err := ExpandSamples(merged, nil)
|
actual, err := ExpandSamples(merged, nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, tc.expected, actual)
|
require.Equal(t, tc.expected, actual)
|
||||||
|
@ -677,7 +677,7 @@ func TestChainSampleIteratorSeek(t *testing.T) {
|
||||||
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}},
|
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}},
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
merged := newChainSampleIterator(tc.input)
|
merged := NewChainSampleIterator(tc.input)
|
||||||
actual := []tsdbutil.Sample{}
|
actual := []tsdbutil.Sample{}
|
||||||
if merged.Seek(tc.seek) {
|
if merged.Seek(tc.seek) {
|
||||||
t, v := merged.At()
|
t, v := merged.At()
|
||||||
|
|
|
@ -70,6 +70,22 @@ const (
|
||||||
DefaultWriteBufferSize = 4 * 1024 * 1024 // 4 MiB.
|
DefaultWriteBufferSize = 4 * 1024 * 1024 // 4 MiB.
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ChunkDiskMapperRef represents the location of a head chunk on disk.
|
||||||
|
// The upper 4 bytes hold the index of the head chunk file and
|
||||||
|
// the lower 4 bytes hold the byte offset in the head chunk file where the chunk starts.
|
||||||
|
type ChunkDiskMapperRef uint64
|
||||||
|
|
||||||
|
func newChunkDiskMapperRef(seq, offset uint64) ChunkDiskMapperRef {
|
||||||
|
return ChunkDiskMapperRef((seq << 32) | offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ref ChunkDiskMapperRef) Unpack() (sgmIndex, chkStart int) {
|
||||||
|
sgmIndex = int(ref >> 32)
|
||||||
|
chkStart = int((ref << 32) >> 32)
|
||||||
|
return sgmIndex, chkStart
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
// CorruptionErr is an error that's returned when corruption is encountered.
|
// CorruptionErr is an error that's returned when corruption is encountered.
|
||||||
type CorruptionErr struct {
|
type CorruptionErr struct {
|
||||||
Dir string
|
Dir string
|
||||||
|
@ -272,7 +288,7 @@ func repairLastChunkFile(files map[int]string) (_ map[int]string, returnErr erro
|
||||||
|
|
||||||
// WriteChunk writes the chunk to the disk.
|
// WriteChunk writes the chunk to the disk.
|
||||||
// The returned chunk ref is the reference from where the chunk encoding starts for the chunk.
|
// The returned chunk ref is the reference from where the chunk encoding starts for the chunk.
|
||||||
func (cdm *ChunkDiskMapper) WriteChunk(seriesRef uint64, mint, maxt int64, chk chunkenc.Chunk) (chkRef uint64, err error) {
|
func (cdm *ChunkDiskMapper) WriteChunk(seriesRef uint64, mint, maxt int64, chk chunkenc.Chunk) (chkRef ChunkDiskMapperRef, err error) {
|
||||||
cdm.writePathMtx.Lock()
|
cdm.writePathMtx.Lock()
|
||||||
defer cdm.writePathMtx.Unlock()
|
defer cdm.writePathMtx.Unlock()
|
||||||
|
|
||||||
|
@ -297,9 +313,7 @@ func (cdm *ChunkDiskMapper) WriteChunk(seriesRef uint64, mint, maxt int64, chk c
|
||||||
cdm.crc32.Reset()
|
cdm.crc32.Reset()
|
||||||
bytesWritten := 0
|
bytesWritten := 0
|
||||||
|
|
||||||
// The upper 4 bytes are for the head chunk file index and
|
chkRef = newChunkDiskMapperRef(uint64(cdm.curFileSequence), uint64(cdm.curFileSize()))
|
||||||
// the lower 4 bytes are for the head chunk file offset where to start reading this chunk.
|
|
||||||
chkRef = chunkRef(uint64(cdm.curFileSequence), uint64(cdm.curFileSize()))
|
|
||||||
|
|
||||||
binary.BigEndian.PutUint64(cdm.byteBuf[bytesWritten:], seriesRef)
|
binary.BigEndian.PutUint64(cdm.byteBuf[bytesWritten:], seriesRef)
|
||||||
bytesWritten += SeriesRefSize
|
bytesWritten += SeriesRefSize
|
||||||
|
@ -339,10 +353,6 @@ func (cdm *ChunkDiskMapper) WriteChunk(seriesRef uint64, mint, maxt int64, chk c
|
||||||
return chkRef, nil
|
return chkRef, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func chunkRef(seq, offset uint64) (chunkRef uint64) {
|
|
||||||
return (seq << 32) | offset
|
|
||||||
}
|
|
||||||
|
|
||||||
// shouldCutNewFile decides the cutting of a new file based on time and size retention.
|
// shouldCutNewFile decides the cutting of a new file based on time and size retention.
|
||||||
// Size retention: because depending on the system architecture, there is a limit on how big of a file we can m-map.
|
// Size retention: because depending on the system architecture, there is a limit on how big of a file we can m-map.
|
||||||
// Time retention: so that we can delete old chunks with some time guarantee in low load environments.
|
// Time retention: so that we can delete old chunks with some time guarantee in low load environments.
|
||||||
|
@ -456,28 +466,22 @@ func (cdm *ChunkDiskMapper) flushBuffer() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Chunk returns a chunk from a given reference.
|
// Chunk returns a chunk from a given reference.
|
||||||
func (cdm *ChunkDiskMapper) Chunk(ref uint64) (chunkenc.Chunk, error) {
|
func (cdm *ChunkDiskMapper) Chunk(ref ChunkDiskMapperRef) (chunkenc.Chunk, error) {
|
||||||
cdm.readPathMtx.RLock()
|
cdm.readPathMtx.RLock()
|
||||||
// We hold this read lock for the entire duration because if the Close()
|
// We hold this read lock for the entire duration because if the Close()
|
||||||
// is called, the data in the byte slice will get corrupted as the mmapped
|
// is called, the data in the byte slice will get corrupted as the mmapped
|
||||||
// file will be closed.
|
// file will be closed.
|
||||||
defer cdm.readPathMtx.RUnlock()
|
defer cdm.readPathMtx.RUnlock()
|
||||||
|
|
||||||
var (
|
|
||||||
// Get the upper 4 bytes.
|
|
||||||
// These contain the head chunk file index.
|
|
||||||
sgmIndex = int(ref >> 32)
|
|
||||||
// Get the lower 4 bytes.
|
|
||||||
// These contain the head chunk file offset where the chunk starts.
|
|
||||||
// We skip the series ref and the mint/maxt beforehand.
|
|
||||||
chkStart = int((ref<<32)>>32) + SeriesRefSize + (2 * MintMaxtSize)
|
|
||||||
chkCRC32 = newCRC32()
|
|
||||||
)
|
|
||||||
|
|
||||||
if cdm.closed {
|
if cdm.closed {
|
||||||
return nil, ErrChunkDiskMapperClosed
|
return nil, ErrChunkDiskMapperClosed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sgmIndex, chkStart := ref.Unpack()
|
||||||
|
// We skip the series ref and the mint/maxt beforehand.
|
||||||
|
chkStart += SeriesRefSize + (2 * MintMaxtSize)
|
||||||
|
chkCRC32 := newCRC32()
|
||||||
|
|
||||||
// If it is the current open file, then the chunks can be in the buffer too.
|
// If it is the current open file, then the chunks can be in the buffer too.
|
||||||
if sgmIndex == cdm.curFileSequence {
|
if sgmIndex == cdm.curFileSequence {
|
||||||
chunk := cdm.chunkBuffer.get(ref)
|
chunk := cdm.chunkBuffer.get(ref)
|
||||||
|
@ -578,7 +582,7 @@ func (cdm *ChunkDiskMapper) Chunk(ref uint64) (chunkenc.Chunk, error) {
|
||||||
// and runs the provided function on each chunk. It returns on the first error encountered.
|
// and runs the provided function on each chunk. It returns on the first error encountered.
|
||||||
// NOTE: This method needs to be called at least once after creating ChunkDiskMapper
|
// NOTE: This method needs to be called at least once after creating ChunkDiskMapper
|
||||||
// to set the maxt of all the file.
|
// to set the maxt of all the file.
|
||||||
func (cdm *ChunkDiskMapper) IterateAllChunks(f func(seriesRef, chunkRef uint64, mint, maxt int64, numSamples uint16) error) (err error) {
|
func (cdm *ChunkDiskMapper) IterateAllChunks(f func(seriesRef uint64, chunkRef ChunkDiskMapperRef, mint, maxt int64, numSamples uint16) error) (err error) {
|
||||||
cdm.writePathMtx.Lock()
|
cdm.writePathMtx.Lock()
|
||||||
defer cdm.writePathMtx.Unlock()
|
defer cdm.writePathMtx.Unlock()
|
||||||
|
|
||||||
|
@ -623,7 +627,7 @@ func (cdm *ChunkDiskMapper) IterateAllChunks(f func(seriesRef, chunkRef uint64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
chkCRC32.Reset()
|
chkCRC32.Reset()
|
||||||
chunkRef := chunkRef(uint64(segID), uint64(idx))
|
chunkRef := newChunkDiskMapperRef(uint64(segID), uint64(idx))
|
||||||
|
|
||||||
startIdx := idx
|
startIdx := idx
|
||||||
seriesRef := binary.BigEndian.Uint64(mmapFile.byteSlice.Range(idx, idx+SeriesRefSize))
|
seriesRef := binary.BigEndian.Uint64(mmapFile.byteSlice.Range(idx, idx+SeriesRefSize))
|
||||||
|
@ -826,19 +830,19 @@ const inBufferShards = 128 // 128 is a randomly chosen number.
|
||||||
|
|
||||||
// chunkBuffer is a thread safe buffer for chunks.
|
// chunkBuffer is a thread safe buffer for chunks.
|
||||||
type chunkBuffer struct {
|
type chunkBuffer struct {
|
||||||
inBufferChunks [inBufferShards]map[uint64]chunkenc.Chunk
|
inBufferChunks [inBufferShards]map[ChunkDiskMapperRef]chunkenc.Chunk
|
||||||
inBufferChunksMtxs [inBufferShards]sync.RWMutex
|
inBufferChunksMtxs [inBufferShards]sync.RWMutex
|
||||||
}
|
}
|
||||||
|
|
||||||
func newChunkBuffer() *chunkBuffer {
|
func newChunkBuffer() *chunkBuffer {
|
||||||
cb := &chunkBuffer{}
|
cb := &chunkBuffer{}
|
||||||
for i := 0; i < inBufferShards; i++ {
|
for i := 0; i < inBufferShards; i++ {
|
||||||
cb.inBufferChunks[i] = make(map[uint64]chunkenc.Chunk)
|
cb.inBufferChunks[i] = make(map[ChunkDiskMapperRef]chunkenc.Chunk)
|
||||||
}
|
}
|
||||||
return cb
|
return cb
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cb *chunkBuffer) put(ref uint64, chk chunkenc.Chunk) {
|
func (cb *chunkBuffer) put(ref ChunkDiskMapperRef, chk chunkenc.Chunk) {
|
||||||
shardIdx := ref % inBufferShards
|
shardIdx := ref % inBufferShards
|
||||||
|
|
||||||
cb.inBufferChunksMtxs[shardIdx].Lock()
|
cb.inBufferChunksMtxs[shardIdx].Lock()
|
||||||
|
@ -846,7 +850,7 @@ func (cb *chunkBuffer) put(ref uint64, chk chunkenc.Chunk) {
|
||||||
cb.inBufferChunksMtxs[shardIdx].Unlock()
|
cb.inBufferChunksMtxs[shardIdx].Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cb *chunkBuffer) get(ref uint64) chunkenc.Chunk {
|
func (cb *chunkBuffer) get(ref ChunkDiskMapperRef) chunkenc.Chunk {
|
||||||
shardIdx := ref % inBufferShards
|
shardIdx := ref % inBufferShards
|
||||||
|
|
||||||
cb.inBufferChunksMtxs[shardIdx].RLock()
|
cb.inBufferChunksMtxs[shardIdx].RLock()
|
||||||
|
@ -858,7 +862,7 @@ func (cb *chunkBuffer) get(ref uint64) chunkenc.Chunk {
|
||||||
func (cb *chunkBuffer) clear() {
|
func (cb *chunkBuffer) clear() {
|
||||||
for i := 0; i < inBufferShards; i++ {
|
for i := 0; i < inBufferShards; i++ {
|
||||||
cb.inBufferChunksMtxs[i].Lock()
|
cb.inBufferChunksMtxs[i].Lock()
|
||||||
cb.inBufferChunks[i] = make(map[uint64]chunkenc.Chunk)
|
cb.inBufferChunks[i] = make(map[ChunkDiskMapperRef]chunkenc.Chunk)
|
||||||
cb.inBufferChunksMtxs[i].Unlock()
|
cb.inBufferChunksMtxs[i].Unlock()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,8 @@ func TestChunkDiskMapper_WriteChunk_Chunk_IterateChunks(t *testing.T) {
|
||||||
chkCRC32 := newCRC32()
|
chkCRC32 := newCRC32()
|
||||||
|
|
||||||
type expectedDataType struct {
|
type expectedDataType struct {
|
||||||
seriesRef, chunkRef uint64
|
seriesRef uint64
|
||||||
|
chunkRef ChunkDiskMapperRef
|
||||||
mint, maxt int64
|
mint, maxt int64
|
||||||
numSamples uint16
|
numSamples uint16
|
||||||
chunk chunkenc.Chunk
|
chunk chunkenc.Chunk
|
||||||
|
@ -69,7 +70,7 @@ func TestChunkDiskMapper_WriteChunk_Chunk_IterateChunks(t *testing.T) {
|
||||||
|
|
||||||
// Calculating expected bytes written on disk for first file.
|
// Calculating expected bytes written on disk for first file.
|
||||||
firstFileName = hrw.curFile.Name()
|
firstFileName = hrw.curFile.Name()
|
||||||
require.Equal(t, chunkRef(1, nextChunkOffset), chkRef)
|
require.Equal(t, newChunkDiskMapperRef(1, nextChunkOffset), chkRef)
|
||||||
|
|
||||||
bytesWritten := 0
|
bytesWritten := 0
|
||||||
chkCRC32.Reset()
|
chkCRC32.Reset()
|
||||||
|
@ -132,7 +133,7 @@ func TestChunkDiskMapper_WriteChunk_Chunk_IterateChunks(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
idx := 0
|
idx := 0
|
||||||
require.NoError(t, hrw.IterateAllChunks(func(seriesRef, chunkRef uint64, mint, maxt int64, numSamples uint16) error {
|
require.NoError(t, hrw.IterateAllChunks(func(seriesRef uint64, chunkRef ChunkDiskMapperRef, mint, maxt int64, numSamples uint16) error {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
expData := expectedData[idx]
|
expData := expectedData[idx]
|
||||||
|
@ -220,7 +221,7 @@ func TestChunkDiskMapper_Truncate(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.False(t, hrw.fileMaxtSet)
|
require.False(t, hrw.fileMaxtSet)
|
||||||
require.NoError(t, hrw.IterateAllChunks(func(_, _ uint64, _, _ int64, _ uint16) error { return nil }))
|
require.NoError(t, hrw.IterateAllChunks(func(_ uint64, _ ChunkDiskMapperRef, _, _ int64, _ uint16) error { return nil }))
|
||||||
require.True(t, hrw.fileMaxtSet)
|
require.True(t, hrw.fileMaxtSet)
|
||||||
|
|
||||||
verifyFiles([]int{3, 4, 5, 6, 7, 8})
|
verifyFiles([]int{3, 4, 5, 6, 7, 8})
|
||||||
|
@ -334,7 +335,7 @@ func TestHeadReadWriter_TruncateAfterFailedIterateChunks(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Forcefully failing IterateAllChunks.
|
// Forcefully failing IterateAllChunks.
|
||||||
require.Error(t, hrw.IterateAllChunks(func(_, _ uint64, _, _ int64, _ uint16) error {
|
require.Error(t, hrw.IterateAllChunks(func(_ uint64, _ ChunkDiskMapperRef, _, _ int64, _ uint16) error {
|
||||||
return errors.New("random error")
|
return errors.New("random error")
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
@ -390,7 +391,7 @@ func TestHeadReadWriter_ReadRepairOnEmptyLastFile(t *testing.T) {
|
||||||
hrw, err = NewChunkDiskMapper(dir, chunkenc.NewPool(), DefaultWriteBufferSize)
|
hrw, err = NewChunkDiskMapper(dir, chunkenc.NewPool(), DefaultWriteBufferSize)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.False(t, hrw.fileMaxtSet)
|
require.False(t, hrw.fileMaxtSet)
|
||||||
require.NoError(t, hrw.IterateAllChunks(func(_, _ uint64, _, _ int64, _ uint16) error { return nil }))
|
require.NoError(t, hrw.IterateAllChunks(func(_ uint64, _ ChunkDiskMapperRef, _, _ int64, _ uint16) error { return nil }))
|
||||||
require.True(t, hrw.fileMaxtSet)
|
require.True(t, hrw.fileMaxtSet)
|
||||||
|
|
||||||
// Removed from memory.
|
// Removed from memory.
|
||||||
|
@ -421,7 +422,7 @@ func testChunkDiskMapper(t *testing.T) *ChunkDiskMapper {
|
||||||
hrw, err := NewChunkDiskMapper(tmpdir, chunkenc.NewPool(), DefaultWriteBufferSize)
|
hrw, err := NewChunkDiskMapper(tmpdir, chunkenc.NewPool(), DefaultWriteBufferSize)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.False(t, hrw.fileMaxtSet)
|
require.False(t, hrw.fileMaxtSet)
|
||||||
require.NoError(t, hrw.IterateAllChunks(func(_, _ uint64, _, _ int64, _ uint16) error { return nil }))
|
require.NoError(t, hrw.IterateAllChunks(func(_ uint64, _ ChunkDiskMapperRef, _, _ int64, _ uint16) error { return nil }))
|
||||||
require.True(t, hrw.fileMaxtSet)
|
require.True(t, hrw.fileMaxtSet)
|
||||||
return hrw
|
return hrw
|
||||||
}
|
}
|
||||||
|
@ -437,7 +438,7 @@ func randomChunk(t *testing.T) chunkenc.Chunk {
|
||||||
return chunk
|
return chunk
|
||||||
}
|
}
|
||||||
|
|
||||||
func createChunk(t *testing.T, idx int, hrw *ChunkDiskMapper) (seriesRef uint64, chunkRef uint64, mint, maxt int64, chunk chunkenc.Chunk) {
|
func createChunk(t *testing.T, idx int, hrw *ChunkDiskMapper) (seriesRef uint64, chunkRef ChunkDiskMapperRef, mint, maxt int64, chunk chunkenc.Chunk) {
|
||||||
var err error
|
var err error
|
||||||
seriesRef = uint64(rand.Int63())
|
seriesRef = uint64(rand.Int63())
|
||||||
mint = int64((idx)*1000 + 1)
|
mint = int64((idx)*1000 + 1)
|
||||||
|
|
|
@ -636,7 +636,7 @@ func (h *Head) Init(minValidTime int64) error {
|
||||||
|
|
||||||
func (h *Head) loadMmappedChunks(refSeries map[uint64]*memSeries) (map[uint64][]*mmappedChunk, error) {
|
func (h *Head) loadMmappedChunks(refSeries map[uint64]*memSeries) (map[uint64][]*mmappedChunk, error) {
|
||||||
mmappedChunks := map[uint64][]*mmappedChunk{}
|
mmappedChunks := map[uint64][]*mmappedChunk{}
|
||||||
if err := h.chunkDiskMapper.IterateAllChunks(func(seriesRef, chunkRef uint64, mint, maxt int64, numSamples uint16) error {
|
if err := h.chunkDiskMapper.IterateAllChunks(func(seriesRef uint64, chunkRef chunks.ChunkDiskMapperRef, mint, maxt int64, numSamples uint16) error {
|
||||||
if maxt < h.minValidTime.Load() {
|
if maxt < h.minValidTime.Load() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -1612,8 +1612,9 @@ func overlapsClosedInterval(mint1, maxt1, mint2, maxt2 int64) bool {
|
||||||
return mint1 <= maxt2 && mint2 <= maxt1
|
return mint1 <= maxt2 && mint2 <= maxt1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// mappedChunks describes chunk data on disk that can be mmapped
|
||||||
type mmappedChunk struct {
|
type mmappedChunk struct {
|
||||||
ref uint64
|
ref chunks.ChunkDiskMapperRef
|
||||||
numSamples uint16
|
numSamples uint16
|
||||||
minTime, maxTime int64
|
minTime, maxTime int64
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,7 +65,7 @@ func newTestHead(t testing.TB, chunkRange int64, compressWAL bool) (*Head, *wal.
|
||||||
h, err := NewHead(nil, nil, wlog, opts, nil)
|
h, err := NewHead(nil, nil, wlog, opts, nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.NoError(t, h.chunkDiskMapper.IterateAllChunks(func(_, _ uint64, _, _ int64, _ uint16) error { return nil }))
|
require.NoError(t, h.chunkDiskMapper.IterateAllChunks(func(_ uint64, _ chunks.ChunkDiskMapperRef, _, _ int64, _ uint16) error { return nil }))
|
||||||
|
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
require.NoError(t, os.RemoveAll(dir))
|
require.NoError(t, os.RemoveAll(dir))
|
||||||
|
|
|
@ -311,8 +311,8 @@ func (api *API) Register(r *route.Router) {
|
||||||
r.Get("/status/flags", wrap(api.serveFlags))
|
r.Get("/status/flags", wrap(api.serveFlags))
|
||||||
r.Get("/status/tsdb", wrap(api.serveTSDBStatus))
|
r.Get("/status/tsdb", wrap(api.serveTSDBStatus))
|
||||||
r.Get("/status/walreplay", api.serveWALReplayStatus)
|
r.Get("/status/walreplay", api.serveWALReplayStatus)
|
||||||
r.Post("/read", api.ready(http.HandlerFunc(api.remoteRead)))
|
r.Post("/read", api.ready(api.remoteRead))
|
||||||
r.Post("/write", api.ready(http.HandlerFunc(api.remoteWrite)))
|
r.Post("/write", api.ready(api.remoteWrite))
|
||||||
|
|
||||||
r.Get("/alerts", wrap(api.alerts))
|
r.Get("/alerts", wrap(api.alerts))
|
||||||
r.Get("/rules", wrap(api.rules))
|
r.Get("/rules", wrap(api.rules))
|
||||||
|
|
116
web/ui/README.md
116
web/ui/README.md
|
@ -1,12 +1,110 @@
|
||||||
The `ui` directory contains static files and templates used in the web UI. For
|
## Overview
|
||||||
easier distribution they are statically compiled into the Prometheus binary
|
|
||||||
using the vfsgen library (c.f. Makefile).
|
|
||||||
|
|
||||||
During development it is more convenient to always use the files on disk to
|
The `ui` directory contains static files and templates used in the web UI. For easier distribution they are statically
|
||||||
directly see changes without recompiling.
|
compiled into the Prometheus binary using the vfsgen library (c.f. Makefile).
|
||||||
To make this work, remove the `builtinassets` build tag in the `flags` entry
|
|
||||||
in `.promu.yml`, and then `make build` (or build Prometheus using
|
During development it is more convenient to always use the files on disk to directly see changes without recompiling. To
|
||||||
|
make this work, remove the `builtinassets` build tag in the `flags` entry in `.promu.yml`, and then `make build` (or
|
||||||
|
build Prometheus using
|
||||||
`go build ./cmd/prometheus`).
|
`go build ./cmd/prometheus`).
|
||||||
|
|
||||||
This will serve all files from your local filesystem.
|
This will serve all files from your local filesystem. This is for development purposes only.
|
||||||
This is for development purposes only.
|
|
||||||
|
## React-app
|
||||||
|
|
||||||
|
### Introduction
|
||||||
|
|
||||||
|
The react application is a monorepo composed by multiple different npm packages. The main one is `react-app` which
|
||||||
|
contains the code of the react application.
|
||||||
|
|
||||||
|
Then you have different npm packages located in the folder `modules`. These packages are supposed to be used by the
|
||||||
|
react-app and also by others consumers (like Thanos)
|
||||||
|
|
||||||
|
### Pre-requisite
|
||||||
|
|
||||||
|
To be able to build the react application you need:
|
||||||
|
|
||||||
|
* npm >= v7
|
||||||
|
* node >= v16
|
||||||
|
|
||||||
|
### Installing npm dependencies
|
||||||
|
|
||||||
|
The React UI depends on a large number of [npm](https://www.npmjs.com/) packages. These are not checked in, so you will
|
||||||
|
need to move to the directory `web/ui` and then download and install them locally via the npm package manager:
|
||||||
|
|
||||||
|
npm install
|
||||||
|
|
||||||
|
npm consults the `package.json` and `package-lock.json` files for dependencies to install. It creates a `node_modules`
|
||||||
|
directory with all installed dependencies.
|
||||||
|
|
||||||
|
**NOTE**: Do not run `npm install` in the `react-app` folder or in any sub folder of the `module` directory.
|
||||||
|
|
||||||
|
### Upgrading npm dependencies
|
||||||
|
|
||||||
|
As it is a monorepo, when upgrading a dependency, you have to upgrade it in every packages that composed this monorepo (
|
||||||
|
aka, in all sub folder of `module` and in `react-app`)
|
||||||
|
|
||||||
|
Then you have to run the command `npm install` in `web/ui` and not in a sub folder / sub package. It won't simply work.
|
||||||
|
|
||||||
|
### Running a local development server
|
||||||
|
|
||||||
|
You can start a development server for the React UI outside of a running Prometheus server by running:
|
||||||
|
|
||||||
|
npm start
|
||||||
|
|
||||||
|
This will open a browser window with the React app running on http://localhost:3000/. The page will reload if you make
|
||||||
|
edits to the source code. You will also see any lint errors in the console.
|
||||||
|
|
||||||
|
**NOTE**: It will reload only if you change the code in `react-app` folder. Any code changes in the folder `module` is
|
||||||
|
not considered by the command `npm start`. In order to see the changes in the react-app you will have to
|
||||||
|
run `npm run build:module`
|
||||||
|
|
||||||
|
Due to a `"proxy": "http://localhost:9090"` setting in the `package.json` file, any API requests from the React UI are
|
||||||
|
proxied to `localhost` on port `9090` by the development server. This allows you to run a normal Prometheus server to
|
||||||
|
handle API requests, while iterating separately on the UI.
|
||||||
|
|
||||||
|
[browser] ----> [localhost:3000 (dev server)] --(proxy API requests)--> [localhost:9090 (Prometheus)]
|
||||||
|
|
||||||
|
### Running tests
|
||||||
|
|
||||||
|
To run the test for the react-app and for all modules, you can simply run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
if you want to run the test only for a specific module, you need to go to the folder of the module and run
|
||||||
|
again `npm test`.
|
||||||
|
|
||||||
|
For example, in case you only want to run the test of the react-app, go to `web/ui/react-app` and run `npm test`
|
||||||
|
|
||||||
|
To generate an HTML-based test coverage report, run:
|
||||||
|
|
||||||
|
CI=true npm test:coverage
|
||||||
|
|
||||||
|
This creates a `coverage` subdirectory with the generated report. Open `coverage/lcov-report/index.html` in the browser
|
||||||
|
to view it.
|
||||||
|
|
||||||
|
The `CI=true` environment variable prevents the tests from being run in interactive / watching mode.
|
||||||
|
|
||||||
|
See the [Create React App documentation](https://create-react-app.dev/docs/running-tests/) for more information about
|
||||||
|
running tests.
|
||||||
|
|
||||||
|
### Building the app for production
|
||||||
|
|
||||||
|
To build a production-optimized version of the React app to a `build` subdirectory, run:
|
||||||
|
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
**NOTE:** You will likely not need to do this directly. Instead, this is taken care of by the `build` target in the main
|
||||||
|
Prometheus `Makefile` when building the full binary.
|
||||||
|
|
||||||
|
### Integration into Prometheus
|
||||||
|
|
||||||
|
To build a Prometheus binary that includes a compiled-in version of the production build of the React app, change to the
|
||||||
|
root of the repository and run:
|
||||||
|
|
||||||
|
make build
|
||||||
|
|
||||||
|
This installs dependencies via npm, builds a production build of the React app, and then finally compiles in all web
|
||||||
|
assets into the Prometheus binary.
|
||||||
|
|
|
@ -1,3 +1,14 @@
|
||||||
|
0.18.0 / 2021-10-20
|
||||||
|
===================
|
||||||
|
|
||||||
|
* **[Feature]**: Allow overriding the API prefix used to contact a remote Prometheus.
|
||||||
|
* **[Feature]**: Add linter and autocompletion support for trigonometric functions (like `sin`, `cos`)
|
||||||
|
* **[BreakingChange]**: The lib is now exposed under the `dist` folder. When importing `codemirror-promql`, it means you
|
||||||
|
will need to add `dist` in the import. For example `import { newCompleteStrategy } from 'codemirror-promql/cjs/complete';`
|
||||||
|
becomes `import { newCompleteStrategy } from 'codemirror-promql/dist/cjs/complete';`
|
||||||
|
* **[BreakingChange]**: lezer-promql has been migrated into codemirror-promql in the `grammar` folder
|
||||||
|
* **[BreakingChange]**: Support last version of Codemirror.next (v0.19.0).
|
||||||
|
|
||||||
0.17.0 / 2021-08-10
|
0.17.0 / 2021-08-10
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
# Working with the React UI
|
|
||||||
|
|
||||||
This file explains how to work with the React-based Prometheus UI.
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
|
|
||||||
The [React-based](https://reactjs.org/) Prometheus UI was bootstrapped using [Create React App](https://github.com/facebook/create-react-app), a popular toolkit for generating React application setups. You can find general information about Create React App on [their documentation site](https://create-react-app.dev/).
|
|
||||||
|
|
||||||
Instead of plain JavaScript, we use [TypeScript](https://www.typescriptlang.org/) to ensure typed code.
|
|
||||||
|
|
||||||
## Development environment
|
|
||||||
|
|
||||||
To work with the React UI code, you will need to have the following tools installed:
|
|
||||||
|
|
||||||
* The [Node.js](https://nodejs.org/) JavaScript runtime.
|
|
||||||
* The [npm](https://www.npmjs.com/) package manager. Once you installed Node, npm should already be available.
|
|
||||||
* *Recommended:* An editor with TypeScript, React, and [ESLint](https://eslint.org/) linting support. See e.g. [Create React App's editor setup instructions](https://create-react-app.dev/docs/setting-up-your-editor/). If you are not sure which editor to use, we recommend using [Visual Studio Code](https://code.visualstudio.com/docs/languages/typescript). Make sure that [the editor uses the project's TypeScript version rather than its own](https://code.visualstudio.com/docs/typescript/typescript-compiling#_using-the-workspace-version-of-typescript).
|
|
||||||
|
|
||||||
**NOTE**: When using Visual Studio Code, be sure to open the `web/ui/react-app` directory in the editor instead of the root of the repository. This way, the right ESLint and TypeScript configuration will be picked up from the React workspace.
|
|
||||||
|
|
||||||
## Installing npm dependencies
|
|
||||||
|
|
||||||
The React UI depends on a large number of [npm](https://www.npmjs.com/) packages. These are not checked in, so you will need to download and install them locally via the npm package manager:
|
|
||||||
|
|
||||||
npm install
|
|
||||||
|
|
||||||
npm consults the `package.json` and `package-lock.json` files for dependencies to install. It creates a `node_modules` directory with all installed dependencies.
|
|
||||||
|
|
||||||
**NOTE**: Remember to change directory to `web/ui/react-app` before running this command and the following commands.
|
|
||||||
|
|
||||||
## Running a local development server
|
|
||||||
|
|
||||||
You can start a development server for the React UI outside of a running Prometheus server by running:
|
|
||||||
|
|
||||||
npm start
|
|
||||||
|
|
||||||
This will open a browser window with the React app running on http://localhost:3000/. The page will reload if you make edits to the source code. You will also see any lint errors in the console.
|
|
||||||
|
|
||||||
Due to a `"proxy": "http://localhost:9090"` setting in the `package.json` file, any API requests from the React UI are proxied to `localhost` on port `9090` by the development server. This allows you to run a normal Prometheus server to handle API requests, while iterating separately on the UI.
|
|
||||||
|
|
||||||
[browser] ----> [localhost:3000 (dev server)] --(proxy API requests)--> [localhost:9090 (Prometheus)]
|
|
||||||
|
|
||||||
## Running tests
|
|
||||||
|
|
||||||
Create React App uses the [Jest](https://jestjs.io/) framework for running tests. To run tests in interactive watch mode:
|
|
||||||
|
|
||||||
npm test
|
|
||||||
|
|
||||||
To generate an HTML-based test coverage report, run:
|
|
||||||
|
|
||||||
CI=true npm test --coverage
|
|
||||||
|
|
||||||
This creates a `coverage` subdirectory with the generated report. Open `coverage/lcov-report/index.html` in the browser to view it.
|
|
||||||
|
|
||||||
The `CI=true` environment variable prevents the tests from being run in interactive / watching mode.
|
|
||||||
|
|
||||||
See the [Create React App documentation](https://create-react-app.dev/docs/running-tests/) for more information about running tests.
|
|
||||||
|
|
||||||
## Linting
|
|
||||||
|
|
||||||
We define linting rules for the [ESLint](https://eslint.org/) linter. We recommend integrating automated linting and fixing into your editor (e.g. upon save), but you can also run the linter separately from the command-line.
|
|
||||||
|
|
||||||
To detect and automatically fix lint errors, run:
|
|
||||||
|
|
||||||
npm run lint
|
|
||||||
|
|
||||||
This is also available via the `react-app-lint-fix` target in the main Prometheus `Makefile`.
|
|
||||||
|
|
||||||
## Building the app for production
|
|
||||||
|
|
||||||
To build a production-optimized version of the React app to a `build` subdirectory, run:
|
|
||||||
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
**NOTE:** You will likely not need to do this directly. Instead, this is taken care of by the `build` target in the main Prometheus `Makefile` when building the full binary.
|
|
||||||
|
|
||||||
## Integration into Prometheus
|
|
||||||
|
|
||||||
To build a Prometheus binary that includes a compiled-in version of the production build of the React app, change to the root of the repository and run:
|
|
||||||
|
|
||||||
make build
|
|
||||||
|
|
||||||
This installs dependencies via npm, builds a production build of the React app, and then finally compiles in all web assets into the Prometheus binary.
|
|
|
@ -1,69 +0,0 @@
|
||||||
import * as React from 'react';
|
|
||||||
import { mount, ReactWrapper } from 'enzyme';
|
|
||||||
import CMExpressionInput from './CMExpressionInput';
|
|
||||||
import { Button, InputGroup, InputGroupAddon } from 'reactstrap';
|
|
||||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
|
||||||
import { faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
|
|
||||||
|
|
||||||
describe('CMExpressionInput', () => {
|
|
||||||
const expressionInputProps = {
|
|
||||||
value: 'node_cpu',
|
|
||||||
queryHistory: [],
|
|
||||||
metricNames: [],
|
|
||||||
executeQuery: (): void => {
|
|
||||||
// Do nothing.
|
|
||||||
},
|
|
||||||
onExpressionChange: (): void => {
|
|
||||||
// Do nothing.
|
|
||||||
},
|
|
||||||
loading: false,
|
|
||||||
enableAutocomplete: true,
|
|
||||||
enableHighlighting: true,
|
|
||||||
enableLinter: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
let expressionInput: ReactWrapper;
|
|
||||||
beforeEach(() => {
|
|
||||||
expressionInput = mount(<CMExpressionInput {...expressionInputProps} />);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders an InputGroup', () => {
|
|
||||||
const inputGroup = expressionInput.find(InputGroup);
|
|
||||||
expect(inputGroup.prop('className')).toEqual('expression-input');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders a search icon when it is not loading', () => {
|
|
||||||
const addon = expressionInput.find(InputGroupAddon).filterWhere((addon) => addon.prop('addonType') === 'prepend');
|
|
||||||
const icon = addon.find(FontAwesomeIcon);
|
|
||||||
expect(icon.prop('icon')).toEqual(faSearch);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders a loading icon when it is loading', () => {
|
|
||||||
const expressionInput = mount(<CMExpressionInput {...expressionInputProps} loading={true} />);
|
|
||||||
const addon = expressionInput.find(InputGroupAddon).filterWhere((addon) => addon.prop('addonType') === 'prepend');
|
|
||||||
const icon = addon.find(FontAwesomeIcon);
|
|
||||||
expect(icon.prop('icon')).toEqual(faSpinner);
|
|
||||||
expect(icon.prop('spin')).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders a CodeMirror expression input', () => {
|
|
||||||
const input = expressionInput.find('div.cm-expression-input');
|
|
||||||
expect(input.text()).toContain('node_cpu');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders an execute button', () => {
|
|
||||||
const addon = expressionInput.find(InputGroupAddon).filterWhere((addon) => addon.prop('addonType') === 'append');
|
|
||||||
const button = addon.find(Button).find('.execute-btn').first();
|
|
||||||
expect(button.prop('color')).toEqual('primary');
|
|
||||||
expect(button.text()).toEqual('Execute');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('executes the query when clicking the execute button', () => {
|
|
||||||
const spyExecuteQuery = jest.fn();
|
|
||||||
const props = { ...expressionInputProps, executeQuery: spyExecuteQuery };
|
|
||||||
const wrapper = mount(<CMExpressionInput {...props} />);
|
|
||||||
const btn = wrapper.find(Button).filterWhere((btn) => btn.hasClass('execute-btn'));
|
|
||||||
btn.simulate('click');
|
|
||||||
expect(spyExecuteQuery).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,249 +0,0 @@
|
||||||
import React, { FC, useState, useEffect, useRef } from 'react';
|
|
||||||
import { Button, InputGroup, InputGroupAddon, InputGroupText } from 'reactstrap';
|
|
||||||
|
|
||||||
import { EditorView, highlightSpecialChars, keymap, ViewUpdate, placeholder } from '@codemirror/view';
|
|
||||||
import { EditorState, Prec, Compartment } from '@codemirror/state';
|
|
||||||
import { indentOnInput, syntaxTree } from '@codemirror/language';
|
|
||||||
import { history, historyKeymap } from '@codemirror/history';
|
|
||||||
import { defaultKeymap, insertNewlineAndIndent } from '@codemirror/commands';
|
|
||||||
import { bracketMatching } from '@codemirror/matchbrackets';
|
|
||||||
import { closeBrackets, closeBracketsKeymap } from '@codemirror/closebrackets';
|
|
||||||
import { highlightSelectionMatches } from '@codemirror/search';
|
|
||||||
import { commentKeymap } from '@codemirror/comment';
|
|
||||||
import { lintKeymap } from '@codemirror/lint';
|
|
||||||
import { autocompletion, completionKeymap, CompletionContext, CompletionResult } from '@codemirror/autocomplete';
|
|
||||||
import { baseTheme, lightTheme, darkTheme, promqlHighlighter } from './CMTheme';
|
|
||||||
|
|
||||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
|
||||||
import { faSearch, faSpinner, faGlobeEurope } from '@fortawesome/free-solid-svg-icons';
|
|
||||||
import MetricsExplorer from './MetricsExplorer';
|
|
||||||
import { usePathPrefix } from '../../contexts/PathPrefixContext';
|
|
||||||
import { useTheme } from '../../contexts/ThemeContext';
|
|
||||||
import { CompleteStrategy, PromQLExtension } from 'codemirror-promql';
|
|
||||||
import { newCompleteStrategy } from 'codemirror-promql/dist/cjs/complete';
|
|
||||||
|
|
||||||
const promqlExtension = new PromQLExtension();
|
|
||||||
|
|
||||||
interface CMExpressionInputProps {
|
|
||||||
value: string;
|
|
||||||
onExpressionChange: (expr: string) => void;
|
|
||||||
queryHistory: string[];
|
|
||||||
metricNames: string[];
|
|
||||||
executeQuery: () => void;
|
|
||||||
loading: boolean;
|
|
||||||
enableAutocomplete: boolean;
|
|
||||||
enableHighlighting: boolean;
|
|
||||||
enableLinter: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
const dynamicConfigCompartment = new Compartment();
|
|
||||||
|
|
||||||
// Autocompletion strategy that wraps the main one and enriches
|
|
||||||
// it with past query items.
|
|
||||||
export class HistoryCompleteStrategy implements CompleteStrategy {
|
|
||||||
private complete: CompleteStrategy;
|
|
||||||
private queryHistory: string[];
|
|
||||||
constructor(complete: CompleteStrategy, queryHistory: string[]) {
|
|
||||||
this.complete = complete;
|
|
||||||
this.queryHistory = queryHistory;
|
|
||||||
}
|
|
||||||
|
|
||||||
promQL(context: CompletionContext): Promise<CompletionResult | null> | CompletionResult | null {
|
|
||||||
return Promise.resolve(this.complete.promQL(context)).then((res) => {
|
|
||||||
const { state, pos } = context;
|
|
||||||
const tree = syntaxTree(state).resolve(pos, -1);
|
|
||||||
const start = res != null ? res.from : tree.from;
|
|
||||||
|
|
||||||
if (start !== 0) {
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
const historyItems: CompletionResult = {
|
|
||||||
from: start,
|
|
||||||
to: pos,
|
|
||||||
options: this.queryHistory.map((q) => ({
|
|
||||||
label: q.length < 80 ? q : q.slice(0, 76).concat('...'),
|
|
||||||
detail: 'past query',
|
|
||||||
apply: q,
|
|
||||||
info: q.length < 80 ? undefined : q,
|
|
||||||
})),
|
|
||||||
span: /^[a-zA-Z0-9_:]+$/,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (res !== null) {
|
|
||||||
historyItems.options = historyItems.options.concat(res.options);
|
|
||||||
}
|
|
||||||
return historyItems;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const CMExpressionInput: FC<CMExpressionInputProps> = ({
|
|
||||||
value,
|
|
||||||
onExpressionChange,
|
|
||||||
queryHistory,
|
|
||||||
metricNames,
|
|
||||||
executeQuery,
|
|
||||||
loading,
|
|
||||||
enableAutocomplete,
|
|
||||||
enableHighlighting,
|
|
||||||
enableLinter,
|
|
||||||
}) => {
|
|
||||||
const containerRef = useRef<HTMLDivElement>(null);
|
|
||||||
const viewRef = useRef<EditorView | null>(null);
|
|
||||||
const [showMetricsExplorer, setShowMetricsExplorer] = useState<boolean>(false);
|
|
||||||
const pathPrefix = usePathPrefix();
|
|
||||||
const { theme } = useTheme();
|
|
||||||
|
|
||||||
// (Re)initialize editor based on settings / setting changes.
|
|
||||||
useEffect(() => {
|
|
||||||
// Build the dynamic part of the config.
|
|
||||||
promqlExtension
|
|
||||||
.activateCompletion(enableAutocomplete)
|
|
||||||
.activateLinter(enableLinter)
|
|
||||||
.setComplete({
|
|
||||||
completeStrategy: new HistoryCompleteStrategy(
|
|
||||||
newCompleteStrategy({
|
|
||||||
remote: { url: pathPrefix, cache: { initialMetricList: metricNames } },
|
|
||||||
}),
|
|
||||||
queryHistory
|
|
||||||
),
|
|
||||||
});
|
|
||||||
const dynamicConfig = [
|
|
||||||
enableHighlighting ? promqlHighlighter : [],
|
|
||||||
promqlExtension.asExtension(),
|
|
||||||
theme === 'dark' ? darkTheme : lightTheme,
|
|
||||||
];
|
|
||||||
|
|
||||||
// Create or reconfigure the editor.
|
|
||||||
const view = viewRef.current;
|
|
||||||
if (view === null) {
|
|
||||||
// If the editor does not exist yet, create it.
|
|
||||||
if (!containerRef.current) {
|
|
||||||
throw new Error('expected CodeMirror container element to exist');
|
|
||||||
}
|
|
||||||
|
|
||||||
const startState = EditorState.create({
|
|
||||||
doc: value,
|
|
||||||
extensions: [
|
|
||||||
baseTheme,
|
|
||||||
highlightSpecialChars(),
|
|
||||||
history(),
|
|
||||||
EditorState.allowMultipleSelections.of(true),
|
|
||||||
indentOnInput(),
|
|
||||||
bracketMatching(),
|
|
||||||
closeBrackets(),
|
|
||||||
autocompletion(),
|
|
||||||
highlightSelectionMatches(),
|
|
||||||
EditorView.lineWrapping,
|
|
||||||
keymap.of([
|
|
||||||
...closeBracketsKeymap,
|
|
||||||
...defaultKeymap,
|
|
||||||
...historyKeymap,
|
|
||||||
...commentKeymap,
|
|
||||||
...completionKeymap,
|
|
||||||
...lintKeymap,
|
|
||||||
]),
|
|
||||||
placeholder('Expression (press Shift+Enter for newlines)'),
|
|
||||||
dynamicConfigCompartment.of(dynamicConfig),
|
|
||||||
// This keymap is added without precedence so that closing the autocomplete dropdown
|
|
||||||
// via Escape works without blurring the editor.
|
|
||||||
keymap.of([
|
|
||||||
{
|
|
||||||
key: 'Escape',
|
|
||||||
run: (v: EditorView): boolean => {
|
|
||||||
v.contentDOM.blur();
|
|
||||||
return false;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
Prec.override(
|
|
||||||
keymap.of([
|
|
||||||
{
|
|
||||||
key: 'Enter',
|
|
||||||
run: (v: EditorView): boolean => {
|
|
||||||
executeQuery();
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'Shift-Enter',
|
|
||||||
run: insertNewlineAndIndent,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
),
|
|
||||||
EditorView.updateListener.of((update: ViewUpdate): void => {
|
|
||||||
onExpressionChange(update.state.doc.toString());
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const view = new EditorView({
|
|
||||||
state: startState,
|
|
||||||
parent: containerRef.current,
|
|
||||||
});
|
|
||||||
|
|
||||||
viewRef.current = view;
|
|
||||||
|
|
||||||
view.focus();
|
|
||||||
} else {
|
|
||||||
// The editor already exists, just reconfigure the dynamically configured parts.
|
|
||||||
view.dispatch(
|
|
||||||
view.state.update({
|
|
||||||
effects: dynamicConfigCompartment.reconfigure(dynamicConfig),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
// "value" is only used in the initial render, so we don't want to
|
|
||||||
// re-run this effect every time that "value" changes.
|
|
||||||
//
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [enableAutocomplete, enableHighlighting, enableLinter, executeQuery, onExpressionChange, queryHistory, theme]);
|
|
||||||
|
|
||||||
const insertAtCursor = (value: string) => {
|
|
||||||
const view = viewRef.current;
|
|
||||||
if (view === null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const { from, to } = view.state.selection.ranges[0];
|
|
||||||
view.dispatch(
|
|
||||||
view.state.update({
|
|
||||||
changes: { from, to, insert: value },
|
|
||||||
})
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<InputGroup className="expression-input">
|
|
||||||
<InputGroupAddon addonType="prepend">
|
|
||||||
<InputGroupText>
|
|
||||||
{loading ? <FontAwesomeIcon icon={faSpinner} spin /> : <FontAwesomeIcon icon={faSearch} />}
|
|
||||||
</InputGroupText>
|
|
||||||
</InputGroupAddon>
|
|
||||||
<div ref={containerRef} className="cm-expression-input" />
|
|
||||||
<InputGroupAddon addonType="append">
|
|
||||||
<Button
|
|
||||||
className="metrics-explorer-btn"
|
|
||||||
title="Open metrics explorer"
|
|
||||||
onClick={() => setShowMetricsExplorer(true)}
|
|
||||||
>
|
|
||||||
<FontAwesomeIcon icon={faGlobeEurope} />
|
|
||||||
</Button>
|
|
||||||
<Button className="execute-btn" color="primary" onClick={executeQuery}>
|
|
||||||
Execute
|
|
||||||
</Button>
|
|
||||||
</InputGroupAddon>
|
|
||||||
</InputGroup>
|
|
||||||
|
|
||||||
<MetricsExplorer
|
|
||||||
show={showMetricsExplorer}
|
|
||||||
updateShow={setShowMetricsExplorer}
|
|
||||||
metrics={metricNames}
|
|
||||||
insertAtCursor={insertAtCursor}
|
|
||||||
/>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default CMExpressionInput;
|
|
|
@ -1,26 +1,15 @@
|
||||||
import * as React from 'react';
|
import * as React from 'react';
|
||||||
import { mount, ReactWrapper } from 'enzyme';
|
import { mount, ReactWrapper } from 'enzyme';
|
||||||
import ExpressionInput from './ExpressionInput';
|
import ExpressionInput from './ExpressionInput';
|
||||||
import Downshift from 'downshift';
|
import { Button, InputGroup, InputGroupAddon } from 'reactstrap';
|
||||||
import { Button, InputGroup, InputGroupAddon, Input } from 'reactstrap';
|
|
||||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
import { faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
|
import { faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
|
||||||
|
|
||||||
const getKeyEvent = (key: string): React.KeyboardEvent<HTMLInputElement> =>
|
|
||||||
({
|
|
||||||
key,
|
|
||||||
nativeEvent: {},
|
|
||||||
preventDefault: () => {
|
|
||||||
// Do nothing.
|
|
||||||
},
|
|
||||||
} as React.KeyboardEvent<HTMLInputElement>);
|
|
||||||
|
|
||||||
describe('ExpressionInput', () => {
|
describe('ExpressionInput', () => {
|
||||||
const metricNames = ['instance:node_cpu_utilisation:rate1m', 'node_cpu_guest_seconds_total', 'node_cpu_seconds_total'];
|
|
||||||
const expressionInputProps = {
|
const expressionInputProps = {
|
||||||
value: 'node_cpu',
|
value: 'node_cpu',
|
||||||
queryHistory: [],
|
queryHistory: [],
|
||||||
metricNames,
|
metricNames: [],
|
||||||
executeQuery: (): void => {
|
executeQuery: (): void => {
|
||||||
// Do nothing.
|
// Do nothing.
|
||||||
},
|
},
|
||||||
|
@ -29,6 +18,8 @@ describe('ExpressionInput', () => {
|
||||||
},
|
},
|
||||||
loading: false,
|
loading: false,
|
||||||
enableAutocomplete: true,
|
enableAutocomplete: true,
|
||||||
|
enableHighlighting: true,
|
||||||
|
enableLinter: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
let expressionInput: ReactWrapper;
|
let expressionInput: ReactWrapper;
|
||||||
|
@ -36,11 +27,6 @@ describe('ExpressionInput', () => {
|
||||||
expressionInput = mount(<ExpressionInput {...expressionInputProps} />);
|
expressionInput = mount(<ExpressionInput {...expressionInputProps} />);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders a downshift component', () => {
|
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
expect(downshift).toHaveLength(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders an InputGroup', () => {
|
it('renders an InputGroup', () => {
|
||||||
const inputGroup = expressionInput.find(InputGroup);
|
const inputGroup = expressionInput.find(InputGroup);
|
||||||
expect(inputGroup.prop('className')).toEqual('expression-input');
|
expect(inputGroup.prop('className')).toEqual('expression-input');
|
||||||
|
@ -60,65 +46,19 @@ describe('ExpressionInput', () => {
|
||||||
expect(icon.prop('spin')).toBe(true);
|
expect(icon.prop('spin')).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders an Input', () => {
|
it('renders a CodeMirror expression input', () => {
|
||||||
const input = expressionInput.find(Input);
|
const input = expressionInput.find('div.cm-expression-input');
|
||||||
expect(input.prop('style')).toEqual({ height: 0 });
|
expect(input.text()).toContain('node_cpu');
|
||||||
expect(input.prop('autoFocus')).toEqual(true);
|
|
||||||
expect(input.prop('type')).toEqual('textarea');
|
|
||||||
expect(input.prop('rows')).toEqual('1');
|
|
||||||
expect(input.prop('placeholder')).toEqual('Expression (press Shift+Enter for newlines)');
|
|
||||||
expect(input.prop('value')).toEqual('node_cpu');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('when autosuggest is closed', () => {
|
it('renders an execute button', () => {
|
||||||
it('prevents Downshift default on Home, End, Arrows', () => {
|
const addon = expressionInput.find(InputGroupAddon).filterWhere((addon) => addon.prop('addonType') === 'append');
|
||||||
const downshift = expressionInput.find(Downshift);
|
const button = addon.find(Button).find('.execute-btn').first();
|
||||||
const input = downshift.find(Input);
|
expect(button.prop('color')).toEqual('primary');
|
||||||
downshift.setState({ isOpen: false });
|
expect(button.text()).toEqual('Execute');
|
||||||
['Home', 'End', 'ArrowUp', 'ArrowDown'].forEach((key) => {
|
|
||||||
const event = getKeyEvent(key);
|
|
||||||
input.simulate('keydown', event);
|
|
||||||
const nativeEvent = event.nativeEvent as any;
|
|
||||||
expect(nativeEvent.preventDownshiftDefault).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does not render an autosuggest', () => {
|
it('executes the query when clicking the execute button', () => {
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
downshift.setState({ isOpen: false });
|
|
||||||
const ul = downshift.find('ul');
|
|
||||||
expect(ul).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('handleInput', () => {
|
|
||||||
it('should call setState', () => {
|
|
||||||
const instance: any = expressionInput.instance();
|
|
||||||
const stateSpy = jest.spyOn(instance, 'setState');
|
|
||||||
instance.handleInput();
|
|
||||||
expect(stateSpy).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
it('should call onExpressionChange', () => {
|
|
||||||
const spyOnExpressionChange = jest.fn();
|
|
||||||
const props = { ...expressionInputProps, onExpressionChange: spyOnExpressionChange };
|
|
||||||
const wrapper = mount(<ExpressionInput {...props} />);
|
|
||||||
const input = wrapper.find(Input);
|
|
||||||
input.simulate('input', { target: { value: 'prometheus_engine_' } });
|
|
||||||
expect(spyOnExpressionChange).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('onSelect', () => {
|
|
||||||
it('should call setState with selected value', () => {
|
|
||||||
const instance: any = expressionInput.instance();
|
|
||||||
const stateSpy = jest.spyOn(instance, 'setState');
|
|
||||||
instance.setValue('foo');
|
|
||||||
expect(stateSpy).toHaveBeenCalledWith({ height: 'auto' }, expect.anything());
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('onClick', () => {
|
|
||||||
it('executes the query', () => {
|
|
||||||
const spyExecuteQuery = jest.fn();
|
const spyExecuteQuery = jest.fn();
|
||||||
const props = { ...expressionInputProps, executeQuery: spyExecuteQuery };
|
const props = { ...expressionInputProps, executeQuery: spyExecuteQuery };
|
||||||
const wrapper = mount(<ExpressionInput {...props} />);
|
const wrapper = mount(<ExpressionInput {...props} />);
|
||||||
|
@ -127,138 +67,3 @@ describe('ExpressionInput', () => {
|
||||||
expect(spyExecuteQuery).toHaveBeenCalledTimes(1);
|
expect(spyExecuteQuery).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('handleKeyPress', () => {
|
|
||||||
it('should call executeQuery on Enter key pressed', () => {
|
|
||||||
const spyExecuteQuery = jest.fn();
|
|
||||||
const props = { ...expressionInputProps, executeQuery: spyExecuteQuery };
|
|
||||||
const input = mount(<ExpressionInput {...props} />);
|
|
||||||
const instance: any = input.instance();
|
|
||||||
instance.handleKeyPress({ preventDefault: jest.fn, key: 'Enter' });
|
|
||||||
expect(spyExecuteQuery).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
it('should NOT call executeQuery on Enter + Shift', () => {
|
|
||||||
const spyExecuteQuery = jest.fn();
|
|
||||||
const props = { ...expressionInputProps, executeQuery: spyExecuteQuery };
|
|
||||||
const input = mount(<ExpressionInput {...props} />);
|
|
||||||
const instance: any = input.instance();
|
|
||||||
instance.handleKeyPress({ preventDefault: jest.fn, key: 'Enter', shiftKey: true });
|
|
||||||
expect(spyExecuteQuery).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getSearchMatches', () => {
|
|
||||||
it('should return matched value', () => {
|
|
||||||
const instance: any = expressionInput.instance();
|
|
||||||
expect(instance.getSearchMatches('foo', ['barfoobaz', 'bazasdbaz'])).toHaveLength(1);
|
|
||||||
});
|
|
||||||
it('should return empty array if no match found', () => {
|
|
||||||
const instance: any = expressionInput.instance();
|
|
||||||
expect(instance.getSearchMatches('foo', ['barbaz', 'bazasdbaz'])).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('createAutocompleteSection', () => {
|
|
||||||
const props = {
|
|
||||||
...expressionInputProps,
|
|
||||||
metricNames: ['foo', 'bar', 'baz'],
|
|
||||||
};
|
|
||||||
|
|
||||||
it('should close menu if no matches found', () => {
|
|
||||||
const input = mount(<ExpressionInput {...props} />);
|
|
||||||
const instance: any = input.instance();
|
|
||||||
const spyCloseMenu = jest.fn();
|
|
||||||
instance.createAutocompleteSection({ inputValue: 'qqqqqq', closeMenu: spyCloseMenu });
|
|
||||||
setTimeout(() => {
|
|
||||||
expect(spyCloseMenu).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
it('should not render list if inputValue not exist', () => {
|
|
||||||
const input = mount(<ExpressionInput {...props} />);
|
|
||||||
const instance: any = input.instance();
|
|
||||||
const spyCloseMenu = jest.fn();
|
|
||||||
instance.createAutocompleteSection({ closeMenu: spyCloseMenu });
|
|
||||||
setTimeout(() => expect(spyCloseMenu).toHaveBeenCalled());
|
|
||||||
});
|
|
||||||
it('should not render list if enableAutocomplete is false', () => {
|
|
||||||
const input = mount(<ExpressionInput {...props} enableAutocomplete={false} />);
|
|
||||||
const instance: any = input.instance();
|
|
||||||
const spyCloseMenu = jest.fn();
|
|
||||||
instance.createAutocompleteSection({ closeMenu: spyCloseMenu });
|
|
||||||
setTimeout(() => expect(spyCloseMenu).toHaveBeenCalled());
|
|
||||||
});
|
|
||||||
it('should render autosuggest-dropdown', () => {
|
|
||||||
const input = mount(<ExpressionInput {...props} enableAutocomplete={true} />);
|
|
||||||
const instance: any = input.instance();
|
|
||||||
const spyGetMenuProps = jest.fn();
|
|
||||||
const sections = instance.createAutocompleteSection({
|
|
||||||
inputValue: 'foo',
|
|
||||||
highlightedIndex: 0,
|
|
||||||
getMenuProps: spyGetMenuProps,
|
|
||||||
getItemProps: jest.fn,
|
|
||||||
});
|
|
||||||
expect(sections.props.className).toEqual('autosuggest-dropdown');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when downshift is open', () => {
|
|
||||||
it('closes the menu on "Enter"', () => {
|
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
const input = downshift.find(Input);
|
|
||||||
downshift.setState({ isOpen: true });
|
|
||||||
const event = getKeyEvent('Enter');
|
|
||||||
input.simulate('keydown', event);
|
|
||||||
expect(downshift.state('isOpen')).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should blur input on escape', () => {
|
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
const instance: any = expressionInput.instance();
|
|
||||||
const spyBlur = jest.spyOn(instance.exprInputRef.current, 'blur');
|
|
||||||
const input = downshift.find(Input);
|
|
||||||
downshift.setState({ isOpen: false });
|
|
||||||
const event = getKeyEvent('Escape');
|
|
||||||
input.simulate('keydown', event);
|
|
||||||
expect(spyBlur).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('noops on ArrowUp or ArrowDown', () => {
|
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
const input = downshift.find(Input);
|
|
||||||
downshift.setState({ isOpen: true });
|
|
||||||
['ArrowUp', 'ArrowDown'].forEach((key) => {
|
|
||||||
const event = getKeyEvent(key);
|
|
||||||
input.simulate('keydown', event);
|
|
||||||
const nativeEvent = event.nativeEvent as any;
|
|
||||||
expect(nativeEvent.preventDownshiftDefault).toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not render an autosuggest if there are no matches', () => {
|
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
downshift.setState({ isOpen: true });
|
|
||||||
const ul = downshift.find('ul');
|
|
||||||
expect(ul).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders an autosuggest if there are matches', () => {
|
|
||||||
const downshift = expressionInput.find(Downshift);
|
|
||||||
downshift.setState({ isOpen: true });
|
|
||||||
setTimeout(() => {
|
|
||||||
const ul = downshift.find('ul');
|
|
||||||
expect(ul.prop('className')).toEqual('card list-group');
|
|
||||||
const items = ul.find('li');
|
|
||||||
expect(items.map((item) => item.text()).join(', ')).toEqual(
|
|
||||||
'node_cpu_guest_seconds_total, node_cpu_seconds_total, instance:node_cpu_utilisation:rate1m'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders an execute Button', () => {
|
|
||||||
const addon = expressionInput.find(InputGroupAddon).filterWhere((addon) => addon.prop('addonType') === 'append');
|
|
||||||
const button = addon.find(Button).find('.execute-btn').first();
|
|
||||||
expect(button.prop('color')).toEqual('primary');
|
|
||||||
expect(button.text()).toEqual('Execute');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
|
@ -1,15 +1,30 @@
|
||||||
import React, { Component } from 'react';
|
import React, { FC, useState, useEffect, useRef } from 'react';
|
||||||
import { Button, Input, InputGroup, InputGroupAddon, InputGroupText } from 'reactstrap';
|
import { Button, InputGroup, InputGroupAddon, InputGroupText } from 'reactstrap';
|
||||||
|
|
||||||
import Downshift, { ControllerStateAndHelpers } from 'downshift';
|
import { EditorView, highlightSpecialChars, keymap, ViewUpdate, placeholder } from '@codemirror/view';
|
||||||
import sanitizeHTML from 'sanitize-html';
|
import { EditorState, Prec, Compartment } from '@codemirror/state';
|
||||||
|
import { indentOnInput, syntaxTree } from '@codemirror/language';
|
||||||
|
import { history, historyKeymap } from '@codemirror/history';
|
||||||
|
import { defaultKeymap, insertNewlineAndIndent } from '@codemirror/commands';
|
||||||
|
import { bracketMatching } from '@codemirror/matchbrackets';
|
||||||
|
import { closeBrackets, closeBracketsKeymap } from '@codemirror/closebrackets';
|
||||||
|
import { highlightSelectionMatches } from '@codemirror/search';
|
||||||
|
import { commentKeymap } from '@codemirror/comment';
|
||||||
|
import { lintKeymap } from '@codemirror/lint';
|
||||||
|
import { autocompletion, completionKeymap, CompletionContext, CompletionResult } from '@codemirror/autocomplete';
|
||||||
|
import { baseTheme, lightTheme, darkTheme, promqlHighlighter } from './CMTheme';
|
||||||
|
|
||||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||||
import { faGlobeEurope, faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
|
import { faSearch, faSpinner, faGlobeEurope } from '@fortawesome/free-solid-svg-icons';
|
||||||
import MetricsExplorer from './MetricsExplorer';
|
import MetricsExplorer from './MetricsExplorer';
|
||||||
import { Fuzzy, FuzzyResult } from '@nexucis/fuzzy';
|
import { usePathPrefix } from '../../contexts/PathPrefixContext';
|
||||||
|
import { useTheme } from '../../contexts/ThemeContext';
|
||||||
|
import { CompleteStrategy, PromQLExtension } from 'codemirror-promql';
|
||||||
|
import { newCompleteStrategy } from 'codemirror-promql/dist/cjs/complete';
|
||||||
|
|
||||||
interface ExpressionInputProps {
|
const promqlExtension = new PromQLExtension();
|
||||||
|
|
||||||
|
interface CMExpressionInputProps {
|
||||||
value: string;
|
value: string;
|
||||||
onExpressionChange: (expr: string) => void;
|
onExpressionChange: (expr: string) => void;
|
||||||
queryHistory: string[];
|
queryHistory: string[];
|
||||||
|
@ -17,235 +32,218 @@ interface ExpressionInputProps {
|
||||||
executeQuery: () => void;
|
executeQuery: () => void;
|
||||||
loading: boolean;
|
loading: boolean;
|
||||||
enableAutocomplete: boolean;
|
enableAutocomplete: boolean;
|
||||||
|
enableHighlighting: boolean;
|
||||||
|
enableLinter: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ExpressionInputState {
|
const dynamicConfigCompartment = new Compartment();
|
||||||
height: number | string;
|
|
||||||
showMetricsExplorer: boolean;
|
// Autocompletion strategy that wraps the main one and enriches
|
||||||
|
// it with past query items.
|
||||||
|
export class HistoryCompleteStrategy implements CompleteStrategy {
|
||||||
|
private complete: CompleteStrategy;
|
||||||
|
private queryHistory: string[];
|
||||||
|
constructor(complete: CompleteStrategy, queryHistory: string[]) {
|
||||||
|
this.complete = complete;
|
||||||
|
this.queryHistory = queryHistory;
|
||||||
}
|
}
|
||||||
|
|
||||||
const fuz = new Fuzzy({ pre: '<strong>', post: '</strong>', shouldSort: true });
|
promQL(context: CompletionContext): Promise<CompletionResult | null> | CompletionResult | null {
|
||||||
|
return Promise.resolve(this.complete.promQL(context)).then((res) => {
|
||||||
|
const { state, pos } = context;
|
||||||
|
const tree = syntaxTree(state).resolve(pos, -1);
|
||||||
|
const start = res != null ? res.from : tree.from;
|
||||||
|
|
||||||
class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputState> {
|
if (start !== 0) {
|
||||||
private exprInputRef = React.createRef<HTMLInputElement>();
|
return res;
|
||||||
|
|
||||||
constructor(props: ExpressionInputProps) {
|
|
||||||
super(props);
|
|
||||||
this.state = {
|
|
||||||
height: 'auto',
|
|
||||||
showMetricsExplorer: false,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount(): void {
|
const historyItems: CompletionResult = {
|
||||||
this.setHeight();
|
from: start,
|
||||||
}
|
to: pos,
|
||||||
|
options: this.queryHistory.map((q) => ({
|
||||||
setHeight = (): void => {
|
label: q.length < 80 ? q : q.slice(0, 76).concat('...'),
|
||||||
if (this.exprInputRef.current) {
|
detail: 'past query',
|
||||||
const { offsetHeight, clientHeight, scrollHeight } = this.exprInputRef.current;
|
apply: q,
|
||||||
const offset = offsetHeight - clientHeight; // Needed in order for the height to be more accurate.
|
info: q.length < 80 ? undefined : q,
|
||||||
this.setState({ height: scrollHeight + offset });
|
})),
|
||||||
}
|
span: /^[a-zA-Z0-9_:]+$/,
|
||||||
};
|
};
|
||||||
|
|
||||||
handleInput = (): void => {
|
if (res !== null) {
|
||||||
if (this.exprInputRef.current) {
|
historyItems.options = historyItems.options.concat(res.options);
|
||||||
this.setValue(this.exprInputRef.current.value);
|
|
||||||
}
|
}
|
||||||
};
|
return historyItems;
|
||||||
|
|
||||||
setValue = (value: string): void => {
|
|
||||||
const { onExpressionChange } = this.props;
|
|
||||||
onExpressionChange(value);
|
|
||||||
this.setState({ height: 'auto' }, this.setHeight);
|
|
||||||
};
|
|
||||||
|
|
||||||
componentDidUpdate(prevProps: ExpressionInputProps): void {
|
|
||||||
const { value } = this.props;
|
|
||||||
if (value !== prevProps.value) {
|
|
||||||
this.setValue(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handleKeyPress = (event: React.KeyboardEvent<HTMLInputElement>): void => {
|
|
||||||
const { executeQuery } = this.props;
|
|
||||||
if (event.key === 'Enter' && !event.shiftKey) {
|
|
||||||
executeQuery();
|
|
||||||
event.preventDefault();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
getSearchMatches = (input: string, expressions: string[]): FuzzyResult[] => {
|
|
||||||
return fuz.filter(input.replace(/ /g, ''), expressions);
|
|
||||||
};
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
createAutocompleteSection = (downshift: ControllerStateAndHelpers<any>): JSX.Element | null => {
|
|
||||||
const { inputValue = '', closeMenu, highlightedIndex } = downshift;
|
|
||||||
const autocompleteSections = {
|
|
||||||
'Query History': this.props.queryHistory,
|
|
||||||
'Metric Names': this.props.metricNames,
|
|
||||||
};
|
|
||||||
let index = 0;
|
|
||||||
const sections =
|
|
||||||
inputValue?.length && this.props.enableAutocomplete
|
|
||||||
? Object.entries(autocompleteSections).reduce((acc, [title, items]) => {
|
|
||||||
const matches = this.getSearchMatches(inputValue, items);
|
|
||||||
return !matches.length
|
|
||||||
? acc
|
|
||||||
: [
|
|
||||||
...acc,
|
|
||||||
<ul className="autosuggest-dropdown-list" key={title}>
|
|
||||||
<li className="autosuggest-dropdown-header">{title}</li>
|
|
||||||
{matches
|
|
||||||
.slice(0, 100) // Limit DOM rendering to 100 results, as DOM rendering is slow.
|
|
||||||
.map((result: FuzzyResult) => {
|
|
||||||
const itemProps = downshift.getItemProps({
|
|
||||||
key: result.original,
|
|
||||||
index,
|
|
||||||
item: result.original,
|
|
||||||
style: {
|
|
||||||
backgroundColor: highlightedIndex === index++ ? 'lightgray' : 'white',
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
return (
|
}
|
||||||
<li
|
}
|
||||||
key={title}
|
|
||||||
{...itemProps}
|
const ExpressionInput: FC<CMExpressionInputProps> = ({
|
||||||
dangerouslySetInnerHTML={{ __html: sanitizeHTML(result.rendered, { allowedTags: ['strong'] }) }}
|
value,
|
||||||
/>
|
onExpressionChange,
|
||||||
);
|
queryHistory,
|
||||||
})}
|
metricNames,
|
||||||
</ul>,
|
executeQuery,
|
||||||
|
loading,
|
||||||
|
enableAutocomplete,
|
||||||
|
enableHighlighting,
|
||||||
|
enableLinter,
|
||||||
|
}) => {
|
||||||
|
const containerRef = useRef<HTMLDivElement>(null);
|
||||||
|
const viewRef = useRef<EditorView | null>(null);
|
||||||
|
const [showMetricsExplorer, setShowMetricsExplorer] = useState<boolean>(false);
|
||||||
|
const pathPrefix = usePathPrefix();
|
||||||
|
const { theme } = useTheme();
|
||||||
|
|
||||||
|
// (Re)initialize editor based on settings / setting changes.
|
||||||
|
useEffect(() => {
|
||||||
|
// Build the dynamic part of the config.
|
||||||
|
promqlExtension
|
||||||
|
.activateCompletion(enableAutocomplete)
|
||||||
|
.activateLinter(enableLinter)
|
||||||
|
.setComplete({
|
||||||
|
completeStrategy: new HistoryCompleteStrategy(
|
||||||
|
newCompleteStrategy({
|
||||||
|
remote: { url: pathPrefix, cache: { initialMetricList: metricNames } },
|
||||||
|
}),
|
||||||
|
queryHistory
|
||||||
|
),
|
||||||
|
});
|
||||||
|
const dynamicConfig = [
|
||||||
|
enableHighlighting ? promqlHighlighter : [],
|
||||||
|
promqlExtension.asExtension(),
|
||||||
|
theme === 'dark' ? darkTheme : lightTheme,
|
||||||
];
|
];
|
||||||
}, [] as JSX.Element[])
|
|
||||||
: [];
|
|
||||||
|
|
||||||
if (!sections.length) {
|
// Create or reconfigure the editor.
|
||||||
// This is ugly but is needed in order to sync state updates.
|
const view = viewRef.current;
|
||||||
// This way we force downshift to wait React render call to complete before closeMenu to be triggered.
|
if (view === null) {
|
||||||
setTimeout(closeMenu);
|
// If the editor does not exist yet, create it.
|
||||||
return null;
|
if (!containerRef.current) {
|
||||||
|
throw new Error('expected CodeMirror container element to exist');
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
const startState = EditorState.create({
|
||||||
<div {...downshift.getMenuProps()} className="autosuggest-dropdown">
|
doc: value,
|
||||||
{sections}
|
extensions: [
|
||||||
</div>
|
baseTheme,
|
||||||
|
highlightSpecialChars(),
|
||||||
|
history(),
|
||||||
|
EditorState.allowMultipleSelections.of(true),
|
||||||
|
indentOnInput(),
|
||||||
|
bracketMatching(),
|
||||||
|
closeBrackets(),
|
||||||
|
autocompletion(),
|
||||||
|
highlightSelectionMatches(),
|
||||||
|
EditorView.lineWrapping,
|
||||||
|
keymap.of([
|
||||||
|
...closeBracketsKeymap,
|
||||||
|
...defaultKeymap,
|
||||||
|
...historyKeymap,
|
||||||
|
...commentKeymap,
|
||||||
|
...completionKeymap,
|
||||||
|
...lintKeymap,
|
||||||
|
]),
|
||||||
|
placeholder('Expression (press Shift+Enter for newlines)'),
|
||||||
|
dynamicConfigCompartment.of(dynamicConfig),
|
||||||
|
// This keymap is added without precedence so that closing the autocomplete dropdown
|
||||||
|
// via Escape works without blurring the editor.
|
||||||
|
keymap.of([
|
||||||
|
{
|
||||||
|
key: 'Escape',
|
||||||
|
run: (v: EditorView): boolean => {
|
||||||
|
v.contentDOM.blur();
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
Prec.override(
|
||||||
|
keymap.of([
|
||||||
|
{
|
||||||
|
key: 'Enter',
|
||||||
|
run: (v: EditorView): boolean => {
|
||||||
|
executeQuery();
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'Shift-Enter',
|
||||||
|
run: insertNewlineAndIndent,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
),
|
||||||
|
EditorView.updateListener.of((update: ViewUpdate): void => {
|
||||||
|
onExpressionChange(update.state.doc.toString());
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const view = new EditorView({
|
||||||
|
state: startState,
|
||||||
|
parent: containerRef.current,
|
||||||
|
});
|
||||||
|
|
||||||
|
viewRef.current = view;
|
||||||
|
|
||||||
|
view.focus();
|
||||||
|
} else {
|
||||||
|
// The editor already exists, just reconfigure the dynamically configured parts.
|
||||||
|
view.dispatch(
|
||||||
|
view.state.update({
|
||||||
|
effects: dynamicConfigCompartment.reconfigure(dynamicConfig),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// "value" is only used in the initial render, so we don't want to
|
||||||
|
// re-run this effect every time that "value" changes.
|
||||||
|
//
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [enableAutocomplete, enableHighlighting, enableLinter, executeQuery, onExpressionChange, queryHistory, theme]);
|
||||||
|
|
||||||
|
const insertAtCursor = (value: string) => {
|
||||||
|
const view = viewRef.current;
|
||||||
|
if (view === null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { from, to } = view.state.selection.ranges[0];
|
||||||
|
view.dispatch(
|
||||||
|
view.state.update({
|
||||||
|
changes: { from, to, insert: value },
|
||||||
|
})
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
openMetricsExplorer = (): void => {
|
|
||||||
this.setState({
|
|
||||||
showMetricsExplorer: true,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
updateShowMetricsExplorer = (show: boolean): void => {
|
|
||||||
this.setState({
|
|
||||||
showMetricsExplorer: show,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
insertAtCursor = (value: string): void => {
|
|
||||||
if (!this.exprInputRef.current) return;
|
|
||||||
|
|
||||||
const startPosition = this.exprInputRef.current.selectionStart;
|
|
||||||
const endPosition = this.exprInputRef.current.selectionEnd;
|
|
||||||
|
|
||||||
const previousValue = this.exprInputRef.current.value;
|
|
||||||
let newValue: string;
|
|
||||||
if (startPosition && endPosition) {
|
|
||||||
newValue =
|
|
||||||
previousValue.substring(0, startPosition) + value + previousValue.substring(endPosition, previousValue.length);
|
|
||||||
} else {
|
|
||||||
newValue = previousValue + value;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setValue(newValue);
|
|
||||||
};
|
|
||||||
|
|
||||||
render(): JSX.Element {
|
|
||||||
const { executeQuery, value } = this.props;
|
|
||||||
const { height } = this.state;
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Downshift onSelect={this.setValue}>
|
|
||||||
{(downshift) => (
|
|
||||||
<div>
|
|
||||||
<InputGroup className="expression-input">
|
<InputGroup className="expression-input">
|
||||||
<InputGroupAddon addonType="prepend">
|
<InputGroupAddon addonType="prepend">
|
||||||
<InputGroupText>
|
<InputGroupText>
|
||||||
{this.props.loading ? <FontAwesomeIcon icon={faSpinner} spin /> : <FontAwesomeIcon icon={faSearch} />}
|
{loading ? <FontAwesomeIcon icon={faSpinner} spin /> : <FontAwesomeIcon icon={faSearch} />}
|
||||||
</InputGroupText>
|
</InputGroupText>
|
||||||
</InputGroupAddon>
|
</InputGroupAddon>
|
||||||
<Input
|
<div ref={containerRef} className="cm-expression-input" />
|
||||||
onInput={this.handleInput}
|
|
||||||
style={{ height }}
|
|
||||||
autoFocus
|
|
||||||
type="textarea"
|
|
||||||
rows="1"
|
|
||||||
onKeyPress={this.handleKeyPress}
|
|
||||||
placeholder="Expression (press Shift+Enter for newlines)"
|
|
||||||
innerRef={this.exprInputRef}
|
|
||||||
{...downshift.getInputProps({
|
|
||||||
onKeyDown: (event: React.KeyboardEvent): void => {
|
|
||||||
switch (event.key) {
|
|
||||||
case 'Home':
|
|
||||||
case 'End':
|
|
||||||
// We want to be able to jump to the beginning/end of the input field.
|
|
||||||
// By default, Downshift otherwise jumps to the first/last suggestion item instead.
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
(event.nativeEvent as any).preventDownshiftDefault = true;
|
|
||||||
break;
|
|
||||||
case 'ArrowUp':
|
|
||||||
case 'ArrowDown':
|
|
||||||
if (!downshift.isOpen) {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
(event.nativeEvent as any).preventDownshiftDefault = true;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'Enter':
|
|
||||||
downshift.closeMenu();
|
|
||||||
break;
|
|
||||||
case 'Escape':
|
|
||||||
if (!downshift.isOpen && this.exprInputRef.current) {
|
|
||||||
this.exprInputRef.current.blur();
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
} as any)}
|
|
||||||
value={value}
|
|
||||||
/>
|
|
||||||
<InputGroupAddon addonType="append">
|
<InputGroupAddon addonType="append">
|
||||||
<Button className="metrics-explorer-btn" title="Open metrics explorer" onClick={this.openMetricsExplorer}>
|
<Button
|
||||||
|
className="metrics-explorer-btn"
|
||||||
|
title="Open metrics explorer"
|
||||||
|
onClick={() => setShowMetricsExplorer(true)}
|
||||||
|
>
|
||||||
<FontAwesomeIcon icon={faGlobeEurope} />
|
<FontAwesomeIcon icon={faGlobeEurope} />
|
||||||
</Button>
|
</Button>
|
||||||
</InputGroupAddon>
|
|
||||||
<InputGroupAddon addonType="append">
|
|
||||||
<Button className="execute-btn" color="primary" onClick={executeQuery}>
|
<Button className="execute-btn" color="primary" onClick={executeQuery}>
|
||||||
Execute
|
Execute
|
||||||
</Button>
|
</Button>
|
||||||
</InputGroupAddon>
|
</InputGroupAddon>
|
||||||
</InputGroup>
|
</InputGroup>
|
||||||
{downshift.isOpen && this.createAutocompleteSection(downshift)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</Downshift>
|
|
||||||
|
|
||||||
<MetricsExplorer
|
<MetricsExplorer
|
||||||
show={this.state.showMetricsExplorer}
|
show={showMetricsExplorer}
|
||||||
updateShow={this.updateShowMetricsExplorer}
|
updateShow={setShowMetricsExplorer}
|
||||||
metrics={this.props.metricNames}
|
metrics={metricNames}
|
||||||
insertAtCursor={this.insertAtCursor}
|
insertAtCursor={insertAtCursor}
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
export default ExpressionInput;
|
export default ExpressionInput;
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import * as React from 'react';
|
import * as React from 'react';
|
||||||
import { mount, shallow } from 'enzyme';
|
import { mount, shallow } from 'enzyme';
|
||||||
import Panel, { PanelOptions, PanelType } from './Panel';
|
import Panel, { PanelOptions, PanelType } from './Panel';
|
||||||
import ExpressionInput from './ExpressionInput';
|
|
||||||
import GraphControls from './GraphControls';
|
import GraphControls from './GraphControls';
|
||||||
import { NavLink, TabPane } from 'reactstrap';
|
import { NavLink, TabPane } from 'reactstrap';
|
||||||
import TimeInput from './TimeInput';
|
import TimeInput from './TimeInput';
|
||||||
|
@ -38,17 +37,6 @@ const defaultProps = {
|
||||||
describe('Panel', () => {
|
describe('Panel', () => {
|
||||||
const panel = shallow(<Panel {...defaultProps} />);
|
const panel = shallow(<Panel {...defaultProps} />);
|
||||||
|
|
||||||
it('renders an ExpressionInput', () => {
|
|
||||||
const input = panel.find(ExpressionInput);
|
|
||||||
expect(input.prop('value')).toEqual('prometheus_engine');
|
|
||||||
expect(input.prop('metricNames')).toEqual([
|
|
||||||
'prometheus_engine_queries',
|
|
||||||
'prometheus_engine_queries_concurrent_max',
|
|
||||||
'prometheus_engine_query_duration_seconds',
|
|
||||||
]);
|
|
||||||
expect(input.prop('queryHistory')).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders NavLinks', () => {
|
it('renders NavLinks', () => {
|
||||||
const results: PanelOptions[] = [];
|
const results: PanelOptions[] = [];
|
||||||
const onOptionsChanged = (opts: PanelOptions): void => {
|
const onOptionsChanged = (opts: PanelOptions): void => {
|
||||||
|
|
|
@ -5,7 +5,6 @@ import { Alert, Button, Col, Nav, NavItem, NavLink, Row, TabContent, TabPane } f
|
||||||
import moment from 'moment-timezone';
|
import moment from 'moment-timezone';
|
||||||
|
|
||||||
import ExpressionInput from './ExpressionInput';
|
import ExpressionInput from './ExpressionInput';
|
||||||
import CMExpressionInput from './CMExpressionInput';
|
|
||||||
import GraphControls from './GraphControls';
|
import GraphControls from './GraphControls';
|
||||||
import { GraphTabContent } from './GraphTabContent';
|
import { GraphTabContent } from './GraphTabContent';
|
||||||
import DataTable from './DataTable';
|
import DataTable from './DataTable';
|
||||||
|
@ -24,7 +23,6 @@ interface PanelProps {
|
||||||
removePanel: () => void;
|
removePanel: () => void;
|
||||||
onExecuteQuery: (query: string) => void;
|
onExecuteQuery: (query: string) => void;
|
||||||
pathPrefix: string;
|
pathPrefix: string;
|
||||||
useExperimentalEditor: boolean;
|
|
||||||
enableAutocomplete: boolean;
|
enableAutocomplete: boolean;
|
||||||
enableHighlighting: boolean;
|
enableHighlighting: boolean;
|
||||||
enableLinter: boolean;
|
enableLinter: boolean;
|
||||||
|
@ -272,8 +270,7 @@ class Panel extends Component<PanelProps, PanelState> {
|
||||||
<div className="panel">
|
<div className="panel">
|
||||||
<Row>
|
<Row>
|
||||||
<Col>
|
<Col>
|
||||||
{this.props.useExperimentalEditor ? (
|
<ExpressionInput
|
||||||
<CMExpressionInput
|
|
||||||
value={this.state.exprInputValue}
|
value={this.state.exprInputValue}
|
||||||
onExpressionChange={this.handleExpressionChange}
|
onExpressionChange={this.handleExpressionChange}
|
||||||
executeQuery={this.executeQuery}
|
executeQuery={this.executeQuery}
|
||||||
|
@ -284,17 +281,6 @@ class Panel extends Component<PanelProps, PanelState> {
|
||||||
queryHistory={pastQueries}
|
queryHistory={pastQueries}
|
||||||
metricNames={metricNames}
|
metricNames={metricNames}
|
||||||
/>
|
/>
|
||||||
) : (
|
|
||||||
<ExpressionInput
|
|
||||||
value={this.state.exprInputValue}
|
|
||||||
onExpressionChange={this.handleExpressionChange}
|
|
||||||
executeQuery={this.executeQuery}
|
|
||||||
loading={this.state.loading}
|
|
||||||
enableAutocomplete={this.props.enableAutocomplete}
|
|
||||||
queryHistory={pastQueries}
|
|
||||||
metricNames={metricNames}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</Col>
|
</Col>
|
||||||
</Row>
|
</Row>
|
||||||
<Row>
|
<Row>
|
||||||
|
|
|
@ -11,7 +11,6 @@ describe('PanelList', () => {
|
||||||
{ id: 'use-local-time-checkbox', label: 'Use local time', default: false },
|
{ id: 'use-local-time-checkbox', label: 'Use local time', default: false },
|
||||||
{ id: 'query-history-checkbox', label: 'Enable query history', default: false },
|
{ id: 'query-history-checkbox', label: 'Enable query history', default: false },
|
||||||
{ id: 'autocomplete-checkbox', label: 'Enable autocomplete', default: true },
|
{ id: 'autocomplete-checkbox', label: 'Enable autocomplete', default: true },
|
||||||
{ id: 'use-experimental-editor-checkbox', label: 'Use experimental editor', default: true },
|
|
||||||
{ id: 'highlighting-checkbox', label: 'Enable highlighting', default: true },
|
{ id: 'highlighting-checkbox', label: 'Enable highlighting', default: true },
|
||||||
{ id: 'linter-checkbox', label: 'Enable linter', default: true },
|
{ id: 'linter-checkbox', label: 'Enable linter', default: true },
|
||||||
].forEach((cb, idx) => {
|
].forEach((cb, idx) => {
|
||||||
|
|
|
@ -20,7 +20,6 @@ interface PanelListContentProps {
|
||||||
panels: PanelMeta[];
|
panels: PanelMeta[];
|
||||||
metrics: string[];
|
metrics: string[];
|
||||||
useLocalTime: boolean;
|
useLocalTime: boolean;
|
||||||
useExperimentalEditor: boolean;
|
|
||||||
queryHistoryEnabled: boolean;
|
queryHistoryEnabled: boolean;
|
||||||
enableAutocomplete: boolean;
|
enableAutocomplete: boolean;
|
||||||
enableHighlighting: boolean;
|
enableHighlighting: boolean;
|
||||||
|
@ -30,7 +29,6 @@ interface PanelListContentProps {
|
||||||
export const PanelListContent: FC<PanelListContentProps> = ({
|
export const PanelListContent: FC<PanelListContentProps> = ({
|
||||||
metrics = [],
|
metrics = [],
|
||||||
useLocalTime,
|
useLocalTime,
|
||||||
useExperimentalEditor,
|
|
||||||
queryHistoryEnabled,
|
queryHistoryEnabled,
|
||||||
enableAutocomplete,
|
enableAutocomplete,
|
||||||
enableHighlighting,
|
enableHighlighting,
|
||||||
|
@ -105,7 +103,6 @@ export const PanelListContent: FC<PanelListContentProps> = ({
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
useExperimentalEditor={useExperimentalEditor}
|
|
||||||
useLocalTime={useLocalTime}
|
useLocalTime={useLocalTime}
|
||||||
metricNames={metrics}
|
metricNames={metrics}
|
||||||
pastQueries={queryHistoryEnabled ? historyItems : []}
|
pastQueries={queryHistoryEnabled ? historyItems : []}
|
||||||
|
@ -123,7 +120,6 @@ export const PanelListContent: FC<PanelListContentProps> = ({
|
||||||
|
|
||||||
const PanelList: FC = () => {
|
const PanelList: FC = () => {
|
||||||
const [delta, setDelta] = useState(0);
|
const [delta, setDelta] = useState(0);
|
||||||
const [useExperimentalEditor, setUseExperimentalEditor] = useLocalStorage('use-new-editor', true);
|
|
||||||
const [useLocalTime, setUseLocalTime] = useLocalStorage('use-local-time', false);
|
const [useLocalTime, setUseLocalTime] = useLocalStorage('use-local-time', false);
|
||||||
const [enableQueryHistory, setEnableQueryHistory] = useLocalStorage('enable-query-history', false);
|
const [enableQueryHistory, setEnableQueryHistory] = useLocalStorage('enable-query-history', false);
|
||||||
const [enableAutocomplete, setEnableAutocomplete] = useLocalStorage('enable-metric-autocomplete', true);
|
const [enableAutocomplete, setEnableAutocomplete] = useLocalStorage('enable-metric-autocomplete', true);
|
||||||
|
@ -180,21 +176,11 @@ const PanelList: FC = () => {
|
||||||
Enable autocomplete
|
Enable autocomplete
|
||||||
</Checkbox>
|
</Checkbox>
|
||||||
</div>
|
</div>
|
||||||
<div className="float-right">
|
|
||||||
<Checkbox
|
|
||||||
wrapperStyles={{ display: 'inline-block' }}
|
|
||||||
id="use-experimental-editor-checkbox"
|
|
||||||
onChange={({ target }) => setUseExperimentalEditor(target.checked)}
|
|
||||||
defaultChecked={useExperimentalEditor}
|
|
||||||
>
|
|
||||||
Use experimental editor
|
|
||||||
</Checkbox>
|
|
||||||
<Checkbox
|
<Checkbox
|
||||||
wrapperStyles={{ marginLeft: 20, display: 'inline-block' }}
|
wrapperStyles={{ marginLeft: 20, display: 'inline-block' }}
|
||||||
id="highlighting-checkbox"
|
id="highlighting-checkbox"
|
||||||
onChange={({ target }) => setEnableHighlighting(target.checked)}
|
onChange={({ target }) => setEnableHighlighting(target.checked)}
|
||||||
defaultChecked={enableHighlighting}
|
defaultChecked={enableHighlighting}
|
||||||
disabled={!useExperimentalEditor}
|
|
||||||
>
|
>
|
||||||
Enable highlighting
|
Enable highlighting
|
||||||
</Checkbox>
|
</Checkbox>
|
||||||
|
@ -203,12 +189,10 @@ const PanelList: FC = () => {
|
||||||
id="linter-checkbox"
|
id="linter-checkbox"
|
||||||
onChange={({ target }) => setEnableLinter(target.checked)}
|
onChange={({ target }) => setEnableLinter(target.checked)}
|
||||||
defaultChecked={enableLinter}
|
defaultChecked={enableLinter}
|
||||||
disabled={!useExperimentalEditor}
|
|
||||||
>
|
>
|
||||||
Enable linter
|
Enable linter
|
||||||
</Checkbox>
|
</Checkbox>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
{(delta > 30 || timeErr) && (
|
{(delta > 30 || timeErr) && (
|
||||||
<Alert color="danger">
|
<Alert color="danger">
|
||||||
<strong>Warning: </strong>
|
<strong>Warning: </strong>
|
||||||
|
@ -227,7 +211,6 @@ const PanelList: FC = () => {
|
||||||
panels={decodePanelOptionsFromQueryString(window.location.search)}
|
panels={decodePanelOptionsFromQueryString(window.location.search)}
|
||||||
useLocalTime={useLocalTime}
|
useLocalTime={useLocalTime}
|
||||||
metrics={metricsRes.data}
|
metrics={metricsRes.data}
|
||||||
useExperimentalEditor={useExperimentalEditor}
|
|
||||||
queryHistoryEnabled={enableQueryHistory}
|
queryHistoryEnabled={enableQueryHistory}
|
||||||
enableAutocomplete={enableAutocomplete}
|
enableAutocomplete={enableAutocomplete}
|
||||||
enableHighlighting={enableHighlighting}
|
enableHighlighting={enableHighlighting}
|
||||||
|
|
Loading…
Reference in a new issue