mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-26 06:04:05 -08:00
Merge pull request #7913 from prometheus/release-2.21
Merge release 2.21 into master
This commit is contained in:
commit
a6ee1f8517
39
CHANGELOG.md
39
CHANGELOG.md
|
@ -1,3 +1,42 @@
|
||||||
|
## 2.21.0-rc.1 / 2020-09-08
|
||||||
|
|
||||||
|
This release is built with Go 1.15, which deprecates [X.509
|
||||||
|
CommonName](https://golang.org/doc/go1.15#commonname) in TLS certificates
|
||||||
|
validation.
|
||||||
|
|
||||||
|
In the unlikely case that you use the gRPC API v2 (which is limited to TSDB
|
||||||
|
admin commands), please note that we will remove this experimental API in the
|
||||||
|
next minor release 2.22.
|
||||||
|
|
||||||
|
* [CHANGE] Disable HTTP2 because of concerns with the Go HTTP/2 client. #7588 #7701
|
||||||
|
* [CHANGE] PromQL: `query_log_file` path is now relative to the config file. #7701
|
||||||
|
* [CHANGE] Promtool: Replace the tsdb command line tool by a promtool tsdb subcommand. #6088
|
||||||
|
* [CHANGE] Rules: Label `rule_group_iterations` metric with group name. #7823
|
||||||
|
* [FEATURE] Eureka SD: New service discovery. #3369
|
||||||
|
* [FEATURE] Hetzner SD: New service discovery. #7822
|
||||||
|
* [FEATURE] Kubernetes SD: Support Kubernetes EndpointSlices. #6838
|
||||||
|
* [FEATURE] Scrape: Add per scrape-config targets limit. #7554
|
||||||
|
* [ENHANCEMENT] Support composite durations in PromQL, config and UI, e.g. 1h30m. #7713 #7833
|
||||||
|
* [ENHANCEMENT] DNS SD: Add SRV record target and port meta labels. #7678
|
||||||
|
* [ENHANCEMENT] Docker Swarm SD: Support tasks and service without published ports. #7686
|
||||||
|
* [ENHANCEMENT] PromQL: Reduce the amount of data queried by remote read when a subquery has an offset. #7667
|
||||||
|
* [ENHANCEMENT] Promtool: Add `--time` option to query instant command. #7829
|
||||||
|
* [ENHANCEMENT] UI: Respect the `--web.page-title` parameter in the React UI. #7607
|
||||||
|
* [ENHANCEMENT] UI: Add duration, labels, annotations to alerts page in the React UI. #7605
|
||||||
|
* [ENHANCEMENT] UI: Add duration on the React UI rules page, hide annotation and labels if empty. #7606
|
||||||
|
* [BUGFIX] API: Deduplicate series in /api/v1/series. #7862
|
||||||
|
* [BUGFIX] PromQL: Drop metric name in bool comparison between two instant vectors. #7819
|
||||||
|
* [BUGFIX] PromQL: Exit with an error when time parameters can't be parsed. #7505
|
||||||
|
* [BUGFIX] Rules: Detect extra fields in rule files. #7767
|
||||||
|
* [BUGFIX] Rules: Disallow overwriting the metric name in the `labels` section of recording rules. #7787
|
||||||
|
* [BUGFIX] Rules: Keep evaluation timestamp across reloads. #7775
|
||||||
|
* [BUGFIX] Scrape: Do not stop scrapes in progress during reload. #7752
|
||||||
|
* [BUGFIX] TSDB: Fix `chunks.HeadReadWriter: maxt of the files are not set` error. #7856
|
||||||
|
* [BUGFIX] TSDB: Delete blocks atomically to prevent corruption when there is a panic/crash during deletion. #7772
|
||||||
|
* [BUGFIX] Triton SD: Fix a panic when triton_sd_config is nil. #7671
|
||||||
|
* [BUGFIX] UI: Fix react UI bug with series going on and off. #7804
|
||||||
|
* [BUGFIX] Web: Stop CMUX and GRPC servers even with stale connections, preventing the server to stop on SIGTERM. #7810
|
||||||
|
|
||||||
## 2.20.1 / 2020-08-05
|
## 2.20.1 / 2020-08-05
|
||||||
|
|
||||||
* [BUGFIX] SD: Reduce the Consul watch timeout to 2m and adjust the request timeout accordingly. #7724
|
* [BUGFIX] SD: Reduce the Consul watch timeout to 2m and adjust the request timeout accordingly. #7724
|
||||||
|
|
|
@ -26,7 +26,8 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/go-kit/kit/log"
|
"github.com/go-kit/kit/log"
|
||||||
config_util "github.com/prometheus/common/config"
|
"github.com/pkg/errors"
|
||||||
|
"github.com/prometheus/common/config"
|
||||||
"github.com/prometheus/common/model"
|
"github.com/prometheus/common/model"
|
||||||
"github.com/prometheus/prometheus/discovery/refresh"
|
"github.com/prometheus/prometheus/discovery/refresh"
|
||||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
|
@ -54,7 +55,7 @@ func newRobotDiscovery(conf *SDConfig, logger log.Logger) (*robotDiscovery, erro
|
||||||
endpoint: conf.robotEndpoint,
|
endpoint: conf.robotEndpoint,
|
||||||
}
|
}
|
||||||
|
|
||||||
rt, err := config_util.NewRoundTripperFromConfig(conf.HTTPClientConfig, "hetzner_sd", false, false)
|
rt, err := config.NewRoundTripperFromConfig(conf.HTTPClientConfig, "hetzner_sd", false, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -70,10 +71,16 @@ func (d *robotDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group, err
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
io.Copy(ioutil.Discard, resp.Body)
|
io.Copy(ioutil.Discard, resp.Body)
|
||||||
resp.Body.Close()
|
resp.Body.Close()
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
if resp.StatusCode/100 != 2 {
|
||||||
|
return nil, errors.Errorf("non 2xx status '%d' response during hetzner service discovery with role robot", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
var servers serversList
|
var servers serversList
|
||||||
err = json.NewDecoder(resp.Body).Decode(&servers)
|
err = json.NewDecoder(resp.Body).Decode(&servers)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -37,7 +37,6 @@ func (s *robotSDTestSuite) SetupTest(t *testing.T) {
|
||||||
func TestRobotSDRefresh(t *testing.T) {
|
func TestRobotSDRefresh(t *testing.T) {
|
||||||
suite := &robotSDTestSuite{}
|
suite := &robotSDTestSuite{}
|
||||||
suite.SetupTest(t)
|
suite.SetupTest(t)
|
||||||
|
|
||||||
cfg := DefaultSDConfig
|
cfg := DefaultSDConfig
|
||||||
cfg.HTTPClientConfig.BasicAuth = &config.BasicAuth{Username: robotTestUsername, Password: robotTestPassword}
|
cfg.HTTPClientConfig.BasicAuth = &config.BasicAuth{Username: robotTestUsername, Password: robotTestPassword}
|
||||||
cfg.robotEndpoint = suite.Mock.Endpoint()
|
cfg.robotEndpoint = suite.Mock.Endpoint()
|
||||||
|
@ -84,3 +83,19 @@ func TestRobotSDRefresh(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRobotSDRefreshHandleError(t *testing.T) {
|
||||||
|
suite := &robotSDTestSuite{}
|
||||||
|
suite.SetupTest(t)
|
||||||
|
cfg := DefaultSDConfig
|
||||||
|
cfg.robotEndpoint = suite.Mock.Endpoint()
|
||||||
|
|
||||||
|
d, err := newRobotDiscovery(&cfg, log.NewNopLogger())
|
||||||
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
|
targetGroups, err := d.refresh(context.Background())
|
||||||
|
testutil.NotOk(t, err)
|
||||||
|
testutil.Equals(t, "non 2xx status '401' response during hetzner service discovery with role robot", err.Error())
|
||||||
|
|
||||||
|
testutil.Equals(t, 0, len(targetGroups))
|
||||||
|
}
|
||||||
|
|
|
@ -936,6 +936,7 @@ changed with relabeling, as demonstrated in [the Prometheus hetzner-sd
|
||||||
configuration file](/documentation/examples/prometheus-hetzner.yml).
|
configuration file](/documentation/examples/prometheus-hetzner.yml).
|
||||||
|
|
||||||
The following meta labels are available on all targets during [relabeling](#relabel_config):
|
The following meta labels are available on all targets during [relabeling](#relabel_config):
|
||||||
|
|
||||||
* `__meta_hetzner_server_id`: the ID of the server
|
* `__meta_hetzner_server_id`: the ID of the server
|
||||||
* `__meta_hetzner_server_name`: the name of the server
|
* `__meta_hetzner_server_name`: the name of the server
|
||||||
* `__meta_hetzner_server_status`: the status of the server
|
* `__meta_hetzner_server_status`: the status of the server
|
||||||
|
@ -944,6 +945,7 @@ The following meta labels are available on all targets during [relabeling](#rela
|
||||||
* `__meta_hetzner_datacenter`: the datacenter of the server
|
* `__meta_hetzner_datacenter`: the datacenter of the server
|
||||||
|
|
||||||
The labels below are only available for targets with `role` set to `hcloud`:
|
The labels below are only available for targets with `role` set to `hcloud`:
|
||||||
|
|
||||||
* `__meta_hetzner_hcloud_image_name`: the image name of the server
|
* `__meta_hetzner_hcloud_image_name`: the image name of the server
|
||||||
* `__meta_hetzner_hcloud_image_description`: the description of the server image
|
* `__meta_hetzner_hcloud_image_description`: the description of the server image
|
||||||
* `__meta_hetzner_hcloud_image_os_flavor`: the OS flavor of the server image
|
* `__meta_hetzner_hcloud_image_os_flavor`: the OS flavor of the server image
|
||||||
|
@ -960,6 +962,7 @@ The labels below are only available for targets with `role` set to `hcloud`:
|
||||||
* `__meta_hetzner_hcloud_label_<labelname>`: each label of the server
|
* `__meta_hetzner_hcloud_label_<labelname>`: each label of the server
|
||||||
|
|
||||||
The labels below are only available for targets with `role` set to `robot`:
|
The labels below are only available for targets with `role` set to `robot`:
|
||||||
|
|
||||||
* `__meta_hetzner_robot_product`: the product of the server
|
* `__meta_hetzner_robot_product`: the product of the server
|
||||||
* `__meta_hetzner_robot_cancelled`: the server cancellation status
|
* `__meta_hetzner_robot_cancelled`: the server cancellation status
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ package storage
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"container/heap"
|
"container/heap"
|
||||||
|
"math"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -331,7 +332,7 @@ func (c *genericMergeSeriesSet) Next() bool {
|
||||||
// If, for the current label set, all the next series sets come from
|
// If, for the current label set, all the next series sets come from
|
||||||
// failed remote storage sources, we want to keep trying with the next label set.
|
// failed remote storage sources, we want to keep trying with the next label set.
|
||||||
for {
|
for {
|
||||||
// Firstly advance all the current series sets. If any of them have run out
|
// Firstly advance all the current series sets. If any of them have run out,
|
||||||
// we can drop them, otherwise they should be inserted back into the heap.
|
// we can drop them, otherwise they should be inserted back into the heap.
|
||||||
for _, set := range c.currentSets {
|
for _, set := range c.currentSets {
|
||||||
if set.Next() {
|
if set.Next() {
|
||||||
|
@ -418,8 +419,7 @@ func (h *genericSeriesSetHeap) Pop() interface{} {
|
||||||
// with "almost" the same data, e.g. from 2 Prometheus HA replicas. This is fine, since from the Prometheus perspective
|
// with "almost" the same data, e.g. from 2 Prometheus HA replicas. This is fine, since from the Prometheus perspective
|
||||||
// this never happens.
|
// this never happens.
|
||||||
//
|
//
|
||||||
// NOTE: Use this merge function only when you see potentially overlapping series, as this introduces a small overhead
|
// It's optimized for non-overlap cases as well.
|
||||||
// to handle overlaps between series.
|
|
||||||
func ChainedSeriesMerge(series ...Series) Series {
|
func ChainedSeriesMerge(series ...Series) Series {
|
||||||
if len(series) == 0 {
|
if len(series) == 0 {
|
||||||
return nil
|
return nil
|
||||||
|
@ -438,16 +438,20 @@ func ChainedSeriesMerge(series ...Series) Series {
|
||||||
|
|
||||||
// chainSampleIterator is responsible to iterate over samples from different iterators of the same time series in timestamps
|
// chainSampleIterator is responsible to iterate over samples from different iterators of the same time series in timestamps
|
||||||
// order. If one or more samples overlap, one sample from random overlapped ones is kept and all others with the same
|
// order. If one or more samples overlap, one sample from random overlapped ones is kept and all others with the same
|
||||||
// timestamp are dropped.
|
// timestamp are dropped. It's optimized for non-overlap cases as well.
|
||||||
type chainSampleIterator struct {
|
type chainSampleIterator struct {
|
||||||
iterators []chunkenc.Iterator
|
iterators []chunkenc.Iterator
|
||||||
h samplesIteratorHeap
|
h samplesIteratorHeap
|
||||||
|
|
||||||
|
curr chunkenc.Iterator
|
||||||
|
lastt int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func newChainSampleIterator(iterators []chunkenc.Iterator) chunkenc.Iterator {
|
func newChainSampleIterator(iterators []chunkenc.Iterator) chunkenc.Iterator {
|
||||||
return &chainSampleIterator{
|
return &chainSampleIterator{
|
||||||
iterators: iterators,
|
iterators: iterators,
|
||||||
h: nil,
|
h: nil,
|
||||||
|
lastt: math.MinInt64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -458,47 +462,74 @@ func (c *chainSampleIterator) Seek(t int64) bool {
|
||||||
heap.Push(&c.h, iter)
|
heap.Push(&c.h, iter)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return len(c.h) > 0
|
if len(c.h) > 0 {
|
||||||
|
c.curr = heap.Pop(&c.h).(chunkenc.Iterator)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
c.curr = nil
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *chainSampleIterator) At() (t int64, v float64) {
|
func (c *chainSampleIterator) At() (t int64, v float64) {
|
||||||
if len(c.h) == 0 {
|
if c.curr == nil {
|
||||||
panic("chainSampleIterator.At() called after .Next() returned false.")
|
panic("chainSampleIterator.At() called before first .Next() or after .Next() returned false.")
|
||||||
}
|
}
|
||||||
|
return c.curr.At()
|
||||||
return c.h[0].At()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *chainSampleIterator) Next() bool {
|
func (c *chainSampleIterator) Next() bool {
|
||||||
if c.h == nil {
|
if c.h == nil {
|
||||||
for _, iter := range c.iterators {
|
c.h = samplesIteratorHeap{}
|
||||||
|
// We call c.curr.Next() as the first thing below.
|
||||||
|
// So, we don't call Next() on it here.
|
||||||
|
c.curr = c.iterators[0]
|
||||||
|
for _, iter := range c.iterators[1:] {
|
||||||
if iter.Next() {
|
if iter.Next() {
|
||||||
heap.Push(&c.h, iter)
|
heap.Push(&c.h, iter)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return len(c.h) > 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(c.h) == 0 {
|
if c.curr == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
currt, _ := c.At()
|
var currt int64
|
||||||
for len(c.h) > 0 {
|
for {
|
||||||
nextt, _ := c.h[0].At()
|
if c.curr.Next() {
|
||||||
// All but one of the overlapping samples will be dropped.
|
currt, _ = c.curr.At()
|
||||||
if nextt != currt {
|
if currt == c.lastt {
|
||||||
break
|
// Ignoring sample for the same timestamp.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(c.h) == 0 {
|
||||||
|
// curr is the only iterator remaining,
|
||||||
|
// no need to check with the heap.
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check current iterator with the top of the heap.
|
||||||
|
if nextt, _ := c.h[0].At(); currt < nextt {
|
||||||
|
// Current iterator has smaller timestamp than the heap.
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// Current iterator does not hold the smallest timestamp.
|
||||||
|
heap.Push(&c.h, c.curr)
|
||||||
|
} else if len(c.h) == 0 {
|
||||||
|
// No iterator left to iterate.
|
||||||
|
c.curr = nil
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
iter := heap.Pop(&c.h).(chunkenc.Iterator)
|
c.curr = heap.Pop(&c.h).(chunkenc.Iterator)
|
||||||
if iter.Next() {
|
currt, _ = c.curr.At()
|
||||||
heap.Push(&c.h, iter)
|
if currt != c.lastt {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return len(c.h) > 0
|
c.lastt = currt
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *chainSampleIterator) Err() error {
|
func (c *chainSampleIterator) Err() error {
|
||||||
|
|
|
@ -448,10 +448,10 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
|
||||||
name: "three in chained overlap",
|
name: "three in chained overlap",
|
||||||
input: []ChunkSeries{
|
input: []ChunkSeries{
|
||||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}}),
|
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}}),
|
||||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4}, sample{6, 6}}),
|
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4}, sample{6, 66}}),
|
||||||
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6}, sample{10, 10}}),
|
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6}, sample{10, 10}}),
|
||||||
},
|
},
|
||||||
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}, sample{5, 5}, sample{6, 6}, sample{10, 10}}),
|
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}, sample{5, 5}, sample{6, 66}, sample{10, 10}}),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "three in chained overlap complex",
|
name: "three in chained overlap complex",
|
||||||
|
|
|
@ -603,7 +603,8 @@ func (api *API) series(r *http.Request) (result apiFuncResult) {
|
||||||
|
|
||||||
var sets []storage.SeriesSet
|
var sets []storage.SeriesSet
|
||||||
for _, mset := range matcherSets {
|
for _, mset := range matcherSets {
|
||||||
s := q.Select(false, nil, mset...)
|
// We need to sort this select results to merge (deduplicate) the series sets later.
|
||||||
|
s := q.Select(true, nil, mset...)
|
||||||
sets = append(sets, s)
|
sets = append(sets, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -303,6 +303,11 @@ func TestEndpoints(t *testing.T) {
|
||||||
test_metric1{foo="bar"} 0+100x100
|
test_metric1{foo="bar"} 0+100x100
|
||||||
test_metric1{foo="boo"} 1+0x100
|
test_metric1{foo="boo"} 1+0x100
|
||||||
test_metric2{foo="boo"} 1+0x100
|
test_metric2{foo="boo"} 1+0x100
|
||||||
|
test_metric3{foo="bar", dup="1"} 1+0x100
|
||||||
|
test_metric3{foo="boo", dup="1"} 1+0x100
|
||||||
|
test_metric4{foo="bar", dup="1"} 1+0x100
|
||||||
|
test_metric4{foo="boo", dup="1"} 1+0x100
|
||||||
|
test_metric4{foo="boo"} 1+0x100
|
||||||
`)
|
`)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
defer suite.Close()
|
defer suite.Close()
|
||||||
|
@ -737,6 +742,18 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
// Try to overlap the selected series set as much as possible to test the result de-duplication works well.
|
||||||
|
{
|
||||||
|
endpoint: api.series,
|
||||||
|
query: url.Values{
|
||||||
|
"match[]": []string{`test_metric4{foo=~".+o$"}`, `test_metric4{dup=~"^1"}`},
|
||||||
|
},
|
||||||
|
response: []labels.Labels{
|
||||||
|
labels.FromStrings("__name__", "test_metric4", "dup", "1", "foo", "bar"),
|
||||||
|
labels.FromStrings("__name__", "test_metric4", "dup", "1", "foo", "boo"),
|
||||||
|
labels.FromStrings("__name__", "test_metric4", "foo", "boo"),
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
endpoint: api.series,
|
endpoint: api.series,
|
||||||
query: url.Values{
|
query: url.Values{
|
||||||
|
@ -1449,6 +1466,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
response: []string{
|
response: []string{
|
||||||
"test_metric1",
|
"test_metric1",
|
||||||
"test_metric2",
|
"test_metric2",
|
||||||
|
"test_metric3",
|
||||||
|
"test_metric4",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -1597,7 +1616,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
// Label names.
|
// Label names.
|
||||||
{
|
{
|
||||||
endpoint: api.labelNames,
|
endpoint: api.labelNames,
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
// Start and end before Label names starts.
|
// Start and end before Label names starts.
|
||||||
{
|
{
|
||||||
|
@ -1615,7 +1634,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
"start": []string{"1"},
|
"start": []string{"1"},
|
||||||
"end": []string{"100"},
|
"end": []string{"100"},
|
||||||
},
|
},
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
// Start before Label names, end within Label names.
|
// Start before Label names, end within Label names.
|
||||||
{
|
{
|
||||||
|
@ -1624,7 +1643,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
"start": []string{"-1"},
|
"start": []string{"-1"},
|
||||||
"end": []string{"10"},
|
"end": []string{"10"},
|
||||||
},
|
},
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
|
|
||||||
// Start before Label names starts, end after Label names ends.
|
// Start before Label names starts, end after Label names ends.
|
||||||
|
@ -1634,7 +1653,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
"start": []string{"-1"},
|
"start": []string{"-1"},
|
||||||
"end": []string{"100000"},
|
"end": []string{"100000"},
|
||||||
},
|
},
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
// Start with bad data for Label names, end within Label names.
|
// Start with bad data for Label names, end within Label names.
|
||||||
{
|
{
|
||||||
|
@ -1652,7 +1671,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
"start": []string{"1"},
|
"start": []string{"1"},
|
||||||
"end": []string{"1000000006"},
|
"end": []string{"1000000006"},
|
||||||
},
|
},
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
// Start and end after Label names ends.
|
// Start and end after Label names ends.
|
||||||
{
|
{
|
||||||
|
@ -1669,7 +1688,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
query: url.Values{
|
query: url.Values{
|
||||||
"start": []string{"4"},
|
"start": []string{"4"},
|
||||||
},
|
},
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
// Only provide End within Label names, don't provide a start time.
|
// Only provide End within Label names, don't provide a start time.
|
||||||
{
|
{
|
||||||
|
@ -1677,7 +1696,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
query: url.Values{
|
query: url.Values{
|
||||||
"end": []string{"20"},
|
"end": []string{"20"},
|
||||||
},
|
},
|
||||||
response: []string{"__name__", "foo"},
|
response: []string{"__name__", "dup", "foo"},
|
||||||
},
|
},
|
||||||
}...)
|
}...)
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,8 +31,10 @@ describe('targetLabels', () => {
|
||||||
it('wraps each label in a label badge', () => {
|
it('wraps each label in a label badge', () => {
|
||||||
const l: { [key: string]: string } = defaultProps.labels;
|
const l: { [key: string]: string } = defaultProps.labels;
|
||||||
Object.keys(l).forEach((labelName: string): void => {
|
Object.keys(l).forEach((labelName: string): void => {
|
||||||
const badge = targetLabels.find(Badge).filterWhere(badge => badge.hasClass(labelName));
|
const badge = targetLabels
|
||||||
expect(badge.children().text()).toEqual(`${labelName}="${l[labelName]}"`);
|
.find(Badge)
|
||||||
|
.filterWhere(badge => badge.children().text() === `${labelName}="${l[labelName]}"`);
|
||||||
|
expect(badge).toHaveLength(1);
|
||||||
});
|
});
|
||||||
expect(targetLabels.find(Badge)).toHaveLength(3);
|
expect(targetLabels.find(Badge)).toHaveLength(3);
|
||||||
});
|
});
|
||||||
|
|
|
@ -27,7 +27,7 @@ const TargetLabels: FC<TargetLabelsProps> = ({ discoveredLabels, labels, idx, sc
|
||||||
<div id={id} className="series-labels-container">
|
<div id={id} className="series-labels-container">
|
||||||
{Object.keys(labels).map(labelName => {
|
{Object.keys(labels).map(labelName => {
|
||||||
return (
|
return (
|
||||||
<Badge color="primary" className={`mr-1 ${labelName}`} key={labelName}>
|
<Badge color="primary" className="mr-1" key={labelName}>
|
||||||
{`${labelName}="${labels[labelName]}"`}
|
{`${labelName}="${labels[labelName]}"`}
|
||||||
</Badge>
|
</Badge>
|
||||||
);
|
);
|
||||||
|
|
|
@ -7,7 +7,7 @@ exports[`targetLabels renders discovered labels 1`] = `
|
||||||
id="series-labels-cortex/node-exporter_group/0-1"
|
id="series-labels-cortex/node-exporter_group/0-1"
|
||||||
>
|
>
|
||||||
<Badge
|
<Badge
|
||||||
className="mr-1 instance"
|
className="mr-1"
|
||||||
color="primary"
|
color="primary"
|
||||||
key="instance"
|
key="instance"
|
||||||
pill={false}
|
pill={false}
|
||||||
|
@ -16,7 +16,7 @@ exports[`targetLabels renders discovered labels 1`] = `
|
||||||
instance="localhost:9100"
|
instance="localhost:9100"
|
||||||
</Badge>
|
</Badge>
|
||||||
<Badge
|
<Badge
|
||||||
className="mr-1 job"
|
className="mr-1"
|
||||||
color="primary"
|
color="primary"
|
||||||
key="job"
|
key="job"
|
||||||
pill={false}
|
pill={false}
|
||||||
|
@ -25,7 +25,7 @@ exports[`targetLabels renders discovered labels 1`] = `
|
||||||
job="node_exporter"
|
job="node_exporter"
|
||||||
</Badge>
|
</Badge>
|
||||||
<Badge
|
<Badge
|
||||||
className="mr-1 foo"
|
className="mr-1"
|
||||||
color="primary"
|
color="primary"
|
||||||
key="foo"
|
key="foo"
|
||||||
pill={false}
|
pill={false}
|
||||||
|
|
|
@ -10,8 +10,8 @@
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<td colspan="3"><h2><a href="#{{reReplaceAll "([^a-zA-Z0-9])" "$1" .Name}}" id="{{reReplaceAll "([^a-zA-Z0-9])" "$1" .Name}}">{{.Name}}</a></h2></td>
|
<td colspan="3"><h2><a href="#{{reReplaceAll "([^a-zA-Z0-9])" "$1" .Name}}" id="{{reReplaceAll "([^a-zA-Z0-9])" "$1" .Name}}">{{.Name}}</a></h2></td>
|
||||||
<td><h2>{{if .GetEvaluationTimestamp.IsZero}}Never{{else}}{{since .GetEvaluationTimestamp}} ago{{end}}</h2></td>
|
<td><h2>{{if .GetLastEvaluation.IsZero}}Never{{else}}{{since .GetLastEvaluation}} ago{{end}}</h2></td>
|
||||||
<td><h2>{{humanizeDuration .GetEvaluationDuration.Seconds}}</h2></td>
|
<td><h2>{{humanizeDuration .GetEvaluationTime.Seconds}}</h2></td>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
|
Loading…
Reference in a new issue