Merge branch 'main' into update-react-and-ts

Signed-off-by: Augustin Husson <husson.augustin@gmail.com>
This commit is contained in:
Augustin Husson 2021-09-07 11:51:39 +02:00
commit c827413755
63 changed files with 42548 additions and 1524 deletions

View file

@ -1,3 +1,8 @@
## 2.29.2 / 2021-08-27
* [BUGFIX] Fix Kubernetes SD failing to discover Ingress in Kubernetes v1.22. #9205
* [BUGFIX] Fix data race in loading write-ahead-log (WAL). #9259
## 2.29.1 / 2021-08-11 ## 2.29.1 / 2021-08-11
* [BUGFIX] tsdb: align atomically accessed int64 to prevent panic in 32-bit * [BUGFIX] tsdb: align atomically accessed int64 to prevent panic in 32-bit

View file

@ -1 +1 @@
2.29.1 2.29.2

View file

@ -107,6 +107,7 @@ type flagConfig struct {
outageTolerance model.Duration outageTolerance model.Duration
resendDelay model.Duration resendDelay model.Duration
web web.Options web web.Options
scrape scrape.Options
tsdb tsdbOptions tsdb tsdbOptions
lookbackDelta model.Duration lookbackDelta model.Duration
webTimeout model.Duration webTimeout model.Duration
@ -152,6 +153,9 @@ func (c *flagConfig) setFeatureListOptions(logger log.Logger) error {
case "memory-snapshot-on-shutdown": case "memory-snapshot-on-shutdown":
c.tsdb.EnableMemorySnapshotOnShutdown = true c.tsdb.EnableMemorySnapshotOnShutdown = true
level.Info(logger).Log("msg", "Experimental memory snapshot on shutdown enabled") level.Info(logger).Log("msg", "Experimental memory snapshot on shutdown enabled")
case "extra-scrape-metrics":
c.scrape.ExtraMetrics = true
level.Info(logger).Log("msg", "Experimental additional scrape metrics")
case "": case "":
continue continue
default: default:
@ -312,7 +316,7 @@ func main() {
a.Flag("query.max-samples", "Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return."). a.Flag("query.max-samples", "Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return.").
Default("50000000").IntVar(&cfg.queryMaxSamples) Default("50000000").IntVar(&cfg.queryMaxSamples)
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-at-modifier, promql-negative-offset, remote-write-receiver. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details."). a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-at-modifier, promql-negative-offset, remote-write-receiver, extra-scrape-metrics. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
Default("").StringsVar(&cfg.featureList) Default("").StringsVar(&cfg.featureList)
promlogflag.AddFlags(a, &cfg.promlogConfig) promlogflag.AddFlags(a, &cfg.promlogConfig)
@ -457,7 +461,7 @@ func main() {
ctxNotify, cancelNotify = context.WithCancel(context.Background()) ctxNotify, cancelNotify = context.WithCancel(context.Background())
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"), discovery.Name("notify")) discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"), discovery.Name("notify"))
scrapeManager = scrape.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage) scrapeManager = scrape.NewManager(&cfg.scrape, log.With(logger, "component", "scrape manager"), fanoutStorage)
opts = promql.EngineOpts{ opts = promql.EngineOpts{
Logger: log.With(logger, "component", "query engine"), Logger: log.With(logger, "component", "query engine"),

View file

@ -24,7 +24,7 @@ import (
"net/url" "net/url"
"os" "os"
"path/filepath" "path/filepath"
"reflect" "sort"
"strconv" "strconv"
"strings" "strings"
"time" "time"
@ -48,6 +48,7 @@ import (
_ "github.com/prometheus/prometheus/discovery/install" // Register service discovery implementations. _ "github.com/prometheus/prometheus/discovery/install" // Register service discovery implementations.
"github.com/prometheus/prometheus/discovery/kubernetes" "github.com/prometheus/prometheus/discovery/kubernetes"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/rulefmt" "github.com/prometheus/prometheus/pkg/rulefmt"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
) )
@ -471,8 +472,8 @@ func checkRules(filename string) (int, []error) {
fmt.Printf("%d duplicate rule(s) found.\n", len(dRules)) fmt.Printf("%d duplicate rule(s) found.\n", len(dRules))
for _, n := range dRules { for _, n := range dRules {
fmt.Printf("Metric: %s\nLabel(s):\n", n.metric) fmt.Printf("Metric: %s\nLabel(s):\n", n.metric)
for i, l := range n.label { for _, l := range n.label {
fmt.Printf("\t%s: %s\n", i, l) fmt.Printf("\t%s: %s\n", l.Name, l.Value)
} }
} }
fmt.Println("Might cause inconsistency while recording expressions.") fmt.Println("Might cause inconsistency while recording expressions.")
@ -483,29 +484,52 @@ func checkRules(filename string) (int, []error) {
type compareRuleType struct { type compareRuleType struct {
metric string metric string
label map[string]string label labels.Labels
}
type compareRuleTypes []compareRuleType
func (c compareRuleTypes) Len() int { return len(c) }
func (c compareRuleTypes) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
func (c compareRuleTypes) Less(i, j int) bool { return compare(c[i], c[j]) < 0 }
func compare(a, b compareRuleType) int {
if res := strings.Compare(a.metric, b.metric); res != 0 {
return res
}
return labels.Compare(a.label, b.label)
} }
func checkDuplicates(groups []rulefmt.RuleGroup) []compareRuleType { func checkDuplicates(groups []rulefmt.RuleGroup) []compareRuleType {
var duplicates []compareRuleType var duplicates []compareRuleType
var rules compareRuleTypes
for _, group := range groups { for _, group := range groups {
for index, rule := range group.Rules {
inst := compareRuleType{ for _, rule := range group.Rules {
rules = append(rules, compareRuleType{
metric: ruleMetric(rule), metric: ruleMetric(rule),
label: rule.Labels, label: labels.FromMap(rule.Labels),
} })
for i := 0; i < index; i++ {
t := compareRuleType{
metric: ruleMetric(group.Rules[i]),
label: group.Rules[i].Labels,
}
if reflect.DeepEqual(t, inst) {
duplicates = append(duplicates, t)
}
}
} }
} }
if len(rules) < 2 {
return duplicates
}
sort.Sort(rules)
last := rules[0]
for i := 1; i < len(rules); i++ {
if compare(last, rules[i]) == 0 {
// Don't add a duplicated rule multiple times.
if len(duplicates) == 0 || compare(last, duplicates[len(duplicates)-1]) != 0 {
duplicates = append(duplicates, rules[i])
}
}
last = rules[i]
}
return duplicates return duplicates
} }

View file

@ -21,6 +21,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/rulefmt"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -118,3 +120,46 @@ func TestCheckSDFile(t *testing.T) {
}) })
} }
} }
func TestCheckDuplicates(t *testing.T) {
cases := []struct {
name string
ruleFile string
expectedDups []compareRuleType
}{
{
name: "no duplicates",
ruleFile: "./testdata/rules.yml",
},
{
name: "duplicate in other group",
ruleFile: "./testdata/rules_duplicates.yml",
expectedDups: []compareRuleType{
{
metric: "job:test:count_over_time1m",
label: labels.New(),
},
},
},
}
for _, test := range cases {
c := test
t.Run(c.name, func(t *testing.T) {
rgs, err := rulefmt.ParseFile(c.ruleFile)
require.Empty(t, err)
dups := checkDuplicates(rgs.Groups)
require.Equal(t, c.expectedDups, dups)
})
}
}
func BenchmarkCheckDuplicates(b *testing.B) {
rgs, err := rulefmt.ParseFile("./testdata/rules_large.yml")
require.Empty(b, err)
b.ResetTimer()
for i := 0; i < b.N; i++ {
checkDuplicates(rgs.Groups)
}
}

View file

@ -0,0 +1,24 @@
# This is a rules file with duplicate expressions
groups:
- name: base
rules:
- record: job:test:count_over_time1m
expr: sum without(instance) (count_over_time(test[1m]))
# A recording rule that doesn't depend on input series.
- record: fixed_data
expr: 1
# Subquery with default resolution test.
- record: suquery_interval_test
expr: count_over_time(up[5m:])
# Duplicating
- record: job:test:count_over_time1m
expr: sum without(instance) (count_over_time(test[1m]))
- name: duplicate
rules:
- record: job:test:count_over_time1m
expr: sum without(instance) (count_over_time(test[1m]))

40011
cmd/promtool/testdata/rules_large.yml vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -1302,6 +1302,10 @@ var expectedErrors = []struct {
filename: "http_url_bad_scheme.bad.yml", filename: "http_url_bad_scheme.bad.yml",
errMsg: "URL scheme must be 'http' or 'https'", errMsg: "URL scheme must be 'http' or 'https'",
}, },
{
filename: "empty_scrape_config_action.bad.yml",
errMsg: "relabel action cannot be empty",
},
} }
func TestBadConfigs(t *testing.T) { func TestBadConfigs(t *testing.T) {

View file

@ -0,0 +1,4 @@
scrape_configs:
- job_name: prometheus
relabel_configs:
- action: null

View file

@ -220,9 +220,8 @@ func (i *Ingress) buildIngress(ingress ingressAdaptor) *targetgroup.Group {
} }
// matchesHostnamePattern returns true if the host matches a wildcard DNS // matchesHostnamePattern returns true if the host matches a wildcard DNS
// pattern or pattern and host are equal // pattern or pattern and host are equal.
func matchesHostnamePattern(pattern, host string) bool { func matchesHostnamePattern(pattern, host string) bool {
// check for exact match
if pattern == host { if pattern == host {
return true return true
} }
@ -230,13 +229,13 @@ func matchesHostnamePattern(pattern, host string) bool {
patternParts := strings.Split(pattern, ".") patternParts := strings.Split(pattern, ".")
hostParts := strings.Split(host, ".") hostParts := strings.Split(host, ".")
// if they are not equal, we cna check if we need to match // If the first element of the pattern is not a wildcard, give up.
// on a wildcard or else give up
if len(patternParts) == 0 || patternParts[0] != "*" { if len(patternParts) == 0 || patternParts[0] != "*" {
return false return false
} }
// to get a valid wildcard match the parts will need to be the same length // A wildcard match require the pattern to have the same length as the host
// path.
if len(patternParts) != len(hostParts) { if len(patternParts) != len(hostParts) {
return false return false
} }

View file

@ -19,11 +19,11 @@ import (
"testing" "testing"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup"
v1 "k8s.io/api/networking/v1" v1 "k8s.io/api/networking/v1"
"k8s.io/api/networking/v1beta1" "k8s.io/api/networking/v1beta1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/prometheus/prometheus/discovery/targetgroup"
) )
type TLSMode int type TLSMode int

View file

@ -2172,6 +2172,9 @@ it was not set during relabeling. The `__scheme__` and `__metrics_path__` labels
are set to the scheme and metrics path of the target respectively. The `__param_<name>` are set to the scheme and metrics path of the target respectively. The `__param_<name>`
label is set to the value of the first passed URL parameter called `<name>`. label is set to the value of the first passed URL parameter called `<name>`.
The `__scrape_interval__` and `__scrape_timeout__` labels are set to the target's
interval and timeout. This is **experimental** and could change in the future.
Additional labels prefixed with `__meta_` may be available during the Additional labels prefixed with `__meta_` may be available during the
relabeling phase. They are set by the service discovery mechanism that provided relabeling phase. They are set by the service discovery mechanism that provided
the target and vary between mechanisms. the target and vary between mechanisms.

View file

@ -34,7 +34,7 @@ that PromQL does not look ahead of the evaluation time for samples.
`--enable-feature=promql-negative-offset` `--enable-feature=promql-negative-offset`
In contrast to the positive offset modifier, the negative offset modifier lets In contrast to the positive offset modifier, the negative offset modifier lets
one shift a vector selector into the future. An example in which one may want one shift a vector selector into the future. An example in which one may want
to use a negative offset is reviewing past data and making temporal comparisons to use a negative offset is reviewing past data and making temporal comparisons
with more recent data. with more recent data.
@ -59,5 +59,15 @@ Exemplar storage is implemented as a fixed size circular buffer that stores exem
`--enable-feature=memory-snapshot-on-shutdown` `--enable-feature=memory-snapshot-on-shutdown`
This takes the snapshot of the chunks that are in memory along with the series information when shutting down and stores This takes the snapshot of the chunks that are in memory along with the series information when shutting down and stores
it on disk. This will reduce the startup time since the memory state can be restored with this snapshot and m-mapped it on disk. This will reduce the startup time since the memory state can be restored with this snapshot and m-mapped
chunks without the need of WAL replay. chunks without the need of WAL replay.
## Extra Scrape Metrics
`--enable-feature=extra-scrape-metrics`
When enabled, for each instance scrape, Prometheus stores a sample in the following additional time series:
- `scrape_timeout_seconds`. The configured `scrape_timeout` for a target. This allows you to measure each target to find out how close they are to timing out with `scrape_duration_seconds / scrape_timeout_seconds`.
- `scrape_sample_limit`. The configured `sample_limit` for a target. This allows you to measure each target
to find out how close they are to reaching the limit with `scrape_samples_post_metric_relabeling / scrape_sample_limit`. Note that `scrape_sample_limit` can be zero if there is no limit configured, which means that the query above can return `+Inf` for targets with no limit (as we divide by zero). If you want to query only for targets that do have a sample limit use this query: `scrape_samples_post_metric_relabeling / (scrape_sample_limit > 0)`.

View file

@ -502,7 +502,9 @@ $ curl http://localhost:9090/api/v1/targets
"lastError": "", "lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00", "lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 0.050688943, "lastScrapeDuration": 0.050688943,
"health": "up" "health": "up",
"scrapeInterval": "1m",
"scrapeTimeout": "10s"
} }
], ],
"droppedTargets": [ "droppedTargets": [
@ -511,6 +513,8 @@ $ curl http://localhost:9090/api/v1/targets
"__address__": "127.0.0.1:9100", "__address__": "127.0.0.1:9100",
"__metrics_path__": "/metrics", "__metrics_path__": "/metrics",
"__scheme__": "http", "__scheme__": "http",
"__scrape_interval__": "1m",
"__scrape_timeout__": "10s",
"job": "node" "job": "node"
}, },
} }

View file

@ -100,6 +100,9 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
if c.Regex.Regexp == nil { if c.Regex.Regexp == nil {
c.Regex = MustNewRegexp("") c.Regex = MustNewRegexp("")
} }
if c.Action == "" {
return errors.Errorf("relabel action cannot be empty")
}
if c.Modulus == 0 && c.Action == HashMod { if c.Modulus == 0 && c.Action == HashMod {
return errors.Errorf("relabel configuration for hashmod requires non-zero modulus") return errors.Errorf("relabel configuration for hashmod requires non-zero modulus")
} }

View file

@ -182,7 +182,10 @@ func (node *UnaryExpr) String() string {
} }
func (node *VectorSelector) String() string { func (node *VectorSelector) String() string {
labelStrings := make([]string, 0, len(node.LabelMatchers)-1) var labelStrings []string
if len(node.LabelMatchers) > 1 {
labelStrings = make([]string, 0, len(node.LabelMatchers)-1)
}
for _, matcher := range node.LabelMatchers { for _, matcher := range node.LabelMatchers {
// Only include the __name__ label if its equality matching and matches the name. // Only include the __name__ label if its equality matching and matches the name.
if matcher.Name == labels.MetricName && matcher.Type == labels.MatchEqual && matcher.Value == node.Name { if matcher.Name == labels.MetricName && matcher.Type == labels.MatchEqual && matcher.Value == node.Name {

View file

@ -16,6 +16,8 @@ package parser
import ( import (
"testing" "testing"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -138,3 +140,76 @@ func TestExprString(t *testing.T) {
require.Equal(t, exp, expr.String()) require.Equal(t, exp, expr.String())
} }
} }
func TestVectorSelector_String(t *testing.T) {
for _, tc := range []struct {
name string
vs VectorSelector
expected string
}{
{
name: "empty value",
vs: VectorSelector{},
expected: ``,
},
{
name: "no matchers with name",
vs: VectorSelector{Name: "foobar"},
expected: `foobar`,
},
{
name: "one matcher with name",
vs: VectorSelector{
Name: "foobar",
LabelMatchers: []*labels.Matcher{
labels.MustNewMatcher(labels.MatchEqual, "a", "x"),
},
},
expected: `foobar{a="x"}`,
},
{
name: "two matchers with name",
vs: VectorSelector{
Name: "foobar",
LabelMatchers: []*labels.Matcher{
labels.MustNewMatcher(labels.MatchEqual, "a", "x"),
labels.MustNewMatcher(labels.MatchEqual, "b", "y"),
},
},
expected: `foobar{a="x",b="y"}`,
},
{
name: "two matchers without name",
vs: VectorSelector{
LabelMatchers: []*labels.Matcher{
labels.MustNewMatcher(labels.MatchEqual, "a", "x"),
labels.MustNewMatcher(labels.MatchEqual, "b", "y"),
},
},
expected: `{a="x",b="y"}`,
},
{
name: "name matcher and name",
vs: VectorSelector{
Name: "foobar",
LabelMatchers: []*labels.Matcher{
labels.MustNewMatcher(labels.MatchEqual, labels.MetricName, "foobar"),
},
},
expected: `foobar`,
},
{
name: "name matcher only",
vs: VectorSelector{
LabelMatchers: []*labels.Matcher{
labels.MustNewMatcher(labels.MatchEqual, labels.MetricName, "foobar"),
},
},
expected: `{__name__="foobar"}`,
},
} {
t.Run(tc.name, func(t *testing.T) {
require.Equal(t, tc.expected, tc.vs.String())
})
}
}

View file

@ -99,12 +99,16 @@ func (mc *MetadataMetricsCollector) Collect(ch chan<- prometheus.Metric) {
} }
// NewManager is the Manager constructor // NewManager is the Manager constructor
func NewManager(logger log.Logger, app storage.Appendable) *Manager { func NewManager(o *Options, logger log.Logger, app storage.Appendable) *Manager {
if o == nil {
o = &Options{}
}
if logger == nil { if logger == nil {
logger = log.NewNopLogger() logger = log.NewNopLogger()
} }
m := &Manager{ m := &Manager{
append: app, append: app,
opts: o,
logger: logger, logger: logger,
scrapeConfigs: make(map[string]*config.ScrapeConfig), scrapeConfigs: make(map[string]*config.ScrapeConfig),
scrapePools: make(map[string]*scrapePool), scrapePools: make(map[string]*scrapePool),
@ -116,9 +120,15 @@ func NewManager(logger log.Logger, app storage.Appendable) *Manager {
return m return m
} }
// Options are the configuration parameters to the scrape manager.
type Options struct {
ExtraMetrics bool
}
// Manager maintains a set of scrape pools and manages start/stop cycles // Manager maintains a set of scrape pools and manages start/stop cycles
// when receiving new target groups from the discovery manager. // when receiving new target groups from the discovery manager.
type Manager struct { type Manager struct {
opts *Options
logger log.Logger logger log.Logger
append storage.Appendable append storage.Appendable
graceShut chan struct{} graceShut chan struct{}
@ -181,7 +191,7 @@ func (m *Manager) reload() {
level.Error(m.logger).Log("msg", "error reloading target set", "err", "invalid config id:"+setName) level.Error(m.logger).Log("msg", "error reloading target set", "err", "invalid config id:"+setName)
continue continue
} }
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName)) sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName), m.opts.ExtraMetrics)
if err != nil { if err != nil {
level.Error(m.logger).Log("msg", "error creating new scrape pool", "err", err, "scrape_pool", setName) level.Error(m.logger).Log("msg", "error creating new scrape pool", "err", err, "scrape_pool", setName)
continue continue

View file

@ -44,52 +44,66 @@ func TestPopulateLabels(t *testing.T) {
"custom": "value", "custom": "value",
}), }),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000", model.AddressLabel: "1.2.3.4:1000",
model.InstanceLabel: "1.2.3.4:1000", model.InstanceLabel: "1.2.3.4:1000",
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "value", model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "value",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000", model.AddressLabel: "1.2.3.4:1000",
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "value", "custom": "value",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
}), }),
}, },
// Pre-define/overwrite scrape config labels. // Pre-define/overwrite scrape config labels.
// Leave out port and expect it to be defaulted to scheme. // Leave out port and expect it to be defaulted to scheme.
{ {
in: labels.FromMap(map[string]string{ in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4", model.AddressLabel: "1.2.3.4",
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom", model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job", model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}), }),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:80", model.AddressLabel: "1.2.3.4:80",
model.InstanceLabel: "1.2.3.4:80", model.InstanceLabel: "1.2.3.4:80",
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom", model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job", model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4", model.AddressLabel: "1.2.3.4",
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.MetricsPathLabel: "/custom", model.MetricsPathLabel: "/custom",
model.JobLabel: "custom-job", model.JobLabel: "custom-job",
model.ScrapeIntervalLabel: "2s",
model.ScrapeTimeoutLabel: "2s",
}), }),
}, },
// Provide instance label. HTTPS port default for IPv6. // Provide instance label. HTTPS port default for IPv6.
@ -99,32 +113,40 @@ func TestPopulateLabels(t *testing.T) {
model.InstanceLabel: "custom-instance", model.InstanceLabel: "custom-instance",
}), }),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "[::1]:443", model.AddressLabel: "[::1]:443",
model.InstanceLabel: "custom-instance", model.InstanceLabel: "custom-instance",
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.AddressLabel: "[::1]", model.AddressLabel: "[::1]",
model.InstanceLabel: "custom-instance", model.InstanceLabel: "custom-instance",
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
}), }),
}, },
// Address label missing. // Address label missing.
{ {
in: labels.FromStrings("custom", "value"), in: labels.FromStrings("custom", "value"),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: nil,
resOrig: nil, resOrig: nil,
@ -134,9 +156,11 @@ func TestPopulateLabels(t *testing.T) {
{ {
in: labels.FromStrings("custom", "host:1234"), in: labels.FromStrings("custom", "host:1234"),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
RelabelConfigs: []*relabel.Config{ RelabelConfigs: []*relabel.Config{
{ {
Action: relabel.Replace, Action: relabel.Replace,
@ -148,27 +172,33 @@ func TestPopulateLabels(t *testing.T) {
}, },
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "host:1234", model.AddressLabel: "host:1234",
model.InstanceLabel: "host:1234", model.InstanceLabel: "host:1234",
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "host:1234", model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "host:1234", model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234",
}), }),
}, },
// Address label missing, but added in relabelling. // Address label missing, but added in relabelling.
{ {
in: labels.FromStrings("custom", "host:1234"), in: labels.FromStrings("custom", "host:1234"),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
RelabelConfigs: []*relabel.Config{ RelabelConfigs: []*relabel.Config{
{ {
Action: relabel.Replace, Action: relabel.Replace,
@ -180,18 +210,22 @@ func TestPopulateLabels(t *testing.T) {
}, },
}, },
res: labels.FromMap(map[string]string{ res: labels.FromMap(map[string]string{
model.AddressLabel: "host:1234", model.AddressLabel: "host:1234",
model.InstanceLabel: "host:1234", model.InstanceLabel: "host:1234",
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "host:1234", model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234",
}), }),
resOrig: labels.FromMap(map[string]string{ resOrig: labels.FromMap(map[string]string{
model.SchemeLabel: "https", model.SchemeLabel: "https",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "job", model.JobLabel: "job",
"custom": "host:1234", model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "1s",
"custom": "host:1234",
}), }),
}, },
// Invalid UTF-8 in label. // Invalid UTF-8 in label.
@ -201,14 +235,102 @@ func TestPopulateLabels(t *testing.T) {
"custom": "\xbd", "custom": "\xbd",
}), }),
cfg: &config.ScrapeConfig{ cfg: &config.ScrapeConfig{
Scheme: "https", Scheme: "https",
MetricsPath: "/metrics", MetricsPath: "/metrics",
JobName: "job", JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: nil,
resOrig: nil, resOrig: nil,
err: "invalid label value for \"custom\": \"\\xbd\"", err: "invalid label value for \"custom\": \"\\xbd\"",
}, },
// Invalid duration in interval label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "2notseconds",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
},
// Invalid duration in timeout label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "2notseconds",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
},
// 0 interval in timeout label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "0s",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "scrape interval cannot be 0",
},
// 0 duration in timeout label.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeTimeoutLabel: "0s",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "scrape timeout cannot be 0",
},
// Timeout less than interval.
{
in: labels.FromMap(map[string]string{
model.AddressLabel: "1.2.3.4:1000",
model.ScrapeIntervalLabel: "1s",
model.ScrapeTimeoutLabel: "2s",
}),
cfg: &config.ScrapeConfig{
Scheme: "https",
MetricsPath: "/metrics",
JobName: "job",
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
},
} }
for _, c := range cases { for _, c := range cases {
in := c.in.Copy() in := c.in.Copy()
@ -276,7 +398,8 @@ scrape_configs:
ch = make(chan struct{}, 1) ch = make(chan struct{}, 1)
) )
scrapeManager := NewManager(nil, nil) opts := Options{}
scrapeManager := NewManager(&opts, nil, nil)
newLoop := func(scrapeLoopOptions) loop { newLoop := func(scrapeLoopOptions) loop {
ch <- struct{}{} ch <- struct{}{}
return noopLoop() return noopLoop()
@ -338,7 +461,8 @@ scrape_configs:
} }
func TestManagerTargetsUpdates(t *testing.T) { func TestManagerTargetsUpdates(t *testing.T) {
m := NewManager(nil, nil) opts := Options{}
m := NewManager(&opts, nil, nil)
ts := make(chan map[string][]*targetgroup.Group) ts := make(chan map[string][]*targetgroup.Group)
go m.Run(ts) go m.Run(ts)
@ -390,7 +514,8 @@ global:
return cfg return cfg
} }
scrapeManager := NewManager(nil, nil) opts := Options{}
scrapeManager := NewManager(&opts, nil, nil)
// Load the first config. // Load the first config.
cfg1 := getConfig("ha1") cfg1 := getConfig("ha1")

View file

@ -253,6 +253,8 @@ type scrapeLoopOptions struct {
labelLimits *labelLimits labelLimits *labelLimits
honorLabels bool honorLabels bool
honorTimestamps bool honorTimestamps bool
interval time.Duration
timeout time.Duration
mrc []*relabel.Config mrc []*relabel.Config
cache *scrapeCache cache *scrapeCache
} }
@ -261,7 +263,7 @@ const maxAheadTime = 10 * time.Minute
type labelsMutator func(labels.Labels) labels.Labels type labelsMutator func(labels.Labels) labels.Labels
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger) (*scrapePool, error) { func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, reportScrapeTimeout bool) (*scrapePool, error) {
targetScrapePools.Inc() targetScrapePools.Inc()
if logger == nil { if logger == nil {
logger = log.NewNopLogger() logger = log.NewNopLogger()
@ -306,7 +308,11 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed
cache, cache,
jitterSeed, jitterSeed,
opts.honorTimestamps, opts.honorTimestamps,
opts.sampleLimit,
opts.labelLimits, opts.labelLimits,
opts.interval,
opts.timeout,
reportScrapeTimeout,
) )
} }
@ -414,6 +420,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
} else { } else {
cache = newScrapeCache() cache = newScrapeCache()
} }
var ( var (
t = sp.activeTargets[fp] t = sp.activeTargets[fp]
s = &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit} s = &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit}
@ -426,6 +433,8 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
mrc: mrc, mrc: mrc,
cache: cache, cache: cache,
interval: interval,
timeout: timeout,
}) })
) )
wg.Add(1) wg.Add(1)
@ -435,7 +444,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
wg.Done() wg.Done()
newLoop.setForcedError(forcedErr) newLoop.setForcedError(forcedErr)
newLoop.run(interval, timeout, nil) newLoop.run(nil)
}(oldLoop, newLoop) }(oldLoop, newLoop)
sp.loops[fp] = newLoop sp.loops[fp] = newLoop
@ -509,6 +518,12 @@ func (sp *scrapePool) sync(targets []*Target) {
hash := t.hash() hash := t.hash()
if _, ok := sp.activeTargets[hash]; !ok { if _, ok := sp.activeTargets[hash]; !ok {
// The scrape interval and timeout labels are set to the config's values initially,
// so whether changed via relabeling or not, they'll exist and hold the correct values
// for every target.
var err error
interval, timeout, err = t.intervalAndTimeout(interval, timeout)
s := &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit} s := &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit}
l := sp.newLoop(scrapeLoopOptions{ l := sp.newLoop(scrapeLoopOptions{
target: t, target: t,
@ -518,7 +533,12 @@ func (sp *scrapePool) sync(targets []*Target) {
honorLabels: honorLabels, honorLabels: honorLabels,
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
mrc: mrc, mrc: mrc,
interval: interval,
timeout: timeout,
}) })
if err != nil {
l.setForcedError(err)
}
sp.activeTargets[hash] = t sp.activeTargets[hash] = t
sp.loops[hash] = l sp.loops[hash] = l
@ -560,7 +580,7 @@ func (sp *scrapePool) sync(targets []*Target) {
} }
for _, l := range uniqueLoops { for _, l := range uniqueLoops {
if l != nil { if l != nil {
go l.run(interval, timeout, nil) go l.run(nil)
} }
} }
// Wait for all potentially stopped scrapers to terminate. // Wait for all potentially stopped scrapers to terminate.
@ -772,7 +792,7 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error)
// A loop can run and be stopped again. It must not be reused after it was stopped. // A loop can run and be stopped again. It must not be reused after it was stopped.
type loop interface { type loop interface {
run(interval, timeout time.Duration, errc chan<- error) run(errc chan<- error)
setForcedError(err error) setForcedError(err error)
stop() stop()
getCache() *scrapeCache getCache() *scrapeCache
@ -796,7 +816,10 @@ type scrapeLoop struct {
honorTimestamps bool honorTimestamps bool
forcedErr error forcedErr error
forcedErrMtx sync.Mutex forcedErrMtx sync.Mutex
sampleLimit int
labelLimits *labelLimits labelLimits *labelLimits
interval time.Duration
timeout time.Duration
appender func(ctx context.Context) storage.Appender appender func(ctx context.Context) storage.Appender
sampleMutator labelsMutator sampleMutator labelsMutator
@ -808,6 +831,8 @@ type scrapeLoop struct {
stopped chan struct{} stopped chan struct{}
disabledEndOfRunStalenessMarkers bool disabledEndOfRunStalenessMarkers bool
reportScrapeTimeout bool
} }
// scrapeCache tracks mappings of exposed metric strings to label sets and // scrapeCache tracks mappings of exposed metric strings to label sets and
@ -1064,7 +1089,11 @@ func newScrapeLoop(ctx context.Context,
cache *scrapeCache, cache *scrapeCache,
jitterSeed uint64, jitterSeed uint64,
honorTimestamps bool, honorTimestamps bool,
sampleLimit int,
labelLimits *labelLimits, labelLimits *labelLimits,
interval time.Duration,
timeout time.Duration,
reportScrapeTimeout bool,
) *scrapeLoop { ) *scrapeLoop {
if l == nil { if l == nil {
l = log.NewNopLogger() l = log.NewNopLogger()
@ -1087,16 +1116,20 @@ func newScrapeLoop(ctx context.Context,
l: l, l: l,
parentCtx: ctx, parentCtx: ctx,
honorTimestamps: honorTimestamps, honorTimestamps: honorTimestamps,
sampleLimit: sampleLimit,
labelLimits: labelLimits, labelLimits: labelLimits,
interval: interval,
timeout: timeout,
reportScrapeTimeout: reportScrapeTimeout,
} }
sl.ctx, sl.cancel = context.WithCancel(ctx) sl.ctx, sl.cancel = context.WithCancel(ctx)
return sl return sl
} }
func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) { func (sl *scrapeLoop) run(errc chan<- error) {
select { select {
case <-time.After(sl.scraper.offset(interval, sl.jitterSeed)): case <-time.After(sl.scraper.offset(sl.interval, sl.jitterSeed)):
// Continue after a scraping offset. // Continue after a scraping offset.
case <-sl.ctx.Done(): case <-sl.ctx.Done():
close(sl.stopped) close(sl.stopped)
@ -1106,7 +1139,7 @@ func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
var last time.Time var last time.Time
alignedScrapeTime := time.Now().Round(0) alignedScrapeTime := time.Now().Round(0)
ticker := time.NewTicker(interval) ticker := time.NewTicker(sl.interval)
defer ticker.Stop() defer ticker.Stop()
mainLoop: mainLoop:
@ -1126,11 +1159,11 @@ mainLoop:
// Calling Round ensures the time used is the wall clock, as otherwise .Sub // Calling Round ensures the time used is the wall clock, as otherwise .Sub
// and .Add on time.Time behave differently (see time package docs). // and .Add on time.Time behave differently (see time package docs).
scrapeTime := time.Now().Round(0) scrapeTime := time.Now().Round(0)
if AlignScrapeTimestamps && interval > 100*scrapeTimestampTolerance { if AlignScrapeTimestamps && sl.interval > 100*scrapeTimestampTolerance {
// For some reason, a tick might have been skipped, in which case we // For some reason, a tick might have been skipped, in which case we
// would call alignedScrapeTime.Add(interval) multiple times. // would call alignedScrapeTime.Add(interval) multiple times.
for scrapeTime.Sub(alignedScrapeTime) >= interval { for scrapeTime.Sub(alignedScrapeTime) >= sl.interval {
alignedScrapeTime = alignedScrapeTime.Add(interval) alignedScrapeTime = alignedScrapeTime.Add(sl.interval)
} }
// Align the scrape time if we are in the tolerance boundaries. // Align the scrape time if we are in the tolerance boundaries.
if scrapeTime.Sub(alignedScrapeTime) <= scrapeTimestampTolerance { if scrapeTime.Sub(alignedScrapeTime) <= scrapeTimestampTolerance {
@ -1138,7 +1171,7 @@ mainLoop:
} }
} }
last = sl.scrapeAndReport(interval, timeout, last, scrapeTime, errc) last = sl.scrapeAndReport(sl.interval, sl.timeout, last, scrapeTime, errc)
select { select {
case <-sl.parentCtx.Done(): case <-sl.parentCtx.Done():
@ -1153,7 +1186,7 @@ mainLoop:
close(sl.stopped) close(sl.stopped)
if !sl.disabledEndOfRunStalenessMarkers { if !sl.disabledEndOfRunStalenessMarkers {
sl.endOfRunStaleness(last, ticker, interval) sl.endOfRunStaleness(last, ticker, sl.interval)
} }
} }
@ -1192,7 +1225,7 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last, app
}() }()
defer func() { defer func() {
if err = sl.report(app, appendTime, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil { if err = sl.report(app, appendTime, timeout, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err) level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
} }
}() }()
@ -1580,9 +1613,11 @@ const (
scrapeSamplesMetricName = "scrape_samples_scraped" + "\xff" scrapeSamplesMetricName = "scrape_samples_scraped" + "\xff"
samplesPostRelabelMetricName = "scrape_samples_post_metric_relabeling" + "\xff" samplesPostRelabelMetricName = "scrape_samples_post_metric_relabeling" + "\xff"
scrapeSeriesAddedMetricName = "scrape_series_added" + "\xff" scrapeSeriesAddedMetricName = "scrape_series_added" + "\xff"
scrapeTimeoutMetricName = "scrape_timeout_seconds" + "\xff"
scrapeSampleLimitMetricName = "scrape_sample_limit" + "\xff"
) )
func (sl *scrapeLoop) report(app storage.Appender, start time.Time, duration time.Duration, scraped, added, seriesAdded int, scrapeErr error) (err error) { func (sl *scrapeLoop) report(app storage.Appender, start time.Time, timeout, duration time.Duration, scraped, added, seriesAdded int, scrapeErr error) (err error) {
sl.scraper.Report(start, duration, scrapeErr) sl.scraper.Report(start, duration, scrapeErr)
ts := timestamp.FromTime(start) ts := timestamp.FromTime(start)
@ -1607,6 +1642,14 @@ func (sl *scrapeLoop) report(app storage.Appender, start time.Time, duration tim
if err = sl.addReportSample(app, scrapeSeriesAddedMetricName, ts, float64(seriesAdded)); err != nil { if err = sl.addReportSample(app, scrapeSeriesAddedMetricName, ts, float64(seriesAdded)); err != nil {
return return
} }
if sl.reportScrapeTimeout {
if err = sl.addReportSample(app, scrapeTimeoutMetricName, ts, timeout.Seconds()); err != nil {
return
}
if err = sl.addReportSample(app, scrapeSampleLimitMetricName, ts, float64(sl.sampleLimit)); err != nil {
return
}
}
return return
} }
@ -1630,6 +1673,14 @@ func (sl *scrapeLoop) reportStale(app storage.Appender, start time.Time) (err er
if err = sl.addReportSample(app, scrapeSeriesAddedMetricName, ts, stale); err != nil { if err = sl.addReportSample(app, scrapeSeriesAddedMetricName, ts, stale); err != nil {
return return
} }
if sl.reportScrapeTimeout {
if err = sl.addReportSample(app, scrapeTimeoutMetricName, ts, stale); err != nil {
return
}
if err = sl.addReportSample(app, scrapeSampleLimitMetricName, ts, stale); err != nil {
return
}
}
return return
} }

View file

@ -57,7 +57,7 @@ func TestNewScrapePool(t *testing.T) {
var ( var (
app = &nopAppendable{} app = &nopAppendable{}
cfg = &config.ScrapeConfig{} cfg = &config.ScrapeConfig{}
sp, _ = newScrapePool(cfg, app, 0, nil) sp, _ = newScrapePool(cfg, app, 0, nil, false)
) )
if a, ok := sp.appendable.(*nopAppendable); !ok || a != app { if a, ok := sp.appendable.(*nopAppendable); !ok || a != app {
@ -92,8 +92,8 @@ func TestDroppedTargetsList(t *testing.T) {
}, },
}, },
} }
sp, _ = newScrapePool(cfg, app, 0, nil) sp, _ = newScrapePool(cfg, app, 0, nil, false)
expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", job=\"dropMe\"}" expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", __scrape_interval__=\"0s\", __scrape_timeout__=\"0s\", job=\"dropMe\"}"
expectedLength = 1 expectedLength = 1
) )
sp.Sync(tgs) sp.Sync(tgs)
@ -146,14 +146,16 @@ type testLoop struct {
forcedErr error forcedErr error
forcedErrMtx sync.Mutex forcedErrMtx sync.Mutex
runOnce bool runOnce bool
interval time.Duration
timeout time.Duration
} }
func (l *testLoop) run(interval, timeout time.Duration, errc chan<- error) { func (l *testLoop) run(errc chan<- error) {
if l.runOnce { if l.runOnce {
panic("loop must be started only once") panic("loop must be started only once")
} }
l.runOnce = true l.runOnce = true
l.startFunc(interval, timeout, errc) l.startFunc(l.interval, l.timeout, errc)
} }
func (l *testLoop) disableEndOfRunStalenessMarkers() { func (l *testLoop) disableEndOfRunStalenessMarkers() {
@ -250,7 +252,7 @@ func TestScrapePoolReload(t *testing.T) {
// On starting to run, new loops created on reload check whether their preceding // On starting to run, new loops created on reload check whether their preceding
// equivalents have been stopped. // equivalents have been stopped.
newLoop := func(opts scrapeLoopOptions) loop { newLoop := func(opts scrapeLoopOptions) loop {
l := &testLoop{} l := &testLoop{interval: time.Duration(reloadCfg.ScrapeInterval), timeout: time.Duration(reloadCfg.ScrapeTimeout)}
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) { l.startFunc = func(interval, timeout time.Duration, errc chan<- error) {
require.Equal(t, 3*time.Second, interval, "Unexpected scrape interval") require.Equal(t, 3*time.Second, interval, "Unexpected scrape interval")
require.Equal(t, 2*time.Second, timeout, "Unexpected scrape timeout") require.Equal(t, 2*time.Second, timeout, "Unexpected scrape timeout")
@ -276,8 +278,10 @@ func TestScrapePoolReload(t *testing.T) {
// one terminated. // one terminated.
for i := 0; i < numTargets; i++ { for i := 0; i < numTargets; i++ {
labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i))
t := &Target{ t := &Target{
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)), labels: labels,
discoveredLabels: labels,
} }
l := &testLoop{} l := &testLoop{}
l.stopFunc = func() { l.stopFunc = func() {
@ -342,7 +346,7 @@ func TestScrapePoolTargetLimit(t *testing.T) {
activeTargets: map[uint64]*Target{}, activeTargets: map[uint64]*Target{},
loops: map[uint64]loop{}, loops: map[uint64]loop{},
newLoop: newLoop, newLoop: newLoop,
logger: nil, logger: log.NewNopLogger(),
client: http.DefaultClient, client: http.DefaultClient,
} }
@ -452,7 +456,7 @@ func TestScrapePoolTargetLimit(t *testing.T) {
func TestScrapePoolAppender(t *testing.T) { func TestScrapePoolAppender(t *testing.T) {
cfg := &config.ScrapeConfig{} cfg := &config.ScrapeConfig{}
app := &nopAppendable{} app := &nopAppendable{}
sp, _ := newScrapePool(cfg, app, 0, nil) sp, _ := newScrapePool(cfg, app, 0, nil, false)
loop := sp.newLoop(scrapeLoopOptions{ loop := sp.newLoop(scrapeLoopOptions{
target: &Target{}, target: &Target{},
@ -488,12 +492,12 @@ func TestScrapePoolAppender(t *testing.T) {
} }
func TestScrapePoolRaces(t *testing.T) { func TestScrapePoolRaces(t *testing.T) {
interval, _ := model.ParseDuration("500ms") interval, _ := model.ParseDuration("1s")
timeout, _ := model.ParseDuration("1s") timeout, _ := model.ParseDuration("500ms")
newConfig := func() *config.ScrapeConfig { newConfig := func() *config.ScrapeConfig {
return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout} return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout}
} }
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil) sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, false)
tgts := []*targetgroup.Group{ tgts := []*targetgroup.Group{
{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
@ -582,7 +586,11 @@ func TestScrapeLoopStopBeforeRun(t *testing.T) {
nopMutator, nopMutator,
nil, nil, 0, nil, nil, 0,
true, true,
0,
nil, nil,
1,
0,
false,
) )
// The scrape pool synchronizes on stopping scrape loops. However, new scrape // The scrape pool synchronizes on stopping scrape loops. However, new scrape
@ -611,7 +619,7 @@ func TestScrapeLoopStopBeforeRun(t *testing.T) {
runDone := make(chan struct{}) runDone := make(chan struct{})
go func() { go func() {
sl.run(1, 0, nil) sl.run(nil)
close(runDone) close(runDone)
}() }()
@ -647,7 +655,11 @@ func TestScrapeLoopStop(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
// Terminate loop after 2 scrapes. // Terminate loop after 2 scrapes.
@ -664,7 +676,7 @@ func TestScrapeLoopStop(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -715,7 +727,11 @@ func TestScrapeLoopRun(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
time.Second,
time.Hour,
false,
) )
// The loop must terminate during the initial offset if the context // The loop must terminate during the initial offset if the context
@ -723,7 +739,7 @@ func TestScrapeLoopRun(t *testing.T) {
scraper.offsetDur = time.Hour scraper.offsetDur = time.Hour
go func() { go func() {
sl.run(time.Second, time.Hour, errc) sl.run(errc)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -763,11 +779,15 @@ func TestScrapeLoopRun(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
time.Second,
100*time.Millisecond,
false,
) )
go func() { go func() {
sl.run(time.Second, 100*time.Millisecond, errc) sl.run(errc)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -815,7 +835,11 @@ func TestScrapeLoopForcedErr(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
time.Second,
time.Hour,
false,
) )
forcedErr := fmt.Errorf("forced err") forcedErr := fmt.Errorf("forced err")
@ -827,7 +851,7 @@ func TestScrapeLoopForcedErr(t *testing.T) {
} }
go func() { go func() {
sl.run(time.Second, time.Hour, errc) sl.run(errc)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -866,7 +890,11 @@ func TestScrapeLoopMetadata(t *testing.T) {
cache, cache,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
defer cancel() defer cancel()
@ -916,7 +944,11 @@ func TestScrapeLoopSeriesAdded(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
defer cancel() defer cancel()
@ -955,7 +987,11 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
// Succeed once, several failures, then stop. // Succeed once, several failures, then stop.
numScrapes := 0 numScrapes := 0
@ -973,7 +1009,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1010,7 +1046,11 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
// Succeed once, several failures, then stop. // Succeed once, several failures, then stop.
@ -1030,7 +1070,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1069,7 +1109,11 @@ func TestScrapeLoopCache(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
numScrapes := 0 numScrapes := 0
@ -1106,7 +1150,7 @@ func TestScrapeLoopCache(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1144,7 +1188,11 @@ func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
numScrapes := 0 numScrapes := 0
@ -1164,7 +1212,7 @@ func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -1251,7 +1299,11 @@ func TestScrapeLoopAppend(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -1293,7 +1345,11 @@ func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
fakeRef := uint64(1) fakeRef := uint64(1)
@ -1343,7 +1399,11 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
nil, nil,
0, 0,
true, true,
app.limit,
nil, nil,
0,
0,
false,
) )
// Get the value of the Counter before performing the append. // Get the value of the Counter before performing the append.
@ -1413,7 +1473,11 @@ func TestScrapeLoop_ChangingMetricString(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -1454,7 +1518,11 @@ func TestScrapeLoopAppendStaleness(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -1498,7 +1566,11 @@ func TestScrapeLoopAppendNoStalenessIfTimestamp(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -1600,7 +1672,11 @@ metric_total{n="2"} 2 # {t="2"} 2.0 20000
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -1658,7 +1734,11 @@ func TestScrapeLoopAppendExemplarSeries(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -1703,7 +1783,11 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
@ -1711,7 +1795,7 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
return errors.New("scrape failed") return errors.New("scrape failed")
} }
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value") require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value")
} }
@ -1732,7 +1816,11 @@ func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
@ -1741,7 +1829,7 @@ func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) {
return nil return nil
} }
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value") require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value")
} }
@ -1774,7 +1862,11 @@ func TestScrapeLoopAppendGracefullyIfAmendOrOutOfOrderOrOutOfBounds(t *testing.T
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Unix(1, 0) now := time.Unix(1, 0)
@ -1812,7 +1904,11 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now().Add(20 * time.Minute) now := time.Now().Add(20 * time.Minute)
@ -2063,7 +2159,11 @@ func TestScrapeLoop_RespectTimestamps(t *testing.T) {
func(ctx context.Context) storage.Appender { return capp }, func(ctx context.Context) storage.Appender { return capp },
nil, 0, nil, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -2097,7 +2197,11 @@ func TestScrapeLoop_DiscardTimestamps(t *testing.T) {
func(ctx context.Context) storage.Appender { return capp }, func(ctx context.Context) storage.Appender { return capp },
nil, 0, nil, 0,
false, false,
0,
nil, nil,
0,
0,
false,
) )
now := time.Now() now := time.Now()
@ -2130,7 +2234,11 @@ func TestScrapeLoopDiscardDuplicateLabels(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
defer cancel() defer cancel()
@ -2181,7 +2289,11 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
defer cancel() defer cancel()
@ -2274,7 +2386,7 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second), ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics", MetricsPath: "/metrics",
} }
sp, _ = newScrapePool(cfg, app, 0, nil) sp, _ = newScrapePool(cfg, app, 0, nil, false)
t1 = &Target{ t1 = &Target{
discoveredLabels: labels.Labels{ discoveredLabels: labels.Labels{
labels.Label{ labels.Label{
@ -2399,7 +2511,11 @@ func TestScrapeAddFast(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
0,
0,
false,
) )
defer cancel() defer cancel()
@ -2429,7 +2545,7 @@ func TestReuseCacheRace(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second), ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics", MetricsPath: "/metrics",
} }
sp, _ = newScrapePool(cfg, app, 0, nil) sp, _ = newScrapePool(cfg, app, 0, nil, false)
t1 = &Target{ t1 = &Target{
discoveredLabels: labels.Labels{ discoveredLabels: labels.Labels{
labels.Label{ labels.Label{
@ -2483,7 +2599,11 @@ func TestScrapeReportSingleAppender(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
nil, nil,
10*time.Millisecond,
time.Hour,
false,
) )
numScrapes := 0 numScrapes := 0
@ -2498,7 +2618,7 @@ func TestScrapeReportSingleAppender(t *testing.T) {
} }
go func() { go func() {
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(nil)
signal <- struct{}{} signal <- struct{}{}
}() }()
@ -2612,7 +2732,11 @@ func TestScrapeLoopLabelLimit(t *testing.T) {
nil, nil,
0, 0,
true, true,
0,
&test.labelLimits, &test.labelLimits,
0,
0,
false,
) )
slApp := sl.appender(context.Background()) slApp := sl.appender(context.Background())
@ -2627,3 +2751,40 @@ func TestScrapeLoopLabelLimit(t *testing.T) {
} }
} }
} }
func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) {
interval, _ := model.ParseDuration("2s")
timeout, _ := model.ParseDuration("500ms")
config := &config.ScrapeConfig{
ScrapeInterval: interval,
ScrapeTimeout: timeout,
RelabelConfigs: []*relabel.Config{
{
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
Regex: relabel.MustNewRegexp("2s"),
Replacement: "3s",
TargetLabel: model.ScrapeIntervalLabel,
Action: relabel.Replace,
},
{
SourceLabels: model.LabelNames{model.ScrapeTimeoutLabel},
Regex: relabel.MustNewRegexp("500ms"),
Replacement: "750ms",
TargetLabel: model.ScrapeTimeoutLabel,
Action: relabel.Replace,
},
},
}
sp, _ := newScrapePool(config, &nopAppendable{}, 0, nil, false)
tgts := []*targetgroup.Group{
{
Targets: []model.LabelSet{{model.AddressLabel: "127.0.0.1:9090"}},
},
}
sp.Sync(tgts)
defer sp.stop()
require.Equal(t, "3s", sp.ActiveTargets()[0].labels.Get(model.ScrapeIntervalLabel))
require.Equal(t, "750ms", sp.ActiveTargets()[0].labels.Get(model.ScrapeTimeoutLabel))
}

View file

@ -143,8 +143,18 @@ func (t *Target) SetMetadataStore(s MetricMetadataStore) {
// hash returns an identifying hash for the target. // hash returns an identifying hash for the target.
func (t *Target) hash() uint64 { func (t *Target) hash() uint64 {
h := fnv.New64a() h := fnv.New64a()
// We must build a label set without the scrape interval and timeout
// labels because those aren't defining attributes of a target
// and can be changed without qualifying its parent as a new target,
// therefore they should not effect its unique hash.
l := t.labels.Map()
delete(l, model.ScrapeIntervalLabel)
delete(l, model.ScrapeTimeoutLabel)
lset := labels.FromMap(l)
//nolint: errcheck //nolint: errcheck
h.Write([]byte(fmt.Sprintf("%016d", t.labels.Hash()))) h.Write([]byte(fmt.Sprintf("%016d", lset.Hash())))
//nolint: errcheck //nolint: errcheck
h.Write([]byte(t.URL().String())) h.Write([]byte(t.URL().String()))
@ -273,6 +283,31 @@ func (t *Target) Health() TargetHealth {
return t.health return t.health
} }
// intervalAndTimeout returns the interval and timeout derived from
// the targets labels.
func (t *Target) intervalAndTimeout(defaultInterval, defaultDuration time.Duration) (time.Duration, time.Duration, error) {
t.mtx.RLock()
defer t.mtx.RUnlock()
intervalLabel := t.labels.Get(model.ScrapeIntervalLabel)
interval, err := model.ParseDuration(intervalLabel)
if err != nil {
return defaultInterval, defaultDuration, errors.Errorf("Error parsing interval label %q: %v", intervalLabel, err)
}
timeoutLabel := t.labels.Get(model.ScrapeTimeoutLabel)
timeout, err := model.ParseDuration(timeoutLabel)
if err != nil {
return defaultInterval, defaultDuration, errors.Errorf("Error parsing timeout label %q: %v", timeoutLabel, err)
}
return time.Duration(interval), time.Duration(timeout), nil
}
// GetValue gets a label value from the entire label set.
func (t *Target) GetValue(name string) string {
return t.labels.Get(name)
}
// Targets is a sortable list of targets. // Targets is a sortable list of targets.
type Targets []*Target type Targets []*Target
@ -329,6 +364,8 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
// Copy labels into the labelset for the target if they are not set already. // Copy labels into the labelset for the target if they are not set already.
scrapeLabels := []labels.Label{ scrapeLabels := []labels.Label{
{Name: model.JobLabel, Value: cfg.JobName}, {Name: model.JobLabel, Value: cfg.JobName},
{Name: model.ScrapeIntervalLabel, Value: cfg.ScrapeInterval.String()},
{Name: model.ScrapeTimeoutLabel, Value: cfg.ScrapeTimeout.String()},
{Name: model.MetricsPathLabel, Value: cfg.MetricsPath}, {Name: model.MetricsPathLabel, Value: cfg.MetricsPath},
{Name: model.SchemeLabel, Value: cfg.Scheme}, {Name: model.SchemeLabel, Value: cfg.Scheme},
} }
@ -390,6 +427,34 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
return nil, nil, err return nil, nil, err
} }
var interval string
var intervalDuration model.Duration
if interval = lset.Get(model.ScrapeIntervalLabel); interval != cfg.ScrapeInterval.String() {
intervalDuration, err = model.ParseDuration(interval)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
}
if time.Duration(intervalDuration) == 0 {
return nil, nil, errors.New("scrape interval cannot be 0")
}
}
var timeout string
var timeoutDuration model.Duration
if timeout = lset.Get(model.ScrapeTimeoutLabel); timeout != cfg.ScrapeTimeout.String() {
timeoutDuration, err = model.ParseDuration(timeout)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
}
if time.Duration(timeoutDuration) == 0 {
return nil, nil, errors.New("scrape timeout cannot be 0")
}
}
if timeoutDuration > intervalDuration {
return nil, nil, errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
}
// Meta labels are deleted after relabelling. Other internal labels propagate to // Meta labels are deleted after relabelling. Other internal labels propagate to
// the target which decides whether they will be part of their label set. // the target which decides whether they will be part of their label set.
for _, l := range lset { for _, l := range lset {

View file

@ -382,3 +382,29 @@ func TestTargetsFromGroup(t *testing.T) {
t.Fatalf("Expected error %s, got %s", expectedError, failures[0]) t.Fatalf("Expected error %s, got %s", expectedError, failures[0])
} }
} }
func TestTargetHash(t *testing.T) {
target1 := &Target{
labels: labels.Labels{
{Name: model.AddressLabel, Value: "localhost"},
{Name: model.SchemeLabel, Value: "http"},
{Name: model.MetricsPathLabel, Value: "/metrics"},
{Name: model.ScrapeIntervalLabel, Value: "15s"},
{Name: model.ScrapeTimeoutLabel, Value: "500ms"},
},
}
hash1 := target1.hash()
target2 := &Target{
labels: labels.Labels{
{Name: model.AddressLabel, Value: "localhost"},
{Name: model.SchemeLabel, Value: "http"},
{Name: model.MetricsPathLabel, Value: "/metrics"},
{Name: model.ScrapeIntervalLabel, Value: "14s"},
{Name: model.ScrapeTimeoutLabel, Value: "600ms"},
},
}
hash2 := target2.hash()
require.Equal(t, hash1, hash2, "Scrape interval and duration labels should not effect hash.")
}

View file

@ -50,7 +50,7 @@ func TestTemplateExpansion(t *testing.T) {
// Non-ASCII space (not allowed in text/template, see https://github.com/golang/go/blob/master/src/text/template/parse/lex.go#L98) // Non-ASCII space (not allowed in text/template, see https://github.com/golang/go/blob/master/src/text/template/parse/lex.go#L98)
text: "{{ }}", text: "{{ }}",
shouldFail: true, shouldFail: true,
errorMsg: "error parsing template test: template: test:1: unexpected unrecognized character in action: U+00A0 in command", errorMsg: "error parsing template test: template: test:1: unrecognized character in action: U+00A0",
}, },
{ {
// HTML escaping. // HTML escaping.
@ -157,7 +157,7 @@ func TestTemplateExpansion(t *testing.T) {
// Unparsable template. // Unparsable template.
text: "{{", text: "{{",
shouldFail: true, shouldFail: true,
errorMsg: "error parsing template test: template: test:1: unexpected unclosed action in command", errorMsg: "error parsing template test: template: test:1: unclosed action",
}, },
{ {
// Error in function. // Error in function.
@ -194,7 +194,7 @@ func TestTemplateExpansion(t *testing.T) {
// Humanize - string with error. // Humanize - string with error.
text: `{{ humanize "one" }}`, text: `{{ humanize "one" }}`,
shouldFail: true, shouldFail: true,
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`, errorMsg: `error executing template test: template: test:1:3: executing "test" at <humanize "one">: error calling humanize: strconv.ParseFloat: parsing "one": invalid syntax`,
}, },
{ {
// Humanize1024 - float64. // Humanize1024 - float64.
@ -212,7 +212,7 @@ func TestTemplateExpansion(t *testing.T) {
// Humanize1024 - string with error. // Humanize1024 - string with error.
text: `{{ humanize1024 "one" }}`, text: `{{ humanize1024 "one" }}`,
shouldFail: true, shouldFail: true,
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`, errorMsg: `error executing template test: template: test:1:3: executing "test" at <humanize1024 "one">: error calling humanize1024: strconv.ParseFloat: parsing "one": invalid syntax`,
}, },
{ {
// HumanizeDuration - seconds - float64. // HumanizeDuration - seconds - float64.
@ -242,7 +242,7 @@ func TestTemplateExpansion(t *testing.T) {
// HumanizeDuration - string with error. // HumanizeDuration - string with error.
text: `{{ humanizeDuration "one" }}`, text: `{{ humanizeDuration "one" }}`,
shouldFail: true, shouldFail: true,
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`, errorMsg: `error executing template test: template: test:1:3: executing "test" at <humanizeDuration "one">: error calling humanizeDuration: strconv.ParseFloat: parsing "one": invalid syntax`,
}, },
{ {
// Humanize* Inf and NaN - float64. // Humanize* Inf and NaN - float64.
@ -270,7 +270,7 @@ func TestTemplateExpansion(t *testing.T) {
// HumanizePercentage - model.SampleValue input - string with error. // HumanizePercentage - model.SampleValue input - string with error.
text: `{{ "one" | humanizePercentage }}`, text: `{{ "one" | humanizePercentage }}`,
shouldFail: true, shouldFail: true,
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`, errorMsg: `error executing template test: template: test:1:11: executing "test" at <humanizePercentage>: error calling humanizePercentage: strconv.ParseFloat: parsing "one": invalid syntax`,
}, },
{ {
// HumanizeTimestamp - model.SampleValue input - float64. // HumanizeTimestamp - model.SampleValue input - float64.
@ -349,6 +349,7 @@ func TestTemplateExpansion(t *testing.T) {
} }
if s.shouldFail { if s.shouldFail {
require.Error(t, err, "%v", s.text) require.Error(t, err, "%v", s.text)
require.EqualError(t, err, s.errorMsg)
continue continue
} }

View file

@ -3,6 +3,11 @@
Memory snapshot uses the WAL package and writes each series as a WAL record. Memory snapshot uses the WAL package and writes each series as a WAL record.
Below are the formats of the individual records. Below are the formats of the individual records.
The order of records in the snapshot is always:
1. Starts with series records, one per series, in an unsorted fashion.
2. After all series are done, we write a tombstone record containing all the tombstones.
3. At the end, we write one or more exemplar records while batching up the exemplars in each record. Exemplars are in the order they were written to the circular buffer.
### Series records ### Series records
This record is a snapshot of a single series. Only one series exists per record. This record is a snapshot of a single series. Only one series exists per record.
@ -60,3 +65,30 @@ as tombstone file in blocks.
│ len(Encoded Tombstones) <uvarint> │ Encoded Tombstones <bytes> │ len(Encoded Tombstones) <uvarint> │ Encoded Tombstones <bytes>
└───────────────────────────────────┴─────────────────────────────┘ └───────────────────────────────────┴─────────────────────────────┘
``` ```
### Exemplar record
A single exemplar record contains one or more exemplars, encoded in the same way as we do in WAL but with changed record type.
```
┌───────────────────────────────────────────────────────────────────┐
│ Record Type <byte>
├───────────────────────────────────────────────────────────────────┤
│ ┌────────────────────┬───────────────────────────┐ │
│ │ series ref <8b> │ timestamp <8b> │ │
│ └────────────────────┴───────────────────────────┘ │
│ ┌─────────────────────┬───────────────────────────┬─────────────┐ │
│ │ ref_delta <uvarint> │ timestamp_delta <uvarint> │ value <8b> │ │
│ ├─────────────────────┴───────────────────────────┴─────────────┤ │
│ │ n = len(labels) <uvarint> │ │
│ ├───────────────────────────────┬───────────────────────────────┤ │
│ │ len(str_1) <uvarint> │ str_1 <bytes> │ │
│ ├───────────────────────────────┴───────────────────────────────┤ │
│ │ ... │ │
│ ├───────────────────────────────┬───────────────────────────────┤ │
│ │ len(str_2n) <uvarint> │ str_2n <bytes> │ │
│ ├───────────────────────────────┴───────────────────────────────┤ │
│ . . . │
└───────────────────────────────────────────────────────────────────┘
```

View file

@ -279,7 +279,7 @@ func (ce *CircularExemplarStorage) Resize(l int64) int {
migrated := 0 migrated := 0
if l > 0 { if l > 0 && len(oldBuffer) > 0 {
// Rewind previous next index by count with wrap-around. // Rewind previous next index by count with wrap-around.
// This math is essentially looking at nextIndex, where we would write the next exemplar to, // This math is essentially looking at nextIndex, where we would write the next exemplar to,
// and find the index in the old exemplar buffer that we should start migrating exemplars from. // and find the index in the old exemplar buffer that we should start migrating exemplars from.
@ -400,3 +400,23 @@ func (ce *CircularExemplarStorage) computeMetrics() {
ce.metrics.lastExemplarsTs.Set(float64(ce.exemplars[0].exemplar.Ts) / 1000) ce.metrics.lastExemplarsTs.Set(float64(ce.exemplars[0].exemplar.Ts) / 1000)
} }
} }
// IterateExemplars iterates through all the exemplars from oldest to newest appended and calls
// the given function on all of them till the end (or) till the first function call that returns an error.
func (ce *CircularExemplarStorage) IterateExemplars(f func(seriesLabels labels.Labels, e exemplar.Exemplar) error) error {
ce.lock.RLock()
defer ce.lock.RUnlock()
idx := ce.nextIndex
l := len(ce.exemplars)
for i := 0; i < l; i, idx = i+1, (idx+1)%l {
if ce.exemplars[idx] == nil {
continue
}
err := f(ce.exemplars[idx].ref.seriesLabels, ce.exemplars[idx].exemplar)
if err != nil {
return err
}
}
return nil
}

View file

@ -413,7 +413,7 @@ func TestResize(t *testing.T) {
expectedMigrated: 50, expectedMigrated: 50,
}, },
{ {
name: "Zero", name: "ShrinkToZero",
startSize: 100, startSize: 100,
newCount: 0, newCount: 0,
expectedSeries: []int{}, expectedSeries: []int{},
@ -436,6 +436,14 @@ func TestResize(t *testing.T) {
notExpectedSeries: []int{}, notExpectedSeries: []int{},
expectedMigrated: 0, expectedMigrated: 0,
}, },
{
name: "GrowFromZero",
startSize: 0,
newCount: 10,
expectedSeries: []int{},
notExpectedSeries: []int{},
expectedMigrated: 0,
},
} }
for _, tc := range testCases { for _, tc := range testCases {
@ -477,16 +485,27 @@ func TestResize(t *testing.T) {
} }
} }
func BenchmarkAddExemplar(t *testing.B) { func BenchmarkAddExemplar(b *testing.B) {
exs, err := NewCircularExemplarStorage(int64(t.N), eMetrics) // We need to include these labels since we do length calculation
require.NoError(t, err) // before adding.
es := exs.(*CircularExemplarStorage) exLabels := labels.Labels{{Name: "traceID", Value: "89620921"}}
for i := 0; i < t.N; i++ { for _, n := range []int{10000, 100000, 1000000} {
l := labels.FromStrings("service", strconv.Itoa(i)) b.Run(fmt.Sprintf("%d", n), func(b *testing.B) {
exs, err := NewCircularExemplarStorage(int64(n), eMetrics)
require.NoError(b, err)
es := exs.(*CircularExemplarStorage)
err = es.AddExemplar(l, exemplar.Exemplar{Value: float64(i), Ts: int64(i)}) b.ResetTimer()
require.NoError(t, err) l := labels.Labels{{Name: "service", Value: strconv.Itoa(0)}}
for i := 0; i < n; i++ {
if i%100 == 0 {
l = labels.Labels{{Name: "service", Value: strconv.Itoa(i)}}
}
err = es.AddExemplar(l, exemplar.Exemplar{Value: float64(i), Ts: int64(i), Labels: exLabels})
require.NoError(b, err)
}
})
} }
} }

View file

@ -113,6 +113,7 @@ type ExemplarStorage interface {
storage.ExemplarQueryable storage.ExemplarQueryable
AddExemplar(labels.Labels, exemplar.Exemplar) error AddExemplar(labels.Labels, exemplar.Exemplar) error
ValidateExemplar(labels.Labels, exemplar.Exemplar) error ValidateExemplar(labels.Labels, exemplar.Exemplar) error
IterateExemplars(f func(seriesLabels labels.Labels, e exemplar.Exemplar) error) error
} }
// HeadOptions are parameters for the Head block. // HeadOptions are parameters for the Head block.
@ -454,7 +455,7 @@ const cardinalityCacheExpirationTime = time.Duration(30) * time.Second
// Init loads data from the write ahead log and prepares the head for writes. // Init loads data from the write ahead log and prepares the head for writes.
// It should be called before using an appender so that it // It should be called before using an appender so that it
// limits the ingested samples to the head min valid time. // limits the ingested samples to the head min valid time.
func (h *Head) Init(minValidTime int64) (err error) { func (h *Head) Init(minValidTime int64) error {
h.minValidTime.Store(minValidTime) h.minValidTime.Store(minValidTime)
defer h.postings.EnsureOrder() defer h.postings.EnsureOrder()
defer h.gc() // After loading the wal remove the obsolete data from the head. defer h.gc() // After loading the wal remove the obsolete data from the head.
@ -474,6 +475,7 @@ func (h *Head) Init(minValidTime int64) (err error) {
if h.opts.EnableMemorySnapshotOnShutdown { if h.opts.EnableMemorySnapshotOnShutdown {
level.Info(h.logger).Log("msg", "Chunk snapshot is enabled, replaying from the snapshot") level.Info(h.logger).Log("msg", "Chunk snapshot is enabled, replaying from the snapshot")
var err error
snapIdx, snapOffset, refSeries, err = h.loadChunkSnapshot() snapIdx, snapOffset, refSeries, err = h.loadChunkSnapshot()
if err != nil { if err != nil {
snapIdx, snapOffset = -1, 0 snapIdx, snapOffset = -1, 0

View file

@ -2496,9 +2496,62 @@ func TestChunkSnapshot(t *testing.T) {
require.NoError(t, head.Close()) require.NoError(t, head.Close())
}() }()
type ex struct {
seriesLabels labels.Labels
e exemplar.Exemplar
}
numSeries := 10 numSeries := 10
expSeries := make(map[string][]tsdbutil.Sample) expSeries := make(map[string][]tsdbutil.Sample)
expTombstones := make(map[uint64]tombstones.Intervals) expTombstones := make(map[uint64]tombstones.Intervals)
expExemplars := make([]ex, 0)
addExemplar := func(app storage.Appender, ref uint64, lbls labels.Labels, ts int64) {
e := ex{
seriesLabels: lbls,
e: exemplar.Exemplar{
Labels: labels.Labels{{Name: "traceID", Value: fmt.Sprintf("%d", rand.Int())}},
Value: rand.Float64(),
Ts: ts,
},
}
expExemplars = append(expExemplars, e)
_, err := app.AppendExemplar(ref, e.seriesLabels, e.e)
require.NoError(t, err)
}
checkSamples := func() {
q, err := NewBlockQuerier(head, math.MinInt64, math.MaxInt64)
require.NoError(t, err)
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", ".*"))
require.Equal(t, expSeries, series)
}
checkTombstones := func() {
tr, err := head.Tombstones()
require.NoError(t, err)
actTombstones := make(map[uint64]tombstones.Intervals)
require.NoError(t, tr.Iter(func(ref uint64, itvs tombstones.Intervals) error {
for _, itv := range itvs {
actTombstones[ref].Add(itv)
}
return nil
}))
require.Equal(t, expTombstones, actTombstones)
}
checkExemplars := func() {
actExemplars := make([]ex, 0, len(expExemplars))
err := head.exemplars.IterateExemplars(func(seriesLabels labels.Labels, e exemplar.Exemplar) error {
actExemplars = append(actExemplars, ex{
seriesLabels: seriesLabels,
e: e,
})
return nil
})
require.NoError(t, err)
// Verifies both existence of right exemplars and order of exemplars in the buffer.
require.Equal(t, expExemplars, actExemplars)
}
{ // Initial data that goes into snapshot. { // Initial data that goes into snapshot.
// Add some initial samples with >=1 m-map chunk. // Add some initial samples with >=1 m-map chunk.
app := head.Appender(context.Background()) app := head.Appender(context.Background())
@ -2509,11 +2562,12 @@ func TestChunkSnapshot(t *testing.T) {
for ts := int64(1); ts <= 200; ts++ { for ts := int64(1); ts <= 200; ts++ {
val := rand.Float64() val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val}) expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val})
_, err := app.Append(0, lbls, ts, val) ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err) require.NoError(t, err)
// To create multiple WAL records. // Add an exemplar and to create multiple WAL records.
if ts%10 == 0 { if ts%10 == 0 {
addExemplar(app, ref, lbls, ts)
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
app = head.Appender(context.Background()) app = head.Appender(context.Background())
} }
@ -2538,6 +2592,7 @@ func TestChunkSnapshot(t *testing.T) {
}, nil)) }, nil))
require.NoError(t, err) require.NoError(t, err)
} }
} }
// These references should be the ones used for the snapshot. // These references should be the ones used for the snapshot.
@ -2563,22 +2618,9 @@ func TestChunkSnapshot(t *testing.T) {
require.NoError(t, head.Init(math.MinInt64)) require.NoError(t, head.Init(math.MinInt64))
// Test query for snapshot replay. // Test query for snapshot replay.
q, err := NewBlockQuerier(head, math.MinInt64, math.MaxInt64) checkSamples()
require.NoError(t, err) checkTombstones()
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", ".*")) checkExemplars()
require.Equal(t, expSeries, series)
// Check the tombstones.
tr, err := head.Tombstones()
require.NoError(t, err)
actTombstones := make(map[uint64]tombstones.Intervals)
require.NoError(t, tr.Iter(func(ref uint64, itvs tombstones.Intervals) error {
for _, itv := range itvs {
actTombstones[ref].Add(itv)
}
return nil
}))
require.Equal(t, expTombstones, actTombstones)
} }
{ // Additional data to only include in WAL and m-mapped chunks and not snapshot. This mimics having an old snapshot on disk. { // Additional data to only include in WAL and m-mapped chunks and not snapshot. This mimics having an old snapshot on disk.
@ -2592,11 +2634,12 @@ func TestChunkSnapshot(t *testing.T) {
for ts := int64(201); ts <= 400; ts++ { for ts := int64(201); ts <= 400; ts++ {
val := rand.Float64() val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val}) expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val})
_, err := app.Append(0, lbls, ts, val) ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err) require.NoError(t, err)
// To create multiple WAL records. // Add an exemplar and to create multiple WAL records.
if ts%10 == 0 { if ts%10 == 0 {
addExemplar(app, ref, lbls, ts)
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
app = head.Appender(context.Background()) app = head.Appender(context.Background())
} }
@ -2643,22 +2686,9 @@ func TestChunkSnapshot(t *testing.T) {
require.NoError(t, head.Init(math.MinInt64)) require.NoError(t, head.Init(math.MinInt64))
// Test query when data is replayed from snapshot, m-map chunks, and WAL. // Test query when data is replayed from snapshot, m-map chunks, and WAL.
q, err := NewBlockQuerier(head, math.MinInt64, math.MaxInt64) checkSamples()
require.NoError(t, err) checkTombstones()
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", ".*")) checkExemplars()
require.Equal(t, expSeries, series)
// Check the tombstones.
tr, err := head.Tombstones()
require.NoError(t, err)
actTombstones := make(map[uint64]tombstones.Intervals)
require.NoError(t, tr.Iter(func(ref uint64, itvs tombstones.Intervals) error {
for _, itv := range itvs {
actTombstones[ref].Add(itv)
}
return nil
}))
require.Equal(t, expTombstones, actTombstones)
} }
} }

View file

@ -413,6 +413,7 @@ func (h *Head) processWALSamples(
const ( const (
chunkSnapshotRecordTypeSeries uint8 = 1 chunkSnapshotRecordTypeSeries uint8 = 1
chunkSnapshotRecordTypeTombstones uint8 = 2 chunkSnapshotRecordTypeTombstones uint8 = 2
chunkSnapshotRecordTypeExemplars uint8 = 3
) )
type chunkSnapshotRecord struct { type chunkSnapshotRecord struct {
@ -537,6 +538,10 @@ const chunkSnapshotPrefix = "chunk_snapshot."
// The chunk snapshot is stored in a directory named chunk_snapshot.N.M and is written // The chunk snapshot is stored in a directory named chunk_snapshot.N.M and is written
// using the WAL package. N is the last WAL segment present during snapshotting and // using the WAL package. N is the last WAL segment present during snapshotting and
// M is the offset in segment N upto which data was written. // M is the offset in segment N upto which data was written.
//
// The snapshot first contains all series (each in individual records and not sorted), followed by
// tombstones (a single record), and finally exemplars (>= 1 record). Exemplars are in the order they
// were written to the circular buffer.
func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) { func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) {
if h.wal == nil { if h.wal == nil {
// If we are not storing any WAL, does not make sense to take a snapshot too. // If we are not storing any WAL, does not make sense to take a snapshot too.
@ -587,6 +592,7 @@ func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) {
buf []byte buf []byte
recs [][]byte recs [][]byte
) )
// Add all series to the snapshot.
stripeSize := h.series.size stripeSize := h.series.size
for i := 0; i < stripeSize; i++ { for i := 0; i < stripeSize; i++ {
h.series.locks[i].RLock() h.series.locks[i].RLock()
@ -622,11 +628,61 @@ func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) {
return stats, errors.Wrap(err, "encode tombstones") return stats, errors.Wrap(err, "encode tombstones")
} }
recs = append(recs, rec) recs = append(recs, rec)
// Flush remaining series records and tombstones.
// Flush remaining records.
if err := cp.Log(recs...); err != nil { if err := cp.Log(recs...); err != nil {
return stats, errors.Wrap(err, "flush records") return stats, errors.Wrap(err, "flush records")
} }
buf = buf[:0]
// Add exemplars in the snapshot.
// We log in batches, with each record having upto 10000 exemplars.
// Assuming 100 bytes (overestimate) per exemplar, that's ~1MB.
maxExemplarsPerRecord := 10000
batch := make([]record.RefExemplar, 0, maxExemplarsPerRecord)
enc := record.Encoder{}
flushExemplars := func() error {
if len(batch) == 0 {
return nil
}
buf = buf[:0]
encbuf := encoding.Encbuf{B: buf}
encbuf.PutByte(chunkSnapshotRecordTypeExemplars)
enc.EncodeExemplarsIntoBuffer(batch, &encbuf)
if err := cp.Log(encbuf.Get()); err != nil {
return errors.Wrap(err, "log exemplars")
}
buf, batch = buf[:0], batch[:0]
return nil
}
err = h.exemplars.IterateExemplars(func(seriesLabels labels.Labels, e exemplar.Exemplar) error {
if len(batch) >= maxExemplarsPerRecord {
if err := flushExemplars(); err != nil {
return errors.Wrap(err, "flush exemplars")
}
}
ms := h.series.getByHash(seriesLabels.Hash(), seriesLabels)
if ms == nil {
// It is possible that exemplar refers to some old series. We discard such exemplars.
return nil
}
batch = append(batch, record.RefExemplar{
Ref: ms.ref,
T: e.Ts,
V: e.Value,
Labels: e.Labels,
})
return nil
})
if err != nil {
return stats, errors.Wrap(err, "iterate exemplars")
}
// Flush remaining exemplars.
if err := flushExemplars(); err != nil {
return stats, errors.Wrap(err, "flush exemplars at the end")
}
if err := cp.Close(); err != nil { if err := cp.Close(); err != nil {
return stats, errors.Wrap(err, "close chunk snapshot") return stats, errors.Wrap(err, "close chunk snapshot")
} }
@ -766,6 +822,9 @@ func (h *Head) loadChunkSnapshot() (int, int, map[uint64]*memSeries, error) {
recordChan = make(chan chunkSnapshotRecord, 5*n) recordChan = make(chan chunkSnapshotRecord, 5*n)
shardedRefSeries = make([]map[uint64]*memSeries, n) shardedRefSeries = make([]map[uint64]*memSeries, n)
errChan = make(chan error, n) errChan = make(chan error, n)
refSeries map[uint64]*memSeries
exemplarBuf []record.RefExemplar
dec record.Decoder
) )
wg.Add(n) wg.Add(n)
@ -852,15 +911,58 @@ Outer:
loopErr = errors.Wrap(err, "iterate tombstones") loopErr = errors.Wrap(err, "iterate tombstones")
break Outer break Outer
} }
case chunkSnapshotRecordTypeExemplars:
// Exemplars are at the end of snapshot. So all series are loaded at this point.
if len(refSeries) == 0 {
close(recordChan)
wg.Wait()
refSeries = make(map[uint64]*memSeries, numSeries)
for _, shard := range shardedRefSeries {
for k, v := range shard {
refSeries[k] = v
}
}
}
decbuf := encoding.Decbuf{B: rec[1:]}
exemplarBuf = exemplarBuf[:0]
exemplarBuf, err = dec.ExemplarsFromBuffer(&decbuf, exemplarBuf)
if err != nil {
loopErr = errors.Wrap(err, "exemplars from buffer")
break Outer
}
for _, e := range exemplarBuf {
ms, ok := refSeries[e.Ref]
if !ok {
unknownRefs++
continue
}
if err := h.exemplars.AddExemplar(ms.lset, exemplar.Exemplar{
Labels: e.Labels,
Value: e.V,
Ts: e.T,
}); err != nil {
loopErr = errors.Wrap(err, "append exemplar")
break Outer
}
}
default: default:
// This is a record type we don't understand. It is either and old format from earlier versions, // This is a record type we don't understand. It is either and old format from earlier versions,
// or a new format and the code was rolled back to old version. // or a new format and the code was rolled back to old version.
loopErr = errors.Errorf("unsuported snapshot record type 0b%b", rec[0]) loopErr = errors.Errorf("unsuported snapshot record type 0b%b", rec[0])
break Outer
} }
} }
close(recordChan) if len(refSeries) == 0 {
wg.Wait() close(recordChan)
wg.Wait()
}
close(errChan) close(errChan)
merr := tsdb_errors.NewMulti(errors.Wrap(loopErr, "decode loop")) merr := tsdb_errors.NewMulti(errors.Wrap(loopErr, "decode loop"))
@ -875,10 +977,13 @@ Outer:
return -1, -1, nil, errors.Wrap(r.Err(), "read records") return -1, -1, nil, errors.Wrap(r.Err(), "read records")
} }
refSeries := make(map[uint64]*memSeries, numSeries) if len(refSeries) == 0 {
for _, shard := range shardedRefSeries { // We had no exemplar record, so we have to build the map here.
for k, v := range shard { refSeries = make(map[uint64]*memSeries, numSeries)
refSeries[k] = v for _, shard := range shardedRefSeries {
for k, v := range shard {
refSeries[k] = v
}
} }
} }

View file

@ -182,6 +182,11 @@ func (d *Decoder) Exemplars(rec []byte, exemplars []RefExemplar) ([]RefExemplar,
if t != Exemplars { if t != Exemplars {
return nil, errors.New("invalid record type") return nil, errors.New("invalid record type")
} }
return d.ExemplarsFromBuffer(&dec, exemplars)
}
func (d *Decoder) ExemplarsFromBuffer(dec *encoding.Decbuf, exemplars []RefExemplar) ([]RefExemplar, error) {
if dec.Len() == 0 { if dec.Len() == 0 {
return exemplars, nil return exemplars, nil
} }
@ -287,6 +292,12 @@ func (e *Encoder) Exemplars(exemplars []RefExemplar, b []byte) []byte {
return buf.Get() return buf.Get()
} }
e.EncodeExemplarsIntoBuffer(exemplars, &buf)
return buf.Get()
}
func (e *Encoder) EncodeExemplarsIntoBuffer(exemplars []RefExemplar, buf *encoding.Encbuf) {
// Store base timestamp and base reference number of first sample. // Store base timestamp and base reference number of first sample.
// All samples encode their timestamp and ref as delta to those. // All samples encode their timestamp and ref as delta to those.
first := exemplars[0] first := exemplars[0]
@ -305,6 +316,4 @@ func (e *Encoder) Exemplars(exemplars []RefExemplar, b []byte) []byte {
buf.PutUvarintStr(l.Value) buf.PutUvarintStr(l.Value)
} }
} }
return buf.Get()
} }

View file

@ -760,6 +760,9 @@ type Target struct {
LastScrape time.Time `json:"lastScrape"` LastScrape time.Time `json:"lastScrape"`
LastScrapeDuration float64 `json:"lastScrapeDuration"` LastScrapeDuration float64 `json:"lastScrapeDuration"`
Health scrape.TargetHealth `json:"health"` Health scrape.TargetHealth `json:"health"`
ScrapeInterval string `json:"scrapeInterval"`
ScrapeTimeout string `json:"scrapeTimeout"`
} }
// DroppedTarget has the information for one target that was dropped during relabelling. // DroppedTarget has the information for one target that was dropped during relabelling.
@ -899,6 +902,8 @@ func (api *API) targets(r *http.Request) apiFuncResult {
LastScrape: target.LastScrape(), LastScrape: target.LastScrape(),
LastScrapeDuration: target.LastScrapeDuration().Seconds(), LastScrapeDuration: target.LastScrapeDuration().Seconds(),
Health: target.Health(), Health: target.Health(),
ScrapeInterval: target.GetValue(model.ScrapeIntervalLabel),
ScrapeTimeout: target.GetValue(model.ScrapeTimeoutLabel),
}) })
} }
} }

View file

@ -534,10 +534,12 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
{ {
Identifier: "test", Identifier: "test",
Labels: labels.FromMap(map[string]string{ Labels: labels.FromMap(map[string]string{
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.AddressLabel: "example.com:8080", model.AddressLabel: "example.com:8080",
model.MetricsPathLabel: "/metrics", model.MetricsPathLabel: "/metrics",
model.JobLabel: "test", model.JobLabel: "test",
model.ScrapeIntervalLabel: "15s",
model.ScrapeTimeoutLabel: "5s",
}), }),
DiscoveredLabels: nil, DiscoveredLabels: nil,
Params: url.Values{}, Params: url.Values{},
@ -547,10 +549,12 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
{ {
Identifier: "blackbox", Identifier: "blackbox",
Labels: labels.FromMap(map[string]string{ Labels: labels.FromMap(map[string]string{
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.AddressLabel: "localhost:9115", model.AddressLabel: "localhost:9115",
model.MetricsPathLabel: "/probe", model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox", model.JobLabel: "blackbox",
model.ScrapeIntervalLabel: "20s",
model.ScrapeTimeoutLabel: "10s",
}), }),
DiscoveredLabels: nil, DiscoveredLabels: nil,
Params: url.Values{"target": []string{"example.com"}}, Params: url.Values{"target": []string{"example.com"}},
@ -561,10 +565,12 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
Identifier: "blackbox", Identifier: "blackbox",
Labels: nil, Labels: nil,
DiscoveredLabels: labels.FromMap(map[string]string{ DiscoveredLabels: labels.FromMap(map[string]string{
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.AddressLabel: "http://dropped.example.com:9115", model.AddressLabel: "http://dropped.example.com:9115",
model.MetricsPathLabel: "/probe", model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox", model.JobLabel: "blackbox",
model.ScrapeIntervalLabel: "30s",
model.ScrapeTimeoutLabel: "15s",
}), }),
Params: url.Values{}, Params: url.Values{},
Active: false, Active: false,
@ -951,6 +957,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "failed: missing port in address", LastError: "failed: missing port in address",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.1, LastScrapeDuration: 0.1,
ScrapeInterval: "20s",
ScrapeTimeout: "10s",
}, },
{ {
DiscoveredLabels: map[string]string{}, DiscoveredLabels: map[string]string{},
@ -964,15 +972,19 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "", LastError: "",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.07, LastScrapeDuration: 0.07,
ScrapeInterval: "15s",
ScrapeTimeout: "5s",
}, },
}, },
DroppedTargets: []*DroppedTarget{ DroppedTargets: []*DroppedTarget{
{ {
DiscoveredLabels: map[string]string{ DiscoveredLabels: map[string]string{
"__address__": "http://dropped.example.com:9115", "__address__": "http://dropped.example.com:9115",
"__metrics_path__": "/probe", "__metrics_path__": "/probe",
"__scheme__": "http", "__scheme__": "http",
"job": "blackbox", "job": "blackbox",
"__scrape_interval__": "30s",
"__scrape_timeout__": "15s",
}, },
}, },
}, },
@ -997,6 +1009,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "failed: missing port in address", LastError: "failed: missing port in address",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.1, LastScrapeDuration: 0.1,
ScrapeInterval: "20s",
ScrapeTimeout: "10s",
}, },
{ {
DiscoveredLabels: map[string]string{}, DiscoveredLabels: map[string]string{},
@ -1010,15 +1024,19 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "", LastError: "",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.07, LastScrapeDuration: 0.07,
ScrapeInterval: "15s",
ScrapeTimeout: "5s",
}, },
}, },
DroppedTargets: []*DroppedTarget{ DroppedTargets: []*DroppedTarget{
{ {
DiscoveredLabels: map[string]string{ DiscoveredLabels: map[string]string{
"__address__": "http://dropped.example.com:9115", "__address__": "http://dropped.example.com:9115",
"__metrics_path__": "/probe", "__metrics_path__": "/probe",
"__scheme__": "http", "__scheme__": "http",
"job": "blackbox", "job": "blackbox",
"__scrape_interval__": "30s",
"__scrape_timeout__": "15s",
}, },
}, },
}, },
@ -1043,6 +1061,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "failed: missing port in address", LastError: "failed: missing port in address",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.1, LastScrapeDuration: 0.1,
ScrapeInterval: "20s",
ScrapeTimeout: "10s",
}, },
{ {
DiscoveredLabels: map[string]string{}, DiscoveredLabels: map[string]string{},
@ -1056,6 +1076,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
LastError: "", LastError: "",
LastScrape: scrapeStart, LastScrape: scrapeStart,
LastScrapeDuration: 0.07, LastScrapeDuration: 0.07,
ScrapeInterval: "15s",
ScrapeTimeout: "5s",
}, },
}, },
DroppedTargets: []*DroppedTarget{}, DroppedTargets: []*DroppedTarget{},
@ -1071,10 +1093,12 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
DroppedTargets: []*DroppedTarget{ DroppedTargets: []*DroppedTarget{
{ {
DiscoveredLabels: map[string]string{ DiscoveredLabels: map[string]string{
"__address__": "http://dropped.example.com:9115", "__address__": "http://dropped.example.com:9115",
"__metrics_path__": "/probe", "__metrics_path__": "/probe",
"__scheme__": "http", "__scheme__": "http",
"job": "blackbox", "job": "blackbox",
"__scrape_interval__": "30s",
"__scrape_timeout__": "15s",
}, },
}, },
}, },

File diff suppressed because it is too large Load diff

View file

@ -45,7 +45,7 @@
"scripts": { "scripts": {
"start": "react-scripts start", "start": "react-scripts start",
"build": "react-scripts build", "build": "react-scripts build",
"test": "react-scripts test --runInBand", "test": "react-scripts test --runInBand --resetMocks=false",
"test:debug": "react-scripts --inspect-brk test --runInBand --no-cache", "test:debug": "react-scripts --inspect-brk test --runInBand --no-cache",
"eject": "react-scripts eject", "eject": "react-scripts eject",
"lint:ci": "eslint --quiet \"src/**/*.{ts,tsx}\"", "lint:ci": "eslint --quiet \"src/**/*.{ts,tsx}\"",
@ -83,7 +83,8 @@
"enzyme": "^3.11.0", "enzyme": "^3.11.0",
"enzyme-to-json": "^3.6.2", "enzyme-to-json": "^3.6.2",
"eslint-config-prettier": "^8.3.0", "eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^3.4.1", "eslint-config-react-app": "^6.0.0",
"eslint-plugin-prettier": "^4.0.0",
"jest-fetch-mock": "^3.0.3", "jest-fetch-mock": "^3.0.3",
"mutationobserver-shim": "^0.3.7", "mutationobserver-shim": "^0.3.7",
"prettier": "^2.3.2", "prettier": "^2.3.2",

View file

@ -26,7 +26,7 @@ export const StartingContent: FC<StartingContentProps> = ({ status, isUnexpected
{status && status.max > 0 ? ( {status && status.max > 0 ? (
<div> <div>
<p> <p>
Replaying WAL ({status?.current}/{status?.max}) Replaying WAL ({status.current}/{status.max})
</p> </p>
<Progress <Progress
animated animated

View file

@ -11,7 +11,9 @@ interface StatusIndicatorProps {
} }
export const withStatusIndicator = export const withStatusIndicator =
<T extends Record<string, any>>(Component: ComponentType<T>): FC<StatusIndicatorProps & T> => <T extends Record<string, any>>( // eslint-disable-line @typescript-eslint/no-explicit-any
Component: ComponentType<T>
): FC<StatusIndicatorProps & T> =>
({ error, isLoading, customErrorMsg, componentTitle, ...rest }) => { ({ error, isLoading, customErrorMsg, componentTitle, ...rest }) => {
if (error) { if (error) {
return ( return (

View file

@ -1,4 +1,6 @@
import jquery from 'jquery'; import jquery from 'jquery';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(window as any).jQuery = jquery; (window as any).jQuery = jquery;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(window as any).moment = require('moment'); (window as any).moment = require('moment');

View file

@ -10,19 +10,15 @@ export interface FetchState<T> {
isLoading: boolean; isLoading: boolean;
} }
export interface FetchStateReady {
ready: boolean;
isUnexpected: boolean;
isLoading: boolean;
}
export interface FetchStateReadyInterval { export interface FetchStateReadyInterval {
ready: boolean; ready: boolean;
isUnexpected: boolean; isUnexpected: boolean;
walReplayStatus: WALReplayStatus; walReplayStatus: WALReplayStatus;
} }
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const useFetch = <T extends Record<string, any>>(url: string, options?: RequestInit): FetchState<T> => { export const useFetch = <T extends Record<string, any>>(url: string, options?: RequestInit): FetchState<T> => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const [response, setResponse] = useState<APIResponse<T>>({ status: 'start fetching' } as any); const [response, setResponse] = useState<APIResponse<T>>({ status: 'start fetching' } as any);
const [error, setError] = useState<Error>(); const [error, setError] = useState<Error>();
const [isLoading, setIsLoading] = useState<boolean>(true); const [isLoading, setIsLoading] = useState<boolean>(true);
@ -38,8 +34,9 @@ export const useFetch = <T extends Record<string, any>>(url: string, options?: R
const json = (await res.json()) as APIResponse<T>; const json = (await res.json()) as APIResponse<T>;
setResponse(json); setResponse(json);
setIsLoading(false); setIsLoading(false);
} catch (error) { } catch (err: unknown) {
setError(error as Error); const error = err as Error;
setError(error);
} }
}; };
fetchData(); fetchData();
@ -54,6 +51,7 @@ let wasReady = false;
export const useFetchReadyInterval = (pathPrefix: string, options?: RequestInit): FetchStateReadyInterval => { export const useFetchReadyInterval = (pathPrefix: string, options?: RequestInit): FetchStateReadyInterval => {
const [ready, setReady] = useState<boolean>(false); const [ready, setReady] = useState<boolean>(false);
const [isUnexpected, setIsUnexpected] = useState<boolean>(false); const [isUnexpected, setIsUnexpected] = useState<boolean>(false);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const [walReplayStatus, setWALReplayStatus] = useState<WALReplayStatus>({} as any); const [walReplayStatus, setWALReplayStatus] = useState<WALReplayStatus>({} as any);
useEffect(() => { useEffect(() => {

View file

@ -6,6 +6,7 @@ import { isPresent } from '../../utils';
import { Rule } from '../../types/types'; import { Rule } from '../../types/types';
import { useLocalStorage } from '../../hooks/useLocalStorage'; import { useLocalStorage } from '../../hooks/useLocalStorage';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type RuleState = keyof RuleStatus<any>; export type RuleState = keyof RuleStatus<any>;
export interface RuleStatus<T> { export interface RuleStatus<T> {
@ -108,6 +109,7 @@ interface GroupInfoProps {
} }
export const GroupInfo: FC<GroupInfoProps> = ({ rules, children }) => { export const GroupInfo: FC<GroupInfoProps> = ({ rules, children }) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const statesCounter = rules.reduce<any>( const statesCounter = rules.reduce<any>(
(acc, r) => { (acc, r) => {
return { return {

View file

@ -27,7 +27,7 @@ export const ConfigContent: FC<ConfigContentProps> = ({ error, data }) => {
<h2> <h2>
Configuration&nbsp; Configuration&nbsp;
<CopyToClipboard <CopyToClipboard
text={config!} text={config ? config : ''}
onCopy={(_, result) => { onCopy={(_, result) => {
setCopied(result); setCopied(result);
setTimeout(setCopied, 1500); setTimeout(setCopied, 1500);

View file

@ -1,11 +1,11 @@
import React, { Component } from 'react'; import React, { Component } from 'react';
import { Button, InputGroup, InputGroupAddon, InputGroupText, Input } from 'reactstrap'; import { Button, Input, InputGroup, InputGroupAddon, InputGroupText } from 'reactstrap';
import Downshift, { ControllerStateAndHelpers } from 'downshift'; import Downshift, { ControllerStateAndHelpers } from 'downshift';
import sanitizeHTML from 'sanitize-html'; import sanitizeHTML from 'sanitize-html';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faSearch, faSpinner, faGlobeEurope } from '@fortawesome/free-solid-svg-icons'; import { faGlobeEurope, faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
import MetricsExplorer from './MetricsExplorer'; import MetricsExplorer from './MetricsExplorer';
import { Fuzzy, FuzzyResult } from '@nexucis/fuzzy'; import { Fuzzy, FuzzyResult } from '@nexucis/fuzzy';
@ -42,13 +42,17 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
} }
setHeight = (): void => { setHeight = (): void => {
const { offsetHeight, clientHeight, scrollHeight } = this.exprInputRef.current!; if (this.exprInputRef.current) {
const offset = offsetHeight - clientHeight; // Needed in order for the height to be more accurate. const { offsetHeight, clientHeight, scrollHeight } = this.exprInputRef.current;
this.setState({ height: scrollHeight + offset }); const offset = offsetHeight - clientHeight; // Needed in order for the height to be more accurate.
this.setState({ height: scrollHeight + offset });
}
}; };
handleInput = (): void => { handleInput = (): void => {
this.setValue(this.exprInputRef.current!.value); if (this.exprInputRef.current) {
this.setValue(this.exprInputRef.current.value);
}
}; };
setValue = (value: string): void => { setValue = (value: string): void => {
@ -76,7 +80,8 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
return fuz.filter(input.replace(/ /g, ''), expressions); return fuz.filter(input.replace(/ /g, ''), expressions);
}; };
createAutocompleteSection = (downshift: ControllerStateAndHelpers<any>) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any
createAutocompleteSection = (downshift: ControllerStateAndHelpers<any>): JSX.Element | null => {
const { inputValue = '', closeMenu, highlightedIndex } = downshift; const { inputValue = '', closeMenu, highlightedIndex } = downshift;
const autocompleteSections = { const autocompleteSections = {
'Query History': this.props.queryHistory, 'Query History': this.props.queryHistory,
@ -94,7 +99,7 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
<ul className="autosuggest-dropdown-list" key={title}> <ul className="autosuggest-dropdown-list" key={title}>
<li className="autosuggest-dropdown-header">{title}</li> <li className="autosuggest-dropdown-header">{title}</li>
{matches {matches
.slice(0, 100) // Limit DOM rendering to 100 results, as DOM rendering is sloooow. .slice(0, 100) // Limit DOM rendering to 100 results, as DOM rendering is slow.
.map((result: FuzzyResult) => { .map((result: FuzzyResult) => {
const itemProps = downshift.getItemProps({ const itemProps = downshift.getItemProps({
key: result.original, key: result.original,
@ -161,7 +166,7 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
this.setValue(newValue); this.setValue(newValue);
}; };
render() { render(): JSX.Element {
const { executeQuery, value } = this.props; const { executeQuery, value } = this.props;
const { height } = this.state; const { height } = this.state;
return ( return (
@ -191,11 +196,13 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
case 'End': case 'End':
// We want to be able to jump to the beginning/end of the input field. // We want to be able to jump to the beginning/end of the input field.
// By default, Downshift otherwise jumps to the first/last suggestion item instead. // By default, Downshift otherwise jumps to the first/last suggestion item instead.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(event.nativeEvent as any).preventDownshiftDefault = true; (event.nativeEvent as any).preventDownshiftDefault = true;
break; break;
case 'ArrowUp': case 'ArrowUp':
case 'ArrowDown': case 'ArrowDown':
if (!downshift.isOpen) { if (!downshift.isOpen) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(event.nativeEvent as any).preventDownshiftDefault = true; (event.nativeEvent as any).preventDownshiftDefault = true;
} }
break; break;
@ -203,13 +210,14 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
downshift.closeMenu(); downshift.closeMenu();
break; break;
case 'Escape': case 'Escape':
if (!downshift.isOpen) { if (!downshift.isOpen && this.exprInputRef.current) {
this.exprInputRef.current!.blur(); this.exprInputRef.current.blur();
} }
break; break;
default: default:
} }
}, },
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any)} } as any)}
value={value} value={value}
/> />

View file

@ -42,6 +42,7 @@ export interface GraphExemplar {
seriesLabels: { [key: string]: string }; seriesLabels: { [key: string]: string };
labels: { [key: string]: string }; labels: { [key: string]: string };
data: number[][]; data: number[][];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
points: any; // This is used to specify the symbol. points: any; // This is used to specify the symbol.
color: string; color: string;
} }
@ -206,7 +207,7 @@ class Graph extends PureComponent<GraphProps, GraphState> {
} }
}; };
render() { render(): JSX.Element {
const { chartData, selectedExemplarLabels } = this.state; const { chartData, selectedExemplarLabels } = this.state;
const selectedLabels = selectedExemplarLabels as { const selectedLabels = selectedExemplarLabels as {
exemplar: { [key: string]: string }; exemplar: { [key: string]: string };

View file

@ -3,7 +3,7 @@ import { shallow } from 'enzyme';
import GraphControls from './GraphControls'; import GraphControls from './GraphControls';
import { Button, ButtonGroup, Form, InputGroup, InputGroupAddon, Input } from 'reactstrap'; import { Button, ButtonGroup, Form, InputGroup, InputGroupAddon, Input } from 'reactstrap';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faSquare, faPlus, faMinus, faChartArea, faChartLine } from '@fortawesome/free-solid-svg-icons'; import { faPlus, faMinus, faChartArea, faChartLine } from '@fortawesome/free-solid-svg-icons';
import TimeInput from './TimeInput'; import TimeInput from './TimeInput';
const defaultGraphControlProps = { const defaultGraphControlProps = {

View file

@ -1,10 +1,10 @@
import React, { Component } from 'react'; import React, { Component } from 'react';
import { Button, ButtonGroup, Form, InputGroup, InputGroupAddon, Input } from 'reactstrap'; import { Button, ButtonGroup, Form, Input, InputGroup, InputGroupAddon } from 'reactstrap';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faPlus, faMinus, faChartArea, faChartLine } from '@fortawesome/free-solid-svg-icons'; import { faChartArea, faChartLine, faMinus, faPlus } from '@fortawesome/free-solid-svg-icons';
import TimeInput from './TimeInput'; import TimeInput from './TimeInput';
import { parseDuration, formatDuration } from '../../utils'; import { formatDuration, parseDuration } from '../../utils';
interface GraphControlsProps { interface GraphControlsProps {
range: number; range: number;
@ -58,7 +58,7 @@ class GraphControls extends Component<GraphControlsProps> {
}; };
changeRangeInput = (range: number): void => { changeRangeInput = (range: number): void => {
this.rangeRef.current!.value = formatDuration(range); this.setCurrentRangeValue(formatDuration(range));
}; };
increaseRange = (): void => { increaseRange = (): void => {
@ -81,16 +81,22 @@ class GraphControls extends Component<GraphControlsProps> {
} }
}; };
componentDidUpdate(prevProps: GraphControlsProps) { componentDidUpdate(prevProps: GraphControlsProps): void {
if (prevProps.range !== this.props.range) { if (prevProps.range !== this.props.range) {
this.changeRangeInput(this.props.range); this.changeRangeInput(this.props.range);
} }
if (prevProps.resolution !== this.props.resolution) { if (prevProps.resolution !== this.props.resolution) {
this.resolutionRef.current!.value = this.props.resolution !== null ? this.props.resolution.toString() : ''; this.setCurrentRangeValue(this.props.resolution !== null ? this.props.resolution.toString() : '');
} }
} }
render() { setCurrentRangeValue(value: string): void {
if (this.rangeRef.current) {
this.rangeRef.current.value = value;
}
}
render(): JSX.Element {
return ( return (
<Form inline className="graph-controls" onSubmit={(e) => e.preventDefault()}> <Form inline className="graph-controls" onSubmit={(e) => e.preventDefault()}>
<InputGroup className="range-input" size="sm"> <InputGroup className="range-input" size="sm">
@ -103,9 +109,13 @@ class GraphControls extends Component<GraphControlsProps> {
<Input <Input
defaultValue={formatDuration(this.props.range)} defaultValue={formatDuration(this.props.range)}
innerRef={this.rangeRef} innerRef={this.rangeRef}
onBlur={() => this.onChangeRangeInput(this.rangeRef.current!.value)} onBlur={() => {
if (this.rangeRef.current) {
this.onChangeRangeInput(this.rangeRef.current.value);
}
}}
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) =>
e.key === 'Enter' && this.onChangeRangeInput(this.rangeRef.current!.value) e.key === 'Enter' && this.rangeRef.current && this.onChangeRangeInput(this.rangeRef.current.value)
} }
/> />
@ -130,8 +140,10 @@ class GraphControls extends Component<GraphControlsProps> {
defaultValue={this.props.resolution !== null ? this.props.resolution.toString() : ''} defaultValue={this.props.resolution !== null ? this.props.resolution.toString() : ''}
innerRef={this.resolutionRef} innerRef={this.resolutionRef}
onBlur={() => { onBlur={() => {
const res = parseInt(this.resolutionRef.current!.value); if (this.resolutionRef.current) {
this.props.onChangeResolution(res ? res : null); const res = parseInt(this.resolutionRef.current.value);
this.props.onChangeResolution(res ? res : null);
}
}} }}
bsSize="sm" bsSize="sm"
/> />

View file

@ -53,7 +53,7 @@ export const formatValue = (y: number | null): string => {
throw Error("couldn't format a value, this is a bug"); throw Error("couldn't format a value, this is a bug");
}; };
export const getHoverColor = (color: string, opacity: number, stacked: boolean) => { export const getHoverColor = (color: string, opacity: number, stacked: boolean): string => {
const { r, g, b } = $.color.parse(color); const { r, g, b } = $.color.parse(color);
if (!stacked) { if (!stacked) {
return `rgba(${r}, ${g}, ${b}, ${opacity})`; return `rgba(${r}, ${g}, ${b}, ${opacity})`;
@ -67,10 +67,15 @@ export const getHoverColor = (color: string, opacity: number, stacked: boolean)
return `rgb(${Math.round(base + opacity * r)},${Math.round(base + opacity * g)},${Math.round(base + opacity * b)})`; return `rgb(${Math.round(base + opacity * r)},${Math.round(base + opacity * g)},${Math.round(base + opacity * b)})`;
}; };
export const toHoverColor = (index: number, stacked: boolean) => (series: GraphSeries, i: number) => ({ export const toHoverColor =
...series, (index: number, stacked: boolean) =>
color: getHoverColor(series.color, i !== index ? 0.3 : 1, stacked), (
}); series: GraphSeries,
i: number
): { color: string; data: (number | null)[][]; index: number; labels: { [p: string]: string } } => ({
...series,
color: getHoverColor(series.color, i !== index ? 0.3 : 1, stacked),
});
export const getOptions = (stacked: boolean, useLocalTime: boolean): jquery.flot.plotOptions => { export const getOptions = (stacked: boolean, useLocalTime: boolean): jquery.flot.plotOptions => {
return { return {
@ -154,7 +159,10 @@ export const getOptions = (stacked: boolean, useLocalTime: boolean): jquery.flot
}; };
// This was adapted from Flot's color generation code. // This was adapted from Flot's color generation code.
export const getColors = (data: { resultType: string; result: Array<{ metric: Metric; values: [number, string][] }> }) => { export const getColors = (data: {
resultType: string;
result: Array<{ metric: Metric; values: [number, string][] }>;
}): Color[] => {
const colorPool = ['#edc240', '#afd8f8', '#cb4b4b', '#4da74d', '#9440ed']; const colorPool = ['#edc240', '#afd8f8', '#cb4b4b', '#4da74d', '#9440ed'];
const colorPoolSize = colorPool.length; const colorPoolSize = colorPool.length;
let variation = 0; let variation = 0;
@ -180,6 +188,7 @@ export const getColors = (data: { resultType: string; result: Array<{ metric: Me
export const normalizeData = ({ queryParams, data, exemplars, stacked }: GraphProps): GraphData => { export const normalizeData = ({ queryParams, data, exemplars, stacked }: GraphProps): GraphData => {
const colors = getColors(data); const colors = getColors(data);
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const { startTime, endTime, resolution } = queryParams!; const { startTime, endTime, resolution } = queryParams!;
let sum = 0; let sum = 0;
@ -256,7 +265,7 @@ export const normalizeData = ({ queryParams, data, exemplars, stacked }: GraphPr
}; };
}; };
export const parseValue = (value: string) => { export const parseValue = (value: string): null | number => {
const val = parseFloat(value); const val = parseFloat(value);
// "+Inf", "-Inf", "+Inf" will be parsed into NaN by parseFloat(). They // "+Inf", "-Inf", "+Inf" will be parsed into NaN by parseFloat(). They
// can't be graphed, so show them as gaps (null). // can't be graphed, so show them as gaps (null).

View file

@ -5,6 +5,7 @@ import { QueryParams, ExemplarData } from '../../types/types';
import { isPresent } from '../../utils'; import { isPresent } from '../../utils';
interface GraphTabContentProps { interface GraphTabContentProps {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any; data: any;
exemplars: ExemplarData; exemplars: ExemplarData;
stacked: boolean; stacked: boolean;

View file

@ -18,36 +18,38 @@ export class Legend extends PureComponent<LegendProps, LegendState> {
state = { state = {
selectedIndexes: [] as number[], selectedIndexes: [] as number[],
}; };
componentDidUpdate(prevProps: LegendProps) { componentDidUpdate(prevProps: LegendProps): void {
if (this.props.shouldReset && prevProps.shouldReset !== this.props.shouldReset) { if (this.props.shouldReset && prevProps.shouldReset !== this.props.shouldReset) {
this.setState({ selectedIndexes: [] }); this.setState({ selectedIndexes: [] });
} }
} }
handleSeriesSelect = (index: number) => (ev: React.MouseEvent<HTMLDivElement, MouseEvent>) => { handleSeriesSelect =
// TODO: add proper event type (index: number) =>
const { selectedIndexes } = this.state; (ev: React.MouseEvent<HTMLDivElement, MouseEvent>): void => {
// TODO: add proper event type
const { selectedIndexes } = this.state;
let selected = [index]; let selected = [index];
if (ev.ctrlKey || ev.metaKey) { if (ev.ctrlKey || ev.metaKey) {
const { chartData } = this.props; const { chartData } = this.props;
if (selectedIndexes.includes(index)) { if (selectedIndexes.includes(index)) {
selected = selectedIndexes.filter((idx) => idx !== index); selected = selectedIndexes.filter((idx) => idx !== index);
} else { } else {
selected = selected =
// Flip the logic - In case none is selected ctrl + click should deselect clicked series. // Flip the logic - In case none is selected ctrl + click should deselect clicked series.
selectedIndexes.length === 0 selectedIndexes.length === 0
? chartData.reduce<number[]>((acc, _, i) => (i === index ? acc : [...acc, i]), []) ? chartData.reduce<number[]>((acc, _, i) => (i === index ? acc : [...acc, i]), [])
: [...selectedIndexes, index]; // Select multiple. : [...selectedIndexes, index]; // Select multiple.
}
} else if (selectedIndexes.length === 1 && selectedIndexes.includes(index)) {
selected = [];
} }
} else if (selectedIndexes.length === 1 && selectedIndexes.includes(index)) {
selected = [];
}
this.setState({ selectedIndexes: selected }); this.setState({ selectedIndexes: selected });
this.props.onSeriesToggle(selected, index); this.props.onSeriesToggle(selected, index);
}; };
render() { render(): JSX.Element {
const { chartData, onLegendMouseOut, onHover } = this.props; const { chartData, onLegendMouseOut, onHover } = this.props;
const { selectedIndexes } = this.state; const { selectedIndexes } = this.state;
const canUseHover = chartData.length > 1 && selectedIndexes.length === 0; const canUseHover = chartData.length > 1 && selectedIndexes.length === 0;

View file

@ -9,16 +9,16 @@ interface Props {
} }
class MetricsExplorer extends Component<Props> { class MetricsExplorer extends Component<Props> {
handleMetricClick = (query: string) => { handleMetricClick = (query: string): void => {
this.props.insertAtCursor(query); this.props.insertAtCursor(query);
this.props.updateShow(false); this.props.updateShow(false);
}; };
toggle = () => { toggle = (): void => {
this.props.updateShow(!this.props.show); this.props.updateShow(!this.props.show);
}; };
render() { render(): JSX.Element {
return ( return (
<Modal isOpen={this.props.show} toggle={this.toggle} className="metrics-explorer"> <Modal isOpen={this.props.show} toggle={this.toggle} className="metrics-explorer">
<ModalHeader toggle={this.toggle}>Metrics Explorer</ModalHeader> <ModalHeader toggle={this.toggle}>Metrics Explorer</ModalHeader>

View file

@ -31,6 +31,7 @@ interface PanelProps {
} }
interface PanelState { interface PanelState {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any; // TODO: Type data. data: any; // TODO: Type data.
exemplars: ExemplarData; exemplars: ExemplarData;
lastQueryParams: QueryParams | null; lastQueryParams: QueryParams | null;
@ -101,6 +102,7 @@ class Panel extends Component<PanelProps, PanelState> {
this.executeQuery(); this.executeQuery();
} }
// eslint-disable-next-line @typescript-eslint/no-explicit-any
executeQuery = async (): Promise<any> => { executeQuery = async (): Promise<any> => {
const { exprInputValue: expr } = this.state; const { exprInputValue: expr } = this.state;
const queryStart = Date.now(); const queryStart = Date.now();
@ -198,7 +200,8 @@ class Panel extends Component<PanelProps, PanelState> {
loading: false, loading: false,
}); });
this.abortInFlightFetch = null; this.abortInFlightFetch = null;
} catch (error: any) { } catch (err: unknown) {
const error = err as Error;
if (error.name === 'AbortError') { if (error.name === 'AbortError') {
// Aborts are expected, don't show an error for them. // Aborts are expected, don't show an error for them.
return; return;
@ -259,7 +262,7 @@ class Panel extends Component<PanelProps, PanelState> {
this.setOptions({ range: endTime - startTime, endTime: endTime }); this.setOptions({ range: endTime - startTime, endTime: endTime });
}; };
render() { render(): JSX.Element {
const { pastQueries, metricNames, options } = this.props; const { pastQueries, metricNames, options } = this.props;
return ( return (
<div className="panel"> <div className="panel">

View file

@ -11,7 +11,7 @@ import { API_PATH } from '../../constants/constants';
export type PanelMeta = { key: string; options: PanelOptions; id: string }; export type PanelMeta = { key: string; options: PanelOptions; id: string };
export const updateURL = (nextPanels: PanelMeta[]) => { export const updateURL = (nextPanels: PanelMeta[]): void => {
const query = encodePanelOptionsToQueryString(nextPanels); const query = encodePanelOptionsToQueryString(nextPanels);
window.history.pushState({}, '', query); window.history.pushState({}, '', query);
}; };

View file

@ -29,7 +29,7 @@ const SeriesName: FC<SeriesNameProps> = ({ labels, format }) => {
return ( return (
<div> <div>
<span className="legend-metric-name">{labels!.__name__ || ''}</span> <span className="legend-metric-name">{labels ? labels.__name__ : ''}</span>
<span className="legend-label-brace">{'{'}</span> <span className="legend-label-brace">{'{'}</span>
{labelNodes} {labelNodes}
<span className="legend-label-brace">{'}'}</span> <span className="legend-label-brace">{'}'}</span>
@ -46,7 +46,7 @@ const SeriesName: FC<SeriesNameProps> = ({ labels, format }) => {
} }
// Return a simple text node. This is much faster to scroll through // Return a simple text node. This is much faster to scroll through
// for longer lists (hundreds of items). // for longer lists (hundreds of items).
return <>{metricToSeriesName(labels!)}</>; return <>{metricToSeriesName(labels)}</>;
}; };
export default SeriesName; export default SeriesName;

View file

@ -1,6 +1,6 @@
import $ from 'jquery'; import $ from 'jquery';
import React, { Component } from 'react'; import React, { Component } from 'react';
import { Button, InputGroup, InputGroupAddon, Input } from 'reactstrap'; import { Button, Input, InputGroup, InputGroupAddon } from 'reactstrap';
import moment from 'moment-timezone'; import moment from 'moment-timezone';
@ -11,11 +11,11 @@ import '../../../node_modules/tempusdominus-bootstrap-4/build/css/tempusdominus-
import { dom, library } from '@fortawesome/fontawesome-svg-core'; import { dom, library } from '@fortawesome/fontawesome-svg-core';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { import {
faArrowDown,
faArrowUp,
faCalendarCheck,
faChevronLeft, faChevronLeft,
faChevronRight, faChevronRight,
faCalendarCheck,
faArrowUp,
faArrowDown,
faTimes, faTimes,
} from '@fortawesome/free-solid-svg-icons'; } from '@fortawesome/free-solid-svg-icons';
@ -33,6 +33,7 @@ interface TimeInputProps {
class TimeInput extends Component<TimeInputProps> { class TimeInput extends Component<TimeInputProps> {
private timeInputRef = React.createRef<HTMLInputElement>(); private timeInputRef = React.createRef<HTMLInputElement>();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private $time: any = null; private $time: any = null;
getBaseTime = (): number => { getBaseTime = (): number => {
@ -60,7 +61,10 @@ class TimeInput extends Component<TimeInputProps> {
}; };
componentDidMount(): void { componentDidMount(): void {
this.$time = $(this.timeInputRef.current!); if (!this.timeInputRef.current) {
return;
}
this.$time = $(this.timeInputRef.current);
this.$time.datetimepicker({ this.$time.datetimepicker({
icons: { icons: {
@ -78,6 +82,7 @@ class TimeInput extends Component<TimeInputProps> {
defaultDate: this.props.time, defaultDate: this.props.time,
}); });
// eslint-disable-next-line @typescript-eslint/no-explicit-any
this.$time.on('change.datetimepicker', (e: any) => { this.$time.on('change.datetimepicker', (e: any) => {
// The end time can also be set by dragging a section on the graph, // The end time can also be set by dragging a section on the graph,
// and that value will have decimal places. // and that value will have decimal places.
@ -101,7 +106,7 @@ class TimeInput extends Component<TimeInputProps> {
} }
} }
render() { render(): JSX.Element {
return ( return (
<InputGroup className="time-input" size="sm"> <InputGroup className="time-input" size="sm">
<InputGroupAddon addonType="prepend"> <InputGroupAddon addonType="prepend">

View file

@ -1,7 +1,7 @@
import React, { FC } from 'react'; import React, { FC } from 'react';
import { useFetch } from '../../hooks/useFetch'; import { useFetch } from '../../hooks/useFetch';
import { LabelsTable } from './LabelsTable'; import { LabelsTable } from './LabelsTable';
import { Target, Labels, DroppedTarget } from '../targets/target'; import { DroppedTarget, Labels, Target } from '../targets/target';
import { withStatusIndicator } from '../../components/withStatusIndicator'; import { withStatusIndicator } from '../../components/withStatusIndicator';
import { mapObjEntries } from '../../utils'; import { mapObjEntries } from '../../utils';
@ -19,7 +19,10 @@ export interface TargetLabels {
isDropped: boolean; isDropped: boolean;
} }
export const processSummary = (activeTargets: Target[], droppedTargets: DroppedTarget[]) => { export const processSummary = (
activeTargets: Target[],
droppedTargets: DroppedTarget[]
): Record<string, { active: number; total: number }> => {
const targets: Record<string, { active: number; total: number }> = {}; const targets: Record<string, { active: number; total: number }> = {};
// Get targets of each type along with the total and active end points // Get targets of each type along with the total and active end points

View file

@ -12,6 +12,7 @@ interface StatusPageProps {
export const statusConfig: Record< export const statusConfig: Record<
string, string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
{ title?: string; customizeValue?: (v: any, key: string) => any; customRow?: boolean; skip?: boolean } { title?: string; customizeValue?: (v: any, key: string) => any; customRow?: boolean; skip?: boolean }
> = { > = {
startTime: { title: 'Start time', customizeValue: (v: string) => new Date(v).toUTCString() }, startTime: { title: 'Start time', customizeValue: (v: string) => new Date(v).toUTCString() },
@ -57,7 +58,7 @@ export const StatusContent: FC<StatusPageProps> = ({ data, title }) => {
<Table className="h-auto" size="sm" bordered striped> <Table className="h-auto" size="sm" bordered striped>
<tbody> <tbody>
{Object.entries(data).map(([k, v]) => { {Object.entries(data).map(([k, v]) => {
const { title = k, customizeValue = (val: any) => val, customRow, skip } = statusConfig[k] || {}; const { title = k, customizeValue = (val: string) => val, customRow, skip } = statusConfig[k] || {};
if (skip) { if (skip) {
return null; return null;
} }

View file

@ -10,10 +10,11 @@ const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
let url: URL; let url: URL;
try { try {
url = new URL(endpoint); url = new URL(endpoint);
} catch (e: any) { } catch (err: unknown) {
const error = err as Error;
return ( return (
<Alert color="danger"> <Alert color="danger">
<strong>Error:</strong> {e.message} <strong>Error:</strong> {error.message}
</Alert> </Alert>
); );
} }

View file

@ -25,6 +25,9 @@ describe('ScrapePoolList', () => {
const div = document.createElement('div'); const div = document.createElement('div');
div.id = `series-labels-${pool}-${idx}`; div.id = `series-labels-${pool}-${idx}`;
document.body.appendChild(div); document.body.appendChild(div);
const div2 = document.createElement('div');
div2.id = `scrape-duration-${pool}-${idx}`;
document.body.appendChild(div2);
}); });
}); });
mock = fetchMock.mockResponse(JSON.stringify(sampleApiResponse)); mock = fetchMock.mockResponse(JSON.stringify(sampleApiResponse));

View file

@ -57,6 +57,9 @@ describe('ScrapePoolPanel', () => {
const div = document.createElement('div'); const div = document.createElement('div');
div.id = `series-labels-prometheus-0`; div.id = `series-labels-prometheus-0`;
document.body.appendChild(div); document.body.appendChild(div);
const div2 = document.createElement('div');
div2.id = `scrape-duration-prometheus-0`;
document.body.appendChild(div2);
const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />); const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />);
const btn = scrapePoolPanel.find(Button); const btn = scrapePoolPanel.find(Button);

View file

@ -5,9 +5,10 @@ import styles from './ScrapePoolPanel.module.css';
import { Target } from './target'; import { Target } from './target';
import EndpointLink from './EndpointLink'; import EndpointLink from './EndpointLink';
import TargetLabels from './TargetLabels'; import TargetLabels from './TargetLabels';
import TargetScrapeDuration from './TargetScrapeDuration';
import { now } from 'moment'; import { now } from 'moment';
import { ToggleMoreLess } from '../../components/ToggleMoreLess'; import { ToggleMoreLess } from '../../components/ToggleMoreLess';
import { formatRelative, humanizeDuration } from '../../utils'; import { formatRelative } from '../../utils';
interface PanelProps { interface PanelProps {
scrapePool: string; scrapePool: string;
@ -54,6 +55,8 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup, expanded, to
lastScrape, lastScrape,
lastScrapeDuration, lastScrapeDuration,
health, health,
scrapeInterval,
scrapeTimeout,
} = target; } = target;
const color = getColor(health); const color = getColor(health);
@ -69,7 +72,15 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup, expanded, to
<TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} /> <TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} />
</td> </td>
<td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td> <td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td>
<td className={styles['scrape-duration']}>{humanizeDuration(lastScrapeDuration * 1000)}</td> <td className={styles['scrape-duration']}>
<TargetScrapeDuration
duration={lastScrapeDuration}
scrapePool={scrapePool}
idx={idx}
interval={scrapeInterval}
timeout={scrapeTimeout}
/>
</td>
<td className={styles.errors}>{lastError ? <span className="text-danger">{lastError}</span> : null}</td> <td className={styles.errors}>{lastError ? <span className="text-danger">{lastError}</span> : null}</td>
</tr> </tr>
); );

View file

@ -0,0 +1,41 @@
import React, { FC, Fragment, useState } from 'react';
import { Tooltip } from 'reactstrap';
import 'css.escape';
import { humanizeDuration } from '../../utils';
export interface TargetScrapeDurationProps {
duration: number;
interval: string;
timeout: string;
idx: number;
scrapePool: string;
}
const TargetScrapeDuration: FC<TargetScrapeDurationProps> = ({ duration, interval, timeout, idx, scrapePool }) => {
const [scrapeTooltipOpen, setScrapeTooltipOpen] = useState<boolean>(false);
const id = `scrape-duration-${scrapePool}-${idx}`;
return (
<>
<div id={id} className="scrape-duration-container">
{humanizeDuration(duration * 1000)}
</div>
<Tooltip
isOpen={scrapeTooltipOpen}
toggle={() => setScrapeTooltipOpen(!scrapeTooltipOpen)}
target={CSS.escape(id)}
style={{ maxWidth: 'none', textAlign: 'left' }}
>
<Fragment>
<span>Interval: {interval}</span>
<br />
</Fragment>
<Fragment>
<span>Timeout: {timeout}</span>
</Fragment>
</Tooltip>
</>
);
};
export default TargetScrapeDuration;

View file

@ -23,6 +23,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:14.759299-07:00', lastScrape: '2019-11-04T11:52:14.759299-07:00',
lastScrapeDuration: 36560147, lastScrapeDuration: 36560147,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
{ {
discoveredLabels: { discoveredLabels: {
@ -43,6 +45,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:24.731096-07:00', lastScrape: '2019-11-04T11:52:24.731096-07:00',
lastScrapeDuration: 49448763, lastScrapeDuration: 49448763,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
{ {
discoveredLabels: { discoveredLabels: {
@ -63,6 +67,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:13.516654-07:00', lastScrape: '2019-11-04T11:52:13.516654-07:00',
lastScrapeDuration: 120916592, lastScrapeDuration: 120916592,
health: 'down', health: 'down',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
], ],
}, },
@ -87,6 +93,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:14.145703-07:00', lastScrape: '2019-11-04T11:52:14.145703-07:00',
lastScrapeDuration: 3842307, lastScrapeDuration: 3842307,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
], ],
}, },
@ -111,6 +119,8 @@ export const targetGroups: ScrapePools = Object.freeze({
lastScrape: '2019-11-04T11:52:18.479731-07:00', lastScrape: '2019-11-04T11:52:18.479731-07:00',
lastScrapeDuration: 4050976, lastScrapeDuration: 4050976,
health: 'up', health: 'up',
scrapeInterval: '15s',
scrapeTimeout: '500ms',
}, },
], ],
}, },

View file

@ -12,6 +12,8 @@ export interface Target {
lastScrape: string; lastScrape: string;
lastScrapeDuration: number; lastScrapeDuration: number;
health: string; health: string;
scrapeInterval: string;
scrapeTimeout: string;
} }
export interface DroppedTarget { export interface DroppedTarget {

View file

@ -217,11 +217,13 @@ export const parseOption = (param: string): Partial<PanelOptions> => {
return {}; return {};
}; };
export const formatParam = (key: string) => (paramName: string, value: number | string | boolean) => { export const formatParam =
return `g${key}.${paramName}=${encodeURIComponent(value)}`; (key: string) =>
}; (paramName: string, value: number | string | boolean): string => {
return `g${key}.${paramName}=${encodeURIComponent(value)}`;
};
export const toQueryString = ({ key, options }: PanelMeta) => { export const toQueryString = ({ key, options }: PanelMeta): string => {
const formatWithKey = formatParam(key); const formatWithKey = formatParam(key);
const { expr, type, stacked, range, endTime, resolution, showExemplars } = options; const { expr, type, stacked, range, endTime, resolution, showExemplars } = options;
const time = isPresent(endTime) ? formatTime(endTime) : false; const time = isPresent(endTime) ? formatTime(endTime) : false;
@ -247,16 +249,20 @@ export const createExpressionLink = (expr: string): string => {
export const mapObjEntries = <T, key extends keyof T, Z>( export const mapObjEntries = <T, key extends keyof T, Z>(
o: T, o: T,
cb: ([k, v]: [string, T[key]], i: number, arr: [string, T[key]][]) => Z cb: ([k, v]: [string, T[key]], i: number, arr: [string, T[key]][]) => Z
) => Object.entries(o).map(cb); ): Z[] => Object.entries(o).map(cb);
export const callAll = export const callAll =
(...fns: Array<(...args: any) => void>) => (
(...args: any) => { // eslint-disable-next-line @typescript-eslint/no-explicit-any
...fns: Array<(...args: any) => void>
) =>
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types
(...args: any): void => {
// eslint-disable-next-line prefer-spread // eslint-disable-next-line prefer-spread
fns.filter(Boolean).forEach((fn) => fn.apply(null, args)); fns.filter(Boolean).forEach((fn) => fn.apply(null, args));
}; };
export const parsePrometheusFloat = (value: string): number | string => { export const parsePrometheusFloat = (value: string): string | number => {
if (isNaN(Number(value))) { if (isNaN(Number(value))) {
return value; return value;
} else { } else {