mirror of
https://github.com/prometheus/prometheus.git
synced 2025-01-12 14:27:27 -08:00
Merge branch 'main' into sparsehistogram
This commit is contained in:
commit
e8e9155a11
|
@ -353,8 +353,10 @@ func checkConfig(agentMode bool, filename string) ([]string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, scfg := range cfg.ScrapeConfigs {
|
for _, scfg := range cfg.ScrapeConfigs {
|
||||||
if err := checkFileExists(scfg.HTTPClientConfig.BearerTokenFile); err != nil {
|
if scfg.HTTPClientConfig.Authorization != nil {
|
||||||
return nil, errors.Wrapf(err, "error checking bearer token file %q", scfg.HTTPClientConfig.BearerTokenFile)
|
if err := checkFileExists(scfg.HTTPClientConfig.Authorization.CredentialsFile); err != nil {
|
||||||
|
return nil, errors.Wrapf(err, "error checking authorization credentials or bearer token file %q", scfg.HTTPClientConfig.Authorization.CredentialsFile)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := checkTLSConfig(scfg.HTTPClientConfig.TLSConfig); err != nil {
|
if err := checkTLSConfig(scfg.HTTPClientConfig.TLSConfig); err != nil {
|
||||||
|
|
|
@ -203,3 +203,33 @@ func TestCheckTargetConfig(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAuthorizationConfig(t *testing.T) {
|
||||||
|
cases := []struct {
|
||||||
|
name string
|
||||||
|
file string
|
||||||
|
err string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "authorization_credentials_file.bad",
|
||||||
|
file: "authorization_credentials_file.bad.yml",
|
||||||
|
err: "error checking authorization credentials or bearer token file",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "authorization_credentials_file.good",
|
||||||
|
file: "authorization_credentials_file.good.yml",
|
||||||
|
err: "",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range cases {
|
||||||
|
t.Run(test.name, func(t *testing.T) {
|
||||||
|
_, err := checkConfig(false, "testdata/"+test.file)
|
||||||
|
if test.err != "" {
|
||||||
|
require.Contains(t, err.Error(), test.err, "Expected error to contain %q, got %q", test.err, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
require.NoError(t, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
4
cmd/promtool/testdata/authorization_credentials_file.bad.yml
vendored
Normal file
4
cmd/promtool/testdata/authorization_credentials_file.bad.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: test
|
||||||
|
authorization:
|
||||||
|
credentials_file: "/random/file/which/does/not/exist.yml"
|
4
cmd/promtool/testdata/authorization_credentials_file.good.yml
vendored
Normal file
4
cmd/promtool/testdata/authorization_credentials_file.good.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: test
|
||||||
|
authorization:
|
||||||
|
credentials_file: "."
|
6
go.mod
6
go.mod
|
@ -3,13 +3,13 @@ module github.com/prometheus/prometheus
|
||||||
go 1.14
|
go 1.14
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go v58.3.0+incompatible
|
github.com/Azure/azure-sdk-for-go v59.4.0+incompatible
|
||||||
github.com/Azure/go-autorest/autorest v0.11.22
|
github.com/Azure/go-autorest/autorest v0.11.22
|
||||||
github.com/Azure/go-autorest/autorest/adal v0.9.17
|
github.com/Azure/go-autorest/autorest/adal v0.9.17
|
||||||
github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect
|
github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect
|
||||||
github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect
|
github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect
|
||||||
github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a
|
github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a
|
||||||
github.com/aws/aws-sdk-go v1.42.10
|
github.com/aws/aws-sdk-go v1.42.15
|
||||||
github.com/cespare/xxhash/v2 v2.1.2
|
github.com/cespare/xxhash/v2 v2.1.2
|
||||||
github.com/containerd/containerd v1.5.7 // indirect
|
github.com/containerd/containerd v1.5.7 // indirect
|
||||||
github.com/dennwc/varint v1.0.0
|
github.com/dennwc/varint v1.0.0
|
||||||
|
@ -23,7 +23,7 @@ require (
|
||||||
github.com/fsnotify/fsnotify v1.5.1
|
github.com/fsnotify/fsnotify v1.5.1
|
||||||
github.com/go-kit/log v0.2.0
|
github.com/go-kit/log v0.2.0
|
||||||
github.com/go-logfmt/logfmt v0.5.1
|
github.com/go-logfmt/logfmt v0.5.1
|
||||||
github.com/go-openapi/strfmt v0.21.0
|
github.com/go-openapi/strfmt v0.21.1
|
||||||
github.com/go-zookeeper/zk v1.0.2
|
github.com/go-zookeeper/zk v1.0.2
|
||||||
github.com/gogo/protobuf v1.3.2
|
github.com/gogo/protobuf v1.3.2
|
||||||
github.com/golang/snappy v0.0.4
|
github.com/golang/snappy v0.0.4
|
||||||
|
|
16
go.sum
16
go.sum
|
@ -51,8 +51,8 @@ collectd.org v0.3.0/go.mod h1:A/8DzQBkF6abtvrT2j/AU/4tiBgJWYyh0y/oB/4MlWE=
|
||||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||||
github.com/Azure/azure-sdk-for-go v41.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
github.com/Azure/azure-sdk-for-go v41.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||||
github.com/Azure/azure-sdk-for-go v58.3.0+incompatible h1:lb9OWePNuJMiibdxg9XvdbiOldR0Yifge37L4LoOxIs=
|
github.com/Azure/azure-sdk-for-go v59.4.0+incompatible h1:gDA8odnngdNd3KYHL2NoK1j9vpWBgEnFSjKKLpkC8Aw=
|
||||||
github.com/Azure/azure-sdk-for-go v58.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
github.com/Azure/azure-sdk-for-go v59.4.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
|
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||||
|
@ -187,8 +187,8 @@ github.com/aws/aws-sdk-go v1.30.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZve
|
||||||
github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
|
github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
|
||||||
github.com/aws/aws-sdk-go v1.38.35/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
|
github.com/aws/aws-sdk-go v1.38.35/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
|
||||||
github.com/aws/aws-sdk-go v1.40.11/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
|
github.com/aws/aws-sdk-go v1.40.11/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
|
||||||
github.com/aws/aws-sdk-go v1.42.10 h1:PW9G/hnsuKttbFtOcgNKD0vQrp4yfNrtACA+X0p9mjM=
|
github.com/aws/aws-sdk-go v1.42.15 h1:RcUChuF7KzrrTqx9LAzJbLBX00LkUY7cH9T1VdxNdqk=
|
||||||
github.com/aws/aws-sdk-go v1.42.10/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
|
github.com/aws/aws-sdk-go v1.42.15/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
|
||||||
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
|
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
|
||||||
github.com/benbjohnson/immutable v0.2.1/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
|
github.com/benbjohnson/immutable v0.2.1/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
|
||||||
github.com/benbjohnson/tmpl v1.0.0/go.mod h1:igT620JFIi44B6awvU9IsDhR77IXWtFigTLil/RPdps=
|
github.com/benbjohnson/tmpl v1.0.0/go.mod h1:igT620JFIi44B6awvU9IsDhR77IXWtFigTLil/RPdps=
|
||||||
|
@ -547,8 +547,8 @@ github.com/go-openapi/strfmt v0.19.5/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk
|
||||||
github.com/go-openapi/strfmt v0.19.11/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
github.com/go-openapi/strfmt v0.19.11/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
||||||
github.com/go-openapi/strfmt v0.20.0/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
github.com/go-openapi/strfmt v0.20.0/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
||||||
github.com/go-openapi/strfmt v0.20.1/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk=
|
github.com/go-openapi/strfmt v0.20.1/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk=
|
||||||
github.com/go-openapi/strfmt v0.21.0 h1:hX2qEZKmYks+t0hKeb4VTJpUm2UYsdL3+DCid5swxIs=
|
github.com/go-openapi/strfmt v0.21.1 h1:G6s2t5V5kGCHLVbSdZ/6lI8Wm4OzoPFkc3/cjAsKQrM=
|
||||||
github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg=
|
github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
|
||||||
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
||||||
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||||
github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||||
|
@ -1351,8 +1351,8 @@ go.mongodb.org/mongo-driver v1.4.3/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S
|
||||||
go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
||||||
go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
||||||
go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw=
|
go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw=
|
||||||
go.mongodb.org/mongo-driver v1.7.3 h1:G4l/eYY9VrQAK/AUgkV0koQKzQnyddnWxrd/Etf0jIs=
|
go.mongodb.org/mongo-driver v1.7.5 h1:ny3p0reEpgsR2cfA5cjgwFZg3Cv/ofFh/8jbhGtz9VI=
|
||||||
go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg=
|
go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng=
|
||||||
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
|
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
|
||||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||||
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||||
|
|
163
tsdb/agent/db.go
163
tsdb/agent/db.go
|
@ -16,9 +16,11 @@ package agent
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/go-kit/log"
|
"github.com/go-kit/log"
|
||||||
"github.com/go-kit/log/level"
|
"github.com/go-kit/log/level"
|
||||||
|
@ -94,6 +96,8 @@ type dbMetrics struct {
|
||||||
numActiveSeries prometheus.Gauge
|
numActiveSeries prometheus.Gauge
|
||||||
numWALSeriesPendingDeletion prometheus.Gauge
|
numWALSeriesPendingDeletion prometheus.Gauge
|
||||||
totalAppendedSamples prometheus.Counter
|
totalAppendedSamples prometheus.Counter
|
||||||
|
totalAppendedExemplars prometheus.Counter
|
||||||
|
totalOutOfOrderSamples prometheus.Counter
|
||||||
walTruncateDuration prometheus.Summary
|
walTruncateDuration prometheus.Summary
|
||||||
walCorruptionsTotal prometheus.Counter
|
walCorruptionsTotal prometheus.Counter
|
||||||
walTotalReplayDuration prometheus.Gauge
|
walTotalReplayDuration prometheus.Gauge
|
||||||
|
@ -120,6 +124,16 @@ func newDBMetrics(r prometheus.Registerer) *dbMetrics {
|
||||||
Help: "Total number of samples appended to the storage",
|
Help: "Total number of samples appended to the storage",
|
||||||
})
|
})
|
||||||
|
|
||||||
|
m.totalAppendedExemplars = prometheus.NewCounter(prometheus.CounterOpts{
|
||||||
|
Name: "prometheus_agent_exemplars_appended_total",
|
||||||
|
Help: "Total number of exemplars appended to the storage",
|
||||||
|
})
|
||||||
|
|
||||||
|
m.totalOutOfOrderSamples = prometheus.NewCounter(prometheus.CounterOpts{
|
||||||
|
Name: "prometheus_agent_out_of_order_samples_total",
|
||||||
|
Help: "Total number of out of order samples ingestion failed attempts.",
|
||||||
|
})
|
||||||
|
|
||||||
m.walTruncateDuration = prometheus.NewSummary(prometheus.SummaryOpts{
|
m.walTruncateDuration = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
Name: "prometheus_agent_truncate_duration_seconds",
|
Name: "prometheus_agent_truncate_duration_seconds",
|
||||||
Help: "Duration of WAL truncation.",
|
Help: "Duration of WAL truncation.",
|
||||||
|
@ -160,6 +174,8 @@ func newDBMetrics(r prometheus.Registerer) *dbMetrics {
|
||||||
m.numActiveSeries,
|
m.numActiveSeries,
|
||||||
m.numWALSeriesPendingDeletion,
|
m.numWALSeriesPendingDeletion,
|
||||||
m.totalAppendedSamples,
|
m.totalAppendedSamples,
|
||||||
|
m.totalAppendedExemplars,
|
||||||
|
m.totalOutOfOrderSamples,
|
||||||
m.walTruncateDuration,
|
m.walTruncateDuration,
|
||||||
m.walCorruptionsTotal,
|
m.walCorruptionsTotal,
|
||||||
m.walTotalReplayDuration,
|
m.walTotalReplayDuration,
|
||||||
|
@ -181,6 +197,15 @@ func (m *dbMetrics) Unregister() {
|
||||||
m.numActiveSeries,
|
m.numActiveSeries,
|
||||||
m.numWALSeriesPendingDeletion,
|
m.numWALSeriesPendingDeletion,
|
||||||
m.totalAppendedSamples,
|
m.totalAppendedSamples,
|
||||||
|
m.totalAppendedExemplars,
|
||||||
|
m.totalOutOfOrderSamples,
|
||||||
|
m.walTruncateDuration,
|
||||||
|
m.walCorruptionsTotal,
|
||||||
|
m.walTotalReplayDuration,
|
||||||
|
m.checkpointDeleteFail,
|
||||||
|
m.checkpointDeleteTotal,
|
||||||
|
m.checkpointCreationFail,
|
||||||
|
m.checkpointCreationTotal,
|
||||||
}
|
}
|
||||||
for _, c := range cs {
|
for _, c := range cs {
|
||||||
m.r.Unregister(c)
|
m.r.Unregister(c)
|
||||||
|
@ -261,6 +286,7 @@ func Open(l log.Logger, reg prometheus.Registerer, rs *remote.Storage, dir strin
|
||||||
DB: db,
|
DB: db,
|
||||||
pendingSeries: make([]record.RefSeries, 0, 100),
|
pendingSeries: make([]record.RefSeries, 0, 100),
|
||||||
pendingSamples: make([]record.RefSample, 0, 100),
|
pendingSamples: make([]record.RefSample, 0, 100),
|
||||||
|
pendingExamplars: make([]record.RefExemplar, 0, 10),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -412,11 +438,8 @@ func (db *DB) loadWAL(r *wal.Reader, multiRef map[chunks.HeadSeriesRef]chunks.He
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
decoded <- samples
|
decoded <- samples
|
||||||
case record.Tombstones:
|
case record.Tombstones, record.Exemplars:
|
||||||
// We don't care about tombstones
|
// We don't care about tombstones or exemplars during replay.
|
||||||
continue
|
|
||||||
case record.Exemplars:
|
|
||||||
// We don't care about exemplars
|
|
||||||
continue
|
continue
|
||||||
default:
|
default:
|
||||||
errCh <- &wal.CorruptionErr{
|
errCh <- &wal.CorruptionErr{
|
||||||
|
@ -668,23 +691,19 @@ type appender struct {
|
||||||
|
|
||||||
pendingSeries []record.RefSeries
|
pendingSeries []record.RefSeries
|
||||||
pendingSamples []record.RefSample
|
pendingSamples []record.RefSample
|
||||||
|
pendingExamplars []record.RefExemplar
|
||||||
|
|
||||||
|
// Pointers to the series referenced by each element of pendingSamples.
|
||||||
|
// Series lock is not held on elements.
|
||||||
|
sampleSeries []*memSeries
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *appender) Append(ref storage.SeriesRef, l labels.Labels, t int64, v float64) (storage.SeriesRef, error) {
|
func (a *appender) Append(ref storage.SeriesRef, l labels.Labels, t int64, v float64) (storage.SeriesRef, error) {
|
||||||
if ref == 0 {
|
// series references and chunk references are identical for agent mode.
|
||||||
r, err := a.Add(l, t, v)
|
headRef := chunks.HeadSeriesRef(ref)
|
||||||
return storage.SeriesRef(r), err
|
|
||||||
}
|
|
||||||
return ref, a.AddFast(chunks.HeadSeriesRef(ref), t, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *appender) Add(l labels.Labels, t int64, v float64) (chunks.HeadSeriesRef, error) {
|
|
||||||
hash := l.Hash()
|
|
||||||
series := a.series.GetByHash(hash, l)
|
|
||||||
if series != nil {
|
|
||||||
return series.ref, a.AddFast(series.ref, t, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
series := a.series.GetByID(headRef)
|
||||||
|
if series == nil {
|
||||||
// Ensure no empty or duplicate labels have gotten through. This mirrors the
|
// Ensure no empty or duplicate labels have gotten through. This mirrors the
|
||||||
// equivalent validation code in the TSDB's headAppender.
|
// equivalent validation code in the TSDB's headAppender.
|
||||||
l = l.WithoutEmpty()
|
l = l.WithoutEmpty()
|
||||||
|
@ -696,52 +715,88 @@ func (a *appender) Add(l labels.Labels, t int64, v float64) (chunks.HeadSeriesRe
|
||||||
return 0, errors.Wrap(tsdb.ErrInvalidSample, fmt.Sprintf(`label name "%s" is not unique`, lbl))
|
return 0, errors.Wrap(tsdb.ErrInvalidSample, fmt.Sprintf(`label name "%s" is not unique`, lbl))
|
||||||
}
|
}
|
||||||
|
|
||||||
ref := chunks.HeadSeriesRef(a.nextRef.Inc())
|
var created bool
|
||||||
series = &memSeries{ref: ref, lset: l, lastTs: t}
|
series, created = a.getOrCreate(l)
|
||||||
|
if created {
|
||||||
a.pendingSeries = append(a.pendingSeries, record.RefSeries{
|
a.pendingSeries = append(a.pendingSeries, record.RefSeries{
|
||||||
Ref: ref,
|
Ref: series.ref,
|
||||||
Labels: l,
|
Labels: l,
|
||||||
})
|
})
|
||||||
a.pendingSamples = append(a.pendingSamples, record.RefSample{
|
|
||||||
Ref: ref,
|
|
||||||
T: t,
|
|
||||||
V: v,
|
|
||||||
})
|
|
||||||
|
|
||||||
a.series.Set(hash, series)
|
|
||||||
|
|
||||||
a.metrics.numActiveSeries.Inc()
|
a.metrics.numActiveSeries.Inc()
|
||||||
a.metrics.totalAppendedSamples.Inc()
|
|
||||||
|
|
||||||
return series.ref, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *appender) AddFast(ref chunks.HeadSeriesRef, t int64, v float64) error {
|
|
||||||
series := a.series.GetByID(ref)
|
|
||||||
if series == nil {
|
|
||||||
return storage.ErrNotFound
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
series.Lock()
|
series.Lock()
|
||||||
defer series.Unlock()
|
defer series.Unlock()
|
||||||
|
|
||||||
// Update last recorded timestamp. Used by Storage.gc to determine if a
|
if t < series.lastTs {
|
||||||
// series is dead.
|
a.metrics.totalOutOfOrderSamples.Inc()
|
||||||
series.lastTs = t
|
return 0, storage.ErrOutOfOrderSample
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: always modify pendingSamples and sampleSeries together
|
||||||
a.pendingSamples = append(a.pendingSamples, record.RefSample{
|
a.pendingSamples = append(a.pendingSamples, record.RefSample{
|
||||||
Ref: ref,
|
Ref: series.ref,
|
||||||
T: t,
|
T: t,
|
||||||
V: v,
|
V: v,
|
||||||
})
|
})
|
||||||
|
a.sampleSeries = append(a.sampleSeries, series)
|
||||||
|
|
||||||
a.metrics.totalAppendedSamples.Inc()
|
a.metrics.totalAppendedSamples.Inc()
|
||||||
return nil
|
return storage.SeriesRef(series.ref), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *appender) getOrCreate(l labels.Labels) (series *memSeries, created bool) {
|
||||||
|
hash := l.Hash()
|
||||||
|
|
||||||
|
series = a.series.GetByHash(hash, l)
|
||||||
|
if series != nil {
|
||||||
|
return series, false
|
||||||
|
}
|
||||||
|
|
||||||
|
ref := chunks.HeadSeriesRef(a.nextRef.Inc())
|
||||||
|
series = &memSeries{ref: ref, lset: l, lastTs: math.MinInt64}
|
||||||
|
a.series.Set(hash, series)
|
||||||
|
return series, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exemplar.Exemplar) (storage.SeriesRef, error) {
|
func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exemplar.Exemplar) (storage.SeriesRef, error) {
|
||||||
// remote_write doesn't support exemplars yet, so do nothing here.
|
// series references and chunk references are identical for agent mode.
|
||||||
return 0, nil
|
headRef := chunks.HeadSeriesRef(ref)
|
||||||
|
|
||||||
|
s := a.series.GetByID(headRef)
|
||||||
|
if s == nil {
|
||||||
|
return 0, fmt.Errorf("unknown series ref when trying to add exemplar: %d", ref)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure no empty labels have gotten through.
|
||||||
|
e.Labels = e.Labels.WithoutEmpty()
|
||||||
|
|
||||||
|
if lbl, dup := e.Labels.HasDuplicateLabelNames(); dup {
|
||||||
|
return 0, errors.Wrap(tsdb.ErrInvalidExemplar, fmt.Sprintf(`label name "%s" is not unique`, lbl))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exemplar label length does not include chars involved in text rendering such as quotes
|
||||||
|
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
|
||||||
|
labelSetLen := 0
|
||||||
|
for _, l := range e.Labels {
|
||||||
|
labelSetLen += utf8.RuneCountInString(l.Name)
|
||||||
|
labelSetLen += utf8.RuneCountInString(l.Value)
|
||||||
|
|
||||||
|
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
|
||||||
|
return 0, storage.ErrExemplarLabelLength
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
a.pendingExamplars = append(a.pendingExamplars, record.RefExemplar{
|
||||||
|
Ref: s.ref,
|
||||||
|
T: e.Ts,
|
||||||
|
V: e.Value,
|
||||||
|
Labels: e.Labels,
|
||||||
|
})
|
||||||
|
|
||||||
|
return storage.SeriesRef(s.ref), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *appender) AppendHistogram(ref storage.SeriesRef, l labels.Labels, t int64, h *histogram.Histogram) (storage.SeriesRef, error) {
|
func (a *appender) AppendHistogram(ref storage.SeriesRef, l labels.Labels, t int64, h *histogram.Histogram) (storage.SeriesRef, error) {
|
||||||
|
@ -773,6 +828,22 @@ func (a *appender) Commit() error {
|
||||||
buf = buf[:0]
|
buf = buf[:0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(a.pendingExamplars) > 0 {
|
||||||
|
buf = encoder.Exemplars(a.pendingExamplars, buf)
|
||||||
|
if err := a.wal.Log(buf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
buf = buf[:0]
|
||||||
|
}
|
||||||
|
|
||||||
|
var series *memSeries
|
||||||
|
for i, s := range a.pendingSamples {
|
||||||
|
series = a.sampleSeries[i]
|
||||||
|
if !series.updateTimestamp(s.T) {
|
||||||
|
a.metrics.totalOutOfOrderSamples.Inc()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//nolint:staticcheck
|
//nolint:staticcheck
|
||||||
a.bufPool.Put(buf)
|
a.bufPool.Put(buf)
|
||||||
return a.Rollback()
|
return a.Rollback()
|
||||||
|
@ -781,6 +852,8 @@ func (a *appender) Commit() error {
|
||||||
func (a *appender) Rollback() error {
|
func (a *appender) Rollback() error {
|
||||||
a.pendingSeries = a.pendingSeries[:0]
|
a.pendingSeries = a.pendingSeries[:0]
|
||||||
a.pendingSamples = a.pendingSamples[:0]
|
a.pendingSamples = a.pendingSamples[:0]
|
||||||
|
a.pendingExamplars = a.pendingExamplars[:0]
|
||||||
|
a.sampleSeries = a.sampleSeries[:0]
|
||||||
a.appenderPool.Put(a)
|
a.appenderPool.Put(a)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,8 +15,8 @@ package agent
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -26,25 +26,70 @@ import (
|
||||||
dto "github.com/prometheus/client_model/go"
|
dto "github.com/prometheus/client_model/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/model/exemplar"
|
||||||
"github.com/prometheus/prometheus/model/labels"
|
"github.com/prometheus/prometheus/model/labels"
|
||||||
|
"github.com/prometheus/prometheus/storage"
|
||||||
"github.com/prometheus/prometheus/storage/remote"
|
"github.com/prometheus/prometheus/storage/remote"
|
||||||
|
"github.com/prometheus/prometheus/tsdb"
|
||||||
"github.com/prometheus/prometheus/tsdb/record"
|
"github.com/prometheus/prometheus/tsdb/record"
|
||||||
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
||||||
"github.com/prometheus/prometheus/tsdb/wal"
|
"github.com/prometheus/prometheus/tsdb/wal"
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUnsupported(t *testing.T) {
|
func TestDB_InvalidSeries(t *testing.T) {
|
||||||
promAgentDir := t.TempDir()
|
s := createTestAgentDB(t, nil, DefaultOptions())
|
||||||
|
defer s.Close()
|
||||||
|
|
||||||
opts := DefaultOptions()
|
app := s.Appender(context.Background())
|
||||||
logger := log.NewNopLogger()
|
|
||||||
|
|
||||||
s, err := Open(logger, prometheus.NewRegistry(), nil, promAgentDir, opts)
|
t.Run("Samples", func(t *testing.T) {
|
||||||
|
_, err := app.Append(0, labels.Labels{}, 0, 0)
|
||||||
|
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject empty labels")
|
||||||
|
|
||||||
|
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}, 0, 0)
|
||||||
|
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject duplicate labels")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Exemplars", func(t *testing.T) {
|
||||||
|
sRef, err := app.Append(0, labels.Labels{{Name: "a", Value: "1"}}, 0, 0)
|
||||||
|
require.NoError(t, err, "should not reject valid series")
|
||||||
|
|
||||||
|
_, err = app.AppendExemplar(0, nil, exemplar.Exemplar{})
|
||||||
|
require.EqualError(t, err, "unknown series ref when trying to add exemplar: 0")
|
||||||
|
|
||||||
|
e := exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}}
|
||||||
|
_, err = app.AppendExemplar(sRef, nil, e)
|
||||||
|
require.ErrorIs(t, err, tsdb.ErrInvalidExemplar, "should reject duplicate labels")
|
||||||
|
|
||||||
|
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a_somewhat_long_trace_id", Value: "nYJSNtFrFTY37VR7mHzEE/LIDt7cdAQcuOzFajgmLDAdBSRHYPDzrxhMA4zz7el8naI/AoXFv9/e/G0vcETcIoNUi3OieeLfaIRQci2oa"}}}
|
||||||
|
_, err = app.AppendExemplar(sRef, nil, e)
|
||||||
|
require.ErrorIs(t, err, storage.ErrExemplarLabelLength, "should reject too long label length")
|
||||||
|
|
||||||
|
// Inverse check
|
||||||
|
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}}, Value: 20, Ts: 10, HasTs: true}
|
||||||
|
_, err = app.AppendExemplar(sRef, nil, e)
|
||||||
|
require.NoError(t, err, "should not reject valid exemplars")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func createTestAgentDB(t *testing.T, reg prometheus.Registerer, opts *Options) *DB {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
dbDir := t.TempDir()
|
||||||
|
rs := remote.NewStorage(log.NewNopLogger(), reg, startTime, dbDir, time.Second*30, nil)
|
||||||
|
t.Cleanup(func() {
|
||||||
|
require.NoError(t, rs.Close())
|
||||||
|
})
|
||||||
|
|
||||||
|
db, err := Open(log.NewNopLogger(), reg, rs, dbDir, opts)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() {
|
return db
|
||||||
require.NoError(t, s.Close())
|
}
|
||||||
}()
|
|
||||||
|
func TestUnsupportedFunctions(t *testing.T) {
|
||||||
|
s := createTestAgentDB(t, nil, DefaultOptions())
|
||||||
|
defer s.Close()
|
||||||
|
|
||||||
t.Run("Querier", func(t *testing.T) {
|
t.Run("Querier", func(t *testing.T) {
|
||||||
_, err := s.Querier(context.TODO(), 0, 0)
|
_, err := s.Querier(context.TODO(), 0, 0)
|
||||||
|
@ -68,93 +113,74 @@ func TestCommit(t *testing.T) {
|
||||||
numSeries = 8
|
numSeries = 8
|
||||||
)
|
)
|
||||||
|
|
||||||
promAgentDir := t.TempDir()
|
s := createTestAgentDB(t, nil, DefaultOptions())
|
||||||
|
app := s.Appender(context.TODO())
|
||||||
|
|
||||||
lbls := labelsForTest(t.Name(), numSeries)
|
lbls := labelsForTest(t.Name(), numSeries)
|
||||||
opts := DefaultOptions()
|
|
||||||
logger := log.NewNopLogger()
|
|
||||||
reg := prometheus.NewRegistry()
|
|
||||||
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func(rs *remote.Storage) {
|
|
||||||
require.NoError(t, rs.Close())
|
|
||||||
}(remoteStorage)
|
|
||||||
|
|
||||||
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
a := s.Appender(context.TODO())
|
|
||||||
|
|
||||||
for _, l := range lbls {
|
for _, l := range lbls {
|
||||||
lset := labels.New(l...)
|
lset := labels.New(l...)
|
||||||
|
|
||||||
for i := 0; i < numDatapoints; i++ {
|
for i := 0; i < numDatapoints; i++ {
|
||||||
sample := tsdbutil.GenerateSamples(0, 1)
|
sample := tsdbutil.GenerateSamples(0, 1)
|
||||||
_, err := a.Append(0, lset, sample[0].T(), sample[0].V())
|
ref, err := app.Append(0, lset, sample[0].T(), sample[0].V())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
e := exemplar.Exemplar{
|
||||||
|
Labels: lset,
|
||||||
|
Ts: sample[0].T(),
|
||||||
|
Value: sample[0].V(),
|
||||||
|
HasTs: true,
|
||||||
|
}
|
||||||
|
_, err = app.AppendExemplar(ref, lset, e)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, a.Commit())
|
require.NoError(t, app.Commit())
|
||||||
require.NoError(t, s.Close())
|
require.NoError(t, s.Close())
|
||||||
|
|
||||||
// Read records from WAL and check for expected count of series and samples.
|
sr, err := wal.NewSegmentsReader(s.wal.Dir())
|
||||||
walSeriesCount := 0
|
|
||||||
walSamplesCount := 0
|
|
||||||
|
|
||||||
reg = prometheus.NewRegistry()
|
|
||||||
remoteStorage = remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, remoteStorage.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
s1, err := Open(logger, nil, remoteStorage, promAgentDir, opts)
|
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, s1.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
var dec record.Decoder
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
sr, err := wal.NewSegmentsReader(s1.wal.Dir())
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() {
|
defer func() {
|
||||||
require.NoError(t, sr.Close())
|
require.NoError(t, sr.Close())
|
||||||
}()
|
}()
|
||||||
|
|
||||||
r := wal.NewReader(sr)
|
// Read records from WAL and check for expected count of series, samples, and exemplars.
|
||||||
seriesPool := sync.Pool{
|
var (
|
||||||
New: func() interface{} {
|
r = wal.NewReader(sr)
|
||||||
return []record.RefSeries{}
|
dec record.Decoder
|
||||||
},
|
|
||||||
}
|
|
||||||
samplesPool := sync.Pool{
|
|
||||||
New: func() interface{} {
|
|
||||||
return []record.RefSample{}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
walSeriesCount, walSamplesCount, walExemplarsCount int
|
||||||
|
)
|
||||||
for r.Next() {
|
for r.Next() {
|
||||||
rec := r.Record()
|
rec := r.Record()
|
||||||
switch dec.Type(rec) {
|
switch dec.Type(rec) {
|
||||||
case record.Series:
|
case record.Series:
|
||||||
series := seriesPool.Get().([]record.RefSeries)[:0]
|
var series []record.RefSeries
|
||||||
series, _ = dec.Series(rec, series)
|
series, err = dec.Series(rec, series)
|
||||||
|
require.NoError(t, err)
|
||||||
walSeriesCount += len(series)
|
walSeriesCount += len(series)
|
||||||
|
|
||||||
case record.Samples:
|
case record.Samples:
|
||||||
samples := samplesPool.Get().([]record.RefSample)[:0]
|
var samples []record.RefSample
|
||||||
samples, _ = dec.Samples(rec, samples)
|
samples, err = dec.Samples(rec, samples)
|
||||||
|
require.NoError(t, err)
|
||||||
walSamplesCount += len(samples)
|
walSamplesCount += len(samples)
|
||||||
|
|
||||||
|
case record.Exemplars:
|
||||||
|
var exemplars []record.RefExemplar
|
||||||
|
exemplars, err = dec.Exemplars(rec, exemplars)
|
||||||
|
require.NoError(t, err)
|
||||||
|
walExemplarsCount += len(exemplars)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Retrieved series count from WAL should match the count of series been added to the WAL.
|
// Check that the WAL contained the same number of commited series/samples/exemplars.
|
||||||
require.Equal(t, walSeriesCount, numSeries)
|
require.Equal(t, numSeries, walSeriesCount, "unexpected number of series")
|
||||||
|
require.Equal(t, numSeries*numDatapoints, walSamplesCount, "unexpected number of samples")
|
||||||
// Retrieved samples count from WAL should match the count of samples been added to the WAL.
|
require.Equal(t, numSeries*numDatapoints, walExemplarsCount, "unexpected number of exemplars")
|
||||||
require.Equal(t, walSamplesCount, numSeries*numDatapoints)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRollback(t *testing.T) {
|
func TestRollback(t *testing.T) {
|
||||||
|
@ -163,93 +189,68 @@ func TestRollback(t *testing.T) {
|
||||||
numSeries = 8
|
numSeries = 8
|
||||||
)
|
)
|
||||||
|
|
||||||
promAgentDir := t.TempDir()
|
s := createTestAgentDB(t, nil, DefaultOptions())
|
||||||
|
app := s.Appender(context.TODO())
|
||||||
|
|
||||||
lbls := labelsForTest(t.Name(), numSeries)
|
lbls := labelsForTest(t.Name(), numSeries)
|
||||||
opts := DefaultOptions()
|
|
||||||
logger := log.NewNopLogger()
|
|
||||||
reg := prometheus.NewRegistry()
|
|
||||||
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func(rs *remote.Storage) {
|
|
||||||
require.NoError(t, rs.Close())
|
|
||||||
}(remoteStorage)
|
|
||||||
|
|
||||||
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
a := s.Appender(context.TODO())
|
|
||||||
|
|
||||||
for _, l := range lbls {
|
for _, l := range lbls {
|
||||||
lset := labels.New(l...)
|
lset := labels.New(l...)
|
||||||
|
|
||||||
for i := 0; i < numDatapoints; i++ {
|
for i := 0; i < numDatapoints; i++ {
|
||||||
sample := tsdbutil.GenerateSamples(0, 1)
|
sample := tsdbutil.GenerateSamples(0, 1)
|
||||||
_, err := a.Append(0, lset, sample[0].T(), sample[0].V())
|
_, err := app.Append(0, lset, sample[0].T(), sample[0].V())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, a.Rollback())
|
// Do a rollback, which should clear uncommitted data. A followup call to
|
||||||
|
// commit should persist nothing to the WAL.
|
||||||
|
require.NoError(t, app.Rollback())
|
||||||
|
require.NoError(t, app.Commit())
|
||||||
require.NoError(t, s.Close())
|
require.NoError(t, s.Close())
|
||||||
|
|
||||||
// Read records from WAL and check for expected count of series and samples.
|
sr, err := wal.NewSegmentsReader(s.wal.Dir())
|
||||||
walSeriesCount := 0
|
|
||||||
walSamplesCount := 0
|
|
||||||
|
|
||||||
reg = prometheus.NewRegistry()
|
|
||||||
remoteStorage = remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, remoteStorage.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
s1, err := Open(logger, nil, remoteStorage, promAgentDir, opts)
|
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, s1.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
var dec record.Decoder
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
sr, err := wal.NewSegmentsReader(s1.wal.Dir())
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() {
|
defer func() {
|
||||||
require.NoError(t, sr.Close())
|
require.NoError(t, sr.Close())
|
||||||
}()
|
}()
|
||||||
|
|
||||||
r := wal.NewReader(sr)
|
// Read records from WAL and check for expected count of series and samples.
|
||||||
seriesPool := sync.Pool{
|
var (
|
||||||
New: func() interface{} {
|
r = wal.NewReader(sr)
|
||||||
return []record.RefSeries{}
|
dec record.Decoder
|
||||||
},
|
|
||||||
}
|
|
||||||
samplesPool := sync.Pool{
|
|
||||||
New: func() interface{} {
|
|
||||||
return []record.RefSample{}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
walSeriesCount, walSamplesCount, walExemplarsCount int
|
||||||
|
)
|
||||||
for r.Next() {
|
for r.Next() {
|
||||||
rec := r.Record()
|
rec := r.Record()
|
||||||
switch dec.Type(rec) {
|
switch dec.Type(rec) {
|
||||||
case record.Series:
|
case record.Series:
|
||||||
series := seriesPool.Get().([]record.RefSeries)[:0]
|
var series []record.RefSeries
|
||||||
series, _ = dec.Series(rec, series)
|
series, err = dec.Series(rec, series)
|
||||||
|
require.NoError(t, err)
|
||||||
walSeriesCount += len(series)
|
walSeriesCount += len(series)
|
||||||
|
|
||||||
case record.Samples:
|
case record.Samples:
|
||||||
samples := samplesPool.Get().([]record.RefSample)[:0]
|
var samples []record.RefSample
|
||||||
samples, _ = dec.Samples(rec, samples)
|
samples, err = dec.Samples(rec, samples)
|
||||||
|
require.NoError(t, err)
|
||||||
walSamplesCount += len(samples)
|
walSamplesCount += len(samples)
|
||||||
|
|
||||||
|
case record.Exemplars:
|
||||||
|
var exemplars []record.RefExemplar
|
||||||
|
exemplars, err = dec.Exemplars(rec, exemplars)
|
||||||
|
require.NoError(t, err)
|
||||||
|
walExemplarsCount += len(exemplars)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Retrieved series count from WAL should be zero.
|
// Check that the rollback ensured nothing got stored.
|
||||||
require.Equal(t, walSeriesCount, 0)
|
require.Equal(t, 0, walSeriesCount, "series should not have been written to WAL")
|
||||||
|
require.Equal(t, 0, walSamplesCount, "samples should not have been written to WAL")
|
||||||
// Retrieved samples count from WAL should be zero.
|
require.Equal(t, 0, walExemplarsCount, "exemplars should not have been written to WAL")
|
||||||
require.Equal(t, walSamplesCount, 0)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFullTruncateWAL(t *testing.T) {
|
func TestFullTruncateWAL(t *testing.T) {
|
||||||
|
@ -259,34 +260,25 @@ func TestFullTruncateWAL(t *testing.T) {
|
||||||
lastTs = 500
|
lastTs = 500
|
||||||
)
|
)
|
||||||
|
|
||||||
promAgentDir := t.TempDir()
|
reg := prometheus.NewRegistry()
|
||||||
|
|
||||||
lbls := labelsForTest(t.Name(), numSeries)
|
|
||||||
opts := DefaultOptions()
|
opts := DefaultOptions()
|
||||||
opts.TruncateFrequency = time.Minute * 2
|
opts.TruncateFrequency = time.Minute * 2
|
||||||
logger := log.NewNopLogger()
|
|
||||||
reg := prometheus.NewRegistry()
|
|
||||||
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, remoteStorage.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
|
s := createTestAgentDB(t, reg, opts)
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() {
|
defer func() {
|
||||||
require.NoError(t, s.Close())
|
require.NoError(t, s.Close())
|
||||||
}()
|
}()
|
||||||
|
app := s.Appender(context.TODO())
|
||||||
|
|
||||||
a := s.Appender(context.TODO())
|
lbls := labelsForTest(t.Name(), numSeries)
|
||||||
|
|
||||||
for _, l := range lbls {
|
for _, l := range lbls {
|
||||||
lset := labels.New(l...)
|
lset := labels.New(l...)
|
||||||
|
|
||||||
for i := 0; i < numDatapoints; i++ {
|
for i := 0; i < numDatapoints; i++ {
|
||||||
_, err := a.Append(0, lset, int64(lastTs), 0)
|
_, err := app.Append(0, lset, int64(lastTs), 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
require.NoError(t, a.Commit())
|
require.NoError(t, app.Commit())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Truncate WAL with mint to GC all the samples.
|
// Truncate WAL with mint to GC all the samples.
|
||||||
|
@ -302,52 +294,40 @@ func TestPartialTruncateWAL(t *testing.T) {
|
||||||
numSeries = 800
|
numSeries = 800
|
||||||
)
|
)
|
||||||
|
|
||||||
promAgentDir := t.TempDir()
|
|
||||||
|
|
||||||
opts := DefaultOptions()
|
opts := DefaultOptions()
|
||||||
opts.TruncateFrequency = time.Minute * 2
|
opts.TruncateFrequency = time.Minute * 2
|
||||||
logger := log.NewNopLogger()
|
|
||||||
reg := prometheus.NewRegistry()
|
|
||||||
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, remoteStorage.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
|
reg := prometheus.NewRegistry()
|
||||||
require.NoError(t, err)
|
s := createTestAgentDB(t, reg, opts)
|
||||||
defer func() {
|
defer func() {
|
||||||
require.NoError(t, s.Close())
|
require.NoError(t, s.Close())
|
||||||
}()
|
}()
|
||||||
|
app := s.Appender(context.TODO())
|
||||||
a := s.Appender(context.TODO())
|
|
||||||
|
|
||||||
var lastTs int64
|
|
||||||
|
|
||||||
// Create first batch of 800 series with 1000 data-points with a fixed lastTs as 500.
|
// Create first batch of 800 series with 1000 data-points with a fixed lastTs as 500.
|
||||||
lastTs = 500
|
var lastTs int64 = 500
|
||||||
lbls := labelsForTest(t.Name()+"batch-1", numSeries)
|
lbls := labelsForTest(t.Name()+"batch-1", numSeries)
|
||||||
for _, l := range lbls {
|
for _, l := range lbls {
|
||||||
lset := labels.New(l...)
|
lset := labels.New(l...)
|
||||||
|
|
||||||
for i := 0; i < numDatapoints; i++ {
|
for i := 0; i < numDatapoints; i++ {
|
||||||
_, err := a.Append(0, lset, lastTs, 0)
|
_, err := app.Append(0, lset, lastTs, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
require.NoError(t, a.Commit())
|
require.NoError(t, app.Commit())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create second batch of 800 series with 1000 data-points with a fixed lastTs as 600.
|
// Create second batch of 800 series with 1000 data-points with a fixed lastTs as 600.
|
||||||
lastTs = 600
|
lastTs = 600
|
||||||
|
|
||||||
lbls = labelsForTest(t.Name()+"batch-2", numSeries)
|
lbls = labelsForTest(t.Name()+"batch-2", numSeries)
|
||||||
for _, l := range lbls {
|
for _, l := range lbls {
|
||||||
lset := labels.New(l...)
|
lset := labels.New(l...)
|
||||||
|
|
||||||
for i := 0; i < numDatapoints; i++ {
|
for i := 0; i < numDatapoints; i++ {
|
||||||
_, err := a.Append(0, lset, lastTs, 0)
|
_, err := app.Append(0, lset, lastTs, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
require.NoError(t, a.Commit())
|
require.NoError(t, app.Commit())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Truncate WAL with mint to GC only the first batch of 800 series and retaining 2nd batch of 800 series.
|
// Truncate WAL with mint to GC only the first batch of 800 series and retaining 2nd batch of 800 series.
|
||||||
|
@ -364,53 +344,41 @@ func TestWALReplay(t *testing.T) {
|
||||||
lastTs = 500
|
lastTs = 500
|
||||||
)
|
)
|
||||||
|
|
||||||
promAgentDir := t.TempDir()
|
s := createTestAgentDB(t, nil, DefaultOptions())
|
||||||
|
app := s.Appender(context.TODO())
|
||||||
|
|
||||||
lbls := labelsForTest(t.Name(), numSeries)
|
lbls := labelsForTest(t.Name(), numSeries)
|
||||||
opts := DefaultOptions()
|
|
||||||
|
|
||||||
logger := log.NewNopLogger()
|
|
||||||
reg := prometheus.NewRegistry()
|
|
||||||
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, remoteStorage.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
a := s.Appender(context.TODO())
|
|
||||||
|
|
||||||
for _, l := range lbls {
|
for _, l := range lbls {
|
||||||
lset := labels.New(l...)
|
lset := labels.New(l...)
|
||||||
|
|
||||||
for i := 0; i < numDatapoints; i++ {
|
for i := 0; i < numDatapoints; i++ {
|
||||||
_, err := a.Append(0, lset, lastTs, 0)
|
_, err := app.Append(0, lset, lastTs, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, a.Commit())
|
require.NoError(t, app.Commit())
|
||||||
require.NoError(t, s.Close())
|
require.NoError(t, s.Close())
|
||||||
|
|
||||||
restartOpts := DefaultOptions()
|
// Hack: s.wal.Dir() is the /wal subdirectory of the original storage path.
|
||||||
restartLogger := log.NewNopLogger()
|
// We need the original directory so we can recreate the storage for replay.
|
||||||
restartReg := prometheus.NewRegistry()
|
storageDir := filepath.Dir(s.wal.Dir())
|
||||||
|
|
||||||
// Open a new DB with the same WAL to check that series from the previous DB
|
reg := prometheus.NewRegistry()
|
||||||
// get replayed.
|
replayStorage, err := Open(s.logger, reg, nil, storageDir, s.opts)
|
||||||
replayDB, err := Open(restartLogger, restartReg, nil, promAgentDir, restartOpts)
|
if err != nil {
|
||||||
require.NoError(t, err)
|
t.Fatalf("unable to create storage for the agent: %v", err)
|
||||||
|
}
|
||||||
defer func() {
|
defer func() {
|
||||||
require.NoError(t, replayDB.Close())
|
require.NoError(t, replayStorage.Close())
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Check if all the series are retrieved back from the WAL.
|
// Check if all the series are retrieved back from the WAL.
|
||||||
m := gatherFamily(t, restartReg, "prometheus_agent_active_series")
|
m := gatherFamily(t, reg, "prometheus_agent_active_series")
|
||||||
require.Equal(t, float64(numSeries), m.Metric[0].Gauge.GetValue(), "agent wal replay mismatch of active series count")
|
require.Equal(t, float64(numSeries), m.Metric[0].Gauge.GetValue(), "agent wal replay mismatch of active series count")
|
||||||
|
|
||||||
// Check if lastTs of the samples retrieved from the WAL is retained.
|
// Check if lastTs of the samples retrieved from the WAL is retained.
|
||||||
metrics := replayDB.series.series
|
metrics := replayStorage.series.series
|
||||||
for i := 0; i < len(metrics); i++ {
|
for i := 0; i < len(metrics); i++ {
|
||||||
mp := metrics[i]
|
mp := metrics[i]
|
||||||
for _, v := range mp {
|
for _, v := range mp {
|
||||||
|
|
|
@ -26,9 +26,24 @@ type memSeries struct {
|
||||||
|
|
||||||
ref chunks.HeadSeriesRef
|
ref chunks.HeadSeriesRef
|
||||||
lset labels.Labels
|
lset labels.Labels
|
||||||
|
|
||||||
|
// Last recorded timestamp. Used by Storage.gc to determine if a series is
|
||||||
|
// stale.
|
||||||
lastTs int64
|
lastTs int64
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// updateTimestamp obtains the lock on s and will attempt to update lastTs.
|
||||||
|
// fails if newTs < lastTs.
|
||||||
|
func (m *memSeries) updateTimestamp(newTs int64) bool {
|
||||||
|
m.Lock()
|
||||||
|
defer m.Unlock()
|
||||||
|
if newTs >= m.lastTs {
|
||||||
|
m.lastTs = newTs
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// seriesHashmap is a simple hashmap for memSeries by their label set.
|
// seriesHashmap is a simple hashmap for memSeries by their label set.
|
||||||
// It is built on top of a regular hashmap and holds a slice of series to
|
// It is built on top of a regular hashmap and holds a slice of series to
|
||||||
// resolve hash collisions. Its methods require the hash to be submitted
|
// resolve hash collisions. Its methods require the hash to be submitted
|
||||||
|
|
|
@ -46,7 +46,7 @@
|
||||||
"@types/chai": "^4.2.22",
|
"@types/chai": "^4.2.22",
|
||||||
"@types/lru-cache": "^5.1.0",
|
"@types/lru-cache": "^5.1.0",
|
||||||
"@types/mocha": "^9.0.0",
|
"@types/mocha": "^9.0.0",
|
||||||
"@types/node": "^16.11.9",
|
"@types/node": "^16.11.10",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
||||||
"@typescript-eslint/parser": "^5.3.1",
|
"@typescript-eslint/parser": "^5.3.1",
|
||||||
"chai": "^4.2.0",
|
"chai": "^4.2.0",
|
||||||
|
|
20
web/ui/package-lock.json
generated
20
web/ui/package-lock.json
generated
|
@ -32,7 +32,7 @@
|
||||||
"@types/chai": "^4.2.22",
|
"@types/chai": "^4.2.22",
|
||||||
"@types/lru-cache": "^5.1.0",
|
"@types/lru-cache": "^5.1.0",
|
||||||
"@types/mocha": "^9.0.0",
|
"@types/mocha": "^9.0.0",
|
||||||
"@types/node": "^16.11.9",
|
"@types/node": "^16.11.10",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
||||||
"@typescript-eslint/parser": "^5.3.1",
|
"@typescript-eslint/parser": "^5.3.1",
|
||||||
"chai": "^4.2.0",
|
"chai": "^4.2.0",
|
||||||
|
@ -1775,9 +1775,9 @@
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "16.11.9",
|
"version": "16.11.10",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.9.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.10.tgz",
|
||||||
"integrity": "sha512-MKmdASMf3LtPzwLyRrFjtFFZ48cMf8jmX5VRYrDQiJa8Ybu5VAmkqBWqKU8fdCwD8ysw4mQ9nrEHvzg6gunR7A==",
|
"integrity": "sha512-3aRnHa1KlOEEhJ6+CvyHKK5vE9BcLGjtUpwvqYLRvYNQKMfabu3BwfJaA/SLW8dxe28LsNDjtHwePTuzn3gmOA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/@types/prop-types": {
|
"node_modules/@types/prop-types": {
|
||||||
|
@ -7252,7 +7252,7 @@
|
||||||
"@types/flot": "0.0.32",
|
"@types/flot": "0.0.32",
|
||||||
"@types/jest": "^27.0.3",
|
"@types/jest": "^27.0.3",
|
||||||
"@types/jquery": "^3.5.9",
|
"@types/jquery": "^3.5.9",
|
||||||
"@types/node": "^16.11.9",
|
"@types/node": "^16.11.10",
|
||||||
"@types/react": "^17.0.36",
|
"@types/react": "^17.0.36",
|
||||||
"@types/react-copy-to-clipboard": "^5.0.2",
|
"@types/react-copy-to-clipboard": "^5.0.2",
|
||||||
"@types/react-dom": "^17.0.11",
|
"@types/react-dom": "^17.0.11",
|
||||||
|
@ -27808,9 +27808,9 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@types/node": {
|
"@types/node": {
|
||||||
"version": "16.11.9",
|
"version": "16.11.10",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.9.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.10.tgz",
|
||||||
"integrity": "sha512-MKmdASMf3LtPzwLyRrFjtFFZ48cMf8jmX5VRYrDQiJa8Ybu5VAmkqBWqKU8fdCwD8ysw4mQ9nrEHvzg6gunR7A==",
|
"integrity": "sha512-3aRnHa1KlOEEhJ6+CvyHKK5vE9BcLGjtUpwvqYLRvYNQKMfabu3BwfJaA/SLW8dxe28LsNDjtHwePTuzn3gmOA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@types/prop-types": {
|
"@types/prop-types": {
|
||||||
|
@ -28413,7 +28413,7 @@
|
||||||
"@types/chai": "^4.2.22",
|
"@types/chai": "^4.2.22",
|
||||||
"@types/lru-cache": "^5.1.0",
|
"@types/lru-cache": "^5.1.0",
|
||||||
"@types/mocha": "^9.0.0",
|
"@types/mocha": "^9.0.0",
|
||||||
"@types/node": "^16.11.9",
|
"@types/node": "^16.11.10",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
"@typescript-eslint/eslint-plugin": "^5.3.1",
|
||||||
"@typescript-eslint/parser": "^5.3.1",
|
"@typescript-eslint/parser": "^5.3.1",
|
||||||
"chai": "^4.2.0",
|
"chai": "^4.2.0",
|
||||||
|
@ -29645,7 +29645,7 @@
|
||||||
"@types/flot": "0.0.32",
|
"@types/flot": "0.0.32",
|
||||||
"@types/jest": "^27.0.3",
|
"@types/jest": "^27.0.3",
|
||||||
"@types/jquery": "^3.5.9",
|
"@types/jquery": "^3.5.9",
|
||||||
"@types/node": "^16.11.9",
|
"@types/node": "^16.11.10",
|
||||||
"@types/react": "^17.0.36",
|
"@types/react": "^17.0.36",
|
||||||
"@types/react-copy-to-clipboard": "^5.0.2",
|
"@types/react-copy-to-clipboard": "^5.0.2",
|
||||||
"@types/react-dom": "^17.0.11",
|
"@types/react-dom": "^17.0.11",
|
||||||
|
|
|
@ -69,7 +69,7 @@
|
||||||
"@types/flot": "0.0.32",
|
"@types/flot": "0.0.32",
|
||||||
"@types/jest": "^27.0.3",
|
"@types/jest": "^27.0.3",
|
||||||
"@types/jquery": "^3.5.9",
|
"@types/jquery": "^3.5.9",
|
||||||
"@types/node": "^16.11.9",
|
"@types/node": "^16.11.10",
|
||||||
"@types/react": "^17.0.36",
|
"@types/react": "^17.0.36",
|
||||||
"@types/react-copy-to-clipboard": "^5.0.2",
|
"@types/react-copy-to-clipboard": "^5.0.2",
|
||||||
"@types/react-dom": "^17.0.11",
|
"@types/react-dom": "^17.0.11",
|
||||||
|
|
|
@ -15,10 +15,11 @@
|
||||||
It will render a "Consoles" link in the navbar when it is non-empty.
|
It will render a "Consoles" link in the navbar when it is non-empty.
|
||||||
- PROMETHEUS_AGENT_MODE is replaced by a boolean indicating if Prometheus is running in agent mode.
|
- PROMETHEUS_AGENT_MODE is replaced by a boolean indicating if Prometheus is running in agent mode.
|
||||||
It true, it will disable querying capacities in the UI and generally adapt the UI to the agent mode.
|
It true, it will disable querying capacities in the UI and generally adapt the UI to the agent mode.
|
||||||
|
It has to be represented as a string, because booleans can be mangled to !1 in production builds.
|
||||||
-->
|
-->
|
||||||
<script>
|
<script>
|
||||||
const GLOBAL_CONSOLES_LINK='CONSOLES_LINK_PLACEHOLDER';
|
const GLOBAL_CONSOLES_LINK='CONSOLES_LINK_PLACEHOLDER';
|
||||||
const GLOBAL_PROMETHEUS_AGENT_MODE=PROMETHEUS_AGENT_MODE_PLACEHOLDER;
|
const GLOBAL_AGENT_MODE='AGENT_MODE_PLACEHOLDER';
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
|
@ -5,6 +5,7 @@ import Navigation from './Navbar';
|
||||||
import { Container } from 'reactstrap';
|
import { Container } from 'reactstrap';
|
||||||
import { Route } from 'react-router-dom';
|
import { Route } from 'react-router-dom';
|
||||||
import {
|
import {
|
||||||
|
AgentPage,
|
||||||
AlertsPage,
|
AlertsPage,
|
||||||
ConfigPage,
|
ConfigPage,
|
||||||
FlagsPage,
|
FlagsPage,
|
||||||
|
@ -24,6 +25,7 @@ describe('App', () => {
|
||||||
});
|
});
|
||||||
it('routes', () => {
|
it('routes', () => {
|
||||||
[
|
[
|
||||||
|
AgentPage,
|
||||||
AlertsPage,
|
AlertsPage,
|
||||||
ConfigPage,
|
ConfigPage,
|
||||||
FlagsPage,
|
FlagsPage,
|
||||||
|
@ -37,7 +39,7 @@ describe('App', () => {
|
||||||
const c = app.find(component);
|
const c = app.find(component);
|
||||||
expect(c).toHaveLength(1);
|
expect(c).toHaveLength(1);
|
||||||
});
|
});
|
||||||
expect(app.find(Route)).toHaveLength(9);
|
expect(app.find(Route)).toHaveLength(10);
|
||||||
expect(app.find(Container)).toHaveLength(1);
|
expect(app.find(Container)).toHaveLength(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -4,6 +4,7 @@ import { Container } from 'reactstrap';
|
||||||
|
|
||||||
import { BrowserRouter as Router, Redirect, Switch, Route } from 'react-router-dom';
|
import { BrowserRouter as Router, Redirect, Switch, Route } from 'react-router-dom';
|
||||||
import {
|
import {
|
||||||
|
AgentPage,
|
||||||
AlertsPage,
|
AlertsPage,
|
||||||
ConfigPage,
|
ConfigPage,
|
||||||
FlagsPage,
|
FlagsPage,
|
||||||
|
@ -22,14 +23,16 @@ import useMedia from './hooks/useMedia';
|
||||||
|
|
||||||
interface AppProps {
|
interface AppProps {
|
||||||
consolesLink: string | null;
|
consolesLink: string | null;
|
||||||
|
agentMode: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const App: FC<AppProps> = ({ consolesLink }) => {
|
const App: FC<AppProps> = ({ consolesLink, agentMode }) => {
|
||||||
// This dynamically/generically determines the pathPrefix by stripping the first known
|
// This dynamically/generically determines the pathPrefix by stripping the first known
|
||||||
// endpoint suffix from the window location path. It works out of the box for both direct
|
// endpoint suffix from the window location path. It works out of the box for both direct
|
||||||
// hosting and reverse proxy deployments with no additional configurations required.
|
// hosting and reverse proxy deployments with no additional configurations required.
|
||||||
let basePath = window.location.pathname;
|
let basePath = window.location.pathname;
|
||||||
const paths = [
|
const paths = [
|
||||||
|
'/agent',
|
||||||
'/graph',
|
'/graph',
|
||||||
'/alerts',
|
'/alerts',
|
||||||
'/status',
|
'/status',
|
||||||
|
@ -70,14 +73,17 @@ const App: FC<AppProps> = ({ consolesLink }) => {
|
||||||
<Theme />
|
<Theme />
|
||||||
<PathPrefixContext.Provider value={basePath}>
|
<PathPrefixContext.Provider value={basePath}>
|
||||||
<Router basename={basePath}>
|
<Router basename={basePath}>
|
||||||
<Navigation consolesLink={consolesLink} />
|
<Navigation consolesLink={consolesLink} agentMode={agentMode} />
|
||||||
<Container fluid style={{ paddingTop: 70 }}>
|
<Container fluid style={{ paddingTop: 70 }}>
|
||||||
<Switch>
|
<Switch>
|
||||||
<Redirect exact from="/" to={`graph`} />
|
<Redirect exact from="/" to={agentMode ? '/agent' : '/graph'} />
|
||||||
{/*
|
{/*
|
||||||
NOTE: Any route added here needs to also be added to the list of
|
NOTE: Any route added here needs to also be added to the list of
|
||||||
React-handled router paths ("reactRouterPaths") in /web/web.go.
|
React-handled router paths ("reactRouterPaths") in /web/web.go.
|
||||||
*/}
|
*/}
|
||||||
|
<Route path="/agent">
|
||||||
|
<AgentPage />
|
||||||
|
</Route>
|
||||||
<Route path="/graph">
|
<Route path="/graph">
|
||||||
<PanelListPage />
|
<PanelListPage />
|
||||||
</Route>
|
</Route>
|
||||||
|
|
|
@ -17,17 +17,18 @@ import { ThemeToggle } from './Theme';
|
||||||
|
|
||||||
interface NavbarProps {
|
interface NavbarProps {
|
||||||
consolesLink: string | null;
|
consolesLink: string | null;
|
||||||
|
agentMode: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
|
const Navigation: FC<NavbarProps> = ({ consolesLink, agentMode }) => {
|
||||||
const [isOpen, setIsOpen] = useState(false);
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
const toggle = () => setIsOpen(!isOpen);
|
const toggle = () => setIsOpen(!isOpen);
|
||||||
const pathPrefix = usePathPrefix();
|
const pathPrefix = usePathPrefix();
|
||||||
return (
|
return (
|
||||||
<Navbar className="mb-3" dark color="dark" expand="md" fixed="top">
|
<Navbar className="mb-3" dark color="dark" expand="md" fixed="top">
|
||||||
<NavbarToggler onClick={toggle} className="mr-2" />
|
<NavbarToggler onClick={toggle} className="mr-2" />
|
||||||
<Link className="pt-0 navbar-brand" to="/graph">
|
<Link className="pt-0 navbar-brand" to={agentMode ? '/agent' : '/graph'}>
|
||||||
Prometheus
|
Prometheus{agentMode && ' Agent'}
|
||||||
</Link>
|
</Link>
|
||||||
<Collapse isOpen={isOpen} navbar style={{ justifyContent: 'space-between' }}>
|
<Collapse isOpen={isOpen} navbar style={{ justifyContent: 'space-between' }}>
|
||||||
<Nav className="ml-0" navbar>
|
<Nav className="ml-0" navbar>
|
||||||
|
@ -36,6 +37,8 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
|
||||||
<NavLink href={consolesLink}>Consoles</NavLink>
|
<NavLink href={consolesLink}>Consoles</NavLink>
|
||||||
</NavItem>
|
</NavItem>
|
||||||
)}
|
)}
|
||||||
|
{!agentMode && (
|
||||||
|
<>
|
||||||
<NavItem>
|
<NavItem>
|
||||||
<NavLink tag={Link} to="/alerts">
|
<NavLink tag={Link} to="/alerts">
|
||||||
Alerts
|
Alerts
|
||||||
|
@ -46,6 +49,8 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
|
||||||
Graph
|
Graph
|
||||||
</NavLink>
|
</NavLink>
|
||||||
</NavItem>
|
</NavItem>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
<UncontrolledDropdown nav inNavbar>
|
<UncontrolledDropdown nav inNavbar>
|
||||||
<DropdownToggle nav caret>
|
<DropdownToggle nav caret>
|
||||||
Status
|
Status
|
||||||
|
@ -54,18 +59,22 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
|
||||||
<DropdownItem tag={Link} to="/status">
|
<DropdownItem tag={Link} to="/status">
|
||||||
Runtime & Build Information
|
Runtime & Build Information
|
||||||
</DropdownItem>
|
</DropdownItem>
|
||||||
|
{!agentMode && (
|
||||||
<DropdownItem tag={Link} to="/tsdb-status">
|
<DropdownItem tag={Link} to="/tsdb-status">
|
||||||
TSDB Status
|
TSDB Status
|
||||||
</DropdownItem>
|
</DropdownItem>
|
||||||
|
)}
|
||||||
<DropdownItem tag={Link} to="/flags">
|
<DropdownItem tag={Link} to="/flags">
|
||||||
Command-Line Flags
|
Command-Line Flags
|
||||||
</DropdownItem>
|
</DropdownItem>
|
||||||
<DropdownItem tag={Link} to="/config">
|
<DropdownItem tag={Link} to="/config">
|
||||||
Configuration
|
Configuration
|
||||||
</DropdownItem>
|
</DropdownItem>
|
||||||
|
{!agentMode && (
|
||||||
<DropdownItem tag={Link} to="/rules">
|
<DropdownItem tag={Link} to="/rules">
|
||||||
Rules
|
Rules
|
||||||
</DropdownItem>
|
</DropdownItem>
|
||||||
|
)}
|
||||||
<DropdownItem tag={Link} to="/targets">
|
<DropdownItem tag={Link} to="/targets">
|
||||||
Targets
|
Targets
|
||||||
</DropdownItem>
|
</DropdownItem>
|
||||||
|
@ -77,9 +86,11 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
|
||||||
<NavItem>
|
<NavItem>
|
||||||
<NavLink href="https://prometheus.io/docs/prometheus/latest/getting_started/">Help</NavLink>
|
<NavLink href="https://prometheus.io/docs/prometheus/latest/getting_started/">Help</NavLink>
|
||||||
</NavItem>
|
</NavItem>
|
||||||
|
{!agentMode && (
|
||||||
<NavItem>
|
<NavItem>
|
||||||
<NavLink href={`${pathPrefix}/classic/graph${window.location.search}`}>Classic UI</NavLink>
|
<NavLink href={`${pathPrefix}/classic/graph${window.location.search}`}>Classic UI</NavLink>
|
||||||
</NavItem>
|
</NavItem>
|
||||||
|
)}
|
||||||
</Nav>
|
</Nav>
|
||||||
</Collapse>
|
</Collapse>
|
||||||
<ThemeToggle />
|
<ThemeToggle />
|
||||||
|
|
|
@ -10,8 +10,10 @@ import { isPresent } from './utils';
|
||||||
|
|
||||||
// Declared/defined in public/index.html, value replaced by Prometheus when serving bundle.
|
// Declared/defined in public/index.html, value replaced by Prometheus when serving bundle.
|
||||||
declare const GLOBAL_CONSOLES_LINK: string;
|
declare const GLOBAL_CONSOLES_LINK: string;
|
||||||
|
declare const GLOBAL_AGENT_MODE: string;
|
||||||
|
|
||||||
let consolesLink: string | null = GLOBAL_CONSOLES_LINK;
|
let consolesLink: string | null = GLOBAL_CONSOLES_LINK;
|
||||||
|
const agentMode: string | null = GLOBAL_AGENT_MODE;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
GLOBAL_CONSOLES_LINK === 'CONSOLES_LINK_PLACEHOLDER' ||
|
GLOBAL_CONSOLES_LINK === 'CONSOLES_LINK_PLACEHOLDER' ||
|
||||||
|
@ -21,4 +23,4 @@ if (
|
||||||
consolesLink = null;
|
consolesLink = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
ReactDOM.render(<App consolesLink={consolesLink} />, document.getElementById('root'));
|
ReactDOM.render(<App consolesLink={consolesLink} agentMode={agentMode === 'true'} />, document.getElementById('root'));
|
||||||
|
|
16
web/ui/react-app/src/pages/agent/Agent.tsx
Normal file
16
web/ui/react-app/src/pages/agent/Agent.tsx
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
import React, { FC } from 'react';
|
||||||
|
|
||||||
|
const Agent: FC = () => {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<h2>Prometheus Agent</h2>
|
||||||
|
<p>
|
||||||
|
This Prometheus instance is running in <strong>agent mode</strong>. In this mode, Prometheus is only used to scrape
|
||||||
|
discovered targets and forward them to remote write endpoints.
|
||||||
|
</p>
|
||||||
|
<p>Some features are not available in this mode, such as querying and alerting.</p>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Agent;
|
|
@ -1,3 +1,4 @@
|
||||||
|
import Agent from './agent/Agent';
|
||||||
import Alerts from './alerts/Alerts';
|
import Alerts from './alerts/Alerts';
|
||||||
import Config from './config/Config';
|
import Config from './config/Config';
|
||||||
import Flags from './flags/Flags';
|
import Flags from './flags/Flags';
|
||||||
|
@ -9,6 +10,7 @@ import PanelList from './graph/PanelList';
|
||||||
import TSDBStatus from './tsdbStatus/TSDBStatus';
|
import TSDBStatus from './tsdbStatus/TSDBStatus';
|
||||||
import { withStartingIndicator } from '../components/withStartingIndicator';
|
import { withStartingIndicator } from '../components/withStartingIndicator';
|
||||||
|
|
||||||
|
const AgentPage = withStartingIndicator(Agent);
|
||||||
const AlertsPage = withStartingIndicator(Alerts);
|
const AlertsPage = withStartingIndicator(Alerts);
|
||||||
const ConfigPage = withStartingIndicator(Config);
|
const ConfigPage = withStartingIndicator(Config);
|
||||||
const FlagsPage = withStartingIndicator(Flags);
|
const FlagsPage = withStartingIndicator(Flags);
|
||||||
|
@ -21,6 +23,7 @@ const PanelListPage = withStartingIndicator(PanelList);
|
||||||
|
|
||||||
// prettier-ignore
|
// prettier-ignore
|
||||||
export {
|
export {
|
||||||
|
AgentPage,
|
||||||
AlertsPage,
|
AlertsPage,
|
||||||
ConfigPage,
|
ConfigPage,
|
||||||
FlagsPage,
|
FlagsPage,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { shallow, mount } from 'enzyme';
|
import { shallow, mount } from 'enzyme';
|
||||||
import { Badge, Alert } from 'reactstrap';
|
import { Badge } from 'reactstrap';
|
||||||
import EndpointLink from './EndpointLink';
|
import EndpointLink from './EndpointLink';
|
||||||
|
|
||||||
describe('EndpointLink', () => {
|
describe('EndpointLink', () => {
|
||||||
|
@ -29,11 +29,24 @@ describe('EndpointLink', () => {
|
||||||
const targetLabel = badges.filterWhere((badge) => badge.children().text() === 'target="http://some-service"');
|
const targetLabel = badges.filterWhere((badge) => badge.children().text() === 'target="http://some-service"');
|
||||||
expect(targetLabel.length).toEqual(1);
|
expect(targetLabel.length).toEqual(1);
|
||||||
});
|
});
|
||||||
|
// In cases of IPv6 addresses with a Zone ID, URL may not be parseable.
|
||||||
it('renders an alert if url is invalid', () => {
|
// See https://github.com/prometheus/prometheus/issues/9760
|
||||||
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} globalUrl={'afdsacas'} />);
|
it('renders an anchor for IPv6 link with zone ID including labels for query params', () => {
|
||||||
const err = endpointLink.find(Alert);
|
const endpoint =
|
||||||
expect(err.render().text()).toEqual('Error: Invalid URL: afdsacas');
|
'http://[fe80::f1ee:adeb:371d:983%eth1]:9100/stats/prometheus?module=http_2xx&target=http://some-service';
|
||||||
|
const globalURL =
|
||||||
|
'http://[fe80::f1ee:adeb:371d:983%eth1]:9100/stats/prometheus?module=http_2xx&target=http://some-service';
|
||||||
|
const endpointLink = shallow(<EndpointLink endpoint={endpoint} globalUrl={globalURL} />);
|
||||||
|
const anchor = endpointLink.find('a');
|
||||||
|
const badges = endpointLink.find(Badge);
|
||||||
|
expect(anchor.prop('href')).toEqual(globalURL);
|
||||||
|
expect(anchor.children().text()).toEqual('http://[fe80::f1ee:adeb:371d:983%eth1]:9100/stats/prometheus');
|
||||||
|
expect(endpointLink.find('br')).toHaveLength(1);
|
||||||
|
expect(badges).toHaveLength(2);
|
||||||
|
const moduleLabel = badges.filterWhere((badge) => badge.children().text() === 'module="http_2xx"');
|
||||||
|
expect(moduleLabel.length).toEqual(1);
|
||||||
|
const targetLabel = badges.filterWhere((badge) => badge.children().text() === 'target="http://some-service"');
|
||||||
|
expect(targetLabel.length).toEqual(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('handles params with multiple values correctly', () => {
|
it('handles params with multiple values correctly', () => {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import React, { FC } from 'react';
|
import React, { FC } from 'react';
|
||||||
import { Badge, Alert } from 'reactstrap';
|
import { Badge } from 'reactstrap';
|
||||||
|
|
||||||
export interface EndpointLinkProps {
|
export interface EndpointLinkProps {
|
||||||
endpoint: string;
|
endpoint: string;
|
||||||
|
@ -8,23 +8,28 @@ export interface EndpointLinkProps {
|
||||||
|
|
||||||
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
|
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
|
||||||
let url: URL;
|
let url: URL;
|
||||||
|
let search = '';
|
||||||
|
let invalidURL = false;
|
||||||
try {
|
try {
|
||||||
url = new URL(endpoint);
|
url = new URL(endpoint);
|
||||||
} catch (err: unknown) {
|
} catch (err: unknown) {
|
||||||
const error = err as Error;
|
// In cases of IPv6 addresses with a Zone ID, URL may not be parseable.
|
||||||
return (
|
// See https://github.com/prometheus/prometheus/issues/9760
|
||||||
<Alert color="danger">
|
// In this case, we attempt to prepare a synthetic URL with the
|
||||||
<strong>Error:</strong> {error.message}
|
// same query parameters, for rendering purposes.
|
||||||
</Alert>
|
invalidURL = true;
|
||||||
);
|
if (endpoint.indexOf('?') > -1) {
|
||||||
|
search = endpoint.substring(endpoint.indexOf('?'));
|
||||||
|
}
|
||||||
|
url = new URL('http://0.0.0.0' + search);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { host, pathname, protocol, searchParams }: URL = url;
|
const { host, pathname, protocol, searchParams }: URL = url;
|
||||||
const params = Array.from(searchParams.entries());
|
const params = Array.from(searchParams.entries());
|
||||||
|
const displayLink = invalidURL ? endpoint.replace(search, '') : `${protocol}//${host}${pathname}`;
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<a href={globalUrl}>{`${protocol}//${host}${pathname}`}</a>
|
<a href={globalUrl}>{displayLink}</a>
|
||||||
{params.length > 0 ? <br /> : null}
|
{params.length > 0 ? <br /> : null}
|
||||||
{params.map(([labelName, labelValue]: [string, string]) => {
|
{params.map(([labelName, labelValue]: [string, string]) => {
|
||||||
return (
|
return (
|
||||||
|
|
36
web/web.go
36
web/web.go
|
@ -71,18 +71,27 @@ import (
|
||||||
|
|
||||||
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
|
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
|
||||||
var reactRouterPaths = []string{
|
var reactRouterPaths = []string{
|
||||||
"/alerts",
|
|
||||||
"/config",
|
"/config",
|
||||||
"/flags",
|
"/flags",
|
||||||
"/graph",
|
|
||||||
"/rules",
|
|
||||||
"/service-discovery",
|
"/service-discovery",
|
||||||
"/status",
|
"/status",
|
||||||
"/targets",
|
"/targets",
|
||||||
"/tsdb-status",
|
|
||||||
"/starting",
|
"/starting",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Paths that are handled by the React router when the Agent mode is set.
|
||||||
|
var reactRouterAgentPaths = []string{
|
||||||
|
"/agent",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Paths that are handled by the React router when the Agent mode is not set.
|
||||||
|
var reactRouterServerPaths = []string{
|
||||||
|
"/alerts",
|
||||||
|
"/graph",
|
||||||
|
"/rules",
|
||||||
|
"/tsdb-status",
|
||||||
|
}
|
||||||
|
|
||||||
// withStackTrace logs the stack trace in case the request panics. The function
|
// withStackTrace logs the stack trace in case the request panics. The function
|
||||||
// will re-raise the error which will then be handled by the net/http package.
|
// will re-raise the error which will then be handled by the net/http package.
|
||||||
// It is needed because the go-kit log package doesn't manage properly the
|
// It is needed because the go-kit log package doesn't manage properly the
|
||||||
|
@ -346,10 +355,15 @@ func New(logger log.Logger, o *Options) *Handler {
|
||||||
router = router.WithPrefix(o.RoutePrefix)
|
router = router.WithPrefix(o.RoutePrefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
homePage := "/graph"
|
||||||
|
if o.IsAgent {
|
||||||
|
homePage = "/agent"
|
||||||
|
}
|
||||||
|
|
||||||
readyf := h.testReady
|
readyf := h.testReady
|
||||||
|
|
||||||
router.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
router.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
http.Redirect(w, r, path.Join(o.ExternalURL.Path, "/graph"), http.StatusFound)
|
http.Redirect(w, r, path.Join(o.ExternalURL.Path, homePage), http.StatusFound)
|
||||||
})
|
})
|
||||||
router.Get("/classic/", func(w http.ResponseWriter, r *http.Request) {
|
router.Get("/classic/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
http.Redirect(w, r, path.Join(o.ExternalURL.Path, "/classic/graph"), http.StatusFound)
|
http.Redirect(w, r, path.Join(o.ExternalURL.Path, "/classic/graph"), http.StatusFound)
|
||||||
|
@ -409,7 +423,7 @@ func New(logger log.Logger, o *Options) *Handler {
|
||||||
}
|
}
|
||||||
replacedIdx := bytes.ReplaceAll(idx, []byte("CONSOLES_LINK_PLACEHOLDER"), []byte(h.consolesPath()))
|
replacedIdx := bytes.ReplaceAll(idx, []byte("CONSOLES_LINK_PLACEHOLDER"), []byte(h.consolesPath()))
|
||||||
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("TITLE_PLACEHOLDER"), []byte(h.options.PageTitle))
|
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("TITLE_PLACEHOLDER"), []byte(h.options.PageTitle))
|
||||||
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("PROMETHEUS_AGENT_MODE_PLACEHOLDER"), []byte(strconv.FormatBool(h.options.IsAgent)))
|
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("AGENT_MODE_PLACEHOLDER"), []byte(strconv.FormatBool(h.options.IsAgent)))
|
||||||
w.Write(replacedIdx)
|
w.Write(replacedIdx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -418,6 +432,16 @@ func New(logger log.Logger, o *Options) *Handler {
|
||||||
router.Get(p, serveReactApp)
|
router.Get(p, serveReactApp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if h.options.IsAgent {
|
||||||
|
for _, p := range reactRouterAgentPaths {
|
||||||
|
router.Get(p, serveReactApp)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, p := range reactRouterServerPaths {
|
||||||
|
router.Get(p, serveReactApp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// The favicon and manifest are bundled as part of the React app, but we want to serve
|
// The favicon and manifest are bundled as part of the React app, but we want to serve
|
||||||
// them on the root.
|
// them on the root.
|
||||||
for _, p := range []string{"/favicon.ico", "/manifest.json"} {
|
for _, p := range []string{"/favicon.ico", "/manifest.json"} {
|
||||||
|
|
|
@ -585,6 +585,8 @@ func TestAgentAPIEndPoints(t *testing.T) {
|
||||||
"/query",
|
"/query",
|
||||||
"/query_range",
|
"/query_range",
|
||||||
"/query_exemplars",
|
"/query_exemplars",
|
||||||
|
"/graph",
|
||||||
|
"/rules",
|
||||||
} {
|
} {
|
||||||
w := httptest.NewRecorder()
|
w := httptest.NewRecorder()
|
||||||
req, err := http.NewRequest("GET", baseURL+u, nil)
|
req, err := http.NewRequest("GET", baseURL+u, nil)
|
||||||
|
@ -595,6 +597,7 @@ func TestAgentAPIEndPoints(t *testing.T) {
|
||||||
|
|
||||||
// Test for available endpoints in the Agent mode.
|
// Test for available endpoints in the Agent mode.
|
||||||
for _, u := range []string{
|
for _, u := range []string{
|
||||||
|
"/agent",
|
||||||
"/targets",
|
"/targets",
|
||||||
"/status",
|
"/status",
|
||||||
} {
|
} {
|
||||||
|
|
Loading…
Reference in a new issue