Merge branch 'main' into sparsehistogram

This commit is contained in:
beorn7 2021-11-30 18:22:37 +01:00
commit e8e9155a11
23 changed files with 522 additions and 340 deletions

View file

@ -353,8 +353,10 @@ func checkConfig(agentMode bool, filename string) ([]string, error) {
}
for _, scfg := range cfg.ScrapeConfigs {
if err := checkFileExists(scfg.HTTPClientConfig.BearerTokenFile); err != nil {
return nil, errors.Wrapf(err, "error checking bearer token file %q", scfg.HTTPClientConfig.BearerTokenFile)
if scfg.HTTPClientConfig.Authorization != nil {
if err := checkFileExists(scfg.HTTPClientConfig.Authorization.CredentialsFile); err != nil {
return nil, errors.Wrapf(err, "error checking authorization credentials or bearer token file %q", scfg.HTTPClientConfig.Authorization.CredentialsFile)
}
}
if err := checkTLSConfig(scfg.HTTPClientConfig.TLSConfig); err != nil {

View file

@ -203,3 +203,33 @@ func TestCheckTargetConfig(t *testing.T) {
})
}
}
func TestAuthorizationConfig(t *testing.T) {
cases := []struct {
name string
file string
err string
}{
{
name: "authorization_credentials_file.bad",
file: "authorization_credentials_file.bad.yml",
err: "error checking authorization credentials or bearer token file",
},
{
name: "authorization_credentials_file.good",
file: "authorization_credentials_file.good.yml",
err: "",
},
}
for _, test := range cases {
t.Run(test.name, func(t *testing.T) {
_, err := checkConfig(false, "testdata/"+test.file)
if test.err != "" {
require.Contains(t, err.Error(), test.err, "Expected error to contain %q, got %q", test.err, err.Error())
return
}
require.NoError(t, err)
})
}
}

View file

@ -0,0 +1,4 @@
scrape_configs:
- job_name: test
authorization:
credentials_file: "/random/file/which/does/not/exist.yml"

View file

@ -0,0 +1,4 @@
scrape_configs:
- job_name: test
authorization:
credentials_file: "."

6
go.mod
View file

@ -3,13 +3,13 @@ module github.com/prometheus/prometheus
go 1.14
require (
github.com/Azure/azure-sdk-for-go v58.3.0+incompatible
github.com/Azure/azure-sdk-for-go v59.4.0+incompatible
github.com/Azure/go-autorest/autorest v0.11.22
github.com/Azure/go-autorest/autorest/adal v0.9.17
github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect
github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect
github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a
github.com/aws/aws-sdk-go v1.42.10
github.com/aws/aws-sdk-go v1.42.15
github.com/cespare/xxhash/v2 v2.1.2
github.com/containerd/containerd v1.5.7 // indirect
github.com/dennwc/varint v1.0.0
@ -23,7 +23,7 @@ require (
github.com/fsnotify/fsnotify v1.5.1
github.com/go-kit/log v0.2.0
github.com/go-logfmt/logfmt v0.5.1
github.com/go-openapi/strfmt v0.21.0
github.com/go-openapi/strfmt v0.21.1
github.com/go-zookeeper/zk v1.0.2
github.com/gogo/protobuf v1.3.2
github.com/golang/snappy v0.0.4

16
go.sum
View file

@ -51,8 +51,8 @@ collectd.org v0.3.0/go.mod h1:A/8DzQBkF6abtvrT2j/AU/4tiBgJWYyh0y/oB/4MlWE=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v41.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v58.3.0+incompatible h1:lb9OWePNuJMiibdxg9XvdbiOldR0Yifge37L4LoOxIs=
github.com/Azure/azure-sdk-for-go v58.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v59.4.0+incompatible h1:gDA8odnngdNd3KYHL2NoK1j9vpWBgEnFSjKKLpkC8Aw=
github.com/Azure/azure-sdk-for-go v59.4.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
@ -187,8 +187,8 @@ github.com/aws/aws-sdk-go v1.30.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZve
github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
github.com/aws/aws-sdk-go v1.38.35/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/aws/aws-sdk-go v1.40.11/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
github.com/aws/aws-sdk-go v1.42.10 h1:PW9G/hnsuKttbFtOcgNKD0vQrp4yfNrtACA+X0p9mjM=
github.com/aws/aws-sdk-go v1.42.10/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
github.com/aws/aws-sdk-go v1.42.15 h1:RcUChuF7KzrrTqx9LAzJbLBX00LkUY7cH9T1VdxNdqk=
github.com/aws/aws-sdk-go v1.42.15/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
github.com/benbjohnson/immutable v0.2.1/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
github.com/benbjohnson/tmpl v1.0.0/go.mod h1:igT620JFIi44B6awvU9IsDhR77IXWtFigTLil/RPdps=
@ -547,8 +547,8 @@ github.com/go-openapi/strfmt v0.19.5/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk
github.com/go-openapi/strfmt v0.19.11/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
github.com/go-openapi/strfmt v0.20.0/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
github.com/go-openapi/strfmt v0.20.1/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk=
github.com/go-openapi/strfmt v0.21.0 h1:hX2qEZKmYks+t0hKeb4VTJpUm2UYsdL3+DCid5swxIs=
github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg=
github.com/go-openapi/strfmt v0.21.1 h1:G6s2t5V5kGCHLVbSdZ/6lI8Wm4OzoPFkc3/cjAsKQrM=
github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
@ -1351,8 +1351,8 @@ go.mongodb.org/mongo-driver v1.4.3/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S
go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw=
go.mongodb.org/mongo-driver v1.7.3 h1:G4l/eYY9VrQAK/AUgkV0koQKzQnyddnWxrd/Etf0jIs=
go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg=
go.mongodb.org/mongo-driver v1.7.5 h1:ny3p0reEpgsR2cfA5cjgwFZg3Cv/ofFh/8jbhGtz9VI=
go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng=
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=

View file

@ -16,9 +16,11 @@ package agent
import (
"context"
"fmt"
"math"
"path/filepath"
"sync"
"time"
"unicode/utf8"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
@ -94,6 +96,8 @@ type dbMetrics struct {
numActiveSeries prometheus.Gauge
numWALSeriesPendingDeletion prometheus.Gauge
totalAppendedSamples prometheus.Counter
totalAppendedExemplars prometheus.Counter
totalOutOfOrderSamples prometheus.Counter
walTruncateDuration prometheus.Summary
walCorruptionsTotal prometheus.Counter
walTotalReplayDuration prometheus.Gauge
@ -120,6 +124,16 @@ func newDBMetrics(r prometheus.Registerer) *dbMetrics {
Help: "Total number of samples appended to the storage",
})
m.totalAppendedExemplars = prometheus.NewCounter(prometheus.CounterOpts{
Name: "prometheus_agent_exemplars_appended_total",
Help: "Total number of exemplars appended to the storage",
})
m.totalOutOfOrderSamples = prometheus.NewCounter(prometheus.CounterOpts{
Name: "prometheus_agent_out_of_order_samples_total",
Help: "Total number of out of order samples ingestion failed attempts.",
})
m.walTruncateDuration = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "prometheus_agent_truncate_duration_seconds",
Help: "Duration of WAL truncation.",
@ -160,6 +174,8 @@ func newDBMetrics(r prometheus.Registerer) *dbMetrics {
m.numActiveSeries,
m.numWALSeriesPendingDeletion,
m.totalAppendedSamples,
m.totalAppendedExemplars,
m.totalOutOfOrderSamples,
m.walTruncateDuration,
m.walCorruptionsTotal,
m.walTotalReplayDuration,
@ -181,6 +197,15 @@ func (m *dbMetrics) Unregister() {
m.numActiveSeries,
m.numWALSeriesPendingDeletion,
m.totalAppendedSamples,
m.totalAppendedExemplars,
m.totalOutOfOrderSamples,
m.walTruncateDuration,
m.walCorruptionsTotal,
m.walTotalReplayDuration,
m.checkpointDeleteFail,
m.checkpointDeleteTotal,
m.checkpointCreationFail,
m.checkpointCreationTotal,
}
for _, c := range cs {
m.r.Unregister(c)
@ -258,9 +283,10 @@ func Open(l log.Logger, reg prometheus.Registerer, rs *remote.Storage, dir strin
db.appenderPool.New = func() interface{} {
return &appender{
DB: db,
pendingSeries: make([]record.RefSeries, 0, 100),
pendingSamples: make([]record.RefSample, 0, 100),
DB: db,
pendingSeries: make([]record.RefSeries, 0, 100),
pendingSamples: make([]record.RefSample, 0, 100),
pendingExamplars: make([]record.RefExemplar, 0, 10),
}
}
@ -412,11 +438,8 @@ func (db *DB) loadWAL(r *wal.Reader, multiRef map[chunks.HeadSeriesRef]chunks.He
return
}
decoded <- samples
case record.Tombstones:
// We don't care about tombstones
continue
case record.Exemplars:
// We don't care about exemplars
case record.Tombstones, record.Exemplars:
// We don't care about tombstones or exemplars during replay.
continue
default:
errCh <- &wal.CorruptionErr{
@ -666,82 +689,114 @@ func (db *DB) Close() error {
type appender struct {
*DB
pendingSeries []record.RefSeries
pendingSamples []record.RefSample
pendingSeries []record.RefSeries
pendingSamples []record.RefSample
pendingExamplars []record.RefExemplar
// Pointers to the series referenced by each element of pendingSamples.
// Series lock is not held on elements.
sampleSeries []*memSeries
}
func (a *appender) Append(ref storage.SeriesRef, l labels.Labels, t int64, v float64) (storage.SeriesRef, error) {
if ref == 0 {
r, err := a.Add(l, t, v)
return storage.SeriesRef(r), err
}
return ref, a.AddFast(chunks.HeadSeriesRef(ref), t, v)
}
// series references and chunk references are identical for agent mode.
headRef := chunks.HeadSeriesRef(ref)
func (a *appender) Add(l labels.Labels, t int64, v float64) (chunks.HeadSeriesRef, error) {
hash := l.Hash()
series := a.series.GetByHash(hash, l)
if series != nil {
return series.ref, a.AddFast(series.ref, t, v)
}
// Ensure no empty or duplicate labels have gotten through. This mirrors the
// equivalent validation code in the TSDB's headAppender.
l = l.WithoutEmpty()
if len(l) == 0 {
return 0, errors.Wrap(tsdb.ErrInvalidSample, "empty labelset")
}
if lbl, dup := l.HasDuplicateLabelNames(); dup {
return 0, errors.Wrap(tsdb.ErrInvalidSample, fmt.Sprintf(`label name "%s" is not unique`, lbl))
}
ref := chunks.HeadSeriesRef(a.nextRef.Inc())
series = &memSeries{ref: ref, lset: l, lastTs: t}
a.pendingSeries = append(a.pendingSeries, record.RefSeries{
Ref: ref,
Labels: l,
})
a.pendingSamples = append(a.pendingSamples, record.RefSample{
Ref: ref,
T: t,
V: v,
})
a.series.Set(hash, series)
a.metrics.numActiveSeries.Inc()
a.metrics.totalAppendedSamples.Inc()
return series.ref, nil
}
func (a *appender) AddFast(ref chunks.HeadSeriesRef, t int64, v float64) error {
series := a.series.GetByID(ref)
series := a.series.GetByID(headRef)
if series == nil {
return storage.ErrNotFound
// Ensure no empty or duplicate labels have gotten through. This mirrors the
// equivalent validation code in the TSDB's headAppender.
l = l.WithoutEmpty()
if len(l) == 0 {
return 0, errors.Wrap(tsdb.ErrInvalidSample, "empty labelset")
}
if lbl, dup := l.HasDuplicateLabelNames(); dup {
return 0, errors.Wrap(tsdb.ErrInvalidSample, fmt.Sprintf(`label name "%s" is not unique`, lbl))
}
var created bool
series, created = a.getOrCreate(l)
if created {
a.pendingSeries = append(a.pendingSeries, record.RefSeries{
Ref: series.ref,
Labels: l,
})
a.metrics.numActiveSeries.Inc()
}
}
series.Lock()
defer series.Unlock()
// Update last recorded timestamp. Used by Storage.gc to determine if a
// series is dead.
series.lastTs = t
if t < series.lastTs {
a.metrics.totalOutOfOrderSamples.Inc()
return 0, storage.ErrOutOfOrderSample
}
// NOTE: always modify pendingSamples and sampleSeries together
a.pendingSamples = append(a.pendingSamples, record.RefSample{
Ref: ref,
Ref: series.ref,
T: t,
V: v,
})
a.sampleSeries = append(a.sampleSeries, series)
a.metrics.totalAppendedSamples.Inc()
return nil
return storage.SeriesRef(series.ref), nil
}
func (a *appender) getOrCreate(l labels.Labels) (series *memSeries, created bool) {
hash := l.Hash()
series = a.series.GetByHash(hash, l)
if series != nil {
return series, false
}
ref := chunks.HeadSeriesRef(a.nextRef.Inc())
series = &memSeries{ref: ref, lset: l, lastTs: math.MinInt64}
a.series.Set(hash, series)
return series, true
}
func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exemplar.Exemplar) (storage.SeriesRef, error) {
// remote_write doesn't support exemplars yet, so do nothing here.
return 0, nil
// series references and chunk references are identical for agent mode.
headRef := chunks.HeadSeriesRef(ref)
s := a.series.GetByID(headRef)
if s == nil {
return 0, fmt.Errorf("unknown series ref when trying to add exemplar: %d", ref)
}
// Ensure no empty labels have gotten through.
e.Labels = e.Labels.WithoutEmpty()
if lbl, dup := e.Labels.HasDuplicateLabelNames(); dup {
return 0, errors.Wrap(tsdb.ErrInvalidExemplar, fmt.Sprintf(`label name "%s" is not unique`, lbl))
}
// Exemplar label length does not include chars involved in text rendering such as quotes
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
labelSetLen := 0
for _, l := range e.Labels {
labelSetLen += utf8.RuneCountInString(l.Name)
labelSetLen += utf8.RuneCountInString(l.Value)
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
return 0, storage.ErrExemplarLabelLength
}
}
a.pendingExamplars = append(a.pendingExamplars, record.RefExemplar{
Ref: s.ref,
T: e.Ts,
V: e.Value,
Labels: e.Labels,
})
return storage.SeriesRef(s.ref), nil
}
func (a *appender) AppendHistogram(ref storage.SeriesRef, l labels.Labels, t int64, h *histogram.Histogram) (storage.SeriesRef, error) {
@ -773,6 +828,22 @@ func (a *appender) Commit() error {
buf = buf[:0]
}
if len(a.pendingExamplars) > 0 {
buf = encoder.Exemplars(a.pendingExamplars, buf)
if err := a.wal.Log(buf); err != nil {
return err
}
buf = buf[:0]
}
var series *memSeries
for i, s := range a.pendingSamples {
series = a.sampleSeries[i]
if !series.updateTimestamp(s.T) {
a.metrics.totalOutOfOrderSamples.Inc()
}
}
//nolint:staticcheck
a.bufPool.Put(buf)
return a.Rollback()
@ -781,6 +852,8 @@ func (a *appender) Commit() error {
func (a *appender) Rollback() error {
a.pendingSeries = a.pendingSeries[:0]
a.pendingSamples = a.pendingSamples[:0]
a.pendingExamplars = a.pendingExamplars[:0]
a.sampleSeries = a.sampleSeries[:0]
a.appenderPool.Put(a)
return nil
}

View file

@ -15,8 +15,8 @@ package agent
import (
"context"
"path/filepath"
"strconv"
"sync"
"testing"
"time"
@ -26,25 +26,70 @@ import (
dto "github.com/prometheus/client_model/go"
"github.com/stretchr/testify/require"
"github.com/prometheus/prometheus/model/exemplar"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/tsdb"
"github.com/prometheus/prometheus/tsdb/record"
"github.com/prometheus/prometheus/tsdb/tsdbutil"
"github.com/prometheus/prometheus/tsdb/wal"
"github.com/prometheus/prometheus/util/testutil"
)
func TestUnsupported(t *testing.T) {
promAgentDir := t.TempDir()
func TestDB_InvalidSeries(t *testing.T) {
s := createTestAgentDB(t, nil, DefaultOptions())
defer s.Close()
opts := DefaultOptions()
logger := log.NewNopLogger()
app := s.Appender(context.Background())
s, err := Open(logger, prometheus.NewRegistry(), nil, promAgentDir, opts)
t.Run("Samples", func(t *testing.T) {
_, err := app.Append(0, labels.Labels{}, 0, 0)
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject empty labels")
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}, 0, 0)
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject duplicate labels")
})
t.Run("Exemplars", func(t *testing.T) {
sRef, err := app.Append(0, labels.Labels{{Name: "a", Value: "1"}}, 0, 0)
require.NoError(t, err, "should not reject valid series")
_, err = app.AppendExemplar(0, nil, exemplar.Exemplar{})
require.EqualError(t, err, "unknown series ref when trying to add exemplar: 0")
e := exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}}
_, err = app.AppendExemplar(sRef, nil, e)
require.ErrorIs(t, err, tsdb.ErrInvalidExemplar, "should reject duplicate labels")
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a_somewhat_long_trace_id", Value: "nYJSNtFrFTY37VR7mHzEE/LIDt7cdAQcuOzFajgmLDAdBSRHYPDzrxhMA4zz7el8naI/AoXFv9/e/G0vcETcIoNUi3OieeLfaIRQci2oa"}}}
_, err = app.AppendExemplar(sRef, nil, e)
require.ErrorIs(t, err, storage.ErrExemplarLabelLength, "should reject too long label length")
// Inverse check
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}}, Value: 20, Ts: 10, HasTs: true}
_, err = app.AppendExemplar(sRef, nil, e)
require.NoError(t, err, "should not reject valid exemplars")
})
}
func createTestAgentDB(t *testing.T, reg prometheus.Registerer, opts *Options) *DB {
t.Helper()
dbDir := t.TempDir()
rs := remote.NewStorage(log.NewNopLogger(), reg, startTime, dbDir, time.Second*30, nil)
t.Cleanup(func() {
require.NoError(t, rs.Close())
})
db, err := Open(log.NewNopLogger(), reg, rs, dbDir, opts)
require.NoError(t, err)
defer func() {
require.NoError(t, s.Close())
}()
return db
}
func TestUnsupportedFunctions(t *testing.T) {
s := createTestAgentDB(t, nil, DefaultOptions())
defer s.Close()
t.Run("Querier", func(t *testing.T) {
_, err := s.Querier(context.TODO(), 0, 0)
@ -68,93 +113,74 @@ func TestCommit(t *testing.T) {
numSeries = 8
)
promAgentDir := t.TempDir()
s := createTestAgentDB(t, nil, DefaultOptions())
app := s.Appender(context.TODO())
lbls := labelsForTest(t.Name(), numSeries)
opts := DefaultOptions()
logger := log.NewNopLogger()
reg := prometheus.NewRegistry()
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func(rs *remote.Storage) {
require.NoError(t, rs.Close())
}(remoteStorage)
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
require.NoError(t, err)
a := s.Appender(context.TODO())
for _, l := range lbls {
lset := labels.New(l...)
for i := 0; i < numDatapoints; i++ {
sample := tsdbutil.GenerateSamples(0, 1)
_, err := a.Append(0, lset, sample[0].T(), sample[0].V())
ref, err := app.Append(0, lset, sample[0].T(), sample[0].V())
require.NoError(t, err)
e := exemplar.Exemplar{
Labels: lset,
Ts: sample[0].T(),
Value: sample[0].V(),
HasTs: true,
}
_, err = app.AppendExemplar(ref, lset, e)
require.NoError(t, err)
}
}
require.NoError(t, a.Commit())
require.NoError(t, app.Commit())
require.NoError(t, s.Close())
// Read records from WAL and check for expected count of series and samples.
walSeriesCount := 0
walSamplesCount := 0
reg = prometheus.NewRegistry()
remoteStorage = remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func() {
require.NoError(t, remoteStorage.Close())
}()
s1, err := Open(logger, nil, remoteStorage, promAgentDir, opts)
sr, err := wal.NewSegmentsReader(s.wal.Dir())
require.NoError(t, err)
defer func() {
require.NoError(t, s1.Close())
require.NoError(t, sr.Close())
}()
var dec record.Decoder
// Read records from WAL and check for expected count of series, samples, and exemplars.
var (
r = wal.NewReader(sr)
dec record.Decoder
if err == nil {
sr, err := wal.NewSegmentsReader(s1.wal.Dir())
require.NoError(t, err)
defer func() {
require.NoError(t, sr.Close())
}()
walSeriesCount, walSamplesCount, walExemplarsCount int
)
for r.Next() {
rec := r.Record()
switch dec.Type(rec) {
case record.Series:
var series []record.RefSeries
series, err = dec.Series(rec, series)
require.NoError(t, err)
walSeriesCount += len(series)
r := wal.NewReader(sr)
seriesPool := sync.Pool{
New: func() interface{} {
return []record.RefSeries{}
},
}
samplesPool := sync.Pool{
New: func() interface{} {
return []record.RefSample{}
},
}
case record.Samples:
var samples []record.RefSample
samples, err = dec.Samples(rec, samples)
require.NoError(t, err)
walSamplesCount += len(samples)
for r.Next() {
rec := r.Record()
switch dec.Type(rec) {
case record.Series:
series := seriesPool.Get().([]record.RefSeries)[:0]
series, _ = dec.Series(rec, series)
walSeriesCount += len(series)
case record.Samples:
samples := samplesPool.Get().([]record.RefSample)[:0]
samples, _ = dec.Samples(rec, samples)
walSamplesCount += len(samples)
default:
}
case record.Exemplars:
var exemplars []record.RefExemplar
exemplars, err = dec.Exemplars(rec, exemplars)
require.NoError(t, err)
walExemplarsCount += len(exemplars)
default:
}
}
// Retrieved series count from WAL should match the count of series been added to the WAL.
require.Equal(t, walSeriesCount, numSeries)
// Retrieved samples count from WAL should match the count of samples been added to the WAL.
require.Equal(t, walSamplesCount, numSeries*numDatapoints)
// Check that the WAL contained the same number of commited series/samples/exemplars.
require.Equal(t, numSeries, walSeriesCount, "unexpected number of series")
require.Equal(t, numSeries*numDatapoints, walSamplesCount, "unexpected number of samples")
require.Equal(t, numSeries*numDatapoints, walExemplarsCount, "unexpected number of exemplars")
}
func TestRollback(t *testing.T) {
@ -163,93 +189,68 @@ func TestRollback(t *testing.T) {
numSeries = 8
)
promAgentDir := t.TempDir()
s := createTestAgentDB(t, nil, DefaultOptions())
app := s.Appender(context.TODO())
lbls := labelsForTest(t.Name(), numSeries)
opts := DefaultOptions()
logger := log.NewNopLogger()
reg := prometheus.NewRegistry()
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func(rs *remote.Storage) {
require.NoError(t, rs.Close())
}(remoteStorage)
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
require.NoError(t, err)
a := s.Appender(context.TODO())
for _, l := range lbls {
lset := labels.New(l...)
for i := 0; i < numDatapoints; i++ {
sample := tsdbutil.GenerateSamples(0, 1)
_, err := a.Append(0, lset, sample[0].T(), sample[0].V())
_, err := app.Append(0, lset, sample[0].T(), sample[0].V())
require.NoError(t, err)
}
}
require.NoError(t, a.Rollback())
// Do a rollback, which should clear uncommitted data. A followup call to
// commit should persist nothing to the WAL.
require.NoError(t, app.Rollback())
require.NoError(t, app.Commit())
require.NoError(t, s.Close())
// Read records from WAL and check for expected count of series and samples.
walSeriesCount := 0
walSamplesCount := 0
reg = prometheus.NewRegistry()
remoteStorage = remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func() {
require.NoError(t, remoteStorage.Close())
}()
s1, err := Open(logger, nil, remoteStorage, promAgentDir, opts)
sr, err := wal.NewSegmentsReader(s.wal.Dir())
require.NoError(t, err)
defer func() {
require.NoError(t, s1.Close())
require.NoError(t, sr.Close())
}()
var dec record.Decoder
// Read records from WAL and check for expected count of series and samples.
var (
r = wal.NewReader(sr)
dec record.Decoder
if err == nil {
sr, err := wal.NewSegmentsReader(s1.wal.Dir())
require.NoError(t, err)
defer func() {
require.NoError(t, sr.Close())
}()
walSeriesCount, walSamplesCount, walExemplarsCount int
)
for r.Next() {
rec := r.Record()
switch dec.Type(rec) {
case record.Series:
var series []record.RefSeries
series, err = dec.Series(rec, series)
require.NoError(t, err)
walSeriesCount += len(series)
r := wal.NewReader(sr)
seriesPool := sync.Pool{
New: func() interface{} {
return []record.RefSeries{}
},
}
samplesPool := sync.Pool{
New: func() interface{} {
return []record.RefSample{}
},
}
case record.Samples:
var samples []record.RefSample
samples, err = dec.Samples(rec, samples)
require.NoError(t, err)
walSamplesCount += len(samples)
for r.Next() {
rec := r.Record()
switch dec.Type(rec) {
case record.Series:
series := seriesPool.Get().([]record.RefSeries)[:0]
series, _ = dec.Series(rec, series)
walSeriesCount += len(series)
case record.Samples:
samples := samplesPool.Get().([]record.RefSample)[:0]
samples, _ = dec.Samples(rec, samples)
walSamplesCount += len(samples)
default:
}
case record.Exemplars:
var exemplars []record.RefExemplar
exemplars, err = dec.Exemplars(rec, exemplars)
require.NoError(t, err)
walExemplarsCount += len(exemplars)
default:
}
}
// Retrieved series count from WAL should be zero.
require.Equal(t, walSeriesCount, 0)
// Retrieved samples count from WAL should be zero.
require.Equal(t, walSamplesCount, 0)
// Check that the rollback ensured nothing got stored.
require.Equal(t, 0, walSeriesCount, "series should not have been written to WAL")
require.Equal(t, 0, walSamplesCount, "samples should not have been written to WAL")
require.Equal(t, 0, walExemplarsCount, "exemplars should not have been written to WAL")
}
func TestFullTruncateWAL(t *testing.T) {
@ -259,34 +260,25 @@ func TestFullTruncateWAL(t *testing.T) {
lastTs = 500
)
promAgentDir := t.TempDir()
lbls := labelsForTest(t.Name(), numSeries)
reg := prometheus.NewRegistry()
opts := DefaultOptions()
opts.TruncateFrequency = time.Minute * 2
logger := log.NewNopLogger()
reg := prometheus.NewRegistry()
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func() {
require.NoError(t, remoteStorage.Close())
}()
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
require.NoError(t, err)
s := createTestAgentDB(t, reg, opts)
defer func() {
require.NoError(t, s.Close())
}()
app := s.Appender(context.TODO())
a := s.Appender(context.TODO())
lbls := labelsForTest(t.Name(), numSeries)
for _, l := range lbls {
lset := labels.New(l...)
for i := 0; i < numDatapoints; i++ {
_, err := a.Append(0, lset, int64(lastTs), 0)
_, err := app.Append(0, lset, int64(lastTs), 0)
require.NoError(t, err)
}
require.NoError(t, a.Commit())
require.NoError(t, app.Commit())
}
// Truncate WAL with mint to GC all the samples.
@ -302,52 +294,40 @@ func TestPartialTruncateWAL(t *testing.T) {
numSeries = 800
)
promAgentDir := t.TempDir()
opts := DefaultOptions()
opts.TruncateFrequency = time.Minute * 2
logger := log.NewNopLogger()
reg := prometheus.NewRegistry()
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func() {
require.NoError(t, remoteStorage.Close())
}()
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
require.NoError(t, err)
reg := prometheus.NewRegistry()
s := createTestAgentDB(t, reg, opts)
defer func() {
require.NoError(t, s.Close())
}()
a := s.Appender(context.TODO())
var lastTs int64
app := s.Appender(context.TODO())
// Create first batch of 800 series with 1000 data-points with a fixed lastTs as 500.
lastTs = 500
var lastTs int64 = 500
lbls := labelsForTest(t.Name()+"batch-1", numSeries)
for _, l := range lbls {
lset := labels.New(l...)
for i := 0; i < numDatapoints; i++ {
_, err := a.Append(0, lset, lastTs, 0)
_, err := app.Append(0, lset, lastTs, 0)
require.NoError(t, err)
}
require.NoError(t, a.Commit())
require.NoError(t, app.Commit())
}
// Create second batch of 800 series with 1000 data-points with a fixed lastTs as 600.
lastTs = 600
lbls = labelsForTest(t.Name()+"batch-2", numSeries)
for _, l := range lbls {
lset := labels.New(l...)
for i := 0; i < numDatapoints; i++ {
_, err := a.Append(0, lset, lastTs, 0)
_, err := app.Append(0, lset, lastTs, 0)
require.NoError(t, err)
}
require.NoError(t, a.Commit())
require.NoError(t, app.Commit())
}
// Truncate WAL with mint to GC only the first batch of 800 series and retaining 2nd batch of 800 series.
@ -364,53 +344,41 @@ func TestWALReplay(t *testing.T) {
lastTs = 500
)
promAgentDir := t.TempDir()
s := createTestAgentDB(t, nil, DefaultOptions())
app := s.Appender(context.TODO())
lbls := labelsForTest(t.Name(), numSeries)
opts := DefaultOptions()
logger := log.NewNopLogger()
reg := prometheus.NewRegistry()
remoteStorage := remote.NewStorage(log.With(logger, "component", "remote"), reg, startTime, promAgentDir, time.Second*30, nil)
defer func() {
require.NoError(t, remoteStorage.Close())
}()
s, err := Open(logger, reg, remoteStorage, promAgentDir, opts)
require.NoError(t, err)
a := s.Appender(context.TODO())
for _, l := range lbls {
lset := labels.New(l...)
for i := 0; i < numDatapoints; i++ {
_, err := a.Append(0, lset, lastTs, 0)
_, err := app.Append(0, lset, lastTs, 0)
require.NoError(t, err)
}
}
require.NoError(t, a.Commit())
require.NoError(t, app.Commit())
require.NoError(t, s.Close())
restartOpts := DefaultOptions()
restartLogger := log.NewNopLogger()
restartReg := prometheus.NewRegistry()
// Hack: s.wal.Dir() is the /wal subdirectory of the original storage path.
// We need the original directory so we can recreate the storage for replay.
storageDir := filepath.Dir(s.wal.Dir())
// Open a new DB with the same WAL to check that series from the previous DB
// get replayed.
replayDB, err := Open(restartLogger, restartReg, nil, promAgentDir, restartOpts)
require.NoError(t, err)
reg := prometheus.NewRegistry()
replayStorage, err := Open(s.logger, reg, nil, storageDir, s.opts)
if err != nil {
t.Fatalf("unable to create storage for the agent: %v", err)
}
defer func() {
require.NoError(t, replayDB.Close())
require.NoError(t, replayStorage.Close())
}()
// Check if all the series are retrieved back from the WAL.
m := gatherFamily(t, restartReg, "prometheus_agent_active_series")
m := gatherFamily(t, reg, "prometheus_agent_active_series")
require.Equal(t, float64(numSeries), m.Metric[0].Gauge.GetValue(), "agent wal replay mismatch of active series count")
// Check if lastTs of the samples retrieved from the WAL is retained.
metrics := replayDB.series.series
metrics := replayStorage.series.series
for i := 0; i < len(metrics); i++ {
mp := metrics[i]
for _, v := range mp {

View file

@ -24,11 +24,26 @@ import (
type memSeries struct {
sync.Mutex
ref chunks.HeadSeriesRef
lset labels.Labels
ref chunks.HeadSeriesRef
lset labels.Labels
// Last recorded timestamp. Used by Storage.gc to determine if a series is
// stale.
lastTs int64
}
// updateTimestamp obtains the lock on s and will attempt to update lastTs.
// fails if newTs < lastTs.
func (m *memSeries) updateTimestamp(newTs int64) bool {
m.Lock()
defer m.Unlock()
if newTs >= m.lastTs {
m.lastTs = newTs
return true
}
return false
}
// seriesHashmap is a simple hashmap for memSeries by their label set.
// It is built on top of a regular hashmap and holds a slice of series to
// resolve hash collisions. Its methods require the hash to be submitted

View file

@ -46,7 +46,7 @@
"@types/chai": "^4.2.22",
"@types/lru-cache": "^5.1.0",
"@types/mocha": "^9.0.0",
"@types/node": "^16.11.9",
"@types/node": "^16.11.10",
"@typescript-eslint/eslint-plugin": "^5.3.1",
"@typescript-eslint/parser": "^5.3.1",
"chai": "^4.2.0",

View file

@ -32,7 +32,7 @@
"@types/chai": "^4.2.22",
"@types/lru-cache": "^5.1.0",
"@types/mocha": "^9.0.0",
"@types/node": "^16.11.9",
"@types/node": "^16.11.10",
"@typescript-eslint/eslint-plugin": "^5.3.1",
"@typescript-eslint/parser": "^5.3.1",
"chai": "^4.2.0",
@ -1775,9 +1775,9 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "16.11.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.9.tgz",
"integrity": "sha512-MKmdASMf3LtPzwLyRrFjtFFZ48cMf8jmX5VRYrDQiJa8Ybu5VAmkqBWqKU8fdCwD8ysw4mQ9nrEHvzg6gunR7A==",
"version": "16.11.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.10.tgz",
"integrity": "sha512-3aRnHa1KlOEEhJ6+CvyHKK5vE9BcLGjtUpwvqYLRvYNQKMfabu3BwfJaA/SLW8dxe28LsNDjtHwePTuzn3gmOA==",
"dev": true
},
"node_modules/@types/prop-types": {
@ -7252,7 +7252,7 @@
"@types/flot": "0.0.32",
"@types/jest": "^27.0.3",
"@types/jquery": "^3.5.9",
"@types/node": "^16.11.9",
"@types/node": "^16.11.10",
"@types/react": "^17.0.36",
"@types/react-copy-to-clipboard": "^5.0.2",
"@types/react-dom": "^17.0.11",
@ -27808,9 +27808,9 @@
"dev": true
},
"@types/node": {
"version": "16.11.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.9.tgz",
"integrity": "sha512-MKmdASMf3LtPzwLyRrFjtFFZ48cMf8jmX5VRYrDQiJa8Ybu5VAmkqBWqKU8fdCwD8ysw4mQ9nrEHvzg6gunR7A==",
"version": "16.11.10",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.10.tgz",
"integrity": "sha512-3aRnHa1KlOEEhJ6+CvyHKK5vE9BcLGjtUpwvqYLRvYNQKMfabu3BwfJaA/SLW8dxe28LsNDjtHwePTuzn3gmOA==",
"dev": true
},
"@types/prop-types": {
@ -28413,7 +28413,7 @@
"@types/chai": "^4.2.22",
"@types/lru-cache": "^5.1.0",
"@types/mocha": "^9.0.0",
"@types/node": "^16.11.9",
"@types/node": "^16.11.10",
"@typescript-eslint/eslint-plugin": "^5.3.1",
"@typescript-eslint/parser": "^5.3.1",
"chai": "^4.2.0",
@ -29645,7 +29645,7 @@
"@types/flot": "0.0.32",
"@types/jest": "^27.0.3",
"@types/jquery": "^3.5.9",
"@types/node": "^16.11.9",
"@types/node": "^16.11.10",
"@types/react": "^17.0.36",
"@types/react-copy-to-clipboard": "^5.0.2",
"@types/react-dom": "^17.0.11",

View file

@ -69,7 +69,7 @@
"@types/flot": "0.0.32",
"@types/jest": "^27.0.3",
"@types/jquery": "^3.5.9",
"@types/node": "^16.11.9",
"@types/node": "^16.11.10",
"@types/react": "^17.0.36",
"@types/react-copy-to-clipboard": "^5.0.2",
"@types/react-dom": "^17.0.11",

View file

@ -15,10 +15,11 @@
It will render a "Consoles" link in the navbar when it is non-empty.
- PROMETHEUS_AGENT_MODE is replaced by a boolean indicating if Prometheus is running in agent mode.
It true, it will disable querying capacities in the UI and generally adapt the UI to the agent mode.
It has to be represented as a string, because booleans can be mangled to !1 in production builds.
-->
<script>
const GLOBAL_CONSOLES_LINK='CONSOLES_LINK_PLACEHOLDER';
const GLOBAL_PROMETHEUS_AGENT_MODE=PROMETHEUS_AGENT_MODE_PLACEHOLDER;
const GLOBAL_AGENT_MODE='AGENT_MODE_PLACEHOLDER';
</script>
<!--

View file

@ -5,6 +5,7 @@ import Navigation from './Navbar';
import { Container } from 'reactstrap';
import { Route } from 'react-router-dom';
import {
AgentPage,
AlertsPage,
ConfigPage,
FlagsPage,
@ -24,6 +25,7 @@ describe('App', () => {
});
it('routes', () => {
[
AgentPage,
AlertsPage,
ConfigPage,
FlagsPage,
@ -37,7 +39,7 @@ describe('App', () => {
const c = app.find(component);
expect(c).toHaveLength(1);
});
expect(app.find(Route)).toHaveLength(9);
expect(app.find(Route)).toHaveLength(10);
expect(app.find(Container)).toHaveLength(1);
});
});

View file

@ -4,6 +4,7 @@ import { Container } from 'reactstrap';
import { BrowserRouter as Router, Redirect, Switch, Route } from 'react-router-dom';
import {
AgentPage,
AlertsPage,
ConfigPage,
FlagsPage,
@ -22,14 +23,16 @@ import useMedia from './hooks/useMedia';
interface AppProps {
consolesLink: string | null;
agentMode: boolean;
}
const App: FC<AppProps> = ({ consolesLink }) => {
const App: FC<AppProps> = ({ consolesLink, agentMode }) => {
// This dynamically/generically determines the pathPrefix by stripping the first known
// endpoint suffix from the window location path. It works out of the box for both direct
// hosting and reverse proxy deployments with no additional configurations required.
let basePath = window.location.pathname;
const paths = [
'/agent',
'/graph',
'/alerts',
'/status',
@ -70,14 +73,17 @@ const App: FC<AppProps> = ({ consolesLink }) => {
<Theme />
<PathPrefixContext.Provider value={basePath}>
<Router basename={basePath}>
<Navigation consolesLink={consolesLink} />
<Navigation consolesLink={consolesLink} agentMode={agentMode} />
<Container fluid style={{ paddingTop: 70 }}>
<Switch>
<Redirect exact from="/" to={`graph`} />
<Redirect exact from="/" to={agentMode ? '/agent' : '/graph'} />
{/*
NOTE: Any route added here needs to also be added to the list of
React-handled router paths ("reactRouterPaths") in /web/web.go.
*/}
<Route path="/agent">
<AgentPage />
</Route>
<Route path="/graph">
<PanelListPage />
</Route>

View file

@ -17,17 +17,18 @@ import { ThemeToggle } from './Theme';
interface NavbarProps {
consolesLink: string | null;
agentMode: boolean;
}
const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
const Navigation: FC<NavbarProps> = ({ consolesLink, agentMode }) => {
const [isOpen, setIsOpen] = useState(false);
const toggle = () => setIsOpen(!isOpen);
const pathPrefix = usePathPrefix();
return (
<Navbar className="mb-3" dark color="dark" expand="md" fixed="top">
<NavbarToggler onClick={toggle} className="mr-2" />
<Link className="pt-0 navbar-brand" to="/graph">
Prometheus
<Link className="pt-0 navbar-brand" to={agentMode ? '/agent' : '/graph'}>
Prometheus{agentMode && ' Agent'}
</Link>
<Collapse isOpen={isOpen} navbar style={{ justifyContent: 'space-between' }}>
<Nav className="ml-0" navbar>
@ -36,16 +37,20 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
<NavLink href={consolesLink}>Consoles</NavLink>
</NavItem>
)}
<NavItem>
<NavLink tag={Link} to="/alerts">
Alerts
</NavLink>
</NavItem>
<NavItem>
<NavLink tag={Link} to="/graph">
Graph
</NavLink>
</NavItem>
{!agentMode && (
<>
<NavItem>
<NavLink tag={Link} to="/alerts">
Alerts
</NavLink>
</NavItem>
<NavItem>
<NavLink tag={Link} to="/graph">
Graph
</NavLink>
</NavItem>
</>
)}
<UncontrolledDropdown nav inNavbar>
<DropdownToggle nav caret>
Status
@ -54,18 +59,22 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
<DropdownItem tag={Link} to="/status">
Runtime & Build Information
</DropdownItem>
<DropdownItem tag={Link} to="/tsdb-status">
TSDB Status
</DropdownItem>
{!agentMode && (
<DropdownItem tag={Link} to="/tsdb-status">
TSDB Status
</DropdownItem>
)}
<DropdownItem tag={Link} to="/flags">
Command-Line Flags
</DropdownItem>
<DropdownItem tag={Link} to="/config">
Configuration
</DropdownItem>
<DropdownItem tag={Link} to="/rules">
Rules
</DropdownItem>
{!agentMode && (
<DropdownItem tag={Link} to="/rules">
Rules
</DropdownItem>
)}
<DropdownItem tag={Link} to="/targets">
Targets
</DropdownItem>
@ -77,9 +86,11 @@ const Navigation: FC<NavbarProps> = ({ consolesLink }) => {
<NavItem>
<NavLink href="https://prometheus.io/docs/prometheus/latest/getting_started/">Help</NavLink>
</NavItem>
<NavItem>
<NavLink href={`${pathPrefix}/classic/graph${window.location.search}`}>Classic UI</NavLink>
</NavItem>
{!agentMode && (
<NavItem>
<NavLink href={`${pathPrefix}/classic/graph${window.location.search}`}>Classic UI</NavLink>
</NavItem>
)}
</Nav>
</Collapse>
<ThemeToggle />

View file

@ -10,8 +10,10 @@ import { isPresent } from './utils';
// Declared/defined in public/index.html, value replaced by Prometheus when serving bundle.
declare const GLOBAL_CONSOLES_LINK: string;
declare const GLOBAL_AGENT_MODE: string;
let consolesLink: string | null = GLOBAL_CONSOLES_LINK;
const agentMode: string | null = GLOBAL_AGENT_MODE;
if (
GLOBAL_CONSOLES_LINK === 'CONSOLES_LINK_PLACEHOLDER' ||
@ -21,4 +23,4 @@ if (
consolesLink = null;
}
ReactDOM.render(<App consolesLink={consolesLink} />, document.getElementById('root'));
ReactDOM.render(<App consolesLink={consolesLink} agentMode={agentMode === 'true'} />, document.getElementById('root'));

View file

@ -0,0 +1,16 @@
import React, { FC } from 'react';
const Agent: FC = () => {
return (
<>
<h2>Prometheus Agent</h2>
<p>
This Prometheus instance is running in <strong>agent mode</strong>. In this mode, Prometheus is only used to scrape
discovered targets and forward them to remote write endpoints.
</p>
<p>Some features are not available in this mode, such as querying and alerting.</p>
</>
);
};
export default Agent;

View file

@ -1,3 +1,4 @@
import Agent from './agent/Agent';
import Alerts from './alerts/Alerts';
import Config from './config/Config';
import Flags from './flags/Flags';
@ -9,6 +10,7 @@ import PanelList from './graph/PanelList';
import TSDBStatus from './tsdbStatus/TSDBStatus';
import { withStartingIndicator } from '../components/withStartingIndicator';
const AgentPage = withStartingIndicator(Agent);
const AlertsPage = withStartingIndicator(Alerts);
const ConfigPage = withStartingIndicator(Config);
const FlagsPage = withStartingIndicator(Flags);
@ -21,6 +23,7 @@ const PanelListPage = withStartingIndicator(PanelList);
// prettier-ignore
export {
AgentPage,
AlertsPage,
ConfigPage,
FlagsPage,

View file

@ -1,6 +1,6 @@
import React from 'react';
import { shallow, mount } from 'enzyme';
import { Badge, Alert } from 'reactstrap';
import { Badge } from 'reactstrap';
import EndpointLink from './EndpointLink';
describe('EndpointLink', () => {
@ -29,11 +29,24 @@ describe('EndpointLink', () => {
const targetLabel = badges.filterWhere((badge) => badge.children().text() === 'target="http://some-service"');
expect(targetLabel.length).toEqual(1);
});
it('renders an alert if url is invalid', () => {
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} globalUrl={'afdsacas'} />);
const err = endpointLink.find(Alert);
expect(err.render().text()).toEqual('Error: Invalid URL: afdsacas');
// In cases of IPv6 addresses with a Zone ID, URL may not be parseable.
// See https://github.com/prometheus/prometheus/issues/9760
it('renders an anchor for IPv6 link with zone ID including labels for query params', () => {
const endpoint =
'http://[fe80::f1ee:adeb:371d:983%eth1]:9100/stats/prometheus?module=http_2xx&target=http://some-service';
const globalURL =
'http://[fe80::f1ee:adeb:371d:983%eth1]:9100/stats/prometheus?module=http_2xx&target=http://some-service';
const endpointLink = shallow(<EndpointLink endpoint={endpoint} globalUrl={globalURL} />);
const anchor = endpointLink.find('a');
const badges = endpointLink.find(Badge);
expect(anchor.prop('href')).toEqual(globalURL);
expect(anchor.children().text()).toEqual('http://[fe80::f1ee:adeb:371d:983%eth1]:9100/stats/prometheus');
expect(endpointLink.find('br')).toHaveLength(1);
expect(badges).toHaveLength(2);
const moduleLabel = badges.filterWhere((badge) => badge.children().text() === 'module="http_2xx"');
expect(moduleLabel.length).toEqual(1);
const targetLabel = badges.filterWhere((badge) => badge.children().text() === 'target="http://some-service"');
expect(targetLabel.length).toEqual(1);
});
it('handles params with multiple values correctly', () => {

View file

@ -1,5 +1,5 @@
import React, { FC } from 'react';
import { Badge, Alert } from 'reactstrap';
import { Badge } from 'reactstrap';
export interface EndpointLinkProps {
endpoint: string;
@ -8,23 +8,28 @@ export interface EndpointLinkProps {
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
let url: URL;
let search = '';
let invalidURL = false;
try {
url = new URL(endpoint);
} catch (err: unknown) {
const error = err as Error;
return (
<Alert color="danger">
<strong>Error:</strong> {error.message}
</Alert>
);
// In cases of IPv6 addresses with a Zone ID, URL may not be parseable.
// See https://github.com/prometheus/prometheus/issues/9760
// In this case, we attempt to prepare a synthetic URL with the
// same query parameters, for rendering purposes.
invalidURL = true;
if (endpoint.indexOf('?') > -1) {
search = endpoint.substring(endpoint.indexOf('?'));
}
url = new URL('http://0.0.0.0' + search);
}
const { host, pathname, protocol, searchParams }: URL = url;
const params = Array.from(searchParams.entries());
const displayLink = invalidURL ? endpoint.replace(search, '') : `${protocol}//${host}${pathname}`;
return (
<>
<a href={globalUrl}>{`${protocol}//${host}${pathname}`}</a>
<a href={globalUrl}>{displayLink}</a>
{params.length > 0 ? <br /> : null}
{params.map(([labelName, labelValue]: [string, string]) => {
return (

View file

@ -71,18 +71,27 @@ import (
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
var reactRouterPaths = []string{
"/alerts",
"/config",
"/flags",
"/graph",
"/rules",
"/service-discovery",
"/status",
"/targets",
"/tsdb-status",
"/starting",
}
// Paths that are handled by the React router when the Agent mode is set.
var reactRouterAgentPaths = []string{
"/agent",
}
// Paths that are handled by the React router when the Agent mode is not set.
var reactRouterServerPaths = []string{
"/alerts",
"/graph",
"/rules",
"/tsdb-status",
}
// withStackTrace logs the stack trace in case the request panics. The function
// will re-raise the error which will then be handled by the net/http package.
// It is needed because the go-kit log package doesn't manage properly the
@ -346,10 +355,15 @@ func New(logger log.Logger, o *Options) *Handler {
router = router.WithPrefix(o.RoutePrefix)
}
homePage := "/graph"
if o.IsAgent {
homePage = "/agent"
}
readyf := h.testReady
router.Get("/", func(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, path.Join(o.ExternalURL.Path, "/graph"), http.StatusFound)
http.Redirect(w, r, path.Join(o.ExternalURL.Path, homePage), http.StatusFound)
})
router.Get("/classic/", func(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, path.Join(o.ExternalURL.Path, "/classic/graph"), http.StatusFound)
@ -409,7 +423,7 @@ func New(logger log.Logger, o *Options) *Handler {
}
replacedIdx := bytes.ReplaceAll(idx, []byte("CONSOLES_LINK_PLACEHOLDER"), []byte(h.consolesPath()))
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("TITLE_PLACEHOLDER"), []byte(h.options.PageTitle))
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("PROMETHEUS_AGENT_MODE_PLACEHOLDER"), []byte(strconv.FormatBool(h.options.IsAgent)))
replacedIdx = bytes.ReplaceAll(replacedIdx, []byte("AGENT_MODE_PLACEHOLDER"), []byte(strconv.FormatBool(h.options.IsAgent)))
w.Write(replacedIdx)
}
@ -418,6 +432,16 @@ func New(logger log.Logger, o *Options) *Handler {
router.Get(p, serveReactApp)
}
if h.options.IsAgent {
for _, p := range reactRouterAgentPaths {
router.Get(p, serveReactApp)
}
} else {
for _, p := range reactRouterServerPaths {
router.Get(p, serveReactApp)
}
}
// The favicon and manifest are bundled as part of the React app, but we want to serve
// them on the root.
for _, p := range []string{"/favicon.ico", "/manifest.json"} {

View file

@ -585,6 +585,8 @@ func TestAgentAPIEndPoints(t *testing.T) {
"/query",
"/query_range",
"/query_exemplars",
"/graph",
"/rules",
} {
w := httptest.NewRecorder()
req, err := http.NewRequest("GET", baseURL+u, nil)
@ -595,6 +597,7 @@ func TestAgentAPIEndPoints(t *testing.T) {
// Test for available endpoints in the Agent mode.
for _, u := range []string{
"/agent",
"/targets",
"/status",
} {