feat: Append created timestamp as metadata
Some checks failed
CI / Go tests (push) Has been cancelled
CI / More Go tests (push) Has been cancelled
CI / Go tests with previous Go version (push) Has been cancelled
CI / UI tests (push) Has been cancelled
CI / Go tests on Windows (push) Has been cancelled
CI / Mixins tests (push) Has been cancelled
CI / Build Prometheus for common architectures (0) (push) Has been cancelled
CI / Build Prometheus for common architectures (1) (push) Has been cancelled
CI / Build Prometheus for common architectures (2) (push) Has been cancelled
CI / Build Prometheus for all architectures (0) (push) Has been cancelled
CI / Build Prometheus for all architectures (1) (push) Has been cancelled
CI / Build Prometheus for all architectures (10) (push) Has been cancelled
CI / Build Prometheus for all architectures (11) (push) Has been cancelled
CI / Build Prometheus for all architectures (2) (push) Has been cancelled
CI / Build Prometheus for all architectures (3) (push) Has been cancelled
CI / Build Prometheus for all architectures (4) (push) Has been cancelled
CI / Build Prometheus for all architectures (5) (push) Has been cancelled
CI / Build Prometheus for all architectures (6) (push) Has been cancelled
CI / Build Prometheus for all architectures (7) (push) Has been cancelled
CI / Build Prometheus for all architectures (8) (push) Has been cancelled
CI / Build Prometheus for all architectures (9) (push) Has been cancelled
CI / Check generated parser (push) Has been cancelled
CI / golangci-lint (push) Has been cancelled
CI / fuzzing (push) Has been cancelled
CI / codeql (push) Has been cancelled
CI / Report status of build Prometheus for all architectures (push) Has been cancelled
CI / Publish main branch artifacts (push) Has been cancelled
CI / Publish release artefacts (push) Has been cancelled
CI / Publish UI on npm Registry (push) Has been cancelled

Signed-off-by: Arthur Silva Sens <arthursens2005@gmail.com>
This commit is contained in:
Arthur Silva Sens 2024-08-25 12:25:07 -03:00
parent 1b86d54c7f
commit 65dc484078
No known key found for this signature in database
10 changed files with 207 additions and 151 deletions

View file

@ -17,7 +17,8 @@ import "github.com/prometheus/common/model"
// Metadata stores a series' metadata information.
type Metadata struct {
Type model.MetricType `json:"type"`
Unit string `json:"unit"`
Help string `json:"help"`
Type model.MetricType `json:"type"`
Unit string `json:"unit"`
Help string `json:"help"`
CreatedTimestamp int64 `json:"created_timestamp"`
}

View file

@ -1080,6 +1080,23 @@ func (c *scrapeCache) setUnit(metric, unit []byte) {
c.metaMtx.Unlock()
}
func (c *scrapeCache) setCreatedTimestamp(metric []byte, ct int64) {
c.metaMtx.Lock()
e, ok := c.metadata[string(metric)]
if !ok {
e = &metaEntry{Metadata: metadata.Metadata{Type: model.MetricTypeUnknown}}
c.metadata[string(metric)] = e
}
if e.CreatedTimestamp != ct {
e.CreatedTimestamp = ct
e.lastIterChange = c.iter
}
e.lastIter = c.iter
c.metaMtx.Unlock()
}
func (c *scrapeCache) GetMetadata(metric string) (MetricMetadata, bool) {
c.metaMtx.Lock()
defer c.metaMtx.Unlock()
@ -1089,10 +1106,11 @@ func (c *scrapeCache) GetMetadata(metric string) (MetricMetadata, bool) {
return MetricMetadata{}, false
}
return MetricMetadata{
Metric: metric,
Type: m.Type,
Help: m.Help,
Unit: m.Unit,
Metric: metric,
Type: m.Type,
Help: m.Help,
Unit: m.Unit,
CreatedTimestamp: m.CreatedTimestamp,
}, true
}
@ -1104,10 +1122,11 @@ func (c *scrapeCache) ListMetadata() []MetricMetadata {
for m, e := range c.metadata {
res = append(res, MetricMetadata{
Metric: m,
Type: e.Type,
Help: e.Help,
Unit: e.Unit,
Metric: m,
Type: e.Type,
Help: e.Help,
Unit: e.Unit,
CreatedTimestamp: e.CreatedTimestamp,
})
}
return res
@ -1528,6 +1547,7 @@ func (sl *scrapeLoop) append(app storage.Appender, b []byte, contentType string,
meta.Type = metaEntry.Type
meta.Unit = metaEntry.Unit
meta.Help = metaEntry.Help
meta.CreatedTimestamp = metaEntry.CreatedTimestamp
return true
}
return false
@ -1610,9 +1630,6 @@ loop:
ref = ce.ref
lset = ce.lset
hash = ce.hash
// Update metadata only if it changed in the current iteration.
updateMetadata(lset, false)
} else {
p.Metric(&lset)
hash = lset.Hash()
@ -1641,15 +1658,13 @@ loop:
sl.metrics.targetScrapePoolExceededLabelLimits.Inc()
break loop
}
// Append metadata for new series if they were present.
updateMetadata(lset, true)
}
if seriesAlreadyScraped {
err = storage.ErrDuplicateSampleForTimestamp
} else {
if ctMs := p.CreatedTimestamp(); sl.enableCTZeroIngestion && ctMs != nil {
ctMs := p.CreatedTimestamp()
if sl.enableCTZeroIngestion && ctMs != nil {
ref, err = app.AppendCTZeroSample(ref, lset, t, *ctMs)
if err != nil && !errors.Is(err, storage.ErrOutOfOrderCT) { // OOO is a common case, ignoring completely for now.
// CT is an experimental feature. For now, we don't need to fail the
@ -1657,6 +1672,9 @@ loop:
level.Debug(sl.l).Log("msg", "Error when appending CT in scrape loop", "series", string(met), "ct", *ctMs, "t", t, "err", err)
}
}
if ctMs != nil {
sl.cache.setCreatedTimestamp(met, *ctMs)
}
if isHistogram && sl.enableNativeHistogramIngestion {
if h != nil {
@ -1669,6 +1687,10 @@ loop:
}
}
// If they were present, append metadata for new series
// or for existing series if the metadata has changed.
updateMetadata(lset, !ok)
if err == nil {
if (parsedTimestamp == nil || sl.trackTimestampsStaleness) && ce != nil {
sl.cache.trackStaleness(ce.hash, ce.lset)

View file

@ -962,7 +962,7 @@ func TestScrapeLoopMetadata(t *testing.T) {
0,
false,
false,
false,
true, // EnableCTZeroIngestion
false,
false,
nil,
@ -977,30 +977,36 @@ func TestScrapeLoopMetadata(t *testing.T) {
# HELP test_metric some help text
# UNIT test_metric metric
test_metric 1
test_metric_created 1000
# TYPE test_metric_no_help gauge
# HELP test_metric_no_type other help text
# EOF`), "application/openmetrics-text", time.Now())
require.NoError(t, err)
require.NoError(t, slApp.Commit())
require.Equal(t, 1, total)
// It returns 2 below because _created lines are still being ingested as a metric when using OpenMetrics.
// TODO: _created lines should be ingested as metadata only, and not as a metric.
require.Equal(t, 2, total)
md, ok := cache.GetMetadata("test_metric")
require.True(t, ok, "expected metadata to be present")
require.Equal(t, model.MetricTypeCounter, md.Type, "unexpected metric type")
require.Equal(t, "some help text", md.Help)
require.Equal(t, "metric", md.Unit)
require.Equal(t, int64(1000), md.CreatedTimestamp)
md, ok = cache.GetMetadata("test_metric_no_help")
require.True(t, ok, "expected metadata to be present")
require.Equal(t, model.MetricTypeGauge, md.Type, "unexpected metric type")
require.Equal(t, "", md.Help)
require.Equal(t, "", md.Unit)
require.Equal(t, int64(0), md.CreatedTimestamp)
md, ok = cache.GetMetadata("test_metric_no_type")
require.True(t, ok, "expected metadata to be present")
require.Equal(t, model.MetricTypeUnknown, md.Type, "unexpected metric type")
require.Equal(t, "other help text", md.Help)
require.Equal(t, "", md.Unit)
require.Equal(t, int64(0), md.CreatedTimestamp)
}
func simpleTestScrapeLoop(t testing.TB) (context.Context, *scrapeLoop) {

View file

@ -85,10 +85,11 @@ type MetricMetadataStore interface {
// MetricMetadata is a piece of metadata for a metric.
type MetricMetadata struct {
Metric string
Type model.MetricType
Help string
Unit string
Metric string
Type model.MetricType
Help string
Unit string
CreatedTimestamp int64
}
func (t *Target) ListMetadata() []MetricMetadata {

View file

@ -4268,9 +4268,9 @@ func TestMetadataInWAL(t *testing.T) {
// Add a first round of metadata to the first three series.
// Re-take the Appender, as the previous Commit will have it closed.
m1 := metadata.Metadata{Type: "gauge", Unit: "unit_1", Help: "help_1"}
m2 := metadata.Metadata{Type: "gauge", Unit: "unit_2", Help: "help_2"}
m3 := metadata.Metadata{Type: "gauge", Unit: "unit_3", Help: "help_3"}
m1 := metadata.Metadata{Type: "gauge", Unit: "unit_1", Help: "help_1", CreatedTimestamp: 1234}
m2 := metadata.Metadata{Type: "gauge", Unit: "unit_2", Help: "help_2", CreatedTimestamp: 1000}
m3 := metadata.Metadata{Type: "gauge", Unit: "unit_3", Help: "help_3"} // Last one without Created Timestamp.
app = db.Appender(ctx)
updateMetadata(t, app, s1, m1)
updateMetadata(t, app, s2, m2)
@ -4280,8 +4280,8 @@ func TestMetadataInWAL(t *testing.T) {
// Add a replicated metadata entry to the first series,
// a completely new metadata entry for the fourth series,
// and a changed metadata entry to the second series.
m4 := metadata.Metadata{Type: "counter", Unit: "unit_4", Help: "help_4"}
m5 := metadata.Metadata{Type: "counter", Unit: "unit_5", Help: "help_5"}
m4 := metadata.Metadata{Type: "counter", Unit: "unit_4", Help: "help_4", CreatedTimestamp: 2000}
m5 := metadata.Metadata{Type: "counter", Unit: "unit_5", Help: "help_5", CreatedTimestamp: 3000}
app = db.Appender(ctx)
updateMetadata(t, app, s1, m1)
updateMetadata(t, app, s4, m4)
@ -4298,11 +4298,11 @@ func TestMetadataInWAL(t *testing.T) {
}
expectedMetadata := []record.RefMetadata{
{Ref: 1, Type: record.GetMetricType(m1.Type), Unit: m1.Unit, Help: m1.Help},
{Ref: 2, Type: record.GetMetricType(m2.Type), Unit: m2.Unit, Help: m2.Help},
{Ref: 3, Type: record.GetMetricType(m3.Type), Unit: m3.Unit, Help: m3.Help},
{Ref: 4, Type: record.GetMetricType(m4.Type), Unit: m4.Unit, Help: m4.Help},
{Ref: 2, Type: record.GetMetricType(m5.Type), Unit: m5.Unit, Help: m5.Help},
{Ref: 1, Type: record.GetMetricType(m1.Type), Unit: m1.Unit, Help: m1.Help, CreatedTimestamp: m1.CreatedTimestamp},
{Ref: 2, Type: record.GetMetricType(m2.Type), Unit: m2.Unit, Help: m2.Help, CreatedTimestamp: m2.CreatedTimestamp},
{Ref: 3, Type: record.GetMetricType(m3.Type), Unit: m3.Unit, Help: m3.Help, CreatedTimestamp: 0},
{Ref: 4, Type: record.GetMetricType(m4.Type), Unit: m4.Unit, Help: m4.Help, CreatedTimestamp: m4.CreatedTimestamp},
{Ref: 2, Type: record.GetMetricType(m5.Type), Unit: m5.Unit, Help: m5.Help, CreatedTimestamp: m5.CreatedTimestamp},
}
require.Len(t, gotMetadataBlocks, 2)
require.Equal(t, expectedMetadata[:3], gotMetadataBlocks[0])

View file

@ -693,10 +693,11 @@ func (a *headAppender) UpdateMetadata(ref storage.SeriesRef, lset labels.Labels,
if hasNewMetadata {
a.metadata = append(a.metadata, record.RefMetadata{
Ref: s.ref,
Type: record.GetMetricType(meta.Type),
Unit: meta.Unit,
Help: meta.Help,
Ref: s.ref,
Type: record.GetMetricType(meta.Type),
Unit: meta.Unit,
Help: meta.Help,
CreatedTimestamp: meta.CreatedTimestamp,
})
a.metadataSeries = append(a.metadataSeries, s)
}
@ -1056,7 +1057,7 @@ func (a *headAppender) Commit() (err error) {
for i, m := range a.metadata {
series = a.metadataSeries[i]
series.Lock()
series.meta = &metadata.Metadata{Type: record.ToMetricType(m.Type), Unit: m.Unit, Help: m.Help}
series.meta = &metadata.Metadata{Type: record.ToMetricType(m.Type), Unit: m.Unit, Help: m.Help, CreatedTimestamp: m.CreatedTimestamp}
series.Unlock()
}

View file

@ -134,8 +134,9 @@ func ToMetricType(m uint8) model.MetricType {
}
const (
unitMetaName = "UNIT"
helpMetaName = "HELP"
unitMetaName = "UNIT"
helpMetaName = "HELP"
createdTimestampName = "CREATED_TIMESTAMP"
)
// ErrNotFound is returned if a looked up resource was not found. Duplicate ErrNotFound from head.go.
@ -157,10 +158,11 @@ type RefSample struct {
// RefMetadata is the metadata associated with a series ID.
type RefMetadata struct {
Ref chunks.HeadSeriesRef
Type uint8
Unit string
Help string
Ref chunks.HeadSeriesRef
Type uint8
Unit string
Help string
CreatedTimestamp int64
}
// RefExemplar is an exemplar with the labels, timestamp, value the exemplar was collected/observed with, and a reference to a series.
@ -253,23 +255,31 @@ func (d *Decoder) Metadata(rec []byte, metadata []RefMetadata) ([]RefMetadata, e
// We're currently aware of two more metadata fields other than TYPE; that is UNIT and HELP.
// We can skip the rest of the fields (if we encounter any), but we must decode them anyway
// so we can correctly align with the start with the next metadata record.
var unit, help string
var unit, help, fieldValueStr string
var ct, fieldValueInt int64
for i := 0; i < numFields; i++ {
fieldName := dec.UvarintStr()
fieldValue := dec.UvarintStr()
switch fieldName {
case unitMetaName:
unit = fieldValue
fieldValueStr = dec.UvarintStr()
unit = fieldValueStr
case helpMetaName:
help = fieldValue
fieldValueStr = dec.UvarintStr()
help = fieldValueStr
case createdTimestampName:
fieldValueInt = dec.Varint64()
ct = fieldValueInt
default:
_ = dec.UvarintStr() // To be skipped
}
}
metadata = append(metadata, RefMetadata{
Ref: chunks.HeadSeriesRef(ref),
Type: typ,
Unit: unit,
Help: help,
Ref: chunks.HeadSeriesRef(ref),
Type: typ,
Unit: unit,
Help: help,
CreatedTimestamp: ct,
})
}
if dec.Err() != nil {
@ -615,11 +625,13 @@ func (e *Encoder) Metadata(metadata []RefMetadata, b []byte) []byte {
buf.PutByte(m.Type)
buf.PutUvarint(2) // num_fields: We currently have two more metadata fields, UNIT and HELP.
buf.PutUvarint(3) // num_fields: We currently have three more metadata fields, UNIT, HELP, and CREATED_TIMESTAMP.
buf.PutUvarintStr(unitMetaName)
buf.PutUvarintStr(m.Unit)
buf.PutUvarintStr(helpMetaName)
buf.PutUvarintStr(m.Help)
buf.PutUvarintStr(createdTimestampName)
buf.PutVarint64(m.CreatedTimestamp)
}
return buf.Get()

View file

@ -49,22 +49,25 @@ func TestRecord_EncodeDecode(t *testing.T) {
metadata := []RefMetadata{
{
Ref: 100,
Type: uint8(Counter),
Unit: "",
Help: "some magic counter",
Ref: 100,
Type: uint8(Counter),
Unit: "",
Help: "some magic counter",
CreatedTimestamp: 1234,
},
{
Ref: 1,
Type: uint8(Counter),
Unit: "seconds",
Help: "CPU time counter",
Ref: 1,
Type: uint8(Counter),
Unit: "seconds",
Help: "CPU time counter",
CreatedTimestamp: 1000,
},
{
Ref: 147741,
Type: uint8(Gauge),
Unit: "percentage",
Help: "current memory usage",
Ref: 147741,
Type: uint8(Gauge),
Unit: "percentage",
Help: "current memory usage",
CreatedTimestamp: 1020,
},
}
decMetadata, err := dec.Metadata(enc.Metadata(metadata, nil), nil)
@ -329,21 +332,25 @@ func TestRecord_MetadataDecodeUnknownExtraFields(t *testing.T) {
// Write first metadata entry, all known fields.
enc.PutUvarint64(101)
enc.PutByte(byte(Counter))
enc.PutUvarint(2)
enc.PutUvarint(3)
enc.PutUvarintStr(unitMetaName)
enc.PutUvarintStr("")
enc.PutUvarintStr(helpMetaName)
enc.PutUvarintStr("some magic counter")
enc.PutUvarintStr(createdTimestampName)
enc.PutVarint64(1234)
// Write second metadata entry, known fields + unknown fields.
enc.PutUvarint64(99)
enc.PutByte(byte(Counter))
enc.PutUvarint(3)
enc.PutUvarint(4)
// Known fields.
enc.PutUvarintStr(unitMetaName)
enc.PutUvarintStr("seconds")
enc.PutUvarintStr(helpMetaName)
enc.PutUvarintStr("CPU time counter")
enc.PutUvarintStr(createdTimestampName)
enc.PutVarint64(1000)
// Unknown fields.
enc.PutUvarintStr("an extra field name to be skipped")
enc.PutUvarintStr("with its value")
@ -351,7 +358,7 @@ func TestRecord_MetadataDecodeUnknownExtraFields(t *testing.T) {
// Write third metadata entry, with unknown fields and different order.
enc.PutUvarint64(47250)
enc.PutByte(byte(Gauge))
enc.PutUvarint(4)
enc.PutUvarint(5)
enc.PutUvarintStr("extra name one")
enc.PutUvarintStr("extra value one")
enc.PutUvarintStr(helpMetaName)
@ -360,6 +367,8 @@ func TestRecord_MetadataDecodeUnknownExtraFields(t *testing.T) {
enc.PutUvarintStr("extra value two")
enc.PutUvarintStr(unitMetaName)
enc.PutUvarintStr("percentage")
enc.PutUvarintStr(createdTimestampName)
enc.PutVarint64(1020)
// Should yield known fields for all entries and skip over unknown fields.
expectedMetadata := []RefMetadata{
@ -368,16 +377,19 @@ func TestRecord_MetadataDecodeUnknownExtraFields(t *testing.T) {
Type: uint8(Counter),
Unit: "",
Help: "some magic counter",
CreatedTimestamp: 1234,
}, {
Ref: 99,
Type: uint8(Counter),
Unit: "seconds",
Help: "CPU time counter",
CreatedTimestamp: 1000,
}, {
Ref: 47250,
Type: uint8(Gauge),
Unit: "percentage",
Help: "current memory usage",
CreatedTimestamp: 1020,
},
}

View file

@ -1288,7 +1288,7 @@ func (api *API) metricMetadata(r *http.Request) apiFuncResult {
for _, t := range tt {
if metric == "" {
for _, mm := range t.ListMetadata() {
m := metadata.Metadata{Type: mm.Type, Help: mm.Help, Unit: mm.Unit}
m := metadata.Metadata{Type: mm.Type, Help: mm.Help, Unit: mm.Unit, CreatedTimestamp: mm.CreatedTimestamp}
ms, ok := metrics[mm.Metric]
if limitPerMetric > 0 && len(ms) >= limitPerMetric {

View file

@ -1137,7 +1137,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
}
tests := []test{
{
{ // 0
endpoint: api.query,
query: url.Values{
"query": []string{"2"},
@ -1151,7 +1151,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 1
endpoint: api.query,
query: url.Values{
"query": []string{"0.333"},
@ -1165,7 +1165,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 2
endpoint: api.query,
query: url.Values{
"query": []string{"0.333"},
@ -1179,7 +1179,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 3
endpoint: api.query,
query: url.Values{
"query": []string{"0.333"},
@ -1192,7 +1192,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 4
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1215,7 +1215,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Test empty vector result
{
{ // 5
endpoint: api.query,
query: url.Values{
"query": []string{"bottomk(2, notExists)"},
@ -1223,7 +1223,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
responseAsJSON: `{"resultType":"vector","result":[]}`,
},
// Test empty matrix result
{
{ // 6
endpoint: api.queryRange,
query: url.Values{
"query": []string{"bottomk(2, notExists)"},
@ -1234,7 +1234,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
responseAsJSON: `{"resultType":"matrix","result":[]}`,
},
// Missing query params in range queries.
{
{ // 7
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1243,7 +1243,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
errType: errorBadData,
},
{
{ // 8
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1252,7 +1252,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
errType: errorBadData,
},
{
{ // 9
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1262,7 +1262,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
errType: errorBadData,
},
// Bad query expression.
{
{ // 10
endpoint: api.query,
query: url.Values{
"query": []string{"invalid][query"},
@ -1270,7 +1270,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
errType: errorBadData,
},
{
{ // 11
endpoint: api.queryRange,
query: url.Values{
"query": []string{"invalid][query"},
@ -1281,7 +1281,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
errType: errorBadData,
},
// Invalid step.
{
{ // 12
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1292,7 +1292,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
errType: errorBadData,
},
// Start after end.
{
{ // 13
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1303,7 +1303,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
errType: errorBadData,
},
// Start overflows int64 internally.
{
{ // 14
endpoint: api.queryRange,
query: url.Values{
"query": []string{"time()"},
@ -1313,21 +1313,21 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
errType: errorBadData,
},
{
{ // 15
endpoint: api.formatQuery,
query: url.Values{
"query": []string{"foo+bar"},
},
response: "foo + bar",
},
{
{ // 16
endpoint: api.formatQuery,
query: url.Values{
"query": []string{"invalid_expression/"},
},
errType: errorBadData,
},
{
{ // 17
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1336,14 +1336,14 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
},
},
{
{ // 18
endpoint: api.series,
query: url.Values{
"match[]": []string{`{foo=""}`},
},
errType: errorBadData,
},
{
{ // 19
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric1{foo=~".+o"}`},
@ -1352,7 +1352,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
},
},
{
{ // 20
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric1{foo=~".+o$"}`, `test_metric1{foo=~".+o"}`},
@ -1362,7 +1362,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Try to overlap the selected series set as much as possible to test the result de-duplication works well.
{
{ // 21
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric4{foo=~".+o$"}`, `test_metric4{dup=~"^1"}`},
@ -1373,7 +1373,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
labels.FromStrings("__name__", "test_metric4", "foo", "boo"),
},
},
{
{ // 22
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric1{foo=~".+o"}`, `none`},
@ -1383,7 +1383,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Start and end before series starts.
{
{ // 23
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1393,7 +1393,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
response: []labels.Labels{},
},
// Start and end after series ends.
{
{ // 24
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1403,7 +1403,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
response: []labels.Labels{},
},
// Start before series starts, end after series ends.
{
{ // 25
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1415,7 +1415,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Start and end within series.
{
{ // 26
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1427,7 +1427,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Start within series, end after.
{
{ // 27
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1439,7 +1439,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Start before series, end within series.
{
{ // 28
endpoint: api.series,
query: url.Values{
"match[]": []string{`test_metric2`},
@ -1451,7 +1451,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Series request with limit.
{
{ // 29
endpoint: api.series,
query: url.Values{
"match[]": []string{"test_metric1"},
@ -1460,7 +1460,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
responseLen: 1, // API does not specify which particular value will come back.
warningsCount: 1,
},
{
{ // 30
endpoint: api.series,
query: url.Values{
"match[]": []string{"test_metric1"},
@ -1469,7 +1469,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
responseLen: 2, // API does not specify which particular value will come back.
warningsCount: 0, // No warnings if limit isn't exceeded.
},
{
{ // 31
endpoint: api.series,
query: url.Values{
"match[]": []string{"test_metric1"},
@ -1479,15 +1479,15 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
warningsCount: 0, // No warnings if limit isn't exceeded.
},
// Missing match[] query params in series requests.
{
{ // 32
endpoint: api.series,
errType: errorBadData,
},
{
{ // 33
endpoint: api.dropSeries,
errType: errorInternal,
},
{
{ // 34
endpoint: api.targets,
response: &TargetDiscovery{
ActiveTargets: []*Target{
@ -1533,7 +1533,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
DroppedTargetCounts: map[string]int{"blackbox": 1},
},
},
{
{ // 35
endpoint: api.targets,
query: url.Values{
"state": []string{"any"},
@ -1582,7 +1582,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
DroppedTargetCounts: map[string]int{"blackbox": 1},
},
},
{
{ // 36
endpoint: api.targets,
query: url.Values{
"state": []string{"active"},
@ -1619,7 +1619,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
DroppedTargets: []*DroppedTarget{},
},
},
{
{ // 37
endpoint: api.targets,
query: url.Values{
"state": []string{"Dropped"},
@ -1642,7 +1642,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// With a matching metric.
{
{ // 38
endpoint: api.targetMetadata,
query: url.Values{
"metric": []string{"go_threads"},
@ -1672,7 +1672,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// With a matching target.
{
{ // 39
endpoint: api.targetMetadata,
query: url.Values{
"match_target": []string{"{job=\"blackbox\"}"},
@ -1703,7 +1703,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Without a target or metric.
{
{ // 40
endpoint: api.targetMetadata,
metadata: []targetMetadata{
{
@ -1757,14 +1757,14 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// Without a matching metric.
{
{ // 41
endpoint: api.targetMetadata,
query: url.Values{
"match_target": []string{"{job=\"non-existentblackbox\"}"},
},
response: []metricMetadata{},
},
{
{ // 42
endpoint: api.alertmanagers,
response: &AlertmanagerDiscovery{
ActiveAlertmanagers: []*AlertmanagerTarget{
@ -1780,7 +1780,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// With metadata available.
{
{ // 43
endpoint: api.metricMetadata,
metadata: []targetMetadata{
{
@ -1797,20 +1797,21 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
Type: model.MetricTypeGauge,
Help: "Information about the Go environment.",
Unit: "",
CreatedTimestamp: 1000,
},
},
},
},
response: map[string][]metadata.Metadata{
"prometheus_engine_query_duration_seconds": {{Type: model.MetricTypeSummary, Help: "Query timings", Unit: ""}},
"go_info": {{Type: model.MetricTypeGauge, Help: "Information about the Go environment.", Unit: ""}},
"go_info": {{Type: model.MetricTypeGauge, Help: "Information about the Go environment.", Unit: "", CreatedTimestamp: 1000}},
},
responseAsJSON: `{"prometheus_engine_query_duration_seconds":[{"type":"summary","unit":"",
"help":"Query timings"}], "go_info":[{"type":"gauge","unit":"",
"help":"Information about the Go environment."}]}`,
"help":"Query timings", "created_timestamp": 0}], "go_info":[{"type":"gauge","unit":"",
"help":"Information about the Go environment.", "created_timestamp": 1000}]}`,
},
// With duplicate metadata for a metric that comes from different targets.
{
{ // 44
endpoint: api.metricMetadata,
metadata: []targetMetadata{
{
@ -1840,10 +1841,10 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
"go_threads": {{Type: model.MetricTypeGauge, Help: "Number of OS threads created"}},
},
responseAsJSON: `{"go_threads": [{"type":"gauge","unit":"",
"help":"Number of OS threads created"}]}`,
"help":"Number of OS threads created", "created_timestamp": 0}]}`,
},
// With non-duplicate metadata for the same metric from different targets.
{
{ // 45
endpoint: api.metricMetadata,
metadata: []targetMetadata{
{
@ -1876,8 +1877,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
responseAsJSON: `{"go_threads": [{"type":"gauge","unit":"",
"help":"Number of OS threads created"},{"type":"gauge","unit":"",
"help":"Number of OS threads that were created."}]}`,
"help":"Number of OS threads created", "created_timestamp": 0},{"type":"gauge","unit":"",
"help":"Number of OS threads that were created.", "created_timestamp": 0}]}`,
sorter: func(m interface{}) {
v := m.(map[string][]metadata.Metadata)["go_threads"]
@ -1887,7 +1888,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// With a limit for the number of metrics returned.
{
{ // 46
endpoint: api.metricMetadata,
query: url.Values{
"limit": []string{"2"},
@ -1925,7 +1926,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
responseLen: 2,
},
// With a limit for the number of metadata per metric.
{
{ // 47
endpoint: api.metricMetadata,
query: url.Values{"limit_per_metric": []string{"1"}},
metadata: []targetMetadata{
@ -1961,10 +1962,10 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
{Type: model.MetricTypeSummary, Help: "A summary of the GC invocation durations."},
},
},
responseAsJSON: `{"go_gc_duration_seconds":[{"help":"A summary of the GC invocation durations.","type":"summary","unit":""}],"go_threads": [{"type":"gauge","unit":"","help":"Number of OS threads created"}]}`,
responseAsJSON: `{"go_gc_duration_seconds":[{"help":"A summary of the GC invocation durations.","type":"summary","unit":"", "created_timestamp": 0}],"go_threads": [{"type":"gauge","unit":"","help":"Number of OS threads created", "created_timestamp": 0}]}`,
},
// With a limit for the number of metadata per metric and per metric.
{
{ // 48
endpoint: api.metricMetadata,
query: url.Values{"limit_per_metric": []string{"1"}, "limit": []string{"1"}},
metadata: []targetMetadata{
@ -1997,7 +1998,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
// With a limit for the number of metadata per metric and per metric, while having multiple targets.
{
{ // 49
endpoint: api.metricMetadata,
query: url.Values{"limit_per_metric": []string{"1"}, "limit": []string{"1"}},
metadata: []targetMetadata{
@ -2046,7 +2047,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
responseMetadataTotal: 1,
},
// When requesting a specific metric that is present.
{
{ // 50
endpoint: api.metricMetadata,
query: url.Values{"metric": []string{"go_threads"}},
metadata: []targetMetadata{
@ -2085,7 +2086,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
{Type: model.MetricTypeGauge, Help: "Number of OS threads that were created."},
},
},
responseAsJSON: `{"go_threads": [{"type":"gauge","unit":"","help":"Number of OS threads created"},{"type":"gauge","unit":"","help":"Number of OS threads that were created."}]}`,
responseAsJSON: `{"go_threads": [{"type":"gauge","unit":"","help":"Number of OS threads created", "created_timestamp": 0},{"type":"gauge","unit":"","help":"Number of OS threads that were created.", "created_timestamp": 0}]}`,
sorter: func(m interface{}) {
v := m.(map[string][]metadata.Metadata)["go_threads"]
@ -2095,7 +2096,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// With a specific metric that is not present.
{
{ // 51
endpoint: api.metricMetadata,
query: url.Values{"metric": []string{"go_gc_duration_seconds"}},
metadata: []targetMetadata{
@ -2114,21 +2115,21 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
response: map[string][]metadata.Metadata{},
},
// With no available metadata.
{
{ // 52
endpoint: api.metricMetadata,
response: map[string][]metadata.Metadata{},
},
{
{ // 53
endpoint: api.serveConfig,
response: &prometheusConfig{
YAML: samplePrometheusCfg.String(),
},
},
{
{ // 54
endpoint: api.serveFlags,
response: sampleFlagMap,
},
{
{ // 55
endpoint: api.alerts,
response: &AlertDiscovery{
Alerts: []*Alert{
@ -2149,7 +2150,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
}
},
},
{
{ // 56
endpoint: api.rules,
response: &RuleDiscovery{
RuleGroups: []*RuleGroup{
@ -2241,7 +2242,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 57
endpoint: api.rules,
query: url.Values{
"exclude_alerts": []string{"true"},
@ -2329,7 +2330,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 58
endpoint: api.rules,
query: url.Values{
"type": []string{"alert"},
@ -2410,7 +2411,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 59
endpoint: api.rules,
query: url.Values{
"type": []string{"record"},
@ -2443,7 +2444,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 60
endpoint: api.rules,
query: url.Values{"rule_name[]": []string{"test_metric4"}},
response: &RuleDiscovery{
@ -2471,12 +2472,12 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 61
endpoint: api.rules,
query: url.Values{"rule_group[]": []string{"respond-with-nothing"}},
response: &RuleDiscovery{RuleGroups: []*RuleGroup{}},
},
{
{ // 62
endpoint: api.rules,
query: url.Values{"file[]": []string{"/path/to/file"}, "rule_name[]": []string{"test_metric4"}},
response: &RuleDiscovery{
@ -2504,7 +2505,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 63
endpoint: api.rules,
query: url.Values{
"match[]": []string{`{testlabel="rule"}`},
@ -2541,7 +2542,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 64
endpoint: api.rules,
query: url.Values{
"type": []string{"alert"},
@ -2572,7 +2573,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 65
endpoint: api.rules,
query: url.Values{
"match[]": []string{`{testlabel="abc"}`},
@ -2582,7 +2583,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
// This is testing OR condition, the api response should return rule if it matches one of the label selector
{
{ // 66
endpoint: api.rules,
query: url.Values{
"match[]": []string{`{testlabel="abc"}`, `{testlabel="rule"}`},
@ -2619,7 +2620,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 67
endpoint: api.rules,
query: url.Values{
"type": []string{"record"},
@ -2646,7 +2647,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 68
endpoint: api.rules,
query: url.Values{
"type": []string{"alert"},
@ -2677,7 +2678,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
zeroFunc: rulesZeroFunc,
},
{
{ // 69
endpoint: api.queryExemplars,
query: url.Values{
"query": []string{`test_metric3{foo="boo"} - test_metric4{foo="bar"}`},
@ -2710,7 +2711,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 70
endpoint: api.queryExemplars,
query: url.Values{
"query": []string{`{foo="boo"}`},
@ -2730,7 +2731,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 71
endpoint: api.queryExemplars,
query: url.Values{
"query": []string{`{foo="boo"}`},
@ -2753,7 +2754,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
},
},
},
{
{ // 72
endpoint: api.queryExemplars,
query: url.Values{
"query": []string{`{__name__="test_metric5"}`},