Standardise exemplar label as "trace_id"

This is consistent with the OpenTelemetry standard, and an example in OpenMetrics.

https://github.com/open-telemetry/opentelemetry-specification/blob/89aa01348139/specification/metrics/data-model.md#exemplars
https://github.com/OpenObservability/OpenMetrics/blob/138654493130/specification/OpenMetrics.md#exemplars-1

Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
This commit is contained in:
Bryan Boreham 2024-02-15 14:19:54 +00:00
parent d595f5a9b1
commit c0e36e6bb3
11 changed files with 33 additions and 33 deletions

View file

@ -34,7 +34,7 @@ Activating the remote write receiver via a feature flag is deprecated. Use `--we
[OpenMetrics](https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#exemplars) introduces the ability for scrape targets to add exemplars to certain metrics. Exemplars are references to data outside of the MetricSet. A common use case are IDs of program traces.
Exemplar storage is implemented as a fixed size circular buffer that stores exemplars in memory for all series. Enabling this feature will enable the storage of exemplars scraped by Prometheus. The config file block [storage](configuration/configuration.md#configuration-file)/[exemplars](configuration/configuration.md#exemplars) can be used to control the size of circular buffer by # of exemplars. An exemplar with just a `traceID=<jaeger-trace-id>` uses roughly 100 bytes of memory via the in-memory exemplar storage. If the exemplar storage is enabled, we will also append the exemplars to WAL for local persistence (for WAL duration).
Exemplar storage is implemented as a fixed size circular buffer that stores exemplars in memory for all series. Enabling this feature will enable the storage of exemplars scraped by Prometheus. The config file block [storage](configuration/configuration.md#configuration-file)/[exemplars](configuration/configuration.md#exemplars) can be used to control the size of circular buffer by # of exemplars. An exemplar with just a `trace_id=<jaeger-trace-id>` uses roughly 100 bytes of memory via the in-memory exemplar storage. If the exemplar storage is enabled, we will also append the exemplars to WAL for local persistence (for WAL duration).
## Memory snapshot on shutdown

View file

@ -404,7 +404,7 @@ $ curl -g 'http://localhost:9090/api/v1/query_exemplars?query=test_exemplar_metr
"exemplars": [
{
"labels": {
"traceID": "EpTxMJ40fUus7aGY"
"trace_id": "EpTxMJ40fUus7aGY"
},
"value": "6",
"timestamp": 1600096945.479
@ -421,14 +421,14 @@ $ curl -g 'http://localhost:9090/api/v1/query_exemplars?query=test_exemplar_metr
"exemplars": [
{
"labels": {
"traceID": "Olp9XHlq763ccsfa"
"trace_id": "Olp9XHlq763ccsfa"
},
"value": "19",
"timestamp": 1600096955.479
},
{
"labels": {
"traceID": "hCtjygkIHwAN9vs4"
"trace_id": "hCtjygkIHwAN9vs4"
},
"value": "20",
"timestamp": 1600096965.489

View file

@ -464,7 +464,7 @@ func (g *Group) Eval(ctx context.Context, ts time.Time) {
}(time.Now())
if sp.SpanContext().IsSampled() && sp.SpanContext().HasTraceID() {
logger = log.WithPrefix(logger, "traceID", sp.SpanContext().TraceID())
logger = log.WithPrefix(logger, "trace_id", sp.SpanContext().TraceID())
}
g.metrics.EvalTotal.WithLabelValues(GroupKey(g.File(), g.Name())).Inc()

View file

@ -585,7 +585,7 @@ func createExemplars(numExemplars, numSeries int) ([]record.RefExemplar, []recor
Ref: chunks.HeadSeriesRef(i),
T: int64(j),
V: float64(i),
Labels: labels.FromStrings("traceID", fmt.Sprintf("trace-%d", i)),
Labels: labels.FromStrings("trace_id", fmt.Sprintf("trace-%d", i)),
}
exemplars = append(exemplars, e)
}

View file

@ -40,7 +40,7 @@ func TestValidateExemplar(t *testing.T) {
l := labels.FromStrings("service", "asdf")
e := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "qwerty"),
Labels: labels.FromStrings("trace_id", "qwerty"),
Value: 0.1,
Ts: 1,
}
@ -49,7 +49,7 @@ func TestValidateExemplar(t *testing.T) {
require.NoError(t, es.AddExemplar(l, e))
e2 := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "zxcvb"),
Labels: labels.FromStrings("trace_id", "zxcvb"),
Value: 0.1,
Ts: 2,
}
@ -82,7 +82,7 @@ func TestAddExemplar(t *testing.T) {
l := labels.FromStrings("service", "asdf")
e := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "qwerty"),
Labels: labels.FromStrings("trace_id", "qwerty"),
Value: 0.1,
Ts: 1,
}
@ -91,7 +91,7 @@ func TestAddExemplar(t *testing.T) {
require.Equal(t, 0, es.index[string(l.Bytes(nil))].newest, "exemplar was not stored correctly")
e2 := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "zxcvb"),
Labels: labels.FromStrings("trace_id", "zxcvb"),
Value: 0.1,
Ts: 2,
}
@ -132,7 +132,7 @@ func TestStorageOverflow(t *testing.T) {
var eList []exemplar.Exemplar
for i := 0; i < len(es.exemplars)+1; i++ {
e := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "a"),
Labels: labels.FromStrings("trace_id", "a"),
Value: float64(i+1) / 10,
Ts: int64(101 + i),
}
@ -158,7 +158,7 @@ func TestSelectExemplar(t *testing.T) {
lName, lValue := "service", "asdf"
l := labels.FromStrings(lName, lValue)
e := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "querty"),
Labels: labels.FromStrings("trace_id", "querty"),
Value: 0.1,
Ts: 12,
}
@ -189,7 +189,7 @@ func TestSelectExemplar_MultiSeries(t *testing.T) {
for i := 0; i < len(es.exemplars); i++ {
e1 := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "a"),
Labels: labels.FromStrings("trace_id", "a"),
Value: float64(i+1) / 10,
Ts: int64(101 + i),
}
@ -197,7 +197,7 @@ func TestSelectExemplar_MultiSeries(t *testing.T) {
require.NoError(t, err)
e2 := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "b"),
Labels: labels.FromStrings("trace_id", "b"),
Value: float64(i+1) / 10,
Ts: int64(101 + i),
}
@ -231,7 +231,7 @@ func TestSelectExemplar_TimeRange(t *testing.T) {
for i := 0; int64(i) < lenEs; i++ {
err := es.AddExemplar(l, exemplar.Exemplar{
Labels: labels.FromStrings("traceID", strconv.Itoa(i)),
Labels: labels.FromStrings("trace_id", strconv.Itoa(i)),
Value: 0.1,
Ts: int64(101 + i),
})
@ -255,7 +255,7 @@ func TestSelectExemplar_DuplicateSeries(t *testing.T) {
es := exs.(*CircularExemplarStorage)
e := exemplar.Exemplar{
Labels: labels.FromStrings("traceID", "qwerty"),
Labels: labels.FromStrings("trace_id", "qwerty"),
Value: 0.1,
Ts: 12,
}
@ -413,7 +413,7 @@ func TestResize(t *testing.T) {
func BenchmarkAddExemplar(b *testing.B) {
// We need to include these labels since we do length calculation
// before adding.
exLabels := labels.FromStrings("traceID", "89620921")
exLabels := labels.FromStrings("trace_id", "89620921")
for _, n := range []int{10000, 100000, 1000000} {
b.Run(fmt.Sprintf("%d", n), func(b *testing.B) {

View file

@ -375,7 +375,7 @@ func BenchmarkLoadWLs(b *testing.B) {
Ref: chunks.HeadSeriesRef(k) * 101,
T: int64(i) * 10,
V: float64(i) * 100,
Labels: labels.FromStrings("traceID", fmt.Sprintf("trace-%d", i)),
Labels: labels.FromStrings("trace_id", fmt.Sprintf("trace-%d", i)),
})
}
populateTestWL(b, wal, []interface{}{refExemplars})
@ -660,7 +660,7 @@ func TestHead_ReadWAL(t *testing.T) {
{Ref: 0, Intervals: []tombstones.Interval{{Mint: 99, Maxt: 101}}},
},
[]record.RefExemplar{
{Ref: 10, T: 100, V: 1, Labels: labels.FromStrings("traceID", "asdf")},
{Ref: 10, T: 100, V: 1, Labels: labels.FromStrings("trace_id", "asdf")},
},
}
@ -709,7 +709,7 @@ func TestHead_ReadWAL(t *testing.T) {
require.NoError(t, err)
e, err := q.Select(0, 1000, []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "1")})
require.NoError(t, err)
require.True(t, exemplar.Exemplar{Ts: 100, Value: 1, Labels: labels.FromStrings("traceID", "asdf")}.Equals(e[0].Exemplars[0]))
require.True(t, exemplar.Exemplar{Ts: 100, Value: 1, Labels: labels.FromStrings("trace_id", "asdf")}.Equals(e[0].Exemplars[0]))
})
}
}
@ -3049,7 +3049,7 @@ func TestHeadExemplars(t *testing.T) {
head, _ := newTestHead(t, chunkRange, wlog.CompressionNone, false)
app := head.Appender(context.Background())
l := labels.FromStrings("traceId", "123")
l := labels.FromStrings("trace_id", "123")
// It is perfectly valid to add Exemplars before the current start time -
// histogram buckets that haven't been update in a while could still be
// exported exemplars from an hour ago.
@ -3694,7 +3694,7 @@ func TestChunkSnapshot(t *testing.T) {
e := ex{
seriesLabels: lbls,
e: exemplar.Exemplar{
Labels: labels.FromStrings("traceID", fmt.Sprintf("%d", rand.Int())),
Labels: labels.FromStrings("trace_id", fmt.Sprintf("%d", rand.Int())),
Value: rand.Float64(),
Ts: ts,
},

View file

@ -102,9 +102,9 @@ func TestRecord_EncodeDecode(t *testing.T) {
}, decTstones)
exemplars := []RefExemplar{
{Ref: 0, T: 12423423, V: 1.2345, Labels: labels.FromStrings("traceID", "qwerty")},
{Ref: 123, T: -1231, V: -123, Labels: labels.FromStrings("traceID", "asdf")},
{Ref: 2, T: 0, V: 99999, Labels: labels.FromStrings("traceID", "zxcv")},
{Ref: 0, T: 12423423, V: 1.2345, Labels: labels.FromStrings("trace_id", "qwerty")},
{Ref: 123, T: -1231, V: -123, Labels: labels.FromStrings("trace_id", "asdf")},
{Ref: 2, T: 0, V: 99999, Labels: labels.FromStrings("trace_id", "zxcv")},
}
decExemplars, err := dec.Exemplars(enc.Exemplars(exemplars, nil), nil)
require.NoError(t, err)
@ -227,7 +227,7 @@ func TestRecord_Corrupted(t *testing.T) {
t.Run("Test corrupted exemplar record", func(t *testing.T) {
exemplars := []RefExemplar{
{Ref: 0, T: 12423423, V: 1.2345, Labels: labels.FromStrings("traceID", "asdf")},
{Ref: 0, T: 12423423, V: 1.2345, Labels: labels.FromStrings("trace_id", "asdf")},
}
corrupted := enc.Exemplars(exemplars, nil)[:8]

View file

@ -202,7 +202,7 @@ func TestCheckpoint(t *testing.T) {
histogramsInWAL += 4
b = enc.Exemplars([]record.RefExemplar{
{Ref: 1, T: last, V: float64(i), Labels: labels.FromStrings("traceID", fmt.Sprintf("trace-%d", i))},
{Ref: 1, T: last, V: float64(i), Labels: labels.FromStrings("trace_id", fmt.Sprintf("trace-%d", i))},
}, nil)
require.NoError(t, w.Log(b))

View file

@ -169,7 +169,7 @@ func TestTailSamples(t *testing.T) {
Ref: chunks.HeadSeriesRef(inner),
T: now.UnixNano() + 1,
V: float64(i),
Labels: labels.FromStrings("traceID", fmt.Sprintf("trace-%d", inner)),
Labels: labels.FromStrings("trace_id", fmt.Sprintf("trace-%d", inner)),
},
}, nil)
require.NoError(t, w.Log(exemplar))

View file

@ -134,14 +134,14 @@ func TestJsonCodec_Encode(t *testing.T) {
SeriesLabels: labels.FromStrings("foo", "bar"),
Exemplars: []exemplar.Exemplar{
{
Labels: labels.FromStrings("traceID", "abc"),
Labels: labels.FromStrings("trace_id", "abc"),
Value: 100.123,
Ts: 1234,
},
},
},
},
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"traceID":"abc"},"value":"100.123","timestamp":1.234}]}]}`,
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"trace_id":"abc"},"value":"100.123","timestamp":1.234}]}]}`,
},
{
response: []exemplar.QueryResult{
@ -149,14 +149,14 @@ func TestJsonCodec_Encode(t *testing.T) {
SeriesLabels: labels.FromStrings("foo", "bar"),
Exemplars: []exemplar.Exemplar{
{
Labels: labels.FromStrings("traceID", "abc"),
Labels: labels.FromStrings("trace_id", "abc"),
Value: math.Inf(1),
Ts: 1234,
},
},
},
},
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"traceID":"abc"},"value":"+Inf","timestamp":1.234}]}]}`,
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"trace_id":"abc"},"value":"+Inf","timestamp":1.234}]}]}`,
},
}

View file

@ -80,7 +80,7 @@ describe('Graph', () => {
exemplars: [
{
labels: {
traceID: '12345',
trace_id: '12345',
},
timestamp: 1572130580,
value: '9',