feat: initial implement of createedTimestamp() with tests

Signed-off-by: Manik Rana <manikrana54@gmail.com>
This commit is contained in:
Manik Rana 2024-06-27 19:24:47 +05:30
parent cb7306155b
commit 7e8899241a
3 changed files with 104 additions and 4 deletions

View file

@ -222,9 +222,82 @@ func (p *OpenMetricsParser) Exemplar(e *exemplar.Exemplar) bool {
// CreatedTimestamp returns nil as it's not implemented yet.
// TODO(bwplotka): https://github.com/prometheus/prometheus/issues/12980
func (p *OpenMetricsParser) CreatedTimestamp() *int64 {
return nil
}
newParser := deepCopyParser(p)
switch t, _ := newParser.Next(); t {
case EntrySeries:
for {
// Check _created suffix
var lbs labels.Labels
newParser.Metric(&lbs)
name := lbs.Get(model.MetricNameLabel)
if name[len(name)-8:] != "_created" {
return nil
}
var labelsPrev labels.Labels
p.Metric(&labelsPrev)
namePrev := labelsPrev.Get(model.MetricNameLabel)
// a case like foo_total != foo_created fails even if underlying metric is the same
if namePrev != name {
return nil
}
// TODO: for histograms
if t, _ := newParser.Next(); t != EntrySeries {
return nil
}
}
default:
// If not series, we don't care.
}
return nil
}
// We need this because we want offsets of the original parser unchanged when
// we're working with a new parser in CreatedTimeStamp()
func deepCopyParser(p *OpenMetricsParser) OpenMetricsParser {
newB := make([]byte, len(p.l.b))
copy(newB, p.l.b)
newLexer := &openMetricsLexer{
b: newB,
i: p.l.i,
start: p.l.start,
err: p.l.err,
state: p.l.state,
}
newSeries := make([]byte, len(p.series))
copy(newSeries, p.series)
newText := make([]byte, len(p.text))
copy(newText, p.text)
newOffsets := make([]int, len(p.offsets))
copy(newOffsets, p.offsets)
newEOffsets := p.eOffsets
newExemplar := p.exemplar
newParser := OpenMetricsParser{
l :newLexer,
builder :p.builder,
series :newSeries,
text :newText,
mtype :p.mtype,
val :p.val,
ts :p.ts,
hasTS :p.hasTS,
start :p.start,
offsets :newOffsets,
eOffsets :newEOffsets,
exemplar :newExemplar,
exemplarVal :p.exemplarVal,
exemplarTs :p.exemplarTs,
hasExemplarTs :p.hasExemplarTs,
}
return newParser
}
// nextToken returns the next token from the openMetricsLexer.
func (p *OpenMetricsParser) nextToken() token {
tok := p.l.Lex()

View file

@ -64,7 +64,10 @@ _metric_starting_with_underscore 1
testmetric{_label_starting_with_underscore="foo"} 1
testmetric{label="\"bar\""} 1
# TYPE foo counter
foo_total 17.0 1520879607.789 # {id="counter-test"} 5`
foo_total 17.0 1520879607.789 # {id="counter-test"} 5
foo_created 1000
foo_total{a="b"} 17.0 1520879607.789 # {id="counter-test"} 5
foo_created{a="b"} 1000`
input += "\n# HELP metric foo\x00bar"
input += "\nnull_byte_metric{a=\"abc\x00\"} 1"
@ -225,6 +228,24 @@ foo_total 17.0 1520879607.789 # {id="counter-test"} 5`
lset: labels.FromStrings("__name__", "foo_total"),
t: int64p(1520879607789),
e: &exemplar.Exemplar{Labels: labels.FromStrings("id", "counter-test"), Value: 5},
ct: int64p(1000),
},{
m: `foo_total{a="b"}`,
v: 17.0,
lset: labels.FromStrings("__name__", "foo_total", "a", "b"),
t: int64p(1520879607789),
e: &exemplar.Exemplar{Labels: labels.FromStrings("id", "counter-test"), Value: 5},
ct: int64p(1000),
},{
m: "rpc_durations_histogram_seconds_bucket",
ct: int64p(1000),
},
{
m: "foo_total",
v: 17,
lset: labels.FromStrings("__name__", "foo_total"),
t: int64p(1520879607789),
e: &exemplar.Exemplar{Labels: labels.FromStrings("id", "counter-test"), Value: 5},
}, {
m: "metric",
help: "foo\x00bar",
@ -683,4 +704,4 @@ func TestOMNullByteHandling(t *testing.T) {
require.Equal(t, c.err, err.Error(), "test %d", i)
}
}
}

View file

@ -41,6 +41,7 @@ type expectedParse struct {
unit string
comment string
e *exemplar.Exemplar
ct *int64
}
func TestPromParse(t *testing.T) {
@ -218,6 +219,11 @@ func checkParseResults(t *testing.T, p Parser, exp []expectedParse) {
require.True(t, found)
testutil.RequireEqual(t, *exp[i].e, e)
}
if ct := p.CreatedTimestamp(); ct != nil {
require.Equal(t, *exp[i].ct, *ct)
} else {
require.Nil(t, exp[i].ct)
}
case EntryType:
m, typ := p.Type()