Update package tsdb tests for new labels.Labels type

Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
This commit is contained in:
Bryan Boreham 2022-03-09 22:17:29 +00:00
parent ce2cfad0cb
commit 4b6a4d1425
6 changed files with 72 additions and 61 deletions

View file

@ -215,10 +215,10 @@ func TestLabelValuesWithMatchers(t *testing.T) {
var seriesEntries []storage.Series
for i := 0; i < 100; i++ {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
blockDir := createBlock(t, tmpdir, seriesEntries)
@ -372,11 +372,11 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
for i := 0; i < metricCount; i++ {
// Note these series are not created in sort order: 'value2' sorts after 'value10'.
// This makes a big difference to the benchmark timing.
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "a_unique", Value: fmt.Sprintf("value%d", i)},
{Name: "b_tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))},
{Name: "c_ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1"
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"a_unique", fmt.Sprintf("value%d", i),
"b_tens", fmt.Sprintf("value%d", i/(metricCount/10)),
"c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1"
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
blockDir := createBlock(b, tmpdir, seriesEntries)
@ -410,23 +410,23 @@ func TestLabelNamesWithMatchers(t *testing.T) {
var seriesEntries []storage.Series
for i := 0; i < 100; i++ {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
if i%10 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
if i%20 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "twenties", Value: fmt.Sprintf("value%d", i/20)},
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"twenties", fmt.Sprintf("value%d", i/20),
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
}

View file

@ -1478,11 +1478,11 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
for sid, schema := range allSchemas {
for i := 0; i < c.numSeriesPerSchema; i++ {
lbls := labels.Labels{
{Name: "__name__", Value: fmt.Sprintf("rpc_durations_%d_histogram_seconds", i)},
{Name: "instance", Value: "localhost:8080"},
{Name: "job", Value: fmt.Sprintf("sparse_histogram_schema_%s", schemaDescription[sid])},
}
lbls := labels.FromStrings(
"__name__", fmt.Sprintf("rpc_durations_%d_histogram_seconds", i),
"instance", "localhost:8080",
"job", fmt.Sprintf("sparse_histogram_schema_%s", schemaDescription[sid]),
)
allSparseSeries = append(allSparseSeries, struct {
baseLabels labels.Labels
hists []*histogram.Histogram
@ -1546,21 +1546,20 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
for it.Next() {
numOldSeriesPerHistogram++
b := it.At()
lbls := append(ah.baseLabels, labels.Label{Name: "le", Value: fmt.Sprintf("%.16f", b.Upper)})
lbls := labels.NewBuilder(ah.baseLabels).Set("le", fmt.Sprintf("%.16f", b.Upper)).Labels(labels.EmptyLabels())
refs[itIdx], err = oldApp.Append(refs[itIdx], lbls, ts, float64(b.Count))
require.NoError(t, err)
itIdx++
}
baseName := ah.baseLabels.Get(labels.MetricName)
// _count metric.
countLbls := ah.baseLabels.Copy()
countLbls[0].Value = countLbls[0].Value + "_count"
countLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_count").Labels(labels.EmptyLabels())
_, err = oldApp.Append(0, countLbls, ts, float64(h.Count))
require.NoError(t, err)
numOldSeriesPerHistogram++
// _sum metric.
sumLbls := ah.baseLabels.Copy()
sumLbls[0].Value = sumLbls[0].Value + "_sum"
sumLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_sum").Labels(labels.EmptyLabels())
_, err = oldApp.Append(0, sumLbls, ts, h.Sum)
require.NoError(t, err)
numOldSeriesPerHistogram++

View file

@ -478,9 +478,9 @@ func TestAmendDatapointCausesError(t *testing.T) {
require.NoError(t, app.Commit())
app = db.Appender(ctx)
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "b"}}, 0, 0)
_, err = app.Append(0, labels.FromStrings("a", "b"), 0, 0)
require.NoError(t, err)
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "b"}}, 0, 1)
_, err = app.Append(0, labels.FromStrings("a", "b"), 0, 1)
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
require.NoError(t, app.Rollback())
@ -498,15 +498,15 @@ func TestAmendDatapointCausesError(t *testing.T) {
}
app = db.Appender(ctx)
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
require.NoError(t, err)
require.NoError(t, app.Commit())
app = db.Appender(ctx)
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
require.NoError(t, err)
h.Schema = 2
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
require.NoError(t, app.Rollback())
}

View file

@ -388,7 +388,12 @@ func TestHead_HighConcurrencyReadAndWrite(t *testing.T) {
querySeriesRef = (querySeriesRef + 1) % seriesCnt
lbls := labelSets[querySeriesRef]
samples, err := queryHead(ts-qryRange, ts, lbls[0])
// lbls has a single entry; extract it so we can run a query.
var lbl labels.Label
lbls.Range(func(l labels.Label) {
lbl = l
})
samples, err := queryHead(ts-qryRange, ts, lbl)
if err != nil {
return false, err
}
@ -1133,8 +1138,9 @@ func TestDelete_e2e(t *testing.T) {
require.NoError(t, hb.Delete(r.Mint, r.Maxt, del.ms...))
}
matched := labels.Slice{}
for _, ls := range lbls {
for _, l := range lbls {
s := labels.Selector(del.ms)
ls := labels.New(l...)
if s.Matches(ls) {
matched = append(matched, ls)
}
@ -2808,7 +2814,7 @@ func TestWaitForPendingReadersInTimeRange(t *testing.T) {
}
func TestAppendHistogram(t *testing.T) {
l := labels.Labels{{Name: "a", Value: "b"}}
l := labels.FromStrings("a", "b")
for _, numHistograms := range []int{1, 10, 150, 200, 250, 300} {
t.Run(fmt.Sprintf("%d", numHistograms), func(t *testing.T) {
head, _ := newTestHead(t, 1000, false, false)
@ -2863,7 +2869,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
require.NoError(t, head.Init(0))
// Series with only histograms.
s1 := labels.Labels{{Name: "a", Value: "b1"}}
s1 := labels.FromStrings("a", "b1")
k1 := s1.String()
numHistograms := 450
exp := map[string][]tsdbutil.Sample{}
@ -2895,7 +2901,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
require.Greater(t, expHeadChunkSamples, 0)
// Series with mix of histograms and float.
s2 := labels.Labels{{Name: "a", Value: "b2"}}
s2 := labels.FromStrings("a", "b2")
k2 := s2.String()
app = head.Appender(context.Background())
ts := 0
@ -3256,7 +3262,7 @@ func TestHistogramMetrics(t *testing.T) {
for x := 0; x < 5; x++ {
expHSeries++
l := labels.Labels{{Name: "a", Value: fmt.Sprintf("b%d", x)}}
l := labels.FromStrings("a", fmt.Sprintf("b%d", x))
for i, h := range GenerateTestHistograms(10) {
app := head.Appender(context.Background())
_, err := app.AppendHistogram(0, l, int64(i), h)
@ -3279,7 +3285,7 @@ func TestHistogramMetrics(t *testing.T) {
}
func TestHistogramStaleSample(t *testing.T) {
l := labels.Labels{{Name: "a", Value: "b"}}
l := labels.FromStrings("a", "b")
numHistograms := 20
head, _ := newTestHead(t, 100000, false, false)
t.Cleanup(func() {
@ -3374,7 +3380,7 @@ func TestHistogramStaleSample(t *testing.T) {
}
func TestHistogramCounterResetHeader(t *testing.T) {
l := labels.Labels{{Name: "a", Value: "b"}}
l := labels.FromStrings("a", "b")
head, _ := newTestHead(t, 1000, false, false)
t.Cleanup(func() {
require.NoError(t, head.Close())
@ -3486,7 +3492,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
db.DisableCompactions()
hists := GenerateTestHistograms(10)
lbls := labels.Labels{{Name: "a", Value: "b"}}
lbls := labels.FromStrings("a", "b")
type result struct {
t int64

View file

@ -358,12 +358,13 @@ func TestOOOHeadIndexReader_Series(t *testing.T) {
var chks []chunks.Meta
var respLset labels.Labels
err := ir.Series(storage.SeriesRef(s1ID), &respLset, &chks)
var b labels.ScratchBuilder
err := ir.Series(storage.SeriesRef(s1ID), &b, &respLset, &chks)
require.NoError(t, err)
require.Equal(t, s1Lset, respLset)
require.Equal(t, expChunks, chks)
err = ir.Series(storage.SeriesRef(s1ID+1), &respLset, &chks)
err = ir.Series(storage.SeriesRef(s1ID+1), &b, &respLset, &chks)
require.Equal(t, storage.ErrNotFound, err)
})
}
@ -841,7 +842,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
var chks []chunks.Meta
var respLset labels.Labels
err := ir.Series(s1Ref, &respLset, &chks)
var b labels.ScratchBuilder
err := ir.Series(s1Ref, &b, &respLset, &chks)
require.NoError(t, err)
require.Equal(t, len(tc.expChunksSamples), len(chks))
@ -1004,7 +1006,8 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
var chks []chunks.Meta
var respLset labels.Labels
err := ir.Series(s1Ref, &respLset, &chks)
var b labels.ScratchBuilder
err := ir.Series(s1Ref, &b, &respLset, &chks)
require.NoError(t, err)
require.Equal(t, len(tc.expChunksSamples), len(chks))

View file

@ -142,14 +142,14 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe
postings.Add(storage.SeriesRef(i), ls)
for _, l := range ls {
ls.Range(func(l labels.Label) {
vs, present := lblIdx[l.Name]
if !present {
vs = map[string]struct{}{}
lblIdx[l.Name] = vs
}
vs[l.Value] = struct{}{}
}
})
}
require.NoError(t, postings.Iter(func(l labels.Label, p index.Postings) error {
@ -1168,10 +1168,10 @@ func (m *mockIndex) AddSeries(ref storage.SeriesRef, l labels.Labels, chunks ...
if _, ok := m.series[ref]; ok {
return errors.Errorf("series with reference %d already added", ref)
}
for _, lbl := range l {
l.Range(func(lbl labels.Label) {
m.symbols[lbl.Name] = struct{}{}
m.symbols[lbl.Value] = struct{}{}
}
})
s := series{l: l}
// Actual chunk data is not stored in the index.
@ -1238,9 +1238,9 @@ func (m mockIndex) LabelValueFor(id storage.SeriesRef, label string) (string, er
func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) {
namesMap := make(map[string]bool)
for _, id := range ids {
for _, lbl := range m.series[id].l {
m.series[id].l.Range(func(lbl labels.Label) {
namesMap[lbl.Name] = true
}
})
}
names := make([]string, 0, len(namesMap))
for name := range namesMap {
@ -1275,7 +1275,7 @@ func (m mockIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder,
if !ok {
return storage.ErrNotFound
}
*lset = append((*lset)[:0], s.l...)
lset.CopyFrom(s.l)
*chks = append((*chks)[:0], s.chunks...)
return nil
@ -1297,9 +1297,9 @@ func (m mockIndex) LabelNames(matchers ...*labels.Matcher) ([]string, error) {
}
}
if matches {
for _, lbl := range series.l {
series.l.Range(func(lbl labels.Label) {
names[lbl.Name] = struct{}{}
}
})
}
}
}
@ -1974,7 +1974,7 @@ func BenchmarkQueries(b *testing.B) {
// Add some common labels to make the matchers select these series.
{
var commonLbls labels.Labels
var commonLbls []labels.Label
for _, selector := range selectors {
switch selector.Type {
case labels.MatchEqual:
@ -1985,8 +1985,11 @@ func BenchmarkQueries(b *testing.B) {
}
for i := range commonLbls {
s := series[i].(*storage.SeriesEntry)
allLabels := append(commonLbls, s.Labels()...)
newS := storage.NewListSeries(allLabels, nil)
allLabels := commonLbls
s.Labels().Range(func(l labels.Label) {
allLabels = append(allLabels, l)
})
newS := storage.NewListSeries(labels.New(allLabels...), nil)
newS.SampleIteratorFn = s.SampleIteratorFn
series[i] = newS