Merge pull request #404 from grafana/fixup-labels-usage2

Fixup labels usage
This commit is contained in:
Bryan Boreham 2023-03-28 13:21:11 +01:00 committed by GitHub
commit 174b26a5c0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 22 additions and 29 deletions

View file

@ -3114,17 +3114,11 @@ func TestRangeQuery(t *testing.T) {
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 3, T: 60000}, {V: 5, T: 120000}}, Points: []Point{{V: 1, T: 0}, {V: 3, T: 60000}, {V: 5, T: 120000}},
Metric: labels.Labels{ Metric: labels.FromStrings("__name__", "bar", "job", "2"),
labels.Label{Name: "__name__", Value: "bar"},
labels.Label{Name: "job", Value: "2"},
},
}, },
Series{ Series{
Points: []Point{{V: 3, T: 60000}, {V: 5, T: 120000}}, Points: []Point{{V: 3, T: 60000}, {V: 5, T: 120000}},
Metric: labels.Labels{ Metric: labels.FromStrings("__name__", "foo", "job", "1"),
labels.Label{Name: "__name__", Value: "foo"},
labels.Label{Name: "job", Value: "1"},
},
}, },
}, },
Start: time.Unix(0, 0), Start: time.Unix(0, 0),

View file

@ -831,7 +831,7 @@ func TestUpdate_AlwaysRestore(t *testing.T) {
ruleManager.start() ruleManager.start()
defer ruleManager.Stop() defer ruleManager.Stop()
err := ruleManager.Update(10*time.Second, []string{"fixtures/rules_alerts.yaml"}, nil, "", nil) err := ruleManager.Update(10*time.Second, []string{"fixtures/rules_alerts.yaml"}, labels.EmptyLabels(), "", nil)
require.NoError(t, err) require.NoError(t, err)
for _, g := range ruleManager.groups { for _, g := range ruleManager.groups {
@ -840,7 +840,7 @@ func TestUpdate_AlwaysRestore(t *testing.T) {
} }
// Use different file, so groups haven't changed, therefore, we expect state restoration // Use different file, so groups haven't changed, therefore, we expect state restoration
err = ruleManager.Update(10*time.Second, []string{"fixtures/rules_alerts2.yaml"}, nil, "", nil) err = ruleManager.Update(10*time.Second, []string{"fixtures/rules_alerts2.yaml"}, labels.EmptyLabels(), "", nil)
for _, g := range ruleManager.groups { for _, g := range ruleManager.groups {
require.True(t, g.shouldRestore) require.True(t, g.shouldRestore)
} }
@ -863,7 +863,7 @@ func TestUpdate_AlwaysRestoreDoesntAffectUnchangedGroups(t *testing.T) {
ruleManager.start() ruleManager.start()
defer ruleManager.Stop() defer ruleManager.Stop()
err := ruleManager.Update(10*time.Second, files, nil, "", nil) err := ruleManager.Update(10*time.Second, files, labels.EmptyLabels(), "", nil)
require.NoError(t, err) require.NoError(t, err)
for _, g := range ruleManager.groups { for _, g := range ruleManager.groups {
@ -872,7 +872,7 @@ func TestUpdate_AlwaysRestoreDoesntAffectUnchangedGroups(t *testing.T) {
} }
// Use the same file, so groups haven't changed, therefore, we don't expect state restoration // Use the same file, so groups haven't changed, therefore, we don't expect state restoration
err = ruleManager.Update(10*time.Second, files, nil, "", nil) err = ruleManager.Update(10*time.Second, files, labels.EmptyLabels(), "", nil)
for _, g := range ruleManager.groups { for _, g := range ruleManager.groups {
require.False(t, g.shouldRestore) require.False(t, g.shouldRestore)
} }
@ -1090,7 +1090,7 @@ func reloadRules(rgs *rulefmt.RuleGroups, t *testing.T, tmpFile *os.File, ruleMa
_, _ = tmpFile.Seek(0, 0) _, _ = tmpFile.Seek(0, 0)
_, err = tmpFile.Write(bs) _, err = tmpFile.Write(bs)
require.NoError(t, err) require.NoError(t, err)
err = ruleManager.Update(interval, []string{tmpFile.Name()}, nil, "", nil) err = ruleManager.Update(interval, []string{tmpFile.Name()}, labels.EmptyLabels(), "", nil)
require.NoError(t, err) require.NoError(t, err)
} }
@ -1686,7 +1686,7 @@ groups:
}, },
}) })
m.start() m.start()
err = m.Update(time.Second, []string{fname}, nil, "", nil) err = m.Update(time.Second, []string{fname}, labels.EmptyLabels(), "", nil)
require.NoError(t, err) require.NoError(t, err)
rgs := m.RuleGroups() rgs := m.RuleGroups()

View file

@ -29,7 +29,7 @@ import (
type unknownRule struct{} type unknownRule struct{}
func (u unknownRule) Name() string { return "" } func (u unknownRule) Name() string { return "" }
func (u unknownRule) Labels() labels.Labels { return nil } func (u unknownRule) Labels() labels.Labels { return labels.EmptyLabels() }
func (u unknownRule) Eval(ctx context.Context, evalDelay time.Duration, time time.Time, queryFunc QueryFunc, url *url.URL, i int) (promql.Vector, error) { func (u unknownRule) Eval(ctx context.Context, evalDelay time.Duration, time time.Time, queryFunc QueryFunc, url *url.URL, i int) (promql.Vector, error) {
return nil, nil return nil, nil
} }

View file

@ -1176,13 +1176,17 @@ func (c *LeveledCompactor) populateSymbols(sets []storage.ChunkSeriesSet, outBlo
obIx = labels.StableHash(s.Labels()) % uint64(len(outBlocks)) obIx = labels.StableHash(s.Labels()) % uint64(len(outBlocks))
} }
for _, l := range s.Labels() { err := s.Labels().Validate(func(l labels.Label) error {
if err := batchers[obIx].addSymbol(l.Name); err != nil { if err := batchers[obIx].addSymbol(l.Name); err != nil {
return errors.Wrap(err, "addSymbol to batcher") return errors.Wrap(err, "addSymbol to batcher")
} }
if err := batchers[obIx].addSymbol(l.Value); err != nil { if err := batchers[obIx].addSymbol(l.Value); err != nil {
return errors.Wrap(err, "addSymbol to batcher") return errors.Wrap(err, "addSymbol to batcher")
} }
return nil
})
if err != nil {
return err
} }
} }

View file

@ -593,13 +593,13 @@ func TestCompaction_CompactWithSplitting(t *testing.T) {
ref := p.At() ref := p.At()
require.NoError(t, idxr.Series(ref, &lbls, nil)) require.NoError(t, idxr.Series(ref, &lbls, nil))
require.Equal(t, uint64(shardIndex), lbls.Labels().Hash()%shardCount) require.Equal(t, uint64(shardIndex), labels.StableHash(lbls.Labels())%shardCount)
// Collect all symbols used by series. // Collect all symbols used by series.
for _, l := range lbls.Labels() { lbls.Labels().Range(func(l labels.Label) {
seriesSymbols[l.Name] = struct{}{} seriesSymbols[l.Name] = struct{}{}
seriesSymbols[l.Value] = struct{}{} seriesSymbols[l.Value] = struct{}{}
} })
} }
require.NoError(t, p.Err()) require.NoError(t, p.Err())

View file

@ -2448,10 +2448,7 @@ func TestHeadShardedPostings(t *testing.T) {
// Append some series. // Append some series.
app := head.Appender(context.Background()) app := head.Appender(context.Background())
for i := 0; i < 100; i++ { for i := 0; i < 100; i++ {
_, err := app.Append(0, labels.Labels{ _, err := app.Append(0, labels.FromStrings("unique", fmt.Sprintf("value%d", i), "const", "1"), 100, 0)
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "const", Value: "1"},
}, 100, 0)
require.NoError(t, err) require.NoError(t, err)
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -2498,7 +2495,7 @@ func TestHeadShardedPostings(t *testing.T) {
var lbls labels.ScratchBuilder var lbls labels.ScratchBuilder
require.NoError(t, ir.Series(id, &lbls, nil)) require.NoError(t, ir.Series(id, &lbls, nil))
require.Equal(t, shardIndex, lbls.Labels().Hash()%shardCount) require.Equal(t, shardIndex, labels.StableHash(lbls.Labels())%shardCount)
} }
} }
} }

View file

@ -292,7 +292,7 @@ func TestIndexRW_Postings(t *testing.T) {
var lbls labels.ScratchBuilder var lbls labels.ScratchBuilder
require.NoError(t, ir.Series(id, &lbls, nil)) require.NoError(t, ir.Series(id, &lbls, nil))
require.Equal(t, shardIndex, lbls.Labels().Hash()%shardCount) require.Equal(t, shardIndex, labels.StableHash(lbls.Labels())%shardCount)
} }
} }
}) })
@ -630,10 +630,8 @@ func BenchmarkReader_ShardedPostings(b *testing.B) {
require.NoError(b, iw.AddSymbol("unique")) require.NoError(b, iw.AddSymbol("unique"))
for i := 1; i <= numSeries; i++ { for i := 1; i <= numSeries; i++ {
require.NoError(b, iw.AddSeries(storage.SeriesRef(i), labels.Labels{ require.NoError(b, iw.AddSeries(storage.SeriesRef(i),
{Name: "const", Value: fmt.Sprintf("%10d", 1)}, labels.FromStrings("const", fmt.Sprintf("%10d", 1), "unique", fmt.Sprintf("%10d", i))))
{Name: "unique", Value: fmt.Sprintf("%10d", i)},
}))
} }
require.NoError(b, iw.Close()) require.NoError(b, iw.Close())