Merge pull request #11717 from bboreham/labels-abstraction

Add and use abstractions over labels.Labels
This commit is contained in:
Bryan Boreham 2022-12-20 17:23:39 +00:00 committed by GitHub
commit ccea61c7bf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
66 changed files with 806 additions and 686 deletions

View file

@ -631,9 +631,9 @@ func checkRules(filename string, lintSettings lintConfig) (int, []error) {
errMessage := fmt.Sprintf("%d duplicate rule(s) found.\n", len(dRules))
for _, n := range dRules {
errMessage += fmt.Sprintf("Metric: %s\nLabel(s):\n", n.metric)
for _, l := range n.label {
n.label.Range(func(l labels.Label) {
errMessage += fmt.Sprintf("\t%s: %s\n", l.Name, l.Value)
}
})
}
errMessage += "Might cause inconsistency while recording expressions"
return 0, []error{fmt.Errorf("%w %s", lintError, errMessage)}

View file

@ -158,14 +158,15 @@ func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr, ruleName
// Setting the rule labels after the output of the query,
// so they can override query output.
for _, l := range ruleLabels {
ruleLabels.Range(func(l labels.Label) {
lb.Set(l.Name, l.Value)
}
})
lb.Set(labels.MetricName, ruleName)
lbls := lb.Labels(labels.EmptyLabels())
for _, value := range sample.Values {
if err := app.add(ctx, lb.Labels(nil), timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {
if err := app.add(ctx, lbls, timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {
return fmt.Errorf("add: %w", err)
}
}

View file

@ -100,7 +100,7 @@ func TestBackfillRuleIntegration(t *testing.T) {
require.Equal(t, 1, len(gRules))
require.Equal(t, "rule1", gRules[0].Name())
require.Equal(t, "ruleExpr", gRules[0].Query().String())
require.Equal(t, 1, len(gRules[0].Labels()))
require.Equal(t, 1, gRules[0].Labels().Len())
group2 := ruleImporter.groups[path2+";group2"]
require.NotNil(t, group2)
@ -109,7 +109,7 @@ func TestBackfillRuleIntegration(t *testing.T) {
require.Equal(t, 2, len(g2Rules))
require.Equal(t, "grp2_rule1", g2Rules[0].Name())
require.Equal(t, "grp2_rule1_expr", g2Rules[0].Query().String())
require.Equal(t, 0, len(g2Rules[0].Labels()))
require.Equal(t, 0, g2Rules[0].Labels().Len())
// Backfill all recording rules then check the blocks to confirm the correct data was created.
errs = ruleImporter.importAll(ctx)
@ -132,12 +132,12 @@ func TestBackfillRuleIntegration(t *testing.T) {
for selectedSeries.Next() {
seriesCount++
series := selectedSeries.At()
if len(series.Labels()) != 3 {
require.Equal(t, 2, len(series.Labels()))
if series.Labels().Len() != 3 {
require.Equal(t, 2, series.Labels().Len())
x := labels.FromStrings("__name__", "grp2_rule1", "name1", "val1")
require.Equal(t, x, series.Labels())
} else {
require.Equal(t, 3, len(series.Labels()))
require.Equal(t, 3, series.Labels().Len())
}
it := series.Iterator(nil)
for it.Next() == chunkenc.ValFloat {

View file

@ -315,7 +315,7 @@ func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
i := 0
for scanner.Scan() && i < n {
m := make(labels.Labels, 0, 10)
m := make([]labels.Label, 0, 10)
r := strings.NewReplacer("\"", "", "{", "", "}", "")
s := r.Replace(scanner.Text())
@ -325,13 +325,12 @@ func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
split := strings.Split(labelChunk, ":")
m = append(m, labels.Label{Name: split[0], Value: split[1]})
}
// Order of the k/v labels matters, don't assume we'll always receive them already sorted.
sort.Sort(m)
h := m.Hash()
ml := labels.New(m...) // This sorts by name - order of the k/v labels matters, don't assume we'll always receive them already sorted.
h := ml.Hash()
if _, ok := hashes[h]; ok {
continue
}
mets = append(mets, m)
mets = append(mets, ml)
hashes[h] = struct{}{}
i++
}
@ -470,21 +469,21 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error {
if err != nil {
return err
}
lbls := labels.Labels{}
chks := []chunks.Meta{}
builder := labels.ScratchBuilder{}
for p.Next() {
if err = ir.Series(p.At(), &lbls, &chks); err != nil {
if err = ir.Series(p.At(), &builder, &chks); err != nil {
return err
}
// Amount of the block time range not covered by this series.
uncovered := uint64(meta.MaxTime-meta.MinTime) - uint64(chks[len(chks)-1].MaxTime-chks[0].MinTime)
for _, lbl := range lbls {
builder.Labels().Range(func(lbl labels.Label) {
key := lbl.Name + "=" + lbl.Value
labelsUncovered[lbl.Name] += uncovered
labelpairsUncovered[key] += uncovered
labelpairsCount[key]++
entries++
}
})
}
if p.Err() != nil {
return p.Err()
@ -589,10 +588,10 @@ func analyzeCompaction(block tsdb.BlockReader, indexr tsdb.IndexReader) (err err
nBuckets := 10
histogram := make([]int, nBuckets)
totalChunks := 0
var builder labels.ScratchBuilder
for postingsr.Next() {
lbsl := labels.Labels{}
var chks []chunks.Meta
if err := indexr.Series(postingsr.At(), &lbsl, &chks); err != nil {
if err := indexr.Series(postingsr.At(), &builder, &chks); err != nil {
return err
}

View file

@ -284,8 +284,8 @@ func (tg *testGroup) test(evalInterval time.Duration, groupOrderMap map[string]i
for _, a := range ar.ActiveAlerts() {
if a.State == rules.StateFiring {
alerts = append(alerts, labelAndAnnotation{
Labels: append(labels.Labels{}, a.Labels...),
Annotations: append(labels.Labels{}, a.Annotations...),
Labels: a.Labels.Copy(),
Annotations: a.Annotations.Copy(),
})
}
}

View file

@ -80,7 +80,8 @@ func Load(s string, expandExternalLabels bool, logger log.Logger) (*Config, erro
return cfg, nil
}
for i, v := range cfg.GlobalConfig.ExternalLabels {
b := labels.ScratchBuilder{}
cfg.GlobalConfig.ExternalLabels.Range(func(v labels.Label) {
newV := os.Expand(v.Value, func(s string) string {
if s == "$" {
return "$"
@ -93,10 +94,10 @@ func Load(s string, expandExternalLabels bool, logger log.Logger) (*Config, erro
})
if newV != v.Value {
level.Debug(logger).Log("msg", "External label replaced", "label", v.Name, "input", v.Value, "output", newV)
v.Value = newV
cfg.GlobalConfig.ExternalLabels[i] = v
}
}
b.Add(v.Name, newV)
})
cfg.GlobalConfig.ExternalLabels = b.Labels()
return cfg, nil
}
@ -361,13 +362,16 @@ func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err
}
for _, l := range gc.ExternalLabels {
if err := gc.ExternalLabels.Validate(func(l labels.Label) error {
if !model.LabelName(l.Name).IsValid() {
return fmt.Errorf("%q is not a valid label name", l.Name)
}
if !model.LabelValue(l.Value).IsValid() {
return fmt.Errorf("%q is not a valid label value", l.Value)
}
return nil
}); err != nil {
return err
}
// First set the correct scrape interval, then check that the timeout
@ -394,7 +398,7 @@ func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
// isZero returns true iff the global config is the zero value.
func (c *GlobalConfig) isZero() bool {
return c.ExternalLabels == nil &&
return c.ExternalLabels.IsEmpty() &&
c.ScrapeInterval == 0 &&
c.ScrapeTimeout == 0 &&
c.EvaluationInterval == 0 &&

View file

@ -357,9 +357,7 @@ func EmptyLabels() Labels {
// The caller has to guarantee that all label names are unique.
func New(ls ...Label) Labels {
set := make(Labels, 0, len(ls))
for _, l := range ls {
set = append(set, l)
}
set = append(set, ls...)
sort.Sort(set)
return set
@ -414,6 +412,49 @@ func Compare(a, b Labels) int {
return len(a) - len(b)
}
// Copy labels from b on top of whatever was in ls previously, reusing memory or expanding if needed.
func (ls *Labels) CopyFrom(b Labels) {
(*ls) = append((*ls)[:0], b...)
}
// IsEmpty returns true if ls represents an empty set of labels.
func (ls Labels) IsEmpty() bool {
return len(ls) == 0
}
// Range calls f on each label.
func (ls Labels) Range(f func(l Label)) {
for _, l := range ls {
f(l)
}
}
// Validate calls f on each label. If f returns a non-nil error, then it returns that error cancelling the iteration.
func (ls Labels) Validate(f func(l Label) error) error {
for _, l := range ls {
if err := f(l); err != nil {
return err
}
}
return nil
}
// InternStrings calls intern on every string value inside ls, replacing them with what it returns.
func (ls *Labels) InternStrings(intern func(string) string) {
for i, l := range *ls {
(*ls)[i].Name = intern(l.Name)
(*ls)[i].Value = intern(l.Value)
}
}
// ReleaseStrings calls release on every string value inside ls.
func (ls Labels) ReleaseStrings(release func(string)) {
for _, l := range ls {
release(l.Name)
release(l.Value)
}
}
// Builder allows modifying Labels.
type Builder struct {
base Labels
@ -470,7 +511,7 @@ Outer:
return b
}
// Set the name/value pair as a label.
// Set the name/value pair as a label. A value of "" means delete that label.
func (b *Builder) Set(n, v string) *Builder {
if v == "" {
// Empty labels are the same as missing labels.
@ -525,3 +566,40 @@ Outer:
}
return res
}
// ScratchBuilder allows efficient construction of a Labels from scratch.
type ScratchBuilder struct {
add Labels
}
// NewScratchBuilder creates a ScratchBuilder initialized for Labels with n entries.
func NewScratchBuilder(n int) ScratchBuilder {
return ScratchBuilder{add: make([]Label, 0, n)}
}
func (b *ScratchBuilder) Reset() {
b.add = b.add[:0]
}
// Add a name/value pair.
// Note if you Add the same name twice you will get a duplicate label, which is invalid.
func (b *ScratchBuilder) Add(name, value string) {
b.add = append(b.add, Label{Name: name, Value: value})
}
// Sort the labels added so far by name.
func (b *ScratchBuilder) Sort() {
sort.Sort(b.add)
}
// Asssign is for when you already have a Labels which you want this ScratchBuilder to return.
func (b *ScratchBuilder) Assign(ls Labels) {
b.add = append(b.add[:0], ls...) // Copy on top of our slice, so we don't retain the input slice.
}
// Return the name/value pairs added so far as a Labels object.
// Note: if you want them sorted, call Sort() first.
func (b *ScratchBuilder) Labels() Labels {
// Copy the slice, so the next use of ScratchBuilder doesn't overwrite.
return append([]Label{}, b.add...)
}

View file

@ -36,10 +36,6 @@ func TestLabels_String(t *testing.T) {
lables: Labels{},
expected: "{}",
},
{
lables: nil,
expected: "{}",
},
}
for _, c := range cases {
str := c.lables.String()
@ -316,18 +312,18 @@ func TestLabels_Equal(t *testing.T) {
func TestLabels_FromStrings(t *testing.T) {
labels := FromStrings("aaa", "111", "bbb", "222")
expected := Labels{
{
Name: "aaa",
Value: "111",
},
{
Name: "bbb",
Value: "222",
},
}
require.Equal(t, expected, labels, "unexpected labelset")
x := 0
labels.Range(func(l Label) {
switch x {
case 0:
require.Equal(t, Label{Name: "aaa", Value: "111"}, l, "unexpected value")
case 1:
require.Equal(t, Label{Name: "bbb", Value: "222"}, l, "unexpected value")
default:
t.Fatalf("unexpected labelset value %d: %v", x, l)
}
x++
})
require.Panics(t, func() { FromStrings("aaa", "111", "bbb") }) //nolint:staticcheck // Ignore SA5012, error is intentional test.
}
@ -539,7 +535,6 @@ func TestBuilder(t *testing.T) {
want: FromStrings("aaa", "111", "ccc", "333"),
},
{
base: nil,
set: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}},
del: []string{"bbb"},
want: FromStrings("aaa", "111", "ccc", "333"),
@ -601,11 +596,49 @@ func TestBuilder(t *testing.T) {
}
}
func TestScratchBuilder(t *testing.T) {
for i, tcase := range []struct {
add []Label
want Labels
}{
{
add: []Label{},
want: EmptyLabels(),
},
{
add: []Label{{"aaa", "111"}},
want: FromStrings("aaa", "111"),
},
{
add: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}},
want: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"),
},
{
add: []Label{{"bbb", "222"}, {"aaa", "111"}, {"ccc", "333"}},
want: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"),
},
{
add: []Label{{"ddd", "444"}},
want: FromStrings("ddd", "444"),
},
} {
t.Run(fmt.Sprint(i), func(t *testing.T) {
b := ScratchBuilder{}
for _, lbl := range tcase.add {
b.Add(lbl.Name, lbl.Value)
}
b.Sort()
require.Equal(t, tcase.want, b.Labels())
b.Assign(tcase.want)
require.Equal(t, tcase.want, b.Labels())
})
}
}
func TestLabels_Hash(t *testing.T) {
lbls := FromStrings("foo", "bar", "baz", "qux")
require.Equal(t, lbls.Hash(), lbls.Hash())
require.NotEqual(t, lbls.Hash(), Labels{lbls[1], lbls[0]}.Hash(), "unordered labels match.")
require.NotEqual(t, lbls.Hash(), Labels{lbls[0]}.Hash(), "different labels match.")
require.NotEqual(t, lbls.Hash(), FromStrings("foo", "bar").Hash(), "different labels match.")
}
var benchmarkLabelsResult uint64
@ -623,7 +656,7 @@ func BenchmarkLabels_Hash(b *testing.B) {
// Label ~20B name, 50B value.
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
}
return b.Labels(nil)
return b.Labels(EmptyLabels())
}(),
},
{
@ -634,7 +667,7 @@ func BenchmarkLabels_Hash(b *testing.B) {
// Label ~50B name, 50B value.
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
}
return b.Labels(nil)
return b.Labels(EmptyLabels())
}(),
},
{

View file

@ -17,7 +17,6 @@ import (
"bufio"
"fmt"
"os"
"sort"
"strings"
)
@ -51,13 +50,14 @@ func ReadLabels(fn string, n int) ([]Labels, error) {
defer f.Close()
scanner := bufio.NewScanner(f)
b := ScratchBuilder{}
var mets []Labels
hashes := map[uint64]struct{}{}
i := 0
for scanner.Scan() && i < n {
m := make(Labels, 0, 10)
b.Reset()
r := strings.NewReplacer("\"", "", "{", "", "}", "")
s := r.Replace(scanner.Text())
@ -65,10 +65,11 @@ func ReadLabels(fn string, n int) ([]Labels, error) {
labelChunks := strings.Split(s, ",")
for _, labelChunk := range labelChunks {
split := strings.Split(labelChunk, ":")
m = append(m, Label{Name: split[0], Value: split[1]})
b.Add(split[0], split[1])
}
// Order of the k/v labels matters, don't assume we'll always receive them already sorted.
sort.Sort(m)
b.Sort()
m := b.Labels()
h := m.Hash()
if _, ok := hashes[h]; ok {

View file

@ -203,20 +203,20 @@ func (re Regexp) String() string {
// Process returns a relabeled copy of the given label set. The relabel configurations
// are applied in order of input.
// If a label set is dropped, nil is returned.
// If a label set is dropped, EmptyLabels and false is returned.
// May return the input labelSet modified.
func Process(lbls labels.Labels, cfgs ...*Config) labels.Labels {
lb := labels.NewBuilder(nil)
func Process(lbls labels.Labels, cfgs ...*Config) (ret labels.Labels, keep bool) {
lb := labels.NewBuilder(labels.EmptyLabels())
for _, cfg := range cfgs {
lbls = relabel(lbls, cfg, lb)
if lbls == nil {
return nil
lbls, keep = relabel(lbls, cfg, lb)
if !keep {
return labels.EmptyLabels(), false
}
}
return lbls
return lbls, true
}
func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) labels.Labels {
func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) (ret labels.Labels, keep bool) {
var va [16]string
values := va[:0]
if len(cfg.SourceLabels) > cap(values) {
@ -232,19 +232,19 @@ func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) labels.Labels
switch cfg.Action {
case Drop:
if cfg.Regex.MatchString(val) {
return nil
return labels.EmptyLabels(), false
}
case Keep:
if !cfg.Regex.MatchString(val) {
return nil
return labels.EmptyLabels(), false
}
case DropEqual:
if lset.Get(cfg.TargetLabel) == val {
return nil
return labels.EmptyLabels(), false
}
case KeepEqual:
if lset.Get(cfg.TargetLabel) != val {
return nil
return labels.EmptyLabels(), false
}
case Replace:
indexes := cfg.Regex.FindStringSubmatchIndex(val)
@ -271,29 +271,29 @@ func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) labels.Labels
mod := sum64(md5.Sum([]byte(val))) % cfg.Modulus
lb.Set(cfg.TargetLabel, fmt.Sprintf("%d", mod))
case LabelMap:
for _, l := range lset {
lset.Range(func(l labels.Label) {
if cfg.Regex.MatchString(l.Name) {
res := cfg.Regex.ReplaceAllString(l.Name, cfg.Replacement)
lb.Set(res, l.Value)
}
}
})
case LabelDrop:
for _, l := range lset {
lset.Range(func(l labels.Label) {
if cfg.Regex.MatchString(l.Name) {
lb.Del(l.Name)
}
}
})
case LabelKeep:
for _, l := range lset {
lset.Range(func(l labels.Label) {
if !cfg.Regex.MatchString(l.Name) {
lb.Del(l.Name)
}
}
})
default:
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
}
return lb.Labels(lset)
return lb.Labels(lset), true
}
// sum64 sums the md5 hash to an uint64.

View file

@ -28,6 +28,7 @@ func TestRelabel(t *testing.T) {
input labels.Labels
relabel []*Config
output labels.Labels
drop bool
}{
{
input: labels.FromMap(map[string]string{
@ -101,7 +102,7 @@ func TestRelabel(t *testing.T) {
Action: Replace,
},
},
output: nil,
drop: true,
},
{
input: labels.FromMap(map[string]string{
@ -115,7 +116,7 @@ func TestRelabel(t *testing.T) {
Action: Drop,
},
},
output: nil,
drop: true,
},
{
input: labels.FromMap(map[string]string{
@ -177,7 +178,7 @@ func TestRelabel(t *testing.T) {
Action: Keep,
},
},
output: nil,
drop: true,
},
{
input: labels.FromMap(map[string]string{
@ -483,7 +484,7 @@ func TestRelabel(t *testing.T) {
TargetLabel: "__port1",
},
},
output: nil,
drop: true,
},
{
input: labels.FromMap(map[string]string{
@ -517,7 +518,7 @@ func TestRelabel(t *testing.T) {
TargetLabel: "__port2",
},
},
output: nil,
drop: true,
},
}
@ -538,8 +539,11 @@ func TestRelabel(t *testing.T) {
}
}
res := Process(test.input, test.relabel...)
require.Equal(t, test.output, res)
res, keep := Process(test.input, test.relabel...)
require.Equal(t, !test.drop, keep)
if keep {
require.Equal(t, test.output, res)
}
}
}
@ -721,7 +725,7 @@ func BenchmarkRelabel(b *testing.B) {
for _, tt := range tests {
b.Run(tt.name, func(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = Process(tt.lbls, tt.cfgs...)
_, _ = Process(tt.lbls, tt.cfgs...)
}
})
}

View file

@ -22,7 +22,6 @@ import (
"fmt"
"io"
"math"
"sort"
"strings"
"unicode/utf8"
@ -82,6 +81,7 @@ func (l *openMetricsLexer) Error(es string) {
// This is based on the working draft https://docs.google.com/document/u/1/d/1KwV0mAXwwbvvifBvDKH_LU1YjyXE_wxCkHNoCGq1GX0/edit
type OpenMetricsParser struct {
l *openMetricsLexer
builder labels.ScratchBuilder
series []byte
text []byte
mtype MetricType
@ -158,14 +158,11 @@ func (p *OpenMetricsParser) Comment() []byte {
// Metric writes the labels of the current sample into the passed labels.
// It returns the string from which the metric was parsed.
func (p *OpenMetricsParser) Metric(l *labels.Labels) string {
// Allocate the full immutable string immediately, so we just
// have to create references on it below.
// Copy the buffer to a string: this is only necessary for the return value.
s := string(p.series)
*l = append(*l, labels.Label{
Name: labels.MetricName,
Value: s[:p.offsets[0]-p.start],
})
p.builder.Reset()
p.builder.Add(labels.MetricName, s[:p.offsets[0]-p.start])
for i := 1; i < len(p.offsets); i += 4 {
a := p.offsets[i] - p.start
@ -173,16 +170,16 @@ func (p *OpenMetricsParser) Metric(l *labels.Labels) string {
c := p.offsets[i+2] - p.start
d := p.offsets[i+3] - p.start
value := s[c:d]
// Replacer causes allocations. Replace only when necessary.
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
*l = append(*l, labels.Label{Name: s[a:b], Value: lvalReplacer.Replace(s[c:d])})
continue
value = lvalReplacer.Replace(value)
}
*l = append(*l, labels.Label{Name: s[a:b], Value: s[c:d]})
p.builder.Add(s[a:b], value)
}
// Sort labels.
sort.Sort(*l)
p.builder.Sort()
*l = p.builder.Labels()
return s
}
@ -204,17 +201,18 @@ func (p *OpenMetricsParser) Exemplar(e *exemplar.Exemplar) bool {
e.Ts = p.exemplarTs
}
p.builder.Reset()
for i := 0; i < len(p.eOffsets); i += 4 {
a := p.eOffsets[i] - p.start
b := p.eOffsets[i+1] - p.start
c := p.eOffsets[i+2] - p.start
d := p.eOffsets[i+3] - p.start
e.Labels = append(e.Labels, labels.Label{Name: s[a:b], Value: s[c:d]})
p.builder.Add(s[a:b], s[c:d])
}
// Sort the labels.
sort.Sort(e.Labels)
p.builder.Sort()
e.Labels = p.builder.Labels()
return true
}

View file

@ -246,7 +246,6 @@ foo_total 17.0 1520879607.789 # {xx="yy"} 5`
require.Equal(t, true, found)
require.Equal(t, *exp[i].e, e)
}
res = res[:0]
case EntryType:
m, typ := p.Type()

View file

@ -21,7 +21,6 @@ import (
"fmt"
"io"
"math"
"sort"
"strconv"
"strings"
"unicode/utf8"
@ -144,6 +143,7 @@ func (l *promlexer) Error(es string) {
// Prometheus text exposition format.
type PromParser struct {
l *promlexer
builder labels.ScratchBuilder
series []byte
text []byte
mtype MetricType
@ -212,14 +212,11 @@ func (p *PromParser) Comment() []byte {
// Metric writes the labels of the current sample into the passed labels.
// It returns the string from which the metric was parsed.
func (p *PromParser) Metric(l *labels.Labels) string {
// Allocate the full immutable string immediately, so we just
// have to create references on it below.
// Copy the buffer to a string: this is only necessary for the return value.
s := string(p.series)
*l = append(*l, labels.Label{
Name: labels.MetricName,
Value: s[:p.offsets[0]-p.start],
})
p.builder.Reset()
p.builder.Add(labels.MetricName, s[:p.offsets[0]-p.start])
for i := 1; i < len(p.offsets); i += 4 {
a := p.offsets[i] - p.start
@ -227,16 +224,16 @@ func (p *PromParser) Metric(l *labels.Labels) string {
c := p.offsets[i+2] - p.start
d := p.offsets[i+3] - p.start
value := s[c:d]
// Replacer causes allocations. Replace only when necessary.
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
*l = append(*l, labels.Label{Name: s[a:b], Value: lvalReplacer.Replace(s[c:d])})
continue
value = lvalReplacer.Replace(value)
}
*l = append(*l, labels.Label{Name: s[a:b], Value: s[c:d]})
p.builder.Add(s[a:b], value)
}
// Sort labels to maintain the sorted labels invariant.
sort.Sort(*l)
p.builder.Sort()
*l = p.builder.Labels()
return s
}

View file

@ -192,7 +192,6 @@ testmetric{label="\"bar\""} 1`
require.Equal(t, exp[i].t, ts)
require.Equal(t, exp[i].v, v)
require.Equal(t, exp[i].lset, res)
res = res[:0]
case EntryType:
m, typ := p.Type()
@ -414,7 +413,7 @@ func BenchmarkParse(b *testing.B) {
case EntrySeries:
m, _, _ := p.Series()
res := make(labels.Labels, 0, 5)
var res labels.Labels
p.Metric(&res)
total += len(m)
@ -426,7 +425,7 @@ func BenchmarkParse(b *testing.B) {
})
b.Run(parserName+"/decode-metric-reuse/"+fn, func(b *testing.B) {
total := 0
res := make(labels.Labels, 0, 5)
var res labels.Labels
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
b.ReportAllocs()
@ -451,7 +450,6 @@ func BenchmarkParse(b *testing.B) {
total += len(m)
i++
res = res[:0]
}
}
}

View file

@ -19,7 +19,6 @@ import (
"fmt"
"io"
"math"
"sort"
"strings"
"unicode/utf8"
@ -59,6 +58,8 @@ type ProtobufParser struct {
// that we have to decode the next MetricFamily.
state Entry
builder labels.ScratchBuilder // held here to reduce allocations when building Labels
mf *dto.MetricFamily
// The following are just shenanigans to satisfy the Parser interface.
@ -245,23 +246,19 @@ func (p *ProtobufParser) Comment() []byte {
// Metric writes the labels of the current sample into the passed labels.
// It returns the string from which the metric was parsed.
func (p *ProtobufParser) Metric(l *labels.Labels) string {
*l = append(*l, labels.Label{
Name: labels.MetricName,
Value: p.getMagicName(),
})
p.builder.Reset()
p.builder.Add(labels.MetricName, p.getMagicName())
for _, lp := range p.mf.GetMetric()[p.metricPos].GetLabel() {
*l = append(*l, labels.Label{
Name: lp.GetName(),
Value: lp.GetValue(),
})
p.builder.Add(lp.GetName(), lp.GetValue())
}
if needed, name, value := p.getMagicLabel(); needed {
*l = append(*l, labels.Label{Name: name, Value: value})
p.builder.Add(name, value)
}
// Sort labels to maintain the sorted labels invariant.
sort.Sort(*l)
p.builder.Sort()
*l = p.builder.Labels()
return p.metricBytes.String()
}
@ -305,12 +302,12 @@ func (p *ProtobufParser) Exemplar(ex *exemplar.Exemplar) bool {
ex.HasTs = true
ex.Ts = ts.GetSeconds()*1000 + int64(ts.GetNanos()/1_000_000)
}
p.builder.Reset()
for _, lp := range exProto.GetLabel() {
ex.Labels = append(ex.Labels, labels.Label{
Name: lp.GetName(),
Value: lp.GetValue(),
})
p.builder.Add(lp.GetName(), lp.GetValue())
}
p.builder.Sort()
ex.Labels = p.builder.Labels()
return true
}

View file

@ -630,7 +630,6 @@ metric: <
require.Equal(t, true, found)
require.Equal(t, exp[i].e[0], e)
}
res = res[:0]
case EntryHistogram:
m, ts, shs, fhs := p.Histogram()
@ -642,7 +641,6 @@ metric: <
require.Equal(t, exp[i].t, int64(0))
}
require.Equal(t, exp[i].lset, res)
res = res[:0]
require.Equal(t, exp[i].m, string(m))
if shs != nil {
require.Equal(t, exp[i].shs, shs)

View file

@ -353,11 +353,11 @@ func (n *Manager) Send(alerts ...*Alert) {
for _, a := range alerts {
lb := labels.NewBuilder(a.Labels)
for _, l := range n.opts.ExternalLabels {
n.opts.ExternalLabels.Range(func(l labels.Label) {
if a.Labels.Get(l.Name) == "" {
lb.Set(l.Name, l.Value)
}
}
})
a.Labels = lb.Labels(a.Labels)
}
@ -394,8 +394,8 @@ func (n *Manager) relabelAlerts(alerts []*Alert) []*Alert {
var relabeledAlerts []*Alert
for _, alert := range alerts {
labels := relabel.Process(alert.Labels, n.opts.RelabelConfigs...)
if labels != nil {
labels, keep := relabel.Process(alert.Labels, n.opts.RelabelConfigs...)
if keep {
alert.Labels = labels
relabeledAlerts = append(relabeledAlerts, alert)
}
@ -570,9 +570,9 @@ func alertsToOpenAPIAlerts(alerts []*Alert) models.PostableAlerts {
func labelsToOpenAPILabelSet(modelLabelSet labels.Labels) models.LabelSet {
apiLabelSet := models.LabelSet{}
for _, label := range modelLabelSet {
modelLabelSet.Range(func(label labels.Label) {
apiLabelSet[label.Name] = label.Value
}
})
return apiLabelSet
}
@ -719,9 +719,9 @@ func AlertmanagerFromGroup(tg *targetgroup.Group, cfg *config.AlertmanagerConfig
}
}
lset := relabel.Process(labels.New(lbls...), cfg.RelabelConfigs...)
if lset == nil {
droppedAlertManagers = append(droppedAlertManagers, alertmanagerLabels{lbls})
lset, keep := relabel.Process(labels.New(lbls...), cfg.RelabelConfigs...)
if !keep {
droppedAlertManagers = append(droppedAlertManagers, alertmanagerLabels{labels.New(lbls...)})
continue
}

View file

@ -1567,7 +1567,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
case *parser.NumberLiteral:
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
return append(enh.Out, Sample{Point: Point{V: e.Val}}), nil
return append(enh.Out, Sample{Point: Point{V: e.Val}, Metric: labels.EmptyLabels()}), nil
})
case *parser.StringLiteral:
@ -2190,7 +2190,7 @@ func resultMetric(lhs, rhs labels.Labels, op parser.ItemType, matching *parser.V
}
}
ret := enh.lb.Labels(nil)
ret := enh.lb.Labels(labels.EmptyLabels())
enh.resultMetric[str] = ret
return ret
}
@ -2230,7 +2230,7 @@ func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scala
}
func dropMetricName(l labels.Labels) labels.Labels {
return labels.NewBuilder(l).Del(labels.MetricName).Labels(nil)
return labels.NewBuilder(l).Del(labels.MetricName).Labels(labels.EmptyLabels())
}
// scalarBinop evaluates a binary operation between two Scalars.
@ -2357,7 +2357,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
}
}
lb := labels.NewBuilder(nil)
lb := labels.NewBuilder(labels.EmptyLabels())
var buf []byte
for si, s := range vec {
metric := s.Metric
@ -2365,7 +2365,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
if op == parser.COUNT_VALUES {
lb.Reset(metric)
lb.Set(valueLabel, strconv.FormatFloat(s.V, 'f', -1, 64))
metric = lb.Labels(nil)
metric = lb.Labels(labels.EmptyLabels())
// We've changed the metric so we have to recompute the grouping key.
recomputeGroupingKey = true
@ -2389,7 +2389,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
} else {
lb.Keep(grouping...)
}
m := lb.Labels(nil)
m := lb.Labels(labels.EmptyLabels())
newAgg := &groupedAggregation{
labels: m,
value: s.V,

View file

@ -684,6 +684,7 @@ load 10s
Result: Matrix{
Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
Metric: labels.EmptyLabels(),
},
},
Start: time.Unix(0, 0),
@ -4008,7 +4009,7 @@ func TestSparseHistogram_Sum_Count_AddOperator(t *testing.T) {
// sum().
queryString := fmt.Sprintf("sum(%s)", seriesName)
queryAndCheck(queryString, []Sample{
{Point{T: ts, H: &c.expected}, labels.Labels{}},
{Point{T: ts, H: &c.expected}, labels.EmptyLabels()},
})
// + operator.
@ -4017,13 +4018,13 @@ func TestSparseHistogram_Sum_Count_AddOperator(t *testing.T) {
queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx)
}
queryAndCheck(queryString, []Sample{
{Point{T: ts, H: &c.expected}, labels.Labels{}},
{Point{T: ts, H: &c.expected}, labels.EmptyLabels()},
})
// count().
queryString = fmt.Sprintf("count(%s)", seriesName)
queryAndCheck(queryString, []Sample{
{Point{T: ts, V: 3}, labels.Labels{}},
{Point{T: ts, V: 3}, labels.EmptyLabels()},
})
})
}

View file

@ -957,7 +957,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
if !ok {
sample.Metric = labels.NewBuilder(sample.Metric).
Del(excludedLabels...).
Labels(nil)
Labels(labels.EmptyLabels())
mb = &metricWithBuckets{sample.Metric, nil}
enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb
@ -1077,7 +1077,7 @@ func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNod
if len(res) > 0 {
lb.Set(dst, string(res))
}
outMetric = lb.Labels(nil)
outMetric = lb.Labels(labels.EmptyLabels())
enh.Dmn[h] = outMetric
}
}
@ -1145,7 +1145,7 @@ func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHe
lb.Set(dst, strval)
}
outMetric = lb.Labels(nil)
outMetric = lb.Labels(labels.EmptyLabels())
enh.Dmn[h] = outMetric
}
@ -1383,7 +1383,7 @@ func (s *vectorByReverseValueHeap) Pop() interface{} {
// createLabelsForAbsentFunction returns the labels that are uniquely and exactly matched
// in a given expression. It is used in the absent functions.
func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
m := labels.Labels{}
b := labels.NewBuilder(labels.EmptyLabels())
var lm []*labels.Matcher
switch n := expr.(type) {
@ -1392,25 +1392,26 @@ func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
case *parser.MatrixSelector:
lm = n.VectorSelector.(*parser.VectorSelector).LabelMatchers
default:
return m
return labels.EmptyLabels()
}
empty := []string{}
// The 'has' map implements backwards-compatibility for historic behaviour:
// e.g. in `absent(x{job="a",job="b",foo="bar"})` then `job` is removed from the output.
// Note this gives arguably wrong behaviour for `absent(x{job="a",job="a",foo="bar"})`.
has := make(map[string]bool, len(lm))
for _, ma := range lm {
if ma.Name == labels.MetricName {
continue
}
if ma.Type == labels.MatchEqual && !m.Has(ma.Name) {
m = labels.NewBuilder(m).Set(ma.Name, ma.Value).Labels(nil)
if ma.Type == labels.MatchEqual && !has[ma.Name] {
b.Set(ma.Name, ma.Value)
has[ma.Name] = true
} else {
empty = append(empty, ma.Name)
b.Del(ma.Name)
}
}
for _, v := range empty {
m = labels.NewBuilder(m).Del(v).Labels(nil)
}
return m
return b.Labels(labels.EmptyLabels())
}
func stringFromArg(e parser.Expr) string {

View file

@ -16,13 +16,13 @@ package parser
import (
"math"
"sort"
"strconv"
"time"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/model/value"
)
%}
%union {
@ -32,6 +32,7 @@ import (
matcher *labels.Matcher
label labels.Label
labels labels.Labels
lblList []labels.Label
strings []string
series []SequenceValue
uint uint64
@ -138,10 +139,9 @@ START_METRIC_SELECTOR
// Type definitions for grammar rules.
%type <matchers> label_match_list
%type <matcher> label_matcher
%type <item> aggregate_op grouping_label match_op maybe_label metric_identifier unary_op at_modifier_preprocessors
%type <labels> label_set label_set_list metric
%type <labels> label_set metric
%type <lblList> label_set_list
%type <label> label_set_item
%type <strings> grouping_label_list grouping_labels maybe_grouping_labels
%type <series> series_item series_values
@ -567,7 +567,7 @@ label_matcher : IDENTIFIER match_op STRING
*/
metric : metric_identifier label_set
{ $$ = append($2, labels.Label{Name: labels.MetricName, Value: $1.Val}); sort.Sort($$) }
{ b := labels.NewBuilder($2); b.Set(labels.MetricName, $1.Val); $$ = b.Labels(labels.EmptyLabels()) }
| label_set
{$$ = $1}
;

View file

@ -9,7 +9,6 @@ import __yyfmt__ "fmt"
import (
"math"
"sort"
"strconv"
"time"
@ -26,6 +25,7 @@ type yySymType struct {
matcher *labels.Matcher
label labels.Label
labels labels.Labels
lblList []labels.Label
strings []string
series []SequenceValue
uint uint64
@ -458,7 +458,7 @@ var yyPact = [...]int{
var yyPgo = [...]int{
0, 267, 7, 265, 2, 264, 262, 164, 261, 257,
115, 253, 181, 8, 252, 4, 5, 251, 250, 0,
115, 181, 253, 8, 252, 4, 5, 251, 250, 0,
23, 248, 6, 247, 246, 245, 10, 64, 244, 239,
1, 231, 230, 9, 217, 21, 214, 213, 205, 201,
198, 196, 189, 188, 206, 3, 180, 165, 127,
@ -474,10 +474,10 @@ var yyR1 = [...]int{
31, 33, 33, 32, 32, 32, 40, 38, 38, 38,
24, 24, 24, 9, 9, 36, 42, 42, 42, 42,
42, 43, 44, 44, 44, 35, 35, 35, 1, 1,
1, 2, 2, 2, 2, 12, 12, 7, 7, 7,
1, 2, 2, 2, 2, 11, 11, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 10,
10, 10, 10, 11, 11, 11, 13, 13, 13, 13,
10, 10, 10, 12, 12, 12, 13, 13, 13, 13,
48, 18, 18, 18, 18, 17, 17, 17, 17, 17,
21, 21, 21, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 6, 6, 6, 6, 6,
@ -513,14 +513,14 @@ var yyR2 = [...]int{
}
var yyChk = [...]int{
-1000, -47, 75, 76, 77, 78, 2, 10, -12, -7,
-1000, -47, 75, 76, 77, 78, 2, 10, -11, -7,
-10, 47, 48, 62, 49, 50, 51, 12, 32, 33,
36, 52, 16, 53, 66, 54, 55, 56, 57, 58,
68, 71, 72, 13, -48, -12, 10, -30, -25, -28,
68, 71, 72, 13, -48, -11, 10, -30, -25, -28,
-31, -36, -37, -38, -40, -41, -42, -43, -44, -24,
-3, 12, 17, 15, 23, -8, -7, -35, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
26, 42, 13, -44, -10, -11, 18, -13, 12, 2,
26, 42, 13, -44, -10, -12, 18, -13, 12, 2,
-18, 2, 26, 44, 27, 28, 30, 31, 32, 33,
34, 35, 36, 37, 38, 39, 41, 42, 66, 43,
14, -26, -33, 2, 62, 68, 15, -33, -30, -30,
@ -1492,8 +1492,9 @@ yydefault:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/parser/generated_parser.y:570
{
yyVAL.labels = append(yyDollar[2].labels, labels.Label{Name: labels.MetricName, Value: yyDollar[1].item.Val})
sort.Sort(yyVAL.labels)
b := labels.NewBuilder(yyDollar[2].labels)
b.Set(labels.MetricName, yyDollar[1].item.Val)
yyVAL.labels = b.Labels(labels.EmptyLabels())
}
case 96:
yyDollar = yyS[yypt-1 : yypt+1]
@ -1505,13 +1506,13 @@ yydefault:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/parser/generated_parser.y:579
{
yyVAL.labels = labels.New(yyDollar[2].labels...)
yyVAL.labels = labels.New(yyDollar[2].lblList...)
}
case 120:
yyDollar = yyS[yypt-4 : yypt+1]
//line promql/parser/generated_parser.y:581
{
yyVAL.labels = labels.New(yyDollar[2].labels...)
yyVAL.labels = labels.New(yyDollar[2].lblList...)
}
case 121:
yyDollar = yyS[yypt-2 : yypt+1]
@ -1529,20 +1530,20 @@ yydefault:
yyDollar = yyS[yypt-3 : yypt+1]
//line promql/parser/generated_parser.y:589
{
yyVAL.labels = append(yyDollar[1].labels, yyDollar[3].label)
yyVAL.lblList = append(yyDollar[1].lblList, yyDollar[3].label)
}
case 124:
yyDollar = yyS[yypt-1 : yypt+1]
//line promql/parser/generated_parser.y:591
{
yyVAL.labels = []labels.Label{yyDollar[1].label}
yyVAL.lblList = []labels.Label{yyDollar[1].label}
}
case 125:
yyDollar = yyS[yypt-2 : yypt+1]
//line promql/parser/generated_parser.y:593
{
yylex.(*parser).unexpected("label set", "\",\" or \"}\"")
yyVAL.labels = yyDollar[1].labels
yyVAL.lblList = yyDollar[1].lblList
}
case 126:
yyDollar = yyS[yypt-3 : yypt+1]

View file

@ -202,7 +202,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
break
}
if f, err := parseNumber(defLine); err == nil {
cmd.expect(0, nil, parser.SequenceValue{Value: f})
cmd.expect(0, parser.SequenceValue{Value: f})
break
}
metric, vals, err := parser.ParseSeriesDesc(defLine)
@ -218,7 +218,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
if len(vals) > 1 {
return i, nil, raise(i, "expecting multiple values in instant evaluation not allowed")
}
cmd.expect(j, metric, vals...)
cmd.expectMetric(j, metric, vals...)
}
return i, cmd, nil
}
@ -368,13 +368,15 @@ func (ev *evalCmd) String() string {
return "eval"
}
// expect adds a new metric with a sequence of values to the set of expected
// expect adds a sequence of values to the set of expected
// results for the query.
func (ev *evalCmd) expect(pos int, m labels.Labels, vals ...parser.SequenceValue) {
if m == nil {
ev.expected[0] = entry{pos: pos, vals: vals}
return
}
func (ev *evalCmd) expect(pos int, vals ...parser.SequenceValue) {
ev.expected[0] = entry{pos: pos, vals: vals}
}
// expectMetric adds a new metric with a sequence of values to the set of expected
// results for the query.
func (ev *evalCmd) expectMetric(pos int, m labels.Labels, vals ...parser.SequenceValue) {
h := m.Hash()
ev.metrics[h] = m
ev.expected[h] = entry{pos: pos, vals: vals}

View file

@ -127,11 +127,11 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
require.NoError(t, err)
for _, s := range tc.series {
var matchers []*labels.Matcher
for _, label := range s.Metric {
s.Metric.Range(func(label labels.Label) {
m, err := labels.NewMatcher(labels.MatchEqual, label.Name, label.Value)
require.NoError(t, err)
matchers = append(matchers, m)
}
})
// Get the series for the matcher.
ss := querier.Select(false, nil, matchers...)

View file

@ -146,10 +146,7 @@ func NewAlertingRule(
labels, annotations, externalLabels labels.Labels, externalURL string,
restored bool, logger log.Logger,
) *AlertingRule {
el := make(map[string]string, len(externalLabels))
for _, lbl := range externalLabels {
el[lbl.Name] = lbl.Value
}
el := externalLabels.Map()
return &AlertingRule{
name: name,
@ -217,16 +214,16 @@ func (r *AlertingRule) Annotations() labels.Labels {
func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
lb := labels.NewBuilder(r.labels)
for _, l := range alert.Labels {
alert.Labels.Range(func(l labels.Label) {
lb.Set(l.Name, l.Value)
}
})
lb.Set(labels.MetricName, alertMetricName)
lb.Set(labels.AlertName, r.name)
lb.Set(alertStateLabel, alert.State.String())
s := promql.Sample{
Metric: lb.Labels(nil),
Metric: lb.Labels(labels.EmptyLabels()),
Point: promql.Point{T: timestamp.FromTime(ts), V: 1},
}
return s
@ -236,15 +233,15 @@ func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) promql.Sample {
lb := labels.NewBuilder(r.labels)
for _, l := range alert.Labels {
alert.Labels.Range(func(l labels.Label) {
lb.Set(l.Name, l.Value)
}
})
lb.Set(labels.MetricName, alertForStateMetricName)
lb.Set(labels.AlertName, r.name)
s := promql.Sample{
Metric: lb.Labels(nil),
Metric: lb.Labels(labels.EmptyLabels()),
Point: promql.Point{T: timestamp.FromTime(ts), V: v},
}
return s
@ -254,13 +251,13 @@ func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) pro
func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (storage.Series, error) {
smpl := r.forStateSample(alert, time.Now(), 0)
var matchers []*labels.Matcher
for _, l := range smpl.Metric {
smpl.Metric.Range(func(l labels.Label) {
mt, err := labels.NewMatcher(labels.MatchEqual, l.Name, l.Value)
if err != nil {
panic(err)
}
matchers = append(matchers, mt)
}
})
sset := q.Select(false, nil, matchers...)
var s storage.Series
@ -268,7 +265,7 @@ func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (sto
// Query assures that smpl.Metric is included in sset.At().Labels(),
// hence just checking the length would act like equality.
// (This is faster than calling labels.Compare again as we already have some info).
if len(sset.At().Labels()) == len(matchers) {
if sset.At().Labels().Len() == len(matchers) {
s = sset.At()
break
}
@ -327,10 +324,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
alerts := make(map[uint64]*Alert, len(res))
for _, smpl := range res {
// Provide the alert information to the template.
l := make(map[string]string, len(smpl.Metric))
for _, lbl := range smpl.Metric {
l[lbl.Name] = lbl.Value
}
l := smpl.Metric.Map()
tmplData := template.AlertTemplateData(l, r.externalLabels, r.externalURL, smpl.V)
// Inject some convenience variables that are easier to remember for users
@ -363,17 +357,18 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricName)
for _, l := range r.labels {
r.labels.Range(func(l labels.Label) {
lb.Set(l.Name, expand(l.Value))
}
})
lb.Set(labels.AlertName, r.Name())
annotations := make(labels.Labels, 0, len(r.annotations))
for _, a := range r.annotations {
annotations = append(annotations, labels.Label{Name: a.Name, Value: expand(a.Value)})
}
sb := labels.ScratchBuilder{}
r.annotations.Range(func(a labels.Label) {
sb.Add(a.Name, expand(a.Value))
})
annotations := sb.Labels()
lbs := lb.Labels(nil)
lbs := lb.Labels(labels.EmptyLabels())
h := lbs.Hash()
resultFPs[h] = struct{}{}

View file

@ -85,11 +85,11 @@ func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFu
lb.Set(labels.MetricName, rule.name)
for _, l := range rule.labels {
rule.labels.Range(func(l labels.Label) {
lb.Set(l.Name, l.Value)
}
})
sample.Metric = lb.Labels(nil)
sample.Metric = lb.Labels(labels.EmptyLabels())
}
// Check that the rule does not produce identical metrics after applying

View file

@ -149,8 +149,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "no address",
},
// Address label missing, but added in relabelling.
@ -242,8 +242,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "invalid label value for \"custom\": \"\\xbd\"",
},
// Invalid duration in interval label.
@ -259,8 +259,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
},
// Invalid duration in timeout label.
@ -276,8 +276,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
},
// 0 interval in timeout label.
@ -293,8 +293,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "scrape interval cannot be 0",
},
// 0 duration in timeout label.
@ -310,8 +310,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "scrape timeout cannot be 0",
},
// Timeout less than interval.
@ -328,8 +328,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second),
},
res: nil,
resOrig: nil,
res: labels.EmptyLabels(),
resOrig: labels.EmptyLabels(),
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
},
// Don't attach default port.

View file

@ -268,6 +268,7 @@ type scrapeLoopOptions struct {
const maxAheadTime = 10 * time.Minute
// returning an empty label set is interpreted as "drop"
type labelsMutator func(labels.Labels) labels.Labels
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, options *Options) (*scrapePool, error) {
@ -498,9 +499,9 @@ func (sp *scrapePool) Sync(tgs []*targetgroup.Group) {
}
targetSyncFailed.WithLabelValues(sp.config.JobName).Add(float64(len(failures)))
for _, t := range targets {
if t.Labels().Len() > 0 {
if !t.Labels().IsEmpty() {
all = append(all, t)
} else if t.DiscoveredLabels().Len() > 0 {
} else if !t.DiscoveredLabels().IsEmpty() {
sp.droppedTargets = append(sp.droppedTargets, t)
}
}
@ -634,7 +635,7 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
met := lset.Get(labels.MetricName)
if limits.labelLimit > 0 {
nbLabels := len(lset)
nbLabels := lset.Len()
if nbLabels > int(limits.labelLimit) {
return fmt.Errorf("label_limit exceeded (metric: %.50s, number of labels: %d, limit: %d)", met, nbLabels, limits.labelLimit)
}
@ -644,7 +645,7 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
return nil
}
for _, l := range lset {
return lset.Validate(func(l labels.Label) error {
if limits.labelNameLengthLimit > 0 {
nameLength := len(l.Name)
if nameLength > int(limits.labelNameLengthLimit) {
@ -658,8 +659,8 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
return fmt.Errorf("label_value_length_limit exceeded (metric: %.50s, label name: %.50s, value: %.50q, length: %d, limit: %d)", met, l.Name, l.Value, valueLength, limits.labelValueLengthLimit)
}
}
}
return nil
return nil
})
}
func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*relabel.Config) labels.Labels {
@ -667,37 +668,37 @@ func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*re
targetLabels := target.Labels()
if honor {
for _, l := range targetLabels {
targetLabels.Range(func(l labels.Label) {
if !lset.Has(l.Name) {
lb.Set(l.Name, l.Value)
}
}
})
} else {
var conflictingExposedLabels labels.Labels
for _, l := range targetLabels {
var conflictingExposedLabels []labels.Label
targetLabels.Range(func(l labels.Label) {
existingValue := lset.Get(l.Name)
if existingValue != "" {
conflictingExposedLabels = append(conflictingExposedLabels, labels.Label{Name: l.Name, Value: existingValue})
}
// It is now safe to set the target label.
lb.Set(l.Name, l.Value)
}
})
if len(conflictingExposedLabels) > 0 {
resolveConflictingExposedLabels(lb, lset, targetLabels, conflictingExposedLabels)
}
}
res := lb.Labels(nil)
res := lb.Labels(labels.EmptyLabels())
if len(rc) > 0 {
res = relabel.Process(res, rc...)
res, _ = relabel.Process(res, rc...)
}
return res
}
func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLabels, conflictingExposedLabels labels.Labels) {
func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLabels labels.Labels, conflictingExposedLabels []labels.Label) {
sort.SliceStable(conflictingExposedLabels, func(i, j int) bool {
return len(conflictingExposedLabels[i].Name) < len(conflictingExposedLabels[j].Name)
})
@ -708,7 +709,7 @@ func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLa
newName = model.ExportedLabelPrefix + newName
if !exposedLabels.Has(newName) &&
!targetLabels.Has(newName) &&
!conflictingExposedLabels[:i].Has(newName) {
!labelSliceHas(conflictingExposedLabels[:i], newName) {
conflictingExposedLabels[i].Name = newName
break
}
@ -720,15 +721,24 @@ func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLa
}
}
func labelSliceHas(lbls []labels.Label, name string) bool {
for _, l := range lbls {
if l.Name == name {
return true
}
}
return false
}
func mutateReportSampleLabels(lset labels.Labels, target *Target) labels.Labels {
lb := labels.NewBuilder(lset)
for _, l := range target.Labels() {
target.Labels().Range(func(l labels.Label) {
lb.Set(model.ExportedLabelPrefix+l.Name, lset.Get(l.Name))
lb.Set(l.Name, l.Value)
}
})
return lb.Labels(nil)
return lb.Labels(labels.EmptyLabels())
}
// appender returns an appender for ingested samples from the target.
@ -1599,8 +1609,8 @@ loop:
// and relabeling and store the final label set.
lset = sl.sampleMutator(lset)
// The label set may be set to nil to indicate dropping.
if lset == nil {
// The label set may be set to empty to indicate dropping.
if lset.IsEmpty() {
sl.cache.addDropped(mets)
continue
}
@ -1857,12 +1867,10 @@ func (sl *scrapeLoop) addReportSample(app storage.Appender, s string, t int64, v
ref = ce.ref
lset = ce.lset
} else {
lset = labels.Labels{
// The constants are suffixed with the invalid \xff unicode rune to avoid collisions
// with scraped metrics in the cache.
// We have to drop it when building the actual metric.
labels.Label{Name: labels.MetricName, Value: s[:len(s)-1]},
}
// The constants are suffixed with the invalid \xff unicode rune to avoid collisions
// with scraped metrics in the cache.
// We have to drop it when building the actual metric.
lset = labels.FromStrings(labels.MetricName, s[:len(s)-1])
lset = sl.reportSampleMutator(lset)
}

View file

@ -1623,7 +1623,7 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
nil, nil, nil,
func(l labels.Labels) labels.Labels {
if l.Has("deleteme") {
return nil
return labels.EmptyLabels()
}
return l
},

View file

@ -172,22 +172,20 @@ func (t *Target) offset(interval time.Duration, jitterSeed uint64) time.Duration
// Labels returns a copy of the set of all public labels of the target.
func (t *Target) Labels() labels.Labels {
lset := make(labels.Labels, 0, len(t.labels))
for _, l := range t.labels {
b := labels.NewScratchBuilder(t.labels.Len())
t.labels.Range(func(l labels.Label) {
if !strings.HasPrefix(l.Name, model.ReservedLabelPrefix) {
lset = append(lset, l)
b.Add(l.Name, l.Value)
}
}
return lset
})
return b.Labels()
}
// DiscoveredLabels returns a copy of the target's labels before any processing.
func (t *Target) DiscoveredLabels() labels.Labels {
t.mtx.Lock()
defer t.mtx.Unlock()
lset := make(labels.Labels, len(t.discoveredLabels))
copy(lset, t.discoveredLabels)
return lset
return t.discoveredLabels.Copy()
}
// SetDiscoveredLabels sets new DiscoveredLabels
@ -205,9 +203,9 @@ func (t *Target) URL() *url.URL {
params[k] = make([]string, len(v))
copy(params[k], v)
}
for _, l := range t.labels {
t.labels.Range(func(l labels.Label) {
if !strings.HasPrefix(l.Name, model.ParamLabelPrefix) {
continue
return
}
ks := l.Name[len(model.ParamLabelPrefix):]
@ -216,7 +214,7 @@ func (t *Target) URL() *url.URL {
} else {
params[ks] = []string{l.Value}
}
}
})
return &url.URL{
Scheme: t.labels.Get(model.SchemeLabel),
@ -374,15 +372,15 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig, noDefaultPort
}
}
preRelabelLabels := lb.Labels(nil)
lset = relabel.Process(preRelabelLabels, cfg.RelabelConfigs...)
preRelabelLabels := lb.Labels(labels.EmptyLabels())
lset, keep := relabel.Process(preRelabelLabels, cfg.RelabelConfigs...)
// Check if the target was dropped.
if lset == nil {
return nil, preRelabelLabels, nil
if !keep {
return labels.EmptyLabels(), preRelabelLabels, nil
}
if v := lset.Get(model.AddressLabel); v == "" {
return nil, nil, errors.New("no address")
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("no address")
}
lb = labels.NewBuilder(lset)
@ -413,7 +411,7 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig, noDefaultPort
case "https":
addr = addr + ":443"
default:
return nil, nil, errors.Errorf("invalid scheme: %q", cfg.Scheme)
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("invalid scheme: %q", cfg.Scheme)
}
lb.Set(model.AddressLabel, addr)
}
@ -434,50 +432,54 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig, noDefaultPort
}
if err := config.CheckTargetAddress(model.LabelValue(addr)); err != nil {
return nil, nil, err
return labels.EmptyLabels(), labels.EmptyLabels(), err
}
interval := lset.Get(model.ScrapeIntervalLabel)
intervalDuration, err := model.ParseDuration(interval)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("error parsing scrape interval: %v", err)
}
if time.Duration(intervalDuration) == 0 {
return nil, nil, errors.New("scrape interval cannot be 0")
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape interval cannot be 0")
}
timeout := lset.Get(model.ScrapeTimeoutLabel)
timeoutDuration, err := model.ParseDuration(timeout)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("error parsing scrape timeout: %v", err)
}
if time.Duration(timeoutDuration) == 0 {
return nil, nil, errors.New("scrape timeout cannot be 0")
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape timeout cannot be 0")
}
if timeoutDuration > intervalDuration {
return nil, nil, errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
}
// Meta labels are deleted after relabelling. Other internal labels propagate to
// the target which decides whether they will be part of their label set.
for _, l := range lset {
lset.Range(func(l labels.Label) {
if strings.HasPrefix(l.Name, model.MetaLabelPrefix) {
lb.Del(l.Name)
}
}
})
// Default the instance label to the target address.
if v := lset.Get(model.InstanceLabel); v == "" {
lb.Set(model.InstanceLabel, addr)
}
res = lb.Labels(nil)
for _, l := range res {
res = lb.Labels(labels.EmptyLabels())
err = res.Validate(func(l labels.Label) error {
// Check label values are valid, drop the target if not.
if !model.LabelValue(l.Value).IsValid() {
return nil, nil, errors.Errorf("invalid label value for %q: %q", l.Name, l.Value)
return errors.Errorf("invalid label value for %q: %q", l.Name, l.Value)
}
return nil
})
if err != nil {
return labels.EmptyLabels(), labels.EmptyLabels(), err
}
return res, preRelabelLabels, nil
}
@ -501,12 +503,12 @@ func TargetsFromGroup(tg *targetgroup.Group, cfg *config.ScrapeConfig, noDefault
lset := labels.New(lbls...)
lbls, origLabels, err := PopulateLabels(lset, cfg, noDefaultPort)
lset, origLabels, err := PopulateLabels(lset, cfg, noDefaultPort)
if err != nil {
failures = append(failures, errors.Wrapf(err, "instance %d in group %s", i, tg))
}
if lbls != nil || origLabels != nil {
targets = append(targets, NewTarget(lbls, origLabels, cfg.Params))
if !lset.IsEmpty() || !origLabels.IsEmpty() {
targets = append(targets, NewTarget(lset, origLabels, cfg.Params))
}
}
return targets, failures

View file

@ -129,7 +129,7 @@ func newTestTarget(targetURL string, deadline time.Duration, lbls labels.Labels)
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
lb.Set(model.MetricsPathLabel, "/metrics")
return &Target{labels: lb.Labels(nil)}
return &Target{labels: lb.Labels(labels.EmptyLabels())}
}
func TestNewHTTPBearerToken(t *testing.T) {

View file

@ -699,7 +699,7 @@ func (c *compactChunkIterator) Next() bool {
// 1:1 duplicates, skip it.
} else {
// We operate on same series, so labels does not matter here.
overlapping = append(overlapping, newChunkToSeriesDecoder(nil, next))
overlapping = append(overlapping, newChunkToSeriesDecoder(labels.EmptyLabels(), next))
if next.MaxTime > oMaxTime {
oMaxTime = next.MaxTime
}
@ -716,7 +716,7 @@ func (c *compactChunkIterator) Next() bool {
}
// Add last as it's not yet included in overlap. We operate on same series, so labels does not matter here.
iter = NewSeriesToChunkEncoder(c.mergeFunc(append(overlapping, newChunkToSeriesDecoder(nil, c.curr))...)).Iterator(nil)
iter = NewSeriesToChunkEncoder(c.mergeFunc(append(overlapping, newChunkToSeriesDecoder(labels.EmptyLabels(), c.curr))...)).Iterator(nil)
if !iter.Next() {
if c.err = iter.Err(); c.err != nil {
return false

View file

@ -153,10 +153,10 @@ func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult,
func FromQueryResult(sortSeries bool, res *prompb.QueryResult) storage.SeriesSet {
series := make([]storage.Series, 0, len(res.Timeseries))
for _, ts := range res.Timeseries {
lbls := labelProtosToLabels(ts.Labels)
if err := validateLabelsAndMetricName(lbls); err != nil {
if err := validateLabelsAndMetricName(ts.Labels); err != nil {
return errSeriesSet{err: err}
}
lbls := labelProtosToLabels(ts.Labels)
series = append(series, &concreteSeries{labels: lbls, samples: ts.Samples})
}
@ -348,7 +348,7 @@ type concreteSeries struct {
}
func (c *concreteSeries) Labels() labels.Labels {
return labels.New(c.labels...)
return c.labels.Copy()
}
func (c *concreteSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
@ -443,7 +443,7 @@ func (c *concreteSeriesIterator) Err() error {
// validateLabelsAndMetricName validates the label names/values and metric names returned from remote read,
// also making sure that there are no labels with duplicate names
func validateLabelsAndMetricName(ls labels.Labels) error {
func validateLabelsAndMetricName(ls []prompb.Label) error {
for i, l := range ls {
if l.Name == labels.MetricName && !model.IsValidMetricName(model.LabelValue(l.Value)) {
return fmt.Errorf("invalid metric name: %v", l.Value)
@ -583,30 +583,24 @@ func LabelProtosToMetric(labelPairs []*prompb.Label) model.Metric {
}
func labelProtosToLabels(labelPairs []prompb.Label) labels.Labels {
result := make(labels.Labels, 0, len(labelPairs))
b := labels.ScratchBuilder{}
for _, l := range labelPairs {
result = append(result, labels.Label{
Name: l.Name,
Value: l.Value,
})
b.Add(l.Name, l.Value)
}
sort.Sort(result)
return result
b.Sort()
return b.Labels()
}
// labelsToLabelsProto transforms labels into prompb labels. The buffer slice
// will be used to avoid allocations if it is big enough to store the labels.
func labelsToLabelsProto(labels labels.Labels, buf []prompb.Label) []prompb.Label {
func labelsToLabelsProto(lbls labels.Labels, buf []prompb.Label) []prompb.Label {
result := buf[:0]
if cap(buf) < len(labels) {
result = make([]prompb.Label, 0, len(labels))
}
for _, l := range labels {
lbls.Range(func(l labels.Label) {
result = append(result, prompb.Label{
Name: l.Name,
Value: l.Value,
})
}
})
return result
}

View file

@ -74,86 +74,86 @@ var writeRequestFixture = &prompb.WriteRequest{
func TestValidateLabelsAndMetricName(t *testing.T) {
tests := []struct {
input labels.Labels
input []prompb.Label
expectedErr string
description string
}{
{
input: labels.FromStrings(
"__name__", "name",
"labelName", "labelValue",
),
input: []prompb.Label{
{Name: "__name__", Value: "name"},
{Name: "labelName", Value: "labelValue"},
},
expectedErr: "",
description: "regular labels",
},
{
input: labels.FromStrings(
"__name__", "name",
"_labelName", "labelValue",
),
input: []prompb.Label{
{Name: "__name__", Value: "name"},
{Name: "_labelName", Value: "labelValue"},
},
expectedErr: "",
description: "label name with _",
},
{
input: labels.FromStrings(
"__name__", "name",
"@labelName", "labelValue",
),
input: []prompb.Label{
{Name: "__name__", Value: "name"},
{Name: "@labelName", Value: "labelValue"},
},
expectedErr: "invalid label name: @labelName",
description: "label name with @",
},
{
input: labels.FromStrings(
"__name__", "name",
"123labelName", "labelValue",
),
input: []prompb.Label{
{Name: "__name__", Value: "name"},
{Name: "123labelName", Value: "labelValue"},
},
expectedErr: "invalid label name: 123labelName",
description: "label name starts with numbers",
},
{
input: labels.FromStrings(
"__name__", "name",
"", "labelValue",
),
input: []prompb.Label{
{Name: "__name__", Value: "name"},
{Name: "", Value: "labelValue"},
},
expectedErr: "invalid label name: ",
description: "label name is empty string",
},
{
input: labels.FromStrings(
"__name__", "name",
"labelName", string([]byte{0xff}),
),
input: []prompb.Label{
{Name: "__name__", Value: "name"},
{Name: "labelName", Value: string([]byte{0xff})},
},
expectedErr: "invalid label value: " + string([]byte{0xff}),
description: "label value is an invalid UTF-8 value",
},
{
input: labels.FromStrings(
"__name__", "@invalid_name",
),
input: []prompb.Label{
{Name: "__name__", Value: "@invalid_name"},
},
expectedErr: "invalid metric name: @invalid_name",
description: "metric name starts with @",
},
{
input: labels.FromStrings(
"__name__", "name1",
"__name__", "name2",
),
input: []prompb.Label{
{Name: "__name__", Value: "name1"},
{Name: "__name__", Value: "name2"},
},
expectedErr: "duplicate label with name: __name__",
description: "duplicate label names",
},
{
input: labels.FromStrings(
"label1", "name",
"label2", "name",
),
input: []prompb.Label{
{Name: "label1", Value: "name"},
{Name: "label2", Value: "name"},
},
expectedErr: "",
description: "duplicate label values",
},
{
input: labels.FromStrings(
"", "name",
"label2", "name",
),
input: []prompb.Label{
{Name: "", Value: "name"},
{Name: "label2", Value: "name"},
},
expectedErr: "invalid label name: ",
description: "don't report as duplicate label name",
},
@ -200,8 +200,7 @@ func TestConcreteSeriesClonesLabels(t *testing.T) {
gotLabels := cs.Labels()
require.Equal(t, lbls, gotLabels)
gotLabels[0].Value = "foo"
gotLabels[1].Value = "bar"
gotLabels.CopyFrom(labels.FromStrings("a", "foo", "c", "foo"))
gotLabels = cs.Labels()
require.Equal(t, lbls, gotLabels)

View file

@ -396,7 +396,7 @@ type QueueManager struct {
flushDeadline time.Duration
cfg config.QueueConfig
mcfg config.MetadataConfig
externalLabels labels.Labels
externalLabels []labels.Label
relabelConfigs []*relabel.Config
sendExemplars bool
sendNativeHistograms bool
@ -454,13 +454,19 @@ func NewQueueManager(
logger = log.NewNopLogger()
}
// Copy externalLabels into slice which we need for processExternalLabels.
extLabelsSlice := make([]labels.Label, 0, externalLabels.Len())
externalLabels.Range(func(l labels.Label) {
extLabelsSlice = append(extLabelsSlice, l)
})
logger = log.With(logger, remoteName, client.Name(), endpoint, client.Endpoint())
t := &QueueManager{
logger: logger,
flushDeadline: flushDeadline,
cfg: cfg,
mcfg: mCfg,
externalLabels: externalLabels,
externalLabels: extLabelsSlice,
relabelConfigs: relabelConfigs,
storeClient: client,
sendExemplars: enableExemplarRemoteWrite,
@ -769,8 +775,8 @@ func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
t.seriesSegmentIndexes[s.Ref] = index
ls := processExternalLabels(s.Labels, t.externalLabels)
lbls := relabel.Process(ls, t.relabelConfigs...)
if len(lbls) == 0 {
lbls, keep := relabel.Process(ls, t.relabelConfigs...)
if !keep || lbls.IsEmpty() {
t.droppedSeries[s.Ref] = struct{}{}
continue
}
@ -831,44 +837,33 @@ func (t *QueueManager) client() WriteClient {
}
func (t *QueueManager) internLabels(lbls labels.Labels) {
for i, l := range lbls {
lbls[i].Name = t.interner.intern(l.Name)
lbls[i].Value = t.interner.intern(l.Value)
}
lbls.InternStrings(t.interner.intern)
}
func (t *QueueManager) releaseLabels(ls labels.Labels) {
for _, l := range ls {
t.interner.release(l.Name)
t.interner.release(l.Value)
}
ls.ReleaseStrings(t.interner.release)
}
// processExternalLabels merges externalLabels into ls. If ls contains
// a label in externalLabels, the value in ls wins.
func processExternalLabels(ls, externalLabels labels.Labels) labels.Labels {
i, j, result := 0, 0, make(labels.Labels, 0, len(ls)+len(externalLabels))
for i < len(ls) && j < len(externalLabels) {
if ls[i].Name < externalLabels[j].Name {
result = append(result, labels.Label{
Name: ls[i].Name,
Value: ls[i].Value,
})
i++
} else if ls[i].Name > externalLabels[j].Name {
result = append(result, externalLabels[j])
j++
} else {
result = append(result, labels.Label{
Name: ls[i].Name,
Value: ls[i].Value,
})
i++
func processExternalLabels(ls labels.Labels, externalLabels []labels.Label) labels.Labels {
b := labels.NewScratchBuilder(ls.Len() + len(externalLabels))
j := 0
ls.Range(func(l labels.Label) {
for j < len(externalLabels) && l.Name > externalLabels[j].Name {
b.Add(externalLabels[j].Name, externalLabels[j].Value)
j++
}
if j < len(externalLabels) && l.Name == externalLabels[j].Name {
j++
}
b.Add(l.Name, l.Value)
})
for ; j < len(externalLabels); j++ {
b.Add(externalLabels[j].Name, externalLabels[j].Value)
}
return append(append(result, ls[i:]...), externalLabels[j:]...)
return b.Labels()
}
func (t *QueueManager) updateShardsLoop() {

View file

@ -161,7 +161,7 @@ func TestMetadataDelivery(t *testing.T) {
mcfg := config.DefaultMetadataConfig
metrics := newQueueManagerMetrics(nil, "", "")
m := NewQueueManager(metrics, nil, nil, nil, dir, newEWMARate(ewmaWeight, shardUpdateDuration), cfg, mcfg, nil, nil, c, defaultFlushDeadline, newPool(), newHighestTimestampMetric(), nil, false, false)
m := NewQueueManager(metrics, nil, nil, nil, dir, newEWMARate(ewmaWeight, shardUpdateDuration), cfg, mcfg, labels.EmptyLabels(), nil, c, defaultFlushDeadline, newPool(), newHighestTimestampMetric(), nil, false, false)
m.Start()
defer m.Stop()
@ -539,6 +539,7 @@ func TestShouldReshard(t *testing.T) {
func createTimeseries(numSamples, numSeries int, extraLabels ...labels.Label) ([]record.RefSample, []record.RefSeries) {
samples := make([]record.RefSample, 0, numSamples)
series := make([]record.RefSeries, 0, numSeries)
b := labels.ScratchBuilder{}
for i := 0; i < numSeries; i++ {
name := fmt.Sprintf("test_metric_%d", i)
for j := 0; j < numSamples; j++ {
@ -548,9 +549,16 @@ func createTimeseries(numSamples, numSeries int, extraLabels ...labels.Label) ([
V: float64(i),
})
}
// Create Labels that is name of series plus any extra labels supplied.
b.Reset()
b.Add(labels.MetricName, name)
for _, l := range extraLabels {
b.Add(l.Name, l.Value)
}
b.Sort()
series = append(series, record.RefSeries{
Ref: chunks.HeadSeriesRef(i),
Labels: append(labels.Labels{{Name: "__name__", Value: name}}, extraLabels...),
Labels: b.Labels(),
})
}
return samples, series
@ -603,7 +611,7 @@ func createHistograms(numSamples, numSeries int) ([]record.RefHistogramSample, [
}
series = append(series, record.RefSeries{
Ref: chunks.HeadSeriesRef(i),
Labels: labels.Labels{{Name: "__name__", Value: name}},
Labels: labels.FromStrings("__name__", name),
})
}
return histograms, series
@ -815,7 +823,7 @@ func BenchmarkSampleSend(b *testing.B) {
const numSeries = 10000
// Extra labels to make a more realistic workload - taken from Kubernetes' embedded cAdvisor metrics.
extraLabels := labels.Labels{
extraLabels := []labels.Label{
{Name: "kubernetes_io_arch", Value: "amd64"},
{Name: "kubernetes_io_instance_type", Value: "c3.somesize"},
{Name: "kubernetes_io_os", Value: "linux"},
@ -902,56 +910,63 @@ func BenchmarkStartup(b *testing.B) {
func TestProcessExternalLabels(t *testing.T) {
for _, tc := range []struct {
labels labels.Labels
externalLabels labels.Labels
externalLabels []labels.Label
expected labels.Labels
}{
// Test adding labels at the end.
{
labels: labels.Labels{{Name: "a", Value: "b"}},
externalLabels: labels.Labels{{Name: "c", Value: "d"}},
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}},
labels: labels.FromStrings("a", "b"),
externalLabels: []labels.Label{{Name: "c", Value: "d"}},
expected: labels.FromStrings("a", "b", "c", "d"),
},
// Test adding labels at the beginning.
{
labels: labels.Labels{{Name: "c", Value: "d"}},
externalLabels: labels.Labels{{Name: "a", Value: "b"}},
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}},
labels: labels.FromStrings("c", "d"),
externalLabels: []labels.Label{{Name: "a", Value: "b"}},
expected: labels.FromStrings("a", "b", "c", "d"),
},
// Test we don't override existing labels.
{
labels: labels.Labels{{Name: "a", Value: "b"}},
externalLabels: labels.Labels{{Name: "a", Value: "c"}},
expected: labels.Labels{{Name: "a", Value: "b"}},
labels: labels.FromStrings("a", "b"),
externalLabels: []labels.Label{{Name: "a", Value: "c"}},
expected: labels.FromStrings("a", "b"),
},
// Test empty externalLabels.
{
labels: labels.Labels{{Name: "a", Value: "b"}},
externalLabels: labels.Labels{},
expected: labels.Labels{{Name: "a", Value: "b"}},
labels: labels.FromStrings("a", "b"),
externalLabels: []labels.Label{},
expected: labels.FromStrings("a", "b"),
},
// Test empty labels.
{
labels: labels.Labels{},
externalLabels: labels.Labels{{Name: "a", Value: "b"}},
expected: labels.Labels{{Name: "a", Value: "b"}},
labels: labels.EmptyLabels(),
externalLabels: []labels.Label{{Name: "a", Value: "b"}},
expected: labels.FromStrings("a", "b"),
},
// Test labels is longer than externalLabels.
{
labels: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}},
externalLabels: labels.Labels{{Name: "e", Value: "f"}},
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}, {Name: "e", Value: "f"}},
labels: labels.FromStrings("a", "b", "c", "d"),
externalLabels: []labels.Label{{Name: "e", Value: "f"}},
expected: labels.FromStrings("a", "b", "c", "d", "e", "f"),
},
// Test externalLabels is longer than labels.
{
labels: labels.Labels{{Name: "c", Value: "d"}},
externalLabels: labels.Labels{{Name: "a", Value: "b"}, {Name: "e", Value: "f"}},
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}, {Name: "e", Value: "f"}},
labels: labels.FromStrings("c", "d"),
externalLabels: []labels.Label{{Name: "a", Value: "b"}, {Name: "e", Value: "f"}},
expected: labels.FromStrings("a", "b", "c", "d", "e", "f"),
},
// Adding with and without clashing labels.
{
labels: labels.FromStrings("a", "b", "c", "d"),
externalLabels: []labels.Label{{Name: "a", Value: "xxx"}, {Name: "c", Value: "yyy"}, {Name: "e", Value: "f"}},
expected: labels.FromStrings("a", "b", "c", "d", "e", "f"),
},
} {
require.Equal(t, tc.expected, processExternalLabels(tc.labels, tc.externalLabels))

View file

@ -180,9 +180,11 @@ func (q *querier) Select(sortSeries bool, hints *storage.SelectHints, matchers .
// We return the new set of matchers, along with a map of labels for which
// matchers were added, so that these can later be removed from the result
// time series again.
func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, labels.Labels) {
el := make(labels.Labels, len(q.externalLabels))
copy(el, q.externalLabels)
func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, []string) {
el := make([]labels.Label, 0, q.externalLabels.Len())
q.externalLabels.Range(func(l labels.Label) {
el = append(el, l)
})
// ms won't be sorted, so have to O(n^2) the search.
for _, m := range ms {
@ -202,7 +204,11 @@ func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, lab
}
ms = append(ms, m)
}
return ms, el
names := make([]string, len(el))
for i := range el {
names[i] = el[i].Name
}
return ms, names
}
// LabelValues implements storage.Querier and is a noop.
@ -234,7 +240,8 @@ func (q *chunkQuerier) Select(sortSeries bool, hints *storage.SelectHints, match
return storage.NewSeriesSetToChunkSet(q.querier.Select(sortSeries, hints, matchers...))
}
func newSeriesSetFilter(ss storage.SeriesSet, toFilter labels.Labels) storage.SeriesSet {
// Note strings in toFilter must be sorted.
func newSeriesSetFilter(ss storage.SeriesSet, toFilter []string) storage.SeriesSet {
return &seriesSetFilter{
SeriesSet: ss,
toFilter: toFilter,
@ -243,7 +250,7 @@ func newSeriesSetFilter(ss storage.SeriesSet, toFilter labels.Labels) storage.Se
type seriesSetFilter struct {
storage.SeriesSet
toFilter labels.Labels
toFilter []string // Label names to remove from result
querier storage.Querier
}
@ -264,20 +271,12 @@ func (ssf seriesSetFilter) At() storage.Series {
type seriesFilter struct {
storage.Series
toFilter labels.Labels
toFilter []string // Label names to remove from result
}
func (sf seriesFilter) Labels() labels.Labels {
labels := sf.Series.Labels()
for i, j := 0, 0; i < len(labels) && j < len(sf.toFilter); {
if labels[i].Name < sf.toFilter[j].Name {
i++
} else if labels[i].Name > sf.toFilter[j].Name {
j++
} else {
labels = labels[:i+copy(labels[i:], labels[i+1:])]
j++
}
}
return labels
b := labels.NewBuilder(sf.Series.Labels())
// todo: check if this is too inefficient.
b.Del(sf.toFilter...)
return b.Labels(labels.EmptyLabels())
}

View file

@ -110,7 +110,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
el labels.Labels
inMatchers []*labels.Matcher
outMatchers []*labels.Matcher
added labels.Labels
added []string
}{
{
inMatchers: []*labels.Matcher{
@ -119,7 +119,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
outMatchers: []*labels.Matcher{
labels.MustNewMatcher(labels.MatchEqual, "job", "api-server"),
},
added: labels.Labels{},
added: []string{},
},
{
el: labels.FromStrings("dc", "berlin-01", "region", "europe"),
@ -131,7 +131,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
labels.MustNewMatcher(labels.MatchEqual, "region", "europe"),
labels.MustNewMatcher(labels.MatchEqual, "dc", "berlin-01"),
},
added: labels.FromStrings("dc", "berlin-01", "region", "europe"),
added: []string{"dc", "region"},
},
{
el: labels.FromStrings("dc", "berlin-01", "region", "europe"),
@ -144,7 +144,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
labels.MustNewMatcher(labels.MatchEqual, "region", "europe"),
labels.MustNewMatcher(labels.MatchEqual, "dc", "munich-02"),
},
added: labels.FromStrings("region", "europe"),
added: []string{"region"},
},
}
@ -163,12 +163,12 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
func TestSeriesSetFilter(t *testing.T) {
tests := []struct {
in *prompb.QueryResult
toRemove labels.Labels
toRemove []string
expected *prompb.QueryResult
}{
{
toRemove: labels.Labels{{Name: "foo", Value: "bar"}},
toRemove: []string{"foo"},
in: &prompb.QueryResult{
Timeseries: []*prompb.TimeSeries{
{Labels: labelsToLabelsProto(labels.FromStrings("foo", "bar", "a", "b"), nil), Samples: []prompb.Sample{}},

View file

@ -91,7 +91,7 @@ func TestFilterExternalLabels(t *testing.T) {
require.NoError(t, s.ApplyConfig(conf))
require.Equal(t, 1, len(s.queryables))
require.Equal(t, 1, len(s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels))
require.Equal(t, 1, s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels.Len())
err := s.Close()
require.NoError(t, err)
@ -118,7 +118,7 @@ func TestIgnoreExternalLabels(t *testing.T) {
require.NoError(t, s.ApplyConfig(conf))
require.Equal(t, 1, len(s.queryables))
require.Equal(t, 0, len(s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels))
require.Equal(t, 0, s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels.Len())
err := s.Close()
require.NoError(t, err)

View file

@ -228,14 +228,14 @@ func TestUpdateExternalLabels(t *testing.T) {
require.NoError(t, err)
require.NoError(t, s.ApplyConfig(conf))
require.Equal(t, 1, len(s.queues))
require.Equal(t, labels.Labels(nil), s.queues[hash].externalLabels)
require.Equal(t, 0, len(s.queues[hash].externalLabels))
conf.GlobalConfig.ExternalLabels = externalLabels
hash, err = toHash(conf.RemoteWriteConfigs[0])
require.NoError(t, err)
require.NoError(t, s.ApplyConfig(conf))
require.Equal(t, 1, len(s.queues))
require.Equal(t, externalLabels, s.queues[hash].externalLabels)
require.Equal(t, []labels.Label{{Name: "external", Value: "true"}}, s.queues[hash].externalLabels)
err = s.Close()
require.NoError(t, err)

View file

@ -713,7 +713,7 @@ func (a *appender) Append(ref storage.SeriesRef, l labels.Labels, t int64, v flo
// Ensure no empty or duplicate labels have gotten through. This mirrors the
// equivalent validation code in the TSDB's headAppender.
l = l.WithoutEmpty()
if len(l) == 0 {
if l.IsEmpty() {
return 0, errors.Wrap(tsdb.ErrInvalidSample, "empty labelset")
}
@ -786,13 +786,17 @@ func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exem
// Exemplar label length does not include chars involved in text rendering such as quotes
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
labelSetLen := 0
for _, l := range e.Labels {
err := e.Labels.Validate(func(l labels.Label) error {
labelSetLen += utf8.RuneCountInString(l.Name)
labelSetLen += utf8.RuneCountInString(l.Value)
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
return 0, storage.ErrExemplarLabelLength
return storage.ErrExemplarLabelLength
}
return nil
})
if err != nil {
return 0, err
}
// Check for duplicate vs last stored exemplar for this series, and discard those.

View file

@ -49,28 +49,28 @@ func TestDB_InvalidSeries(t *testing.T) {
_, err := app.Append(0, labels.Labels{}, 0, 0)
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject empty labels")
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}, 0, 0)
_, err = app.Append(0, labels.FromStrings("a", "1", "a", "2"), 0, 0)
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject duplicate labels")
})
t.Run("Exemplars", func(t *testing.T) {
sRef, err := app.Append(0, labels.Labels{{Name: "a", Value: "1"}}, 0, 0)
sRef, err := app.Append(0, labels.FromStrings("a", "1"), 0, 0)
require.NoError(t, err, "should not reject valid series")
_, err = app.AppendExemplar(0, nil, exemplar.Exemplar{})
_, err = app.AppendExemplar(0, labels.EmptyLabels(), exemplar.Exemplar{})
require.EqualError(t, err, "unknown series ref when trying to add exemplar: 0")
e := exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}}
_, err = app.AppendExemplar(sRef, nil, e)
e := exemplar.Exemplar{Labels: labels.FromStrings("a", "1", "a", "2")}
_, err = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
require.ErrorIs(t, err, tsdb.ErrInvalidExemplar, "should reject duplicate labels")
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a_somewhat_long_trace_id", Value: "nYJSNtFrFTY37VR7mHzEE/LIDt7cdAQcuOzFajgmLDAdBSRHYPDzrxhMA4zz7el8naI/AoXFv9/e/G0vcETcIoNUi3OieeLfaIRQci2oa"}}}
_, err = app.AppendExemplar(sRef, nil, e)
e = exemplar.Exemplar{Labels: labels.FromStrings("a_somewhat_long_trace_id", "nYJSNtFrFTY37VR7mHzEE/LIDt7cdAQcuOzFajgmLDAdBSRHYPDzrxhMA4zz7el8naI/AoXFv9/e/G0vcETcIoNUi3OieeLfaIRQci2oa")}
_, err = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
require.ErrorIs(t, err, storage.ErrExemplarLabelLength, "should reject too long label length")
// Inverse check
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}}, Value: 20, Ts: 10, HasTs: true}
_, err = app.AppendExemplar(sRef, nil, e)
e = exemplar.Exemplar{Labels: labels.FromStrings("a", "1"), Value: 20, Ts: 10, HasTs: true}
_, err = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
require.NoError(t, err, "should not reject valid exemplars")
})
}
@ -426,9 +426,7 @@ func Test_ExistingWAL_NextRef(t *testing.T) {
// Append <seriesCount> series
app := db.Appender(context.Background())
for i := 0; i < seriesCount; i++ {
lset := labels.Labels{
{Name: model.MetricNameLabel, Value: fmt.Sprintf("series_%d", i)},
}
lset := labels.FromStrings(model.MetricNameLabel, fmt.Sprintf("series_%d", i))
_, err := app.Append(0, lset, 0, 100)
require.NoError(t, err)
}
@ -470,11 +468,11 @@ func startTime() (int64, error) {
}
// Create series for tests.
func labelsForTest(lName string, seriesCount int) []labels.Labels {
var series []labels.Labels
func labelsForTest(lName string, seriesCount int) [][]labels.Label {
var series [][]labels.Label
for i := 0; i < seriesCount; i++ {
lset := labels.Labels{
lset := []labels.Label{
{Name: "a", Value: lName},
{Name: "instance", Value: "localhost" + strconv.Itoa(i)},
{Name: "job", Value: "prometheus"},
@ -507,28 +505,28 @@ func TestStorage_DuplicateExemplarsIgnored(t *testing.T) {
app := s.Appender(context.Background())
defer s.Close()
sRef, err := app.Append(0, labels.Labels{{Name: "a", Value: "1"}}, 0, 0)
sRef, err := app.Append(0, labels.FromStrings("a", "1"), 0, 0)
require.NoError(t, err, "should not reject valid series")
// Write a few exemplars to our appender and call Commit().
// If the Labels, Value or Timestamp are different than the last exemplar,
// then a new one should be appended; Otherwise, it should be skipped.
e := exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}}, Value: 20, Ts: 10, HasTs: true}
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, nil, e)
e := exemplar.Exemplar{Labels: labels.FromStrings("a", "1"), Value: 20, Ts: 10, HasTs: true}
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
e.Labels = labels.Labels{{Name: "b", Value: "2"}}
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, nil, e)
e.Labels = labels.FromStrings("b", "2")
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
e.Value = 42
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
e.Ts = 25
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, nil, e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
require.NoError(t, app.Commit())

View file

@ -79,10 +79,10 @@ type IndexReader interface {
// by the label set of the underlying series.
SortedPostings(index.Postings) index.Postings
// Series populates the given labels and chunk metas for the series identified
// Series populates the given builder and chunk metas for the series identified
// by the reference.
// Returns storage.ErrNotFound if the ref does not resolve to a known series.
Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error
Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error
// LabelNames returns all the unique label names present in the index in sorted order.
LabelNames(matchers ...*labels.Matcher) ([]string, error)
@ -499,8 +499,8 @@ func (r blockIndexReader) SortedPostings(p index.Postings) index.Postings {
return r.ir.SortedPostings(p)
}
func (r blockIndexReader) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
if err := r.ir.Series(ref, lset, chks); err != nil {
func (r blockIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
if err := r.ir.Series(ref, builder, chks); err != nil {
return errors.Wrapf(err, "block: %s", r.b.Meta().ULID)
}
return nil
@ -561,12 +561,12 @@ func (pb *Block) Delete(mint, maxt int64, ms ...*labels.Matcher) error {
// Choose only valid postings which have chunks in the time-range.
stones := tombstones.NewMemTombstones()
var lset labels.Labels
var chks []chunks.Meta
var builder labels.ScratchBuilder
Outer:
for p.Next() {
err := ir.Series(p.At(), &lset, &chks)
err := ir.Series(p.At(), &builder, &chks)
if err != nil {
return err
}

View file

@ -215,10 +215,10 @@ func TestLabelValuesWithMatchers(t *testing.T) {
var seriesEntries []storage.Series
for i := 0; i < 100; i++ {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
blockDir := createBlock(t, tmpdir, seriesEntries)
@ -372,11 +372,11 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
for i := 0; i < metricCount; i++ {
// Note these series are not created in sort order: 'value2' sorts after 'value10'.
// This makes a big difference to the benchmark timing.
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "a_unique", Value: fmt.Sprintf("value%d", i)},
{Name: "b_tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))},
{Name: "c_ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1"
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"a_unique", fmt.Sprintf("value%d", i),
"b_tens", fmt.Sprintf("value%d", i/(metricCount/10)),
"c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1"
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
blockDir := createBlock(b, tmpdir, seriesEntries)
@ -410,23 +410,23 @@ func TestLabelNamesWithMatchers(t *testing.T) {
var seriesEntries []storage.Series
for i := 0; i < 100; i++ {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
if i%10 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
if i%20 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "twenties", Value: fmt.Sprintf("value%d", i/20)},
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
"tens", fmt.Sprintf("value%d", i/10),
"twenties", fmt.Sprintf("value%d", i/20),
"unique", fmt.Sprintf("value%d", i),
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
}
}

View file

@ -1478,11 +1478,11 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
for sid, schema := range allSchemas {
for i := 0; i < c.numSeriesPerSchema; i++ {
lbls := labels.Labels{
{Name: "__name__", Value: fmt.Sprintf("rpc_durations_%d_histogram_seconds", i)},
{Name: "instance", Value: "localhost:8080"},
{Name: "job", Value: fmt.Sprintf("sparse_histogram_schema_%s", schemaDescription[sid])},
}
lbls := labels.FromStrings(
"__name__", fmt.Sprintf("rpc_durations_%d_histogram_seconds", i),
"instance", "localhost:8080",
"job", fmt.Sprintf("sparse_histogram_schema_%s", schemaDescription[sid]),
)
allSparseSeries = append(allSparseSeries, struct {
baseLabels labels.Labels
hists []*histogram.Histogram
@ -1546,21 +1546,20 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
for it.Next() {
numOldSeriesPerHistogram++
b := it.At()
lbls := append(ah.baseLabels, labels.Label{Name: "le", Value: fmt.Sprintf("%.16f", b.Upper)})
lbls := labels.NewBuilder(ah.baseLabels).Set("le", fmt.Sprintf("%.16f", b.Upper)).Labels(labels.EmptyLabels())
refs[itIdx], err = oldApp.Append(refs[itIdx], lbls, ts, float64(b.Count))
require.NoError(t, err)
itIdx++
}
baseName := ah.baseLabels.Get(labels.MetricName)
// _count metric.
countLbls := ah.baseLabels.Copy()
countLbls[0].Value = countLbls[0].Value + "_count"
countLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_count").Labels(labels.EmptyLabels())
_, err = oldApp.Append(0, countLbls, ts, float64(h.Count))
require.NoError(t, err)
numOldSeriesPerHistogram++
// _sum metric.
sumLbls := ah.baseLabels.Copy()
sumLbls[0].Value = sumLbls[0].Value + "_sum"
sumLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_sum").Labels(labels.EmptyLabels())
_, err = oldApp.Append(0, sumLbls, ts, h.Sum)
require.NoError(t, err)
numOldSeriesPerHistogram++

View file

@ -1002,7 +1002,7 @@ func (a dbAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRef,
if g, ok := a.Appender.(storage.GetRef); ok {
return g.GetRef(lset, hash)
}
return 0, nil
return 0, labels.EmptyLabels()
}
func (a dbAppender) Commit() error {

View file

@ -478,9 +478,9 @@ func TestAmendDatapointCausesError(t *testing.T) {
require.NoError(t, app.Commit())
app = db.Appender(ctx)
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "b"}}, 0, 0)
_, err = app.Append(0, labels.FromStrings("a", "b"), 0, 0)
require.NoError(t, err)
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "b"}}, 0, 1)
_, err = app.Append(0, labels.FromStrings("a", "b"), 0, 1)
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
require.NoError(t, app.Rollback())
@ -498,15 +498,15 @@ func TestAmendDatapointCausesError(t *testing.T) {
}
app = db.Appender(ctx)
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
require.NoError(t, err)
require.NoError(t, app.Commit())
app = db.Appender(ctx)
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
require.NoError(t, err)
h.Schema = 2
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
require.NoError(t, app.Rollback())
}
@ -1830,6 +1830,8 @@ func TestChunkAtBlockBoundary(t *testing.T) {
err = db.Compact()
require.NoError(t, err)
var builder labels.ScratchBuilder
for _, block := range db.Blocks() {
r, err := block.Index()
require.NoError(t, err)
@ -1841,15 +1843,12 @@ func TestChunkAtBlockBoundary(t *testing.T) {
p, err := r.Postings(k, v)
require.NoError(t, err)
var (
lset labels.Labels
chks []chunks.Meta
)
var chks []chunks.Meta
chunkCount := 0
for p.Next() {
err = r.Series(p.At(), &lset, &chks)
err = r.Series(p.At(), &builder, &chks)
require.NoError(t, err)
for _, c := range chks {
require.True(t, meta.MinTime <= c.MinTime && c.MaxTime <= meta.MaxTime,

View file

@ -226,13 +226,16 @@ func (ce *CircularExemplarStorage) validateExemplar(key []byte, e exemplar.Exemp
// Exemplar label length does not include chars involved in text rendering such as quotes
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
labelSetLen := 0
for _, l := range e.Labels {
if err := e.Labels.Validate(func(l labels.Label) error {
labelSetLen += utf8.RuneCountInString(l.Name)
labelSetLen += utf8.RuneCountInString(l.Value)
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
return storage.ErrExemplarLabelLength
}
return nil
}); err != nil {
return err
}
idx, ok := ce.index[string(key)]

View file

@ -102,7 +102,7 @@ func (a *initAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRe
if g, ok := a.app.(storage.GetRef); ok {
return g.GetRef(lset, hash)
}
return 0, nil
return 0, labels.EmptyLabels()
}
func (a *initAppender) Commit() error {
@ -312,7 +312,7 @@ func (a *headAppender) Append(ref storage.SeriesRef, lset labels.Labels, t int64
if s == nil {
// Ensure no empty labels have gotten through.
lset = lset.WithoutEmpty()
if len(lset) == 0 {
if lset.IsEmpty() {
return 0, errors.Wrap(ErrInvalidSample, "empty labelset")
}
@ -494,7 +494,7 @@ func (a *headAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels
if s == nil {
// Ensure no empty labels have gotten through.
lset = lset.WithoutEmpty()
if len(lset) == 0 {
if lset.IsEmpty() {
return 0, errors.Wrap(ErrInvalidSample, "empty labelset")
}
@ -650,7 +650,7 @@ var _ storage.GetRef = &headAppender{}
func (a *headAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRef, labels.Labels) {
s := a.head.series.getByHash(hash, lset)
if s == nil {
return 0, nil
return 0, labels.EmptyLabels()
}
// returned labels must be suitable to pass to Append()
return storage.SeriesRef(s.ref), s.lset

View file

@ -148,14 +148,14 @@ func (h *headIndexReader) SortedPostings(p index.Postings) index.Postings {
}
// Series returns the series for the given reference.
func (h *headIndexReader) Series(ref storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta) error {
func (h *headIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
s := h.head.series.getByID(chunks.HeadSeriesRef(ref))
if s == nil {
h.head.metrics.seriesNotFound.Inc()
return storage.ErrNotFound
}
*lbls = append((*lbls)[:0], s.lset...)
builder.Assign(s.lset)
s.Lock()
defer s.Unlock()
@ -222,9 +222,9 @@ func (h *headIndexReader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, err
if memSeries == nil {
return nil, storage.ErrNotFound
}
for _, lbl := range memSeries.lset {
memSeries.lset.Range(func(lbl labels.Label) {
namesMap[lbl.Name] = struct{}{}
}
})
}
names := make([]string, 0, len(namesMap))
for name := range namesMap {

View file

@ -388,7 +388,12 @@ func TestHead_HighConcurrencyReadAndWrite(t *testing.T) {
querySeriesRef = (querySeriesRef + 1) % seriesCnt
lbls := labelSets[querySeriesRef]
samples, err := queryHead(ts-qryRange, ts, lbls[0])
// lbls has a single entry; extract it so we can run a query.
var lbl labels.Label
lbls.Range(func(l labels.Label) {
lbl = l
})
samples, err := queryHead(ts-qryRange, ts, lbl)
if err != nil {
return false, err
}
@ -1133,8 +1138,9 @@ func TestDelete_e2e(t *testing.T) {
require.NoError(t, hb.Delete(r.Mint, r.Maxt, del.ms...))
}
matched := labels.Slice{}
for _, ls := range lbls {
for _, l := range lbls {
s := labels.Selector(del.ms)
ls := labels.New(l...)
if s.Matches(ls) {
matched = append(matched, ls)
}
@ -1446,12 +1452,12 @@ func TestGCChunkAccess(t *testing.T) {
idx := h.indexRange(0, 1500)
var (
lset labels.Labels
chunks []chunks.Meta
chunks []chunks.Meta
builder labels.ScratchBuilder
)
require.NoError(t, idx.Series(1, &lset, &chunks))
require.NoError(t, idx.Series(1, &builder, &chunks))
require.Equal(t, labels.FromStrings("a", "1"), lset)
require.Equal(t, labels.FromStrings("a", "1"), builder.Labels())
require.Equal(t, 2, len(chunks))
cr, err := h.chunksRange(0, 1500, nil)
@ -1499,12 +1505,12 @@ func TestGCSeriesAccess(t *testing.T) {
idx := h.indexRange(0, 2000)
var (
lset labels.Labels
chunks []chunks.Meta
chunks []chunks.Meta
builder labels.ScratchBuilder
)
require.NoError(t, idx.Series(1, &lset, &chunks))
require.NoError(t, idx.Series(1, &builder, &chunks))
require.Equal(t, labels.FromStrings("a", "1"), lset)
require.Equal(t, labels.FromStrings("a", "1"), builder.Labels())
require.Equal(t, 2, len(chunks))
cr, err := h.chunksRange(0, 2000, nil)
@ -2806,7 +2812,7 @@ func TestWaitForPendingReadersInTimeRange(t *testing.T) {
}
func TestAppendHistogram(t *testing.T) {
l := labels.Labels{{Name: "a", Value: "b"}}
l := labels.FromStrings("a", "b")
for _, numHistograms := range []int{1, 10, 150, 200, 250, 300} {
t.Run(fmt.Sprintf("%d", numHistograms), func(t *testing.T) {
head, _ := newTestHead(t, 1000, false, false)
@ -2861,7 +2867,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
require.NoError(t, head.Init(0))
// Series with only histograms.
s1 := labels.Labels{{Name: "a", Value: "b1"}}
s1 := labels.FromStrings("a", "b1")
k1 := s1.String()
numHistograms := 450
exp := map[string][]tsdbutil.Sample{}
@ -2893,7 +2899,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
require.Greater(t, expHeadChunkSamples, 0)
// Series with mix of histograms and float.
s2 := labels.Labels{{Name: "a", Value: "b2"}}
s2 := labels.FromStrings("a", "b2")
k2 := s2.String()
app = head.Appender(context.Background())
ts := 0
@ -3254,7 +3260,7 @@ func TestHistogramMetrics(t *testing.T) {
for x := 0; x < 5; x++ {
expHSeries++
l := labels.Labels{{Name: "a", Value: fmt.Sprintf("b%d", x)}}
l := labels.FromStrings("a", fmt.Sprintf("b%d", x))
for i, h := range GenerateTestHistograms(10) {
app := head.Appender(context.Background())
_, err := app.AppendHistogram(0, l, int64(i), h)
@ -3277,7 +3283,7 @@ func TestHistogramMetrics(t *testing.T) {
}
func TestHistogramStaleSample(t *testing.T) {
l := labels.Labels{{Name: "a", Value: "b"}}
l := labels.FromStrings("a", "b")
numHistograms := 20
head, _ := newTestHead(t, 100000, false, false)
t.Cleanup(func() {
@ -3372,7 +3378,7 @@ func TestHistogramStaleSample(t *testing.T) {
}
func TestHistogramCounterResetHeader(t *testing.T) {
l := labels.Labels{{Name: "a", Value: "b"}}
l := labels.FromStrings("a", "b")
head, _ := newTestHead(t, 1000, false, false)
t.Cleanup(func() {
require.NoError(t, head.Close())
@ -3484,7 +3490,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
db.DisableCompactions()
hists := GenerateTestHistograms(10)
lbls := labels.Labels{{Name: "a", Value: "b"}}
lbls := labels.FromStrings("a", "b")
type result struct {
t int64

View file

@ -423,7 +423,7 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
return errors.Errorf("out-of-order series added with label set %q", lset)
}
if ref < w.lastRef && len(w.lastSeries) != 0 {
if ref < w.lastRef && !w.lastSeries.IsEmpty() {
return errors.Errorf("series with reference greater than %d already added", ref)
}
// We add padding to 16 bytes to increase the addressable space we get through 4 byte
@ -437,9 +437,9 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
}
w.buf2.Reset()
w.buf2.PutUvarint(len(lset))
w.buf2.PutUvarint(lset.Len())
for _, l := range lset {
if err := lset.Validate(func(l labels.Label) error {
var err error
cacheEntry, ok := w.symbolCache[l.Name]
nameIndex := cacheEntry.index
@ -465,6 +465,9 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
}
}
w.buf2.PutUvarint32(valueIndex)
return nil
}); err != nil {
return err
}
w.buf2.PutUvarint(len(chunks))
@ -496,7 +499,7 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
return errors.Wrap(err, "write series data")
}
w.lastSeries = append(w.lastSeries[:0], lset...)
w.lastSeries.CopyFrom(lset)
w.lastRef = ref
return nil
@ -1593,8 +1596,8 @@ func (r *Reader) LabelValueFor(id storage.SeriesRef, label string) (string, erro
return value, nil
}
// Series reads the series with the given ID and writes its labels and chunks into lbls and chks.
func (r *Reader) Series(id storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta) error {
// Series reads the series with the given ID and writes its labels and chunks into builder and chks.
func (r *Reader) Series(id storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
offset := id
// In version 2 series IDs are no longer exact references but series are 16-byte padded
// and the ID is the multiple of 16 of the actual position.
@ -1605,7 +1608,7 @@ func (r *Reader) Series(id storage.SeriesRef, lbls *labels.Labels, chks *[]chunk
if d.Err() != nil {
return d.Err()
}
return errors.Wrap(r.dec.Series(d.Get(), lbls, chks), "read series")
return errors.Wrap(r.dec.Series(d.Get(), builder, chks), "read series")
}
func (r *Reader) Postings(name string, values ...string) (Postings, error) {
@ -1832,9 +1835,10 @@ func (dec *Decoder) LabelValueFor(b []byte, label string) (string, error) {
return "", d.Err()
}
// Series decodes a series entry from the given byte slice into lset and chks.
func (dec *Decoder) Series(b []byte, lbls *labels.Labels, chks *[]chunks.Meta) error {
*lbls = (*lbls)[:0]
// Series decodes a series entry from the given byte slice into builder and chks.
// Previous contents of lbls can be overwritten - make sure you copy before retaining.
func (dec *Decoder) Series(b []byte, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
builder.Reset()
*chks = (*chks)[:0]
d := encoding.Decbuf{B: b}
@ -1858,7 +1862,7 @@ func (dec *Decoder) Series(b []byte, lbls *labels.Labels, chks *[]chunks.Meta) e
return errors.Wrap(err, "lookup label value")
}
*lbls = append(*lbls, labels.Label{Name: ln, Value: lv})
builder.Add(ln, lv)
}
// Read the chunks meta data.

View file

@ -68,14 +68,14 @@ func (m mockIndex) AddSeries(ref storage.SeriesRef, l labels.Labels, chunks ...c
if _, ok := m.series[ref]; ok {
return errors.Errorf("series with reference %d already added", ref)
}
for _, lbl := range l {
l.Range(func(lbl labels.Label) {
m.symbols[lbl.Name] = struct{}{}
m.symbols[lbl.Value] = struct{}{}
if _, ok := m.postings[lbl]; !ok {
m.postings[lbl] = []storage.SeriesRef{}
}
m.postings[lbl] = append(m.postings[lbl], ref)
}
})
m.postings[allPostingsKey] = append(m.postings[allPostingsKey], ref)
s := series{l: l}
@ -124,12 +124,12 @@ func (m mockIndex) SortedPostings(p Postings) Postings {
return NewListPostings(ep)
}
func (m mockIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
func (m mockIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
s, ok := m.series[ref]
if !ok {
return errors.New("not found")
}
*lset = append((*lset)[:0], s.l...)
builder.Assign(s.l)
*chks = append((*chks)[:0], s.chunks...)
return nil
@ -197,15 +197,15 @@ func TestIndexRW_Postings(t *testing.T) {
p, err := ir.Postings("a", "1")
require.NoError(t, err)
var l labels.Labels
var c []chunks.Meta
var builder labels.ScratchBuilder
for i := 0; p.Next(); i++ {
err := ir.Series(p.At(), &l, &c)
err := ir.Series(p.At(), &builder, &c)
require.NoError(t, err)
require.Equal(t, 0, len(c))
require.Equal(t, series[i], l)
require.Equal(t, series[i], builder.Labels())
}
require.NoError(t, p.Err())
@ -311,16 +311,16 @@ func TestPostingsMany(t *testing.T) {
{in: []string{"126a", "126b", "127", "127a", "127b", "128", "128a", "128b", "129", "129a", "129b"}},
}
var builder labels.ScratchBuilder
for _, c := range cases {
it, err := ir.Postings("i", c.in...)
require.NoError(t, err)
got := []string{}
var lbls labels.Labels
var metas []chunks.Meta
for it.Next() {
require.NoError(t, ir.Series(it.At(), &lbls, &metas))
got = append(got, lbls.Get("i"))
require.NoError(t, ir.Series(it.At(), &builder, &metas))
got = append(got, builder.Labels().Get("i"))
}
require.NoError(t, it.Err())
exp := []string{}
@ -344,10 +344,10 @@ func TestPersistence_index_e2e(t *testing.T) {
symbols := map[string]struct{}{}
for _, lset := range lbls {
for _, l := range lset {
lset.Range(func(l labels.Label) {
symbols[l.Name] = struct{}{}
symbols[l.Value] = struct{}{}
}
})
}
var input indexWriterSeriesSlice
@ -395,14 +395,14 @@ func TestPersistence_index_e2e(t *testing.T) {
require.NoError(t, err)
require.NoError(t, mi.AddSeries(storage.SeriesRef(i), s.labels, s.chunks...))
for _, l := range s.labels {
s.labels.Range(func(l labels.Label) {
valset, ok := values[l.Name]
if !ok {
valset = map[string]struct{}{}
values[l.Name] = valset
}
valset[l.Value] = struct{}{}
}
})
postings.Add(storage.SeriesRef(i), s.labels)
}
@ -419,20 +419,20 @@ func TestPersistence_index_e2e(t *testing.T) {
expp, err := mi.Postings(p.Name, p.Value)
require.NoError(t, err)
var lset, explset labels.Labels
var chks, expchks []chunks.Meta
var builder, eBuilder labels.ScratchBuilder
for gotp.Next() {
require.True(t, expp.Next())
ref := gotp.At()
err := ir.Series(ref, &lset, &chks)
err := ir.Series(ref, &builder, &chks)
require.NoError(t, err)
err = mi.Series(expp.At(), &explset, &expchks)
err = mi.Series(expp.At(), &eBuilder, &expchks)
require.NoError(t, err)
require.Equal(t, explset, lset)
require.Equal(t, eBuilder.Labels(), builder.Labels())
require.Equal(t, expchks, chks)
}
require.False(t, expp.Next(), "Expected no more postings for %q=%q", p.Name, p.Value)

View file

@ -353,9 +353,9 @@ func (p *MemPostings) Iter(f func(labels.Label, Postings) error) error {
func (p *MemPostings) Add(id storage.SeriesRef, lset labels.Labels) {
p.mtx.Lock()
for _, l := range lset {
lset.Range(func(l labels.Label) {
p.addFor(id, l)
}
})
p.addFor(id, allPostingsKey)
p.mtx.Unlock()

View file

@ -47,21 +47,21 @@ func NewOOOHeadIndexReader(head *Head, mint, maxt int64) *OOOHeadIndexReader {
return &OOOHeadIndexReader{hr}
}
func (oh *OOOHeadIndexReader) Series(ref storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta) error {
return oh.series(ref, lbls, chks, 0)
func (oh *OOOHeadIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
return oh.series(ref, builder, chks, 0)
}
// The passed lastMmapRef tells upto what max m-map chunk that we can consider.
// If it is 0, it means all chunks need to be considered.
// If it is non-0, then the oooHeadChunk must not be considered.
func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta, lastMmapRef chunks.ChunkDiskMapperRef) error {
func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta, lastMmapRef chunks.ChunkDiskMapperRef) error {
s := oh.head.series.getByID(chunks.HeadSeriesRef(ref))
if s == nil {
oh.head.metrics.seriesNotFound.Inc()
return storage.ErrNotFound
}
*lbls = append((*lbls)[:0], s.lset...)
builder.Assign(s.lset)
if chks == nil {
return nil
@ -400,8 +400,8 @@ func (ir *OOOCompactionHeadIndexReader) SortedPostings(p index.Postings) index.P
return p
}
func (ir *OOOCompactionHeadIndexReader) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
return ir.ch.oooIR.series(ref, lset, chks, ir.ch.lastMmapRef)
func (ir *OOOCompactionHeadIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
return ir.ch.oooIR.series(ref, builder, chks, ir.ch.lastMmapRef)
}
func (ir *OOOCompactionHeadIndexReader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) {

View file

@ -357,13 +357,13 @@ func TestOOOHeadIndexReader_Series(t *testing.T) {
ir := NewOOOHeadIndexReader(h, tc.queryMinT, tc.queryMaxT)
var chks []chunks.Meta
var respLset labels.Labels
err := ir.Series(storage.SeriesRef(s1ID), &respLset, &chks)
var b labels.ScratchBuilder
err := ir.Series(storage.SeriesRef(s1ID), &b, &chks)
require.NoError(t, err)
require.Equal(t, s1Lset, respLset)
require.Equal(t, s1Lset, b.Labels())
require.Equal(t, expChunks, chks)
err = ir.Series(storage.SeriesRef(s1ID+1), &respLset, &chks)
err = ir.Series(storage.SeriesRef(s1ID+1), &b, &chks)
require.Equal(t, storage.ErrNotFound, err)
})
}
@ -379,23 +379,15 @@ func TestOOOHeadChunkReader_LabelValues(t *testing.T) {
app := head.Appender(context.Background())
// Add in-order samples
_, err := app.Append(0, labels.Labels{
{Name: "foo", Value: "bar1"},
}, 100, 1)
_, err := app.Append(0, labels.FromStrings("foo", "bar1"), 100, 1)
require.NoError(t, err)
_, err = app.Append(0, labels.Labels{
{Name: "foo", Value: "bar2"},
}, 100, 2)
_, err = app.Append(0, labels.FromStrings("foo", "bar2"), 100, 2)
require.NoError(t, err)
// Add ooo samples for those series
_, err = app.Append(0, labels.Labels{
{Name: "foo", Value: "bar1"},
}, 90, 1)
_, err = app.Append(0, labels.FromStrings("foo", "bar1"), 90, 1)
require.NoError(t, err)
_, err = app.Append(0, labels.Labels{
{Name: "foo", Value: "bar2"},
}, 90, 2)
_, err = app.Append(0, labels.FromStrings("foo", "bar2"), 90, 2)
require.NoError(t, err)
require.NoError(t, app.Commit())
@ -848,8 +840,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// markers like OOOLastRef. These are then used by the ChunkReader.
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
var chks []chunks.Meta
var respLset labels.Labels
err := ir.Series(s1Ref, &respLset, &chks)
var b labels.ScratchBuilder
err := ir.Series(s1Ref, &b, &chks)
require.NoError(t, err)
require.Equal(t, len(tc.expChunksSamples), len(chks))
@ -1011,8 +1003,8 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// markers like OOOLastRef. These are then used by the ChunkReader.
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
var chks []chunks.Meta
var respLset labels.Labels
err := ir.Series(s1Ref, &respLset, &chks)
var b labels.ScratchBuilder
err := ir.Series(s1Ref, &b, &chks)
require.NoError(t, err)
require.Equal(t, len(tc.expChunksSamples), len(chks))

View file

@ -451,13 +451,13 @@ type blockBaseSeriesSet struct {
curr seriesData
bufChks []chunks.Meta
bufLbls labels.Labels
builder labels.ScratchBuilder
err error
}
func (b *blockBaseSeriesSet) Next() bool {
for b.p.Next() {
if err := b.index.Series(b.p.At(), &b.bufLbls, &b.bufChks); err != nil {
if err := b.index.Series(b.p.At(), &b.builder, &b.bufChks); err != nil {
// Postings may be stale. Skip if no underlying series exists.
if errors.Cause(err) == storage.ErrNotFound {
continue
@ -528,8 +528,7 @@ func (b *blockBaseSeriesSet) Next() bool {
intervals = intervals.Add(tombstones.Interval{Mint: b.maxt + 1, Maxt: math.MaxInt64})
}
b.curr.labels = make(labels.Labels, len(b.bufLbls))
copy(b.curr.labels, b.bufLbls)
b.curr.labels = b.builder.Labels()
b.curr.chks = chks
b.curr.intervals = intervals
@ -865,7 +864,6 @@ func newBlockSeriesSet(i IndexReader, c ChunkReader, t tombstones.Reader, p inde
mint: mint,
maxt: maxt,
disableTrimming: disableTrimming,
bufLbls: make(labels.Labels, 0, 10),
},
}
}
@ -897,7 +895,6 @@ func newBlockChunkSeriesSet(id ulid.ULID, i IndexReader, c ChunkReader, t tombst
mint: mint,
maxt: maxt,
disableTrimming: disableTrimming,
bufLbls: make(labels.Labels, 0, 10),
},
}
}

View file

@ -142,14 +142,14 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe
postings.Add(storage.SeriesRef(i), ls)
for _, l := range ls {
ls.Range(func(l labels.Label) {
vs, present := lblIdx[l.Name]
if !present {
vs = map[string]struct{}{}
lblIdx[l.Name] = vs
}
vs[l.Value] = struct{}{}
}
})
}
require.NoError(t, postings.Iter(func(l labels.Label, p index.Postings) error {
@ -1168,10 +1168,10 @@ func (m *mockIndex) AddSeries(ref storage.SeriesRef, l labels.Labels, chunks ...
if _, ok := m.series[ref]; ok {
return errors.Errorf("series with reference %d already added", ref)
}
for _, lbl := range l {
l.Range(func(lbl labels.Label) {
m.symbols[lbl.Name] = struct{}{}
m.symbols[lbl.Value] = struct{}{}
}
})
s := series{l: l}
// Actual chunk data is not stored in the index.
@ -1238,9 +1238,9 @@ func (m mockIndex) LabelValueFor(id storage.SeriesRef, label string) (string, er
func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) {
namesMap := make(map[string]bool)
for _, id := range ids {
for _, lbl := range m.series[id].l {
m.series[id].l.Range(func(lbl labels.Label) {
namesMap[lbl.Name] = true
}
})
}
names := make([]string, 0, len(namesMap))
for name := range namesMap {
@ -1270,12 +1270,12 @@ func (m mockIndex) SortedPostings(p index.Postings) index.Postings {
return index.NewListPostings(ep)
}
func (m mockIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
func (m mockIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
s, ok := m.series[ref]
if !ok {
return storage.ErrNotFound
}
*lset = append((*lset)[:0], s.l...)
builder.Assign(s.l)
*chks = append((*chks)[:0], s.chunks...)
return nil
@ -1297,9 +1297,9 @@ func (m mockIndex) LabelNames(matchers ...*labels.Matcher) ([]string, error) {
}
}
if matches {
for _, lbl := range series.l {
series.l.Range(func(lbl labels.Label) {
names[lbl.Name] = struct{}{}
}
})
}
}
}
@ -1884,9 +1884,10 @@ func TestPostingsForMatchers(t *testing.T) {
p, err := PostingsForMatchers(ir, c.matchers...)
require.NoError(t, err)
var builder labels.ScratchBuilder
for p.Next() {
lbls := labels.Labels{}
require.NoError(t, ir.Series(p.At(), &lbls, &[]chunks.Meta{}))
require.NoError(t, ir.Series(p.At(), &builder, &[]chunks.Meta{}))
lbls := builder.Labels()
if _, ok := exp[lbls.String()]; !ok {
t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String())
} else {
@ -1973,7 +1974,7 @@ func BenchmarkQueries(b *testing.B) {
// Add some common labels to make the matchers select these series.
{
var commonLbls labels.Labels
var commonLbls []labels.Label
for _, selector := range selectors {
switch selector.Type {
case labels.MatchEqual:
@ -1984,8 +1985,11 @@ func BenchmarkQueries(b *testing.B) {
}
for i := range commonLbls {
s := series[i].(*storage.SeriesEntry)
allLabels := append(commonLbls, s.Labels()...)
newS := storage.NewListSeries(allLabels, nil)
allLabels := commonLbls
s.Labels().Range(func(l labels.Label) {
allLabels = append(allLabels, l)
})
newS := storage.NewListSeries(labels.New(allLabels...), nil)
newS.SampleIteratorFn = s.SampleIteratorFn
series[i] = newS
@ -2097,7 +2101,7 @@ func (m mockMatcherIndex) SortedPostings(p index.Postings) index.Postings {
return index.EmptyPostings()
}
func (m mockMatcherIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
func (m mockMatcherIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
return nil
}
@ -2160,7 +2164,7 @@ func TestBlockBaseSeriesSet(t *testing.T) {
{
series: []refdSeries{
{
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}}...),
lset: labels.FromStrings("a", "a"),
chunks: []chunks.Meta{
{Ref: 29},
{Ref: 45},
@ -2173,19 +2177,19 @@ func TestBlockBaseSeriesSet(t *testing.T) {
ref: 12,
},
{
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...),
lset: labels.FromStrings("a", "a", "b", "b"),
chunks: []chunks.Meta{
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
},
ref: 10,
},
{
lset: labels.New([]labels.Label{{Name: "b", Value: "c"}}...),
lset: labels.FromStrings("b", "c"),
chunks: []chunks.Meta{{Ref: 8282}},
ref: 1,
},
{
lset: labels.New([]labels.Label{{Name: "b", Value: "b"}}...),
lset: labels.FromStrings("b", "b"),
chunks: []chunks.Meta{
{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269},
},
@ -2198,14 +2202,14 @@ func TestBlockBaseSeriesSet(t *testing.T) {
{
series: []refdSeries{
{
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...),
lset: labels.FromStrings("a", "a", "b", "b"),
chunks: []chunks.Meta{
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
},
ref: 10,
},
{
lset: labels.New([]labels.Label{{Name: "b", Value: "c"}}...),
lset: labels.FromStrings("b", "c"),
chunks: []chunks.Meta{{Ref: 8282}},
ref: 3,
},

View file

@ -17,7 +17,6 @@ package record
import (
"math"
"sort"
"github.com/pkg/errors"
@ -182,7 +181,9 @@ type RefMmapMarker struct {
// Decoder decodes series, sample, metadata and tombstone records.
// The zero value is ready to use.
type Decoder struct{}
type Decoder struct {
builder labels.ScratchBuilder
}
// Type returns the type of the record.
// Returns RecordUnknown if no valid record type is found.
@ -267,14 +268,15 @@ func (d *Decoder) Metadata(rec []byte, metadata []RefMetadata) ([]RefMetadata, e
// DecodeLabels decodes one set of labels from buf.
func (d *Decoder) DecodeLabels(dec *encoding.Decbuf) labels.Labels {
lset := make(labels.Labels, dec.Uvarint())
for i := range lset {
lset[i].Name = dec.UvarintStr()
lset[i].Value = dec.UvarintStr()
// TODO: reconsider if this function could be pushed down into labels.Labels to be more efficient.
d.builder.Reset()
nLabels := dec.Uvarint()
for i := 0; i < nLabels; i++ {
lName := dec.UvarintStr()
lValue := dec.UvarintStr()
d.builder.Add(lName, lValue)
}
sort.Sort(lset)
return lset
return d.builder.Labels()
}
// Samples appends samples in rec to the given slice.
@ -525,12 +527,13 @@ func (e *Encoder) Metadata(metadata []RefMetadata, b []byte) []byte {
// EncodeLabels encodes the contents of labels into buf.
func EncodeLabels(buf *encoding.Encbuf, lbls labels.Labels) {
buf.PutUvarint(len(lbls))
// TODO: reconsider if this function could be pushed down into labels.Labels to be more efficient.
buf.PutUvarint(lbls.Len())
for _, l := range lbls {
lbls.Range(func(l labels.Label) {
buf.PutUvarintStr(l.Name)
buf.PutUvarintStr(l.Value)
}
})
}
// Samples appends the encoded samples to b and returns the resulting slice.

View file

@ -80,11 +80,11 @@ func TestRepairBadIndexVersion(t *testing.T) {
require.NoError(t, err)
p, err := r.Postings("b", "1")
require.NoError(t, err)
var builder labels.ScratchBuilder
for p.Next() {
t.Logf("next ID %d", p.At())
var lset labels.Labels
require.Error(t, r.Series(p.At(), &lset, nil))
require.Error(t, r.Series(p.At(), &builder, nil))
}
require.NoError(t, p.Err())
require.NoError(t, r.Close())
@ -104,10 +104,9 @@ func TestRepairBadIndexVersion(t *testing.T) {
for p.Next() {
t.Logf("next ID %d", p.At())
var lset labels.Labels
var chks []chunks.Meta
require.NoError(t, r.Series(p.At(), &lset, &chks))
res = append(res, lset)
require.NoError(t, r.Series(p.At(), &builder, &chks))
res = append(res, builder.Labels())
}
require.NoError(t, p.Err())

View file

@ -58,9 +58,7 @@ func BenchmarkLabelsClone(b *testing.B) {
l := labels.FromMap(m)
for i := 0; i < b.N; i++ {
res := make(labels.Labels, len(l))
copy(res, l)
l = res
l = l.Copy()
}
}
@ -106,13 +104,13 @@ func BenchmarkLabelSetAccess(b *testing.B) {
var v string
for _, l := range ls {
ls.Range(func(l labels.Label) {
b.Run(l.Name, func(b *testing.B) {
for i := 0; i < b.N; i++ {
v = ls.Get(l.Name)
}
})
}
})
_ = v
}

View file

@ -90,8 +90,8 @@ type testTargetRetriever struct {
type testTargetParams struct {
Identifier string
Labels []labels.Label
DiscoveredLabels []labels.Label
Labels labels.Labels
DiscoveredLabels labels.Labels
Params url.Values
Reports []*testReport
Active bool
@ -508,9 +508,9 @@ func TestGetSeries(t *testing.T) {
name: "non empty label matcher",
matchers: []string{`{foo=~".+"}`},
expected: []labels.Labels{
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "abc", Value: "qwerty"}, labels.Label{Name: "foo", Value: "baz"}},
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}},
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}, labels.Label{Name: "xyz", Value: "qwerty"}},
labels.FromStrings("__name__", "test_metric2", "abc", "qwerty", "foo", "baz"),
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
labels.FromStrings("__name__", "test_metric2", "foo", "boo", "xyz", "qwerty"),
},
api: api,
},
@ -518,8 +518,8 @@ func TestGetSeries(t *testing.T) {
name: "exact label matcher",
matchers: []string{`{foo="boo"}`},
expected: []labels.Labels{
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}},
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}, labels.Label{Name: "xyz", Value: "qwerty"}},
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
labels.FromStrings("__name__", "test_metric2", "foo", "boo", "xyz", "qwerty"),
},
api: api,
},
@ -527,9 +527,9 @@ func TestGetSeries(t *testing.T) {
name: "two matchers",
matchers: []string{`{foo="boo"}`, `{foo="baz"}`},
expected: []labels.Labels{
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "abc", Value: "qwerty"}, labels.Label{Name: "foo", Value: "baz"}},
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}},
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}, labels.Label{Name: "xyz", Value: "qwerty"}},
labels.FromStrings("__name__", "test_metric2", "abc", "qwerty", "foo", "baz"),
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
labels.FromStrings("__name__", "test_metric2", "foo", "boo", "xyz", "qwerty"),
},
api: api,
},
@ -558,12 +558,6 @@ func TestGetSeries(t *testing.T) {
assertAPIError(t, res.err, tc.expectedErrorType)
if tc.expectedErrorType == errorNone {
r := res.data.([]labels.Labels)
for _, l := range tc.expected {
sort.Sort(l)
}
for _, l := range r {
sort.Sort(l)
}
sort.Sort(byLabels(tc.expected))
sort.Sort(byLabels(r))
require.Equal(t, tc.expected, r)
@ -928,7 +922,7 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.ScrapeIntervalLabel: "15s",
model.ScrapeTimeoutLabel: "5s",
}),
DiscoveredLabels: nil,
DiscoveredLabels: labels.EmptyLabels(),
Params: url.Values{},
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
Active: true,
@ -943,14 +937,14 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
model.ScrapeIntervalLabel: "20s",
model.ScrapeTimeoutLabel: "10s",
}),
DiscoveredLabels: nil,
DiscoveredLabels: labels.EmptyLabels(),
Params: url.Values{"target": []string{"example.com"}},
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
Active: true,
},
{
Identifier: "blackbox",
Labels: nil,
Labels: labels.EmptyLabels(),
DiscoveredLabels: labels.FromMap(map[string]string{
model.SchemeLabel: "http",
model.AddressLabel: "http://dropped.example.com:9115",
@ -1111,7 +1105,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
{V: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
{V: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
},
Metric: nil,
// No Metric returned - use zero value for comparison.
},
},
},
@ -3296,7 +3290,7 @@ func BenchmarkRespond(b *testing.B) {
Result: promql.Matrix{
promql.Series{
Points: points,
Metric: nil,
Metric: labels.EmptyLabels(),
},
},
}

View file

@ -171,26 +171,24 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
Untyped: &dto.Untyped{},
}
for _, l := range s.Metric {
err := s.Metric.Validate(func(l labels.Label) error {
if l.Value == "" {
// No value means unset. Never consider those labels.
// This is also important to protect against nameless metrics.
continue
return nil
}
if l.Name == labels.MetricName {
nameSeen = true
if l.Value == lastMetricName {
// We already have the name in the current MetricFamily,
// and we ignore nameless metrics.
continue
return nil
}
// Need to start a new MetricFamily. Ship off the old one (if any) before
// creating the new one.
if protMetricFam != nil {
if err := enc.Encode(protMetricFam); err != nil {
federationErrors.Inc()
level.Error(h.logger).Log("msg", "federation failed", "err", err)
return
return err
}
}
protMetricFam = &dto.MetricFamily{
@ -198,7 +196,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
Name: proto.String(l.Value),
}
lastMetricName = l.Value
continue
return nil
}
protMetric.Label = append(protMetric.Label, &dto.LabelPair{
Name: proto.String(l.Name),
@ -207,6 +205,12 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
if _, ok := externalLabels[l.Name]; ok {
globalUsed[l.Name] = struct{}{}
}
return nil
})
if err != nil {
federationErrors.Inc()
level.Error(h.logger).Log("msg", "federation failed", "err", err)
return
}
if !nameSeen {
level.Warn(h.logger).Log("msg", "Ignoring nameless metric during federation", "metric", s.Metric)

View file

@ -162,7 +162,7 @@ test_metric_without_labels{instance=""} 1001 6000000
},
"external labels are added if not already present": {
params: "match[]={__name__=~'.%2b'}", // '%2b' is an URL-encoded '+'.
externalLabels: labels.Labels{{Name: "foo", Value: "baz"}, {Name: "zone", Value: "ie"}},
externalLabels: labels.FromStrings("foo", "baz", "zone", "ie"),
code: 200,
body: `# TYPE test_metric1 untyped
test_metric1{foo="bar",instance="i",zone="ie"} 10000 6000000
@ -179,7 +179,7 @@ test_metric_without_labels{foo="baz",instance="",zone="ie"} 1001 6000000
// This makes no sense as a configuration, but we should
// know what it does anyway.
params: "match[]={__name__=~'.%2b'}", // '%2b' is an URL-encoded '+'.
externalLabels: labels.Labels{{Name: "instance", Value: "baz"}},
externalLabels: labels.FromStrings("instance", "baz"),
code: 200,
body: `# TYPE test_metric1 untyped
test_metric1{foo="bar",instance="i"} 10000 6000000

View file

@ -653,13 +653,10 @@ func (h *Handler) consoles(w http.ResponseWriter, r *http.Request) {
params[k] = v[0]
}
externalLabels := map[string]string{}
h.mtx.RLock()
els := h.config.GlobalConfig.ExternalLabels
h.mtx.RUnlock()
for _, el := range els {
externalLabels[el.Name] = el.Value
}
externalLabels := els.Map()
// Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system.