mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
Merge pull request #11717 from bboreham/labels-abstraction
Add and use abstractions over labels.Labels
This commit is contained in:
commit
ccea61c7bf
|
@ -631,9 +631,9 @@ func checkRules(filename string, lintSettings lintConfig) (int, []error) {
|
||||||
errMessage := fmt.Sprintf("%d duplicate rule(s) found.\n", len(dRules))
|
errMessage := fmt.Sprintf("%d duplicate rule(s) found.\n", len(dRules))
|
||||||
for _, n := range dRules {
|
for _, n := range dRules {
|
||||||
errMessage += fmt.Sprintf("Metric: %s\nLabel(s):\n", n.metric)
|
errMessage += fmt.Sprintf("Metric: %s\nLabel(s):\n", n.metric)
|
||||||
for _, l := range n.label {
|
n.label.Range(func(l labels.Label) {
|
||||||
errMessage += fmt.Sprintf("\t%s: %s\n", l.Name, l.Value)
|
errMessage += fmt.Sprintf("\t%s: %s\n", l.Name, l.Value)
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
errMessage += "Might cause inconsistency while recording expressions"
|
errMessage += "Might cause inconsistency while recording expressions"
|
||||||
return 0, []error{fmt.Errorf("%w %s", lintError, errMessage)}
|
return 0, []error{fmt.Errorf("%w %s", lintError, errMessage)}
|
||||||
|
|
|
@ -158,14 +158,15 @@ func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr, ruleName
|
||||||
|
|
||||||
// Setting the rule labels after the output of the query,
|
// Setting the rule labels after the output of the query,
|
||||||
// so they can override query output.
|
// so they can override query output.
|
||||||
for _, l := range ruleLabels {
|
ruleLabels.Range(func(l labels.Label) {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
})
|
||||||
|
|
||||||
lb.Set(labels.MetricName, ruleName)
|
lb.Set(labels.MetricName, ruleName)
|
||||||
|
lbls := lb.Labels(labels.EmptyLabels())
|
||||||
|
|
||||||
for _, value := range sample.Values {
|
for _, value := range sample.Values {
|
||||||
if err := app.add(ctx, lb.Labels(nil), timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {
|
if err := app.add(ctx, lbls, timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {
|
||||||
return fmt.Errorf("add: %w", err)
|
return fmt.Errorf("add: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -100,7 +100,7 @@ func TestBackfillRuleIntegration(t *testing.T) {
|
||||||
require.Equal(t, 1, len(gRules))
|
require.Equal(t, 1, len(gRules))
|
||||||
require.Equal(t, "rule1", gRules[0].Name())
|
require.Equal(t, "rule1", gRules[0].Name())
|
||||||
require.Equal(t, "ruleExpr", gRules[0].Query().String())
|
require.Equal(t, "ruleExpr", gRules[0].Query().String())
|
||||||
require.Equal(t, 1, len(gRules[0].Labels()))
|
require.Equal(t, 1, gRules[0].Labels().Len())
|
||||||
|
|
||||||
group2 := ruleImporter.groups[path2+";group2"]
|
group2 := ruleImporter.groups[path2+";group2"]
|
||||||
require.NotNil(t, group2)
|
require.NotNil(t, group2)
|
||||||
|
@ -109,7 +109,7 @@ func TestBackfillRuleIntegration(t *testing.T) {
|
||||||
require.Equal(t, 2, len(g2Rules))
|
require.Equal(t, 2, len(g2Rules))
|
||||||
require.Equal(t, "grp2_rule1", g2Rules[0].Name())
|
require.Equal(t, "grp2_rule1", g2Rules[0].Name())
|
||||||
require.Equal(t, "grp2_rule1_expr", g2Rules[0].Query().String())
|
require.Equal(t, "grp2_rule1_expr", g2Rules[0].Query().String())
|
||||||
require.Equal(t, 0, len(g2Rules[0].Labels()))
|
require.Equal(t, 0, g2Rules[0].Labels().Len())
|
||||||
|
|
||||||
// Backfill all recording rules then check the blocks to confirm the correct data was created.
|
// Backfill all recording rules then check the blocks to confirm the correct data was created.
|
||||||
errs = ruleImporter.importAll(ctx)
|
errs = ruleImporter.importAll(ctx)
|
||||||
|
@ -132,12 +132,12 @@ func TestBackfillRuleIntegration(t *testing.T) {
|
||||||
for selectedSeries.Next() {
|
for selectedSeries.Next() {
|
||||||
seriesCount++
|
seriesCount++
|
||||||
series := selectedSeries.At()
|
series := selectedSeries.At()
|
||||||
if len(series.Labels()) != 3 {
|
if series.Labels().Len() != 3 {
|
||||||
require.Equal(t, 2, len(series.Labels()))
|
require.Equal(t, 2, series.Labels().Len())
|
||||||
x := labels.FromStrings("__name__", "grp2_rule1", "name1", "val1")
|
x := labels.FromStrings("__name__", "grp2_rule1", "name1", "val1")
|
||||||
require.Equal(t, x, series.Labels())
|
require.Equal(t, x, series.Labels())
|
||||||
} else {
|
} else {
|
||||||
require.Equal(t, 3, len(series.Labels()))
|
require.Equal(t, 3, series.Labels().Len())
|
||||||
}
|
}
|
||||||
it := series.Iterator(nil)
|
it := series.Iterator(nil)
|
||||||
for it.Next() == chunkenc.ValFloat {
|
for it.Next() == chunkenc.ValFloat {
|
||||||
|
|
|
@ -315,7 +315,7 @@ func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
|
||||||
i := 0
|
i := 0
|
||||||
|
|
||||||
for scanner.Scan() && i < n {
|
for scanner.Scan() && i < n {
|
||||||
m := make(labels.Labels, 0, 10)
|
m := make([]labels.Label, 0, 10)
|
||||||
|
|
||||||
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
||||||
s := r.Replace(scanner.Text())
|
s := r.Replace(scanner.Text())
|
||||||
|
@ -325,13 +325,12 @@ func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
|
||||||
split := strings.Split(labelChunk, ":")
|
split := strings.Split(labelChunk, ":")
|
||||||
m = append(m, labels.Label{Name: split[0], Value: split[1]})
|
m = append(m, labels.Label{Name: split[0], Value: split[1]})
|
||||||
}
|
}
|
||||||
// Order of the k/v labels matters, don't assume we'll always receive them already sorted.
|
ml := labels.New(m...) // This sorts by name - order of the k/v labels matters, don't assume we'll always receive them already sorted.
|
||||||
sort.Sort(m)
|
h := ml.Hash()
|
||||||
h := m.Hash()
|
|
||||||
if _, ok := hashes[h]; ok {
|
if _, ok := hashes[h]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
mets = append(mets, m)
|
mets = append(mets, ml)
|
||||||
hashes[h] = struct{}{}
|
hashes[h] = struct{}{}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
|
@ -470,21 +469,21 @@ func analyzeBlock(path, blockID string, limit int, runExtended bool) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
lbls := labels.Labels{}
|
|
||||||
chks := []chunks.Meta{}
|
chks := []chunks.Meta{}
|
||||||
|
builder := labels.ScratchBuilder{}
|
||||||
for p.Next() {
|
for p.Next() {
|
||||||
if err = ir.Series(p.At(), &lbls, &chks); err != nil {
|
if err = ir.Series(p.At(), &builder, &chks); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// Amount of the block time range not covered by this series.
|
// Amount of the block time range not covered by this series.
|
||||||
uncovered := uint64(meta.MaxTime-meta.MinTime) - uint64(chks[len(chks)-1].MaxTime-chks[0].MinTime)
|
uncovered := uint64(meta.MaxTime-meta.MinTime) - uint64(chks[len(chks)-1].MaxTime-chks[0].MinTime)
|
||||||
for _, lbl := range lbls {
|
builder.Labels().Range(func(lbl labels.Label) {
|
||||||
key := lbl.Name + "=" + lbl.Value
|
key := lbl.Name + "=" + lbl.Value
|
||||||
labelsUncovered[lbl.Name] += uncovered
|
labelsUncovered[lbl.Name] += uncovered
|
||||||
labelpairsUncovered[key] += uncovered
|
labelpairsUncovered[key] += uncovered
|
||||||
labelpairsCount[key]++
|
labelpairsCount[key]++
|
||||||
entries++
|
entries++
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
if p.Err() != nil {
|
if p.Err() != nil {
|
||||||
return p.Err()
|
return p.Err()
|
||||||
|
@ -589,10 +588,10 @@ func analyzeCompaction(block tsdb.BlockReader, indexr tsdb.IndexReader) (err err
|
||||||
nBuckets := 10
|
nBuckets := 10
|
||||||
histogram := make([]int, nBuckets)
|
histogram := make([]int, nBuckets)
|
||||||
totalChunks := 0
|
totalChunks := 0
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
for postingsr.Next() {
|
for postingsr.Next() {
|
||||||
lbsl := labels.Labels{}
|
|
||||||
var chks []chunks.Meta
|
var chks []chunks.Meta
|
||||||
if err := indexr.Series(postingsr.At(), &lbsl, &chks); err != nil {
|
if err := indexr.Series(postingsr.At(), &builder, &chks); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -284,8 +284,8 @@ func (tg *testGroup) test(evalInterval time.Duration, groupOrderMap map[string]i
|
||||||
for _, a := range ar.ActiveAlerts() {
|
for _, a := range ar.ActiveAlerts() {
|
||||||
if a.State == rules.StateFiring {
|
if a.State == rules.StateFiring {
|
||||||
alerts = append(alerts, labelAndAnnotation{
|
alerts = append(alerts, labelAndAnnotation{
|
||||||
Labels: append(labels.Labels{}, a.Labels...),
|
Labels: a.Labels.Copy(),
|
||||||
Annotations: append(labels.Labels{}, a.Annotations...),
|
Annotations: a.Annotations.Copy(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,8 @@ func Load(s string, expandExternalLabels bool, logger log.Logger) (*Config, erro
|
||||||
return cfg, nil
|
return cfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, v := range cfg.GlobalConfig.ExternalLabels {
|
b := labels.ScratchBuilder{}
|
||||||
|
cfg.GlobalConfig.ExternalLabels.Range(func(v labels.Label) {
|
||||||
newV := os.Expand(v.Value, func(s string) string {
|
newV := os.Expand(v.Value, func(s string) string {
|
||||||
if s == "$" {
|
if s == "$" {
|
||||||
return "$"
|
return "$"
|
||||||
|
@ -93,10 +94,10 @@ func Load(s string, expandExternalLabels bool, logger log.Logger) (*Config, erro
|
||||||
})
|
})
|
||||||
if newV != v.Value {
|
if newV != v.Value {
|
||||||
level.Debug(logger).Log("msg", "External label replaced", "label", v.Name, "input", v.Value, "output", newV)
|
level.Debug(logger).Log("msg", "External label replaced", "label", v.Name, "input", v.Value, "output", newV)
|
||||||
v.Value = newV
|
|
||||||
cfg.GlobalConfig.ExternalLabels[i] = v
|
|
||||||
}
|
}
|
||||||
}
|
b.Add(v.Name, newV)
|
||||||
|
})
|
||||||
|
cfg.GlobalConfig.ExternalLabels = b.Labels()
|
||||||
return cfg, nil
|
return cfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -361,13 +362,16 @@ func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, l := range gc.ExternalLabels {
|
if err := gc.ExternalLabels.Validate(func(l labels.Label) error {
|
||||||
if !model.LabelName(l.Name).IsValid() {
|
if !model.LabelName(l.Name).IsValid() {
|
||||||
return fmt.Errorf("%q is not a valid label name", l.Name)
|
return fmt.Errorf("%q is not a valid label name", l.Name)
|
||||||
}
|
}
|
||||||
if !model.LabelValue(l.Value).IsValid() {
|
if !model.LabelValue(l.Value).IsValid() {
|
||||||
return fmt.Errorf("%q is not a valid label value", l.Value)
|
return fmt.Errorf("%q is not a valid label value", l.Value)
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// First set the correct scrape interval, then check that the timeout
|
// First set the correct scrape interval, then check that the timeout
|
||||||
|
@ -394,7 +398,7 @@ func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||||
|
|
||||||
// isZero returns true iff the global config is the zero value.
|
// isZero returns true iff the global config is the zero value.
|
||||||
func (c *GlobalConfig) isZero() bool {
|
func (c *GlobalConfig) isZero() bool {
|
||||||
return c.ExternalLabels == nil &&
|
return c.ExternalLabels.IsEmpty() &&
|
||||||
c.ScrapeInterval == 0 &&
|
c.ScrapeInterval == 0 &&
|
||||||
c.ScrapeTimeout == 0 &&
|
c.ScrapeTimeout == 0 &&
|
||||||
c.EvaluationInterval == 0 &&
|
c.EvaluationInterval == 0 &&
|
||||||
|
|
|
@ -357,9 +357,7 @@ func EmptyLabels() Labels {
|
||||||
// The caller has to guarantee that all label names are unique.
|
// The caller has to guarantee that all label names are unique.
|
||||||
func New(ls ...Label) Labels {
|
func New(ls ...Label) Labels {
|
||||||
set := make(Labels, 0, len(ls))
|
set := make(Labels, 0, len(ls))
|
||||||
for _, l := range ls {
|
set = append(set, ls...)
|
||||||
set = append(set, l)
|
|
||||||
}
|
|
||||||
sort.Sort(set)
|
sort.Sort(set)
|
||||||
|
|
||||||
return set
|
return set
|
||||||
|
@ -414,6 +412,49 @@ func Compare(a, b Labels) int {
|
||||||
return len(a) - len(b)
|
return len(a) - len(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Copy labels from b on top of whatever was in ls previously, reusing memory or expanding if needed.
|
||||||
|
func (ls *Labels) CopyFrom(b Labels) {
|
||||||
|
(*ls) = append((*ls)[:0], b...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEmpty returns true if ls represents an empty set of labels.
|
||||||
|
func (ls Labels) IsEmpty() bool {
|
||||||
|
return len(ls) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range calls f on each label.
|
||||||
|
func (ls Labels) Range(f func(l Label)) {
|
||||||
|
for _, l := range ls {
|
||||||
|
f(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate calls f on each label. If f returns a non-nil error, then it returns that error cancelling the iteration.
|
||||||
|
func (ls Labels) Validate(f func(l Label) error) error {
|
||||||
|
for _, l := range ls {
|
||||||
|
if err := f(l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// InternStrings calls intern on every string value inside ls, replacing them with what it returns.
|
||||||
|
func (ls *Labels) InternStrings(intern func(string) string) {
|
||||||
|
for i, l := range *ls {
|
||||||
|
(*ls)[i].Name = intern(l.Name)
|
||||||
|
(*ls)[i].Value = intern(l.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReleaseStrings calls release on every string value inside ls.
|
||||||
|
func (ls Labels) ReleaseStrings(release func(string)) {
|
||||||
|
for _, l := range ls {
|
||||||
|
release(l.Name)
|
||||||
|
release(l.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Builder allows modifying Labels.
|
// Builder allows modifying Labels.
|
||||||
type Builder struct {
|
type Builder struct {
|
||||||
base Labels
|
base Labels
|
||||||
|
@ -470,7 +511,7 @@ Outer:
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the name/value pair as a label.
|
// Set the name/value pair as a label. A value of "" means delete that label.
|
||||||
func (b *Builder) Set(n, v string) *Builder {
|
func (b *Builder) Set(n, v string) *Builder {
|
||||||
if v == "" {
|
if v == "" {
|
||||||
// Empty labels are the same as missing labels.
|
// Empty labels are the same as missing labels.
|
||||||
|
@ -525,3 +566,40 @@ Outer:
|
||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ScratchBuilder allows efficient construction of a Labels from scratch.
|
||||||
|
type ScratchBuilder struct {
|
||||||
|
add Labels
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewScratchBuilder creates a ScratchBuilder initialized for Labels with n entries.
|
||||||
|
func NewScratchBuilder(n int) ScratchBuilder {
|
||||||
|
return ScratchBuilder{add: make([]Label, 0, n)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *ScratchBuilder) Reset() {
|
||||||
|
b.add = b.add[:0]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a name/value pair.
|
||||||
|
// Note if you Add the same name twice you will get a duplicate label, which is invalid.
|
||||||
|
func (b *ScratchBuilder) Add(name, value string) {
|
||||||
|
b.add = append(b.add, Label{Name: name, Value: value})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort the labels added so far by name.
|
||||||
|
func (b *ScratchBuilder) Sort() {
|
||||||
|
sort.Sort(b.add)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Asssign is for when you already have a Labels which you want this ScratchBuilder to return.
|
||||||
|
func (b *ScratchBuilder) Assign(ls Labels) {
|
||||||
|
b.add = append(b.add[:0], ls...) // Copy on top of our slice, so we don't retain the input slice.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the name/value pairs added so far as a Labels object.
|
||||||
|
// Note: if you want them sorted, call Sort() first.
|
||||||
|
func (b *ScratchBuilder) Labels() Labels {
|
||||||
|
// Copy the slice, so the next use of ScratchBuilder doesn't overwrite.
|
||||||
|
return append([]Label{}, b.add...)
|
||||||
|
}
|
||||||
|
|
|
@ -36,10 +36,6 @@ func TestLabels_String(t *testing.T) {
|
||||||
lables: Labels{},
|
lables: Labels{},
|
||||||
expected: "{}",
|
expected: "{}",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
lables: nil,
|
|
||||||
expected: "{}",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
for _, c := range cases {
|
for _, c := range cases {
|
||||||
str := c.lables.String()
|
str := c.lables.String()
|
||||||
|
@ -316,18 +312,18 @@ func TestLabels_Equal(t *testing.T) {
|
||||||
|
|
||||||
func TestLabels_FromStrings(t *testing.T) {
|
func TestLabels_FromStrings(t *testing.T) {
|
||||||
labels := FromStrings("aaa", "111", "bbb", "222")
|
labels := FromStrings("aaa", "111", "bbb", "222")
|
||||||
expected := Labels{
|
x := 0
|
||||||
{
|
labels.Range(func(l Label) {
|
||||||
Name: "aaa",
|
switch x {
|
||||||
Value: "111",
|
case 0:
|
||||||
},
|
require.Equal(t, Label{Name: "aaa", Value: "111"}, l, "unexpected value")
|
||||||
{
|
case 1:
|
||||||
Name: "bbb",
|
require.Equal(t, Label{Name: "bbb", Value: "222"}, l, "unexpected value")
|
||||||
Value: "222",
|
default:
|
||||||
},
|
t.Fatalf("unexpected labelset value %d: %v", x, l)
|
||||||
}
|
}
|
||||||
|
x++
|
||||||
require.Equal(t, expected, labels, "unexpected labelset")
|
})
|
||||||
|
|
||||||
require.Panics(t, func() { FromStrings("aaa", "111", "bbb") }) //nolint:staticcheck // Ignore SA5012, error is intentional test.
|
require.Panics(t, func() { FromStrings("aaa", "111", "bbb") }) //nolint:staticcheck // Ignore SA5012, error is intentional test.
|
||||||
}
|
}
|
||||||
|
@ -539,7 +535,6 @@ func TestBuilder(t *testing.T) {
|
||||||
want: FromStrings("aaa", "111", "ccc", "333"),
|
want: FromStrings("aaa", "111", "ccc", "333"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
base: nil,
|
|
||||||
set: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}},
|
set: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}},
|
||||||
del: []string{"bbb"},
|
del: []string{"bbb"},
|
||||||
want: FromStrings("aaa", "111", "ccc", "333"),
|
want: FromStrings("aaa", "111", "ccc", "333"),
|
||||||
|
@ -601,11 +596,49 @@ func TestBuilder(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestScratchBuilder(t *testing.T) {
|
||||||
|
for i, tcase := range []struct {
|
||||||
|
add []Label
|
||||||
|
want Labels
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
add: []Label{},
|
||||||
|
want: EmptyLabels(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
add: []Label{{"aaa", "111"}},
|
||||||
|
want: FromStrings("aaa", "111"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
add: []Label{{"aaa", "111"}, {"bbb", "222"}, {"ccc", "333"}},
|
||||||
|
want: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
add: []Label{{"bbb", "222"}, {"aaa", "111"}, {"ccc", "333"}},
|
||||||
|
want: FromStrings("aaa", "111", "bbb", "222", "ccc", "333"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
add: []Label{{"ddd", "444"}},
|
||||||
|
want: FromStrings("ddd", "444"),
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run(fmt.Sprint(i), func(t *testing.T) {
|
||||||
|
b := ScratchBuilder{}
|
||||||
|
for _, lbl := range tcase.add {
|
||||||
|
b.Add(lbl.Name, lbl.Value)
|
||||||
|
}
|
||||||
|
b.Sort()
|
||||||
|
require.Equal(t, tcase.want, b.Labels())
|
||||||
|
b.Assign(tcase.want)
|
||||||
|
require.Equal(t, tcase.want, b.Labels())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestLabels_Hash(t *testing.T) {
|
func TestLabels_Hash(t *testing.T) {
|
||||||
lbls := FromStrings("foo", "bar", "baz", "qux")
|
lbls := FromStrings("foo", "bar", "baz", "qux")
|
||||||
require.Equal(t, lbls.Hash(), lbls.Hash())
|
require.Equal(t, lbls.Hash(), lbls.Hash())
|
||||||
require.NotEqual(t, lbls.Hash(), Labels{lbls[1], lbls[0]}.Hash(), "unordered labels match.")
|
require.NotEqual(t, lbls.Hash(), FromStrings("foo", "bar").Hash(), "different labels match.")
|
||||||
require.NotEqual(t, lbls.Hash(), Labels{lbls[0]}.Hash(), "different labels match.")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var benchmarkLabelsResult uint64
|
var benchmarkLabelsResult uint64
|
||||||
|
@ -623,7 +656,7 @@ func BenchmarkLabels_Hash(b *testing.B) {
|
||||||
// Label ~20B name, 50B value.
|
// Label ~20B name, 50B value.
|
||||||
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
|
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
|
||||||
}
|
}
|
||||||
return b.Labels(nil)
|
return b.Labels(EmptyLabels())
|
||||||
}(),
|
}(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -634,7 +667,7 @@ func BenchmarkLabels_Hash(b *testing.B) {
|
||||||
// Label ~50B name, 50B value.
|
// Label ~50B name, 50B value.
|
||||||
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
|
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
|
||||||
}
|
}
|
||||||
return b.Labels(nil)
|
return b.Labels(EmptyLabels())
|
||||||
}(),
|
}(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -17,7 +17,6 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -51,13 +50,14 @@ func ReadLabels(fn string, n int) ([]Labels, error) {
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
scanner := bufio.NewScanner(f)
|
scanner := bufio.NewScanner(f)
|
||||||
|
b := ScratchBuilder{}
|
||||||
|
|
||||||
var mets []Labels
|
var mets []Labels
|
||||||
hashes := map[uint64]struct{}{}
|
hashes := map[uint64]struct{}{}
|
||||||
i := 0
|
i := 0
|
||||||
|
|
||||||
for scanner.Scan() && i < n {
|
for scanner.Scan() && i < n {
|
||||||
m := make(Labels, 0, 10)
|
b.Reset()
|
||||||
|
|
||||||
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
||||||
s := r.Replace(scanner.Text())
|
s := r.Replace(scanner.Text())
|
||||||
|
@ -65,10 +65,11 @@ func ReadLabels(fn string, n int) ([]Labels, error) {
|
||||||
labelChunks := strings.Split(s, ",")
|
labelChunks := strings.Split(s, ",")
|
||||||
for _, labelChunk := range labelChunks {
|
for _, labelChunk := range labelChunks {
|
||||||
split := strings.Split(labelChunk, ":")
|
split := strings.Split(labelChunk, ":")
|
||||||
m = append(m, Label{Name: split[0], Value: split[1]})
|
b.Add(split[0], split[1])
|
||||||
}
|
}
|
||||||
// Order of the k/v labels matters, don't assume we'll always receive them already sorted.
|
// Order of the k/v labels matters, don't assume we'll always receive them already sorted.
|
||||||
sort.Sort(m)
|
b.Sort()
|
||||||
|
m := b.Labels()
|
||||||
|
|
||||||
h := m.Hash()
|
h := m.Hash()
|
||||||
if _, ok := hashes[h]; ok {
|
if _, ok := hashes[h]; ok {
|
||||||
|
|
|
@ -203,20 +203,20 @@ func (re Regexp) String() string {
|
||||||
|
|
||||||
// Process returns a relabeled copy of the given label set. The relabel configurations
|
// Process returns a relabeled copy of the given label set. The relabel configurations
|
||||||
// are applied in order of input.
|
// are applied in order of input.
|
||||||
// If a label set is dropped, nil is returned.
|
// If a label set is dropped, EmptyLabels and false is returned.
|
||||||
// May return the input labelSet modified.
|
// May return the input labelSet modified.
|
||||||
func Process(lbls labels.Labels, cfgs ...*Config) labels.Labels {
|
func Process(lbls labels.Labels, cfgs ...*Config) (ret labels.Labels, keep bool) {
|
||||||
lb := labels.NewBuilder(nil)
|
lb := labels.NewBuilder(labels.EmptyLabels())
|
||||||
for _, cfg := range cfgs {
|
for _, cfg := range cfgs {
|
||||||
lbls = relabel(lbls, cfg, lb)
|
lbls, keep = relabel(lbls, cfg, lb)
|
||||||
if lbls == nil {
|
if !keep {
|
||||||
return nil
|
return labels.EmptyLabels(), false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return lbls
|
return lbls, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) labels.Labels {
|
func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) (ret labels.Labels, keep bool) {
|
||||||
var va [16]string
|
var va [16]string
|
||||||
values := va[:0]
|
values := va[:0]
|
||||||
if len(cfg.SourceLabels) > cap(values) {
|
if len(cfg.SourceLabels) > cap(values) {
|
||||||
|
@ -232,19 +232,19 @@ func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) labels.Labels
|
||||||
switch cfg.Action {
|
switch cfg.Action {
|
||||||
case Drop:
|
case Drop:
|
||||||
if cfg.Regex.MatchString(val) {
|
if cfg.Regex.MatchString(val) {
|
||||||
return nil
|
return labels.EmptyLabels(), false
|
||||||
}
|
}
|
||||||
case Keep:
|
case Keep:
|
||||||
if !cfg.Regex.MatchString(val) {
|
if !cfg.Regex.MatchString(val) {
|
||||||
return nil
|
return labels.EmptyLabels(), false
|
||||||
}
|
}
|
||||||
case DropEqual:
|
case DropEqual:
|
||||||
if lset.Get(cfg.TargetLabel) == val {
|
if lset.Get(cfg.TargetLabel) == val {
|
||||||
return nil
|
return labels.EmptyLabels(), false
|
||||||
}
|
}
|
||||||
case KeepEqual:
|
case KeepEqual:
|
||||||
if lset.Get(cfg.TargetLabel) != val {
|
if lset.Get(cfg.TargetLabel) != val {
|
||||||
return nil
|
return labels.EmptyLabels(), false
|
||||||
}
|
}
|
||||||
case Replace:
|
case Replace:
|
||||||
indexes := cfg.Regex.FindStringSubmatchIndex(val)
|
indexes := cfg.Regex.FindStringSubmatchIndex(val)
|
||||||
|
@ -271,29 +271,29 @@ func relabel(lset labels.Labels, cfg *Config, lb *labels.Builder) labels.Labels
|
||||||
mod := sum64(md5.Sum([]byte(val))) % cfg.Modulus
|
mod := sum64(md5.Sum([]byte(val))) % cfg.Modulus
|
||||||
lb.Set(cfg.TargetLabel, fmt.Sprintf("%d", mod))
|
lb.Set(cfg.TargetLabel, fmt.Sprintf("%d", mod))
|
||||||
case LabelMap:
|
case LabelMap:
|
||||||
for _, l := range lset {
|
lset.Range(func(l labels.Label) {
|
||||||
if cfg.Regex.MatchString(l.Name) {
|
if cfg.Regex.MatchString(l.Name) {
|
||||||
res := cfg.Regex.ReplaceAllString(l.Name, cfg.Replacement)
|
res := cfg.Regex.ReplaceAllString(l.Name, cfg.Replacement)
|
||||||
lb.Set(res, l.Value)
|
lb.Set(res, l.Value)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
case LabelDrop:
|
case LabelDrop:
|
||||||
for _, l := range lset {
|
lset.Range(func(l labels.Label) {
|
||||||
if cfg.Regex.MatchString(l.Name) {
|
if cfg.Regex.MatchString(l.Name) {
|
||||||
lb.Del(l.Name)
|
lb.Del(l.Name)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
case LabelKeep:
|
case LabelKeep:
|
||||||
for _, l := range lset {
|
lset.Range(func(l labels.Label) {
|
||||||
if !cfg.Regex.MatchString(l.Name) {
|
if !cfg.Regex.MatchString(l.Name) {
|
||||||
lb.Del(l.Name)
|
lb.Del(l.Name)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
|
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
|
||||||
}
|
}
|
||||||
|
|
||||||
return lb.Labels(lset)
|
return lb.Labels(lset), true
|
||||||
}
|
}
|
||||||
|
|
||||||
// sum64 sums the md5 hash to an uint64.
|
// sum64 sums the md5 hash to an uint64.
|
||||||
|
|
|
@ -28,6 +28,7 @@ func TestRelabel(t *testing.T) {
|
||||||
input labels.Labels
|
input labels.Labels
|
||||||
relabel []*Config
|
relabel []*Config
|
||||||
output labels.Labels
|
output labels.Labels
|
||||||
|
drop bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
input: labels.FromMap(map[string]string{
|
input: labels.FromMap(map[string]string{
|
||||||
|
@ -101,7 +102,7 @@ func TestRelabel(t *testing.T) {
|
||||||
Action: Replace,
|
Action: Replace,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: nil,
|
drop: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromMap(map[string]string{
|
input: labels.FromMap(map[string]string{
|
||||||
|
@ -115,7 +116,7 @@ func TestRelabel(t *testing.T) {
|
||||||
Action: Drop,
|
Action: Drop,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: nil,
|
drop: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromMap(map[string]string{
|
input: labels.FromMap(map[string]string{
|
||||||
|
@ -177,7 +178,7 @@ func TestRelabel(t *testing.T) {
|
||||||
Action: Keep,
|
Action: Keep,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: nil,
|
drop: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromMap(map[string]string{
|
input: labels.FromMap(map[string]string{
|
||||||
|
@ -483,7 +484,7 @@ func TestRelabel(t *testing.T) {
|
||||||
TargetLabel: "__port1",
|
TargetLabel: "__port1",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: nil,
|
drop: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromMap(map[string]string{
|
input: labels.FromMap(map[string]string{
|
||||||
|
@ -517,7 +518,7 @@ func TestRelabel(t *testing.T) {
|
||||||
TargetLabel: "__port2",
|
TargetLabel: "__port2",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: nil,
|
drop: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -538,8 +539,11 @@ func TestRelabel(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res := Process(test.input, test.relabel...)
|
res, keep := Process(test.input, test.relabel...)
|
||||||
require.Equal(t, test.output, res)
|
require.Equal(t, !test.drop, keep)
|
||||||
|
if keep {
|
||||||
|
require.Equal(t, test.output, res)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -721,7 +725,7 @@ func BenchmarkRelabel(b *testing.B) {
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
b.Run(tt.name, func(b *testing.B) {
|
b.Run(tt.name, func(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
_ = Process(tt.lbls, tt.cfgs...)
|
_, _ = Process(tt.lbls, tt.cfgs...)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
|
@ -82,6 +81,7 @@ func (l *openMetricsLexer) Error(es string) {
|
||||||
// This is based on the working draft https://docs.google.com/document/u/1/d/1KwV0mAXwwbvvifBvDKH_LU1YjyXE_wxCkHNoCGq1GX0/edit
|
// This is based on the working draft https://docs.google.com/document/u/1/d/1KwV0mAXwwbvvifBvDKH_LU1YjyXE_wxCkHNoCGq1GX0/edit
|
||||||
type OpenMetricsParser struct {
|
type OpenMetricsParser struct {
|
||||||
l *openMetricsLexer
|
l *openMetricsLexer
|
||||||
|
builder labels.ScratchBuilder
|
||||||
series []byte
|
series []byte
|
||||||
text []byte
|
text []byte
|
||||||
mtype MetricType
|
mtype MetricType
|
||||||
|
@ -158,14 +158,11 @@ func (p *OpenMetricsParser) Comment() []byte {
|
||||||
// Metric writes the labels of the current sample into the passed labels.
|
// Metric writes the labels of the current sample into the passed labels.
|
||||||
// It returns the string from which the metric was parsed.
|
// It returns the string from which the metric was parsed.
|
||||||
func (p *OpenMetricsParser) Metric(l *labels.Labels) string {
|
func (p *OpenMetricsParser) Metric(l *labels.Labels) string {
|
||||||
// Allocate the full immutable string immediately, so we just
|
// Copy the buffer to a string: this is only necessary for the return value.
|
||||||
// have to create references on it below.
|
|
||||||
s := string(p.series)
|
s := string(p.series)
|
||||||
|
|
||||||
*l = append(*l, labels.Label{
|
p.builder.Reset()
|
||||||
Name: labels.MetricName,
|
p.builder.Add(labels.MetricName, s[:p.offsets[0]-p.start])
|
||||||
Value: s[:p.offsets[0]-p.start],
|
|
||||||
})
|
|
||||||
|
|
||||||
for i := 1; i < len(p.offsets); i += 4 {
|
for i := 1; i < len(p.offsets); i += 4 {
|
||||||
a := p.offsets[i] - p.start
|
a := p.offsets[i] - p.start
|
||||||
|
@ -173,16 +170,16 @@ func (p *OpenMetricsParser) Metric(l *labels.Labels) string {
|
||||||
c := p.offsets[i+2] - p.start
|
c := p.offsets[i+2] - p.start
|
||||||
d := p.offsets[i+3] - p.start
|
d := p.offsets[i+3] - p.start
|
||||||
|
|
||||||
|
value := s[c:d]
|
||||||
// Replacer causes allocations. Replace only when necessary.
|
// Replacer causes allocations. Replace only when necessary.
|
||||||
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
|
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
|
||||||
*l = append(*l, labels.Label{Name: s[a:b], Value: lvalReplacer.Replace(s[c:d])})
|
value = lvalReplacer.Replace(value)
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
*l = append(*l, labels.Label{Name: s[a:b], Value: s[c:d]})
|
p.builder.Add(s[a:b], value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort labels.
|
p.builder.Sort()
|
||||||
sort.Sort(*l)
|
*l = p.builder.Labels()
|
||||||
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
@ -204,17 +201,18 @@ func (p *OpenMetricsParser) Exemplar(e *exemplar.Exemplar) bool {
|
||||||
e.Ts = p.exemplarTs
|
e.Ts = p.exemplarTs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.builder.Reset()
|
||||||
for i := 0; i < len(p.eOffsets); i += 4 {
|
for i := 0; i < len(p.eOffsets); i += 4 {
|
||||||
a := p.eOffsets[i] - p.start
|
a := p.eOffsets[i] - p.start
|
||||||
b := p.eOffsets[i+1] - p.start
|
b := p.eOffsets[i+1] - p.start
|
||||||
c := p.eOffsets[i+2] - p.start
|
c := p.eOffsets[i+2] - p.start
|
||||||
d := p.eOffsets[i+3] - p.start
|
d := p.eOffsets[i+3] - p.start
|
||||||
|
|
||||||
e.Labels = append(e.Labels, labels.Label{Name: s[a:b], Value: s[c:d]})
|
p.builder.Add(s[a:b], s[c:d])
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort the labels.
|
p.builder.Sort()
|
||||||
sort.Sort(e.Labels)
|
e.Labels = p.builder.Labels()
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
@ -246,7 +246,6 @@ foo_total 17.0 1520879607.789 # {xx="yy"} 5`
|
||||||
require.Equal(t, true, found)
|
require.Equal(t, true, found)
|
||||||
require.Equal(t, *exp[i].e, e)
|
require.Equal(t, *exp[i].e, e)
|
||||||
}
|
}
|
||||||
res = res[:0]
|
|
||||||
|
|
||||||
case EntryType:
|
case EntryType:
|
||||||
m, typ := p.Type()
|
m, typ := p.Type()
|
||||||
|
|
|
@ -21,7 +21,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
@ -144,6 +143,7 @@ func (l *promlexer) Error(es string) {
|
||||||
// Prometheus text exposition format.
|
// Prometheus text exposition format.
|
||||||
type PromParser struct {
|
type PromParser struct {
|
||||||
l *promlexer
|
l *promlexer
|
||||||
|
builder labels.ScratchBuilder
|
||||||
series []byte
|
series []byte
|
||||||
text []byte
|
text []byte
|
||||||
mtype MetricType
|
mtype MetricType
|
||||||
|
@ -212,14 +212,11 @@ func (p *PromParser) Comment() []byte {
|
||||||
// Metric writes the labels of the current sample into the passed labels.
|
// Metric writes the labels of the current sample into the passed labels.
|
||||||
// It returns the string from which the metric was parsed.
|
// It returns the string from which the metric was parsed.
|
||||||
func (p *PromParser) Metric(l *labels.Labels) string {
|
func (p *PromParser) Metric(l *labels.Labels) string {
|
||||||
// Allocate the full immutable string immediately, so we just
|
// Copy the buffer to a string: this is only necessary for the return value.
|
||||||
// have to create references on it below.
|
|
||||||
s := string(p.series)
|
s := string(p.series)
|
||||||
|
|
||||||
*l = append(*l, labels.Label{
|
p.builder.Reset()
|
||||||
Name: labels.MetricName,
|
p.builder.Add(labels.MetricName, s[:p.offsets[0]-p.start])
|
||||||
Value: s[:p.offsets[0]-p.start],
|
|
||||||
})
|
|
||||||
|
|
||||||
for i := 1; i < len(p.offsets); i += 4 {
|
for i := 1; i < len(p.offsets); i += 4 {
|
||||||
a := p.offsets[i] - p.start
|
a := p.offsets[i] - p.start
|
||||||
|
@ -227,16 +224,16 @@ func (p *PromParser) Metric(l *labels.Labels) string {
|
||||||
c := p.offsets[i+2] - p.start
|
c := p.offsets[i+2] - p.start
|
||||||
d := p.offsets[i+3] - p.start
|
d := p.offsets[i+3] - p.start
|
||||||
|
|
||||||
|
value := s[c:d]
|
||||||
// Replacer causes allocations. Replace only when necessary.
|
// Replacer causes allocations. Replace only when necessary.
|
||||||
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
|
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
|
||||||
*l = append(*l, labels.Label{Name: s[a:b], Value: lvalReplacer.Replace(s[c:d])})
|
value = lvalReplacer.Replace(value)
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
*l = append(*l, labels.Label{Name: s[a:b], Value: s[c:d]})
|
p.builder.Add(s[a:b], value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort labels to maintain the sorted labels invariant.
|
p.builder.Sort()
|
||||||
sort.Sort(*l)
|
*l = p.builder.Labels()
|
||||||
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
|
@ -192,7 +192,6 @@ testmetric{label="\"bar\""} 1`
|
||||||
require.Equal(t, exp[i].t, ts)
|
require.Equal(t, exp[i].t, ts)
|
||||||
require.Equal(t, exp[i].v, v)
|
require.Equal(t, exp[i].v, v)
|
||||||
require.Equal(t, exp[i].lset, res)
|
require.Equal(t, exp[i].lset, res)
|
||||||
res = res[:0]
|
|
||||||
|
|
||||||
case EntryType:
|
case EntryType:
|
||||||
m, typ := p.Type()
|
m, typ := p.Type()
|
||||||
|
@ -414,7 +413,7 @@ func BenchmarkParse(b *testing.B) {
|
||||||
case EntrySeries:
|
case EntrySeries:
|
||||||
m, _, _ := p.Series()
|
m, _, _ := p.Series()
|
||||||
|
|
||||||
res := make(labels.Labels, 0, 5)
|
var res labels.Labels
|
||||||
p.Metric(&res)
|
p.Metric(&res)
|
||||||
|
|
||||||
total += len(m)
|
total += len(m)
|
||||||
|
@ -426,7 +425,7 @@ func BenchmarkParse(b *testing.B) {
|
||||||
})
|
})
|
||||||
b.Run(parserName+"/decode-metric-reuse/"+fn, func(b *testing.B) {
|
b.Run(parserName+"/decode-metric-reuse/"+fn, func(b *testing.B) {
|
||||||
total := 0
|
total := 0
|
||||||
res := make(labels.Labels, 0, 5)
|
var res labels.Labels
|
||||||
|
|
||||||
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
|
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
|
@ -451,7 +450,6 @@ func BenchmarkParse(b *testing.B) {
|
||||||
|
|
||||||
total += len(m)
|
total += len(m)
|
||||||
i++
|
i++
|
||||||
res = res[:0]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
|
@ -59,6 +58,8 @@ type ProtobufParser struct {
|
||||||
// that we have to decode the next MetricFamily.
|
// that we have to decode the next MetricFamily.
|
||||||
state Entry
|
state Entry
|
||||||
|
|
||||||
|
builder labels.ScratchBuilder // held here to reduce allocations when building Labels
|
||||||
|
|
||||||
mf *dto.MetricFamily
|
mf *dto.MetricFamily
|
||||||
|
|
||||||
// The following are just shenanigans to satisfy the Parser interface.
|
// The following are just shenanigans to satisfy the Parser interface.
|
||||||
|
@ -245,23 +246,19 @@ func (p *ProtobufParser) Comment() []byte {
|
||||||
// Metric writes the labels of the current sample into the passed labels.
|
// Metric writes the labels of the current sample into the passed labels.
|
||||||
// It returns the string from which the metric was parsed.
|
// It returns the string from which the metric was parsed.
|
||||||
func (p *ProtobufParser) Metric(l *labels.Labels) string {
|
func (p *ProtobufParser) Metric(l *labels.Labels) string {
|
||||||
*l = append(*l, labels.Label{
|
p.builder.Reset()
|
||||||
Name: labels.MetricName,
|
p.builder.Add(labels.MetricName, p.getMagicName())
|
||||||
Value: p.getMagicName(),
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, lp := range p.mf.GetMetric()[p.metricPos].GetLabel() {
|
for _, lp := range p.mf.GetMetric()[p.metricPos].GetLabel() {
|
||||||
*l = append(*l, labels.Label{
|
p.builder.Add(lp.GetName(), lp.GetValue())
|
||||||
Name: lp.GetName(),
|
|
||||||
Value: lp.GetValue(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
if needed, name, value := p.getMagicLabel(); needed {
|
if needed, name, value := p.getMagicLabel(); needed {
|
||||||
*l = append(*l, labels.Label{Name: name, Value: value})
|
p.builder.Add(name, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort labels to maintain the sorted labels invariant.
|
// Sort labels to maintain the sorted labels invariant.
|
||||||
sort.Sort(*l)
|
p.builder.Sort()
|
||||||
|
*l = p.builder.Labels()
|
||||||
|
|
||||||
return p.metricBytes.String()
|
return p.metricBytes.String()
|
||||||
}
|
}
|
||||||
|
@ -305,12 +302,12 @@ func (p *ProtobufParser) Exemplar(ex *exemplar.Exemplar) bool {
|
||||||
ex.HasTs = true
|
ex.HasTs = true
|
||||||
ex.Ts = ts.GetSeconds()*1000 + int64(ts.GetNanos()/1_000_000)
|
ex.Ts = ts.GetSeconds()*1000 + int64(ts.GetNanos()/1_000_000)
|
||||||
}
|
}
|
||||||
|
p.builder.Reset()
|
||||||
for _, lp := range exProto.GetLabel() {
|
for _, lp := range exProto.GetLabel() {
|
||||||
ex.Labels = append(ex.Labels, labels.Label{
|
p.builder.Add(lp.GetName(), lp.GetValue())
|
||||||
Name: lp.GetName(),
|
|
||||||
Value: lp.GetValue(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
p.builder.Sort()
|
||||||
|
ex.Labels = p.builder.Labels()
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -630,7 +630,6 @@ metric: <
|
||||||
require.Equal(t, true, found)
|
require.Equal(t, true, found)
|
||||||
require.Equal(t, exp[i].e[0], e)
|
require.Equal(t, exp[i].e[0], e)
|
||||||
}
|
}
|
||||||
res = res[:0]
|
|
||||||
|
|
||||||
case EntryHistogram:
|
case EntryHistogram:
|
||||||
m, ts, shs, fhs := p.Histogram()
|
m, ts, shs, fhs := p.Histogram()
|
||||||
|
@ -642,7 +641,6 @@ metric: <
|
||||||
require.Equal(t, exp[i].t, int64(0))
|
require.Equal(t, exp[i].t, int64(0))
|
||||||
}
|
}
|
||||||
require.Equal(t, exp[i].lset, res)
|
require.Equal(t, exp[i].lset, res)
|
||||||
res = res[:0]
|
|
||||||
require.Equal(t, exp[i].m, string(m))
|
require.Equal(t, exp[i].m, string(m))
|
||||||
if shs != nil {
|
if shs != nil {
|
||||||
require.Equal(t, exp[i].shs, shs)
|
require.Equal(t, exp[i].shs, shs)
|
||||||
|
|
|
@ -353,11 +353,11 @@ func (n *Manager) Send(alerts ...*Alert) {
|
||||||
for _, a := range alerts {
|
for _, a := range alerts {
|
||||||
lb := labels.NewBuilder(a.Labels)
|
lb := labels.NewBuilder(a.Labels)
|
||||||
|
|
||||||
for _, l := range n.opts.ExternalLabels {
|
n.opts.ExternalLabels.Range(func(l labels.Label) {
|
||||||
if a.Labels.Get(l.Name) == "" {
|
if a.Labels.Get(l.Name) == "" {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
|
||||||
a.Labels = lb.Labels(a.Labels)
|
a.Labels = lb.Labels(a.Labels)
|
||||||
}
|
}
|
||||||
|
@ -394,8 +394,8 @@ func (n *Manager) relabelAlerts(alerts []*Alert) []*Alert {
|
||||||
var relabeledAlerts []*Alert
|
var relabeledAlerts []*Alert
|
||||||
|
|
||||||
for _, alert := range alerts {
|
for _, alert := range alerts {
|
||||||
labels := relabel.Process(alert.Labels, n.opts.RelabelConfigs...)
|
labels, keep := relabel.Process(alert.Labels, n.opts.RelabelConfigs...)
|
||||||
if labels != nil {
|
if keep {
|
||||||
alert.Labels = labels
|
alert.Labels = labels
|
||||||
relabeledAlerts = append(relabeledAlerts, alert)
|
relabeledAlerts = append(relabeledAlerts, alert)
|
||||||
}
|
}
|
||||||
|
@ -570,9 +570,9 @@ func alertsToOpenAPIAlerts(alerts []*Alert) models.PostableAlerts {
|
||||||
|
|
||||||
func labelsToOpenAPILabelSet(modelLabelSet labels.Labels) models.LabelSet {
|
func labelsToOpenAPILabelSet(modelLabelSet labels.Labels) models.LabelSet {
|
||||||
apiLabelSet := models.LabelSet{}
|
apiLabelSet := models.LabelSet{}
|
||||||
for _, label := range modelLabelSet {
|
modelLabelSet.Range(func(label labels.Label) {
|
||||||
apiLabelSet[label.Name] = label.Value
|
apiLabelSet[label.Name] = label.Value
|
||||||
}
|
})
|
||||||
|
|
||||||
return apiLabelSet
|
return apiLabelSet
|
||||||
}
|
}
|
||||||
|
@ -719,9 +719,9 @@ func AlertmanagerFromGroup(tg *targetgroup.Group, cfg *config.AlertmanagerConfig
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lset := relabel.Process(labels.New(lbls...), cfg.RelabelConfigs...)
|
lset, keep := relabel.Process(labels.New(lbls...), cfg.RelabelConfigs...)
|
||||||
if lset == nil {
|
if !keep {
|
||||||
droppedAlertManagers = append(droppedAlertManagers, alertmanagerLabels{lbls})
|
droppedAlertManagers = append(droppedAlertManagers, alertmanagerLabels{labels.New(lbls...)})
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1567,7 +1567,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
|
||||||
|
|
||||||
case *parser.NumberLiteral:
|
case *parser.NumberLiteral:
|
||||||
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
|
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
|
||||||
return append(enh.Out, Sample{Point: Point{V: e.Val}}), nil
|
return append(enh.Out, Sample{Point: Point{V: e.Val}, Metric: labels.EmptyLabels()}), nil
|
||||||
})
|
})
|
||||||
|
|
||||||
case *parser.StringLiteral:
|
case *parser.StringLiteral:
|
||||||
|
@ -2190,7 +2190,7 @@ func resultMetric(lhs, rhs labels.Labels, op parser.ItemType, matching *parser.V
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ret := enh.lb.Labels(nil)
|
ret := enh.lb.Labels(labels.EmptyLabels())
|
||||||
enh.resultMetric[str] = ret
|
enh.resultMetric[str] = ret
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
@ -2230,7 +2230,7 @@ func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scala
|
||||||
}
|
}
|
||||||
|
|
||||||
func dropMetricName(l labels.Labels) labels.Labels {
|
func dropMetricName(l labels.Labels) labels.Labels {
|
||||||
return labels.NewBuilder(l).Del(labels.MetricName).Labels(nil)
|
return labels.NewBuilder(l).Del(labels.MetricName).Labels(labels.EmptyLabels())
|
||||||
}
|
}
|
||||||
|
|
||||||
// scalarBinop evaluates a binary operation between two Scalars.
|
// scalarBinop evaluates a binary operation between two Scalars.
|
||||||
|
@ -2357,7 +2357,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lb := labels.NewBuilder(nil)
|
lb := labels.NewBuilder(labels.EmptyLabels())
|
||||||
var buf []byte
|
var buf []byte
|
||||||
for si, s := range vec {
|
for si, s := range vec {
|
||||||
metric := s.Metric
|
metric := s.Metric
|
||||||
|
@ -2365,7 +2365,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
|
||||||
if op == parser.COUNT_VALUES {
|
if op == parser.COUNT_VALUES {
|
||||||
lb.Reset(metric)
|
lb.Reset(metric)
|
||||||
lb.Set(valueLabel, strconv.FormatFloat(s.V, 'f', -1, 64))
|
lb.Set(valueLabel, strconv.FormatFloat(s.V, 'f', -1, 64))
|
||||||
metric = lb.Labels(nil)
|
metric = lb.Labels(labels.EmptyLabels())
|
||||||
|
|
||||||
// We've changed the metric so we have to recompute the grouping key.
|
// We've changed the metric so we have to recompute the grouping key.
|
||||||
recomputeGroupingKey = true
|
recomputeGroupingKey = true
|
||||||
|
@ -2389,7 +2389,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
|
||||||
} else {
|
} else {
|
||||||
lb.Keep(grouping...)
|
lb.Keep(grouping...)
|
||||||
}
|
}
|
||||||
m := lb.Labels(nil)
|
m := lb.Labels(labels.EmptyLabels())
|
||||||
newAgg := &groupedAggregation{
|
newAgg := &groupedAggregation{
|
||||||
labels: m,
|
labels: m,
|
||||||
value: s.V,
|
value: s.V,
|
||||||
|
|
|
@ -684,6 +684,7 @@ load 10s
|
||||||
Result: Matrix{
|
Result: Matrix{
|
||||||
Series{
|
Series{
|
||||||
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
|
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
|
||||||
|
Metric: labels.EmptyLabels(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
|
@ -4008,7 +4009,7 @@ func TestSparseHistogram_Sum_Count_AddOperator(t *testing.T) {
|
||||||
// sum().
|
// sum().
|
||||||
queryString := fmt.Sprintf("sum(%s)", seriesName)
|
queryString := fmt.Sprintf("sum(%s)", seriesName)
|
||||||
queryAndCheck(queryString, []Sample{
|
queryAndCheck(queryString, []Sample{
|
||||||
{Point{T: ts, H: &c.expected}, labels.Labels{}},
|
{Point{T: ts, H: &c.expected}, labels.EmptyLabels()},
|
||||||
})
|
})
|
||||||
|
|
||||||
// + operator.
|
// + operator.
|
||||||
|
@ -4017,13 +4018,13 @@ func TestSparseHistogram_Sum_Count_AddOperator(t *testing.T) {
|
||||||
queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx)
|
queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx)
|
||||||
}
|
}
|
||||||
queryAndCheck(queryString, []Sample{
|
queryAndCheck(queryString, []Sample{
|
||||||
{Point{T: ts, H: &c.expected}, labels.Labels{}},
|
{Point{T: ts, H: &c.expected}, labels.EmptyLabels()},
|
||||||
})
|
})
|
||||||
|
|
||||||
// count().
|
// count().
|
||||||
queryString = fmt.Sprintf("count(%s)", seriesName)
|
queryString = fmt.Sprintf("count(%s)", seriesName)
|
||||||
queryAndCheck(queryString, []Sample{
|
queryAndCheck(queryString, []Sample{
|
||||||
{Point{T: ts, V: 3}, labels.Labels{}},
|
{Point{T: ts, V: 3}, labels.EmptyLabels()},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -957,7 +957,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
|
||||||
if !ok {
|
if !ok {
|
||||||
sample.Metric = labels.NewBuilder(sample.Metric).
|
sample.Metric = labels.NewBuilder(sample.Metric).
|
||||||
Del(excludedLabels...).
|
Del(excludedLabels...).
|
||||||
Labels(nil)
|
Labels(labels.EmptyLabels())
|
||||||
|
|
||||||
mb = &metricWithBuckets{sample.Metric, nil}
|
mb = &metricWithBuckets{sample.Metric, nil}
|
||||||
enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb
|
enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb
|
||||||
|
@ -1077,7 +1077,7 @@ func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNod
|
||||||
if len(res) > 0 {
|
if len(res) > 0 {
|
||||||
lb.Set(dst, string(res))
|
lb.Set(dst, string(res))
|
||||||
}
|
}
|
||||||
outMetric = lb.Labels(nil)
|
outMetric = lb.Labels(labels.EmptyLabels())
|
||||||
enh.Dmn[h] = outMetric
|
enh.Dmn[h] = outMetric
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1145,7 +1145,7 @@ func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHe
|
||||||
lb.Set(dst, strval)
|
lb.Set(dst, strval)
|
||||||
}
|
}
|
||||||
|
|
||||||
outMetric = lb.Labels(nil)
|
outMetric = lb.Labels(labels.EmptyLabels())
|
||||||
enh.Dmn[h] = outMetric
|
enh.Dmn[h] = outMetric
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1383,7 +1383,7 @@ func (s *vectorByReverseValueHeap) Pop() interface{} {
|
||||||
// createLabelsForAbsentFunction returns the labels that are uniquely and exactly matched
|
// createLabelsForAbsentFunction returns the labels that are uniquely and exactly matched
|
||||||
// in a given expression. It is used in the absent functions.
|
// in a given expression. It is used in the absent functions.
|
||||||
func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
|
func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
|
||||||
m := labels.Labels{}
|
b := labels.NewBuilder(labels.EmptyLabels())
|
||||||
|
|
||||||
var lm []*labels.Matcher
|
var lm []*labels.Matcher
|
||||||
switch n := expr.(type) {
|
switch n := expr.(type) {
|
||||||
|
@ -1392,25 +1392,26 @@ func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
|
||||||
case *parser.MatrixSelector:
|
case *parser.MatrixSelector:
|
||||||
lm = n.VectorSelector.(*parser.VectorSelector).LabelMatchers
|
lm = n.VectorSelector.(*parser.VectorSelector).LabelMatchers
|
||||||
default:
|
default:
|
||||||
return m
|
return labels.EmptyLabels()
|
||||||
}
|
}
|
||||||
|
|
||||||
empty := []string{}
|
// The 'has' map implements backwards-compatibility for historic behaviour:
|
||||||
|
// e.g. in `absent(x{job="a",job="b",foo="bar"})` then `job` is removed from the output.
|
||||||
|
// Note this gives arguably wrong behaviour for `absent(x{job="a",job="a",foo="bar"})`.
|
||||||
|
has := make(map[string]bool, len(lm))
|
||||||
for _, ma := range lm {
|
for _, ma := range lm {
|
||||||
if ma.Name == labels.MetricName {
|
if ma.Name == labels.MetricName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if ma.Type == labels.MatchEqual && !m.Has(ma.Name) {
|
if ma.Type == labels.MatchEqual && !has[ma.Name] {
|
||||||
m = labels.NewBuilder(m).Set(ma.Name, ma.Value).Labels(nil)
|
b.Set(ma.Name, ma.Value)
|
||||||
|
has[ma.Name] = true
|
||||||
} else {
|
} else {
|
||||||
empty = append(empty, ma.Name)
|
b.Del(ma.Name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range empty {
|
return b.Labels(labels.EmptyLabels())
|
||||||
m = labels.NewBuilder(m).Del(v).Labels(nil)
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func stringFromArg(e parser.Expr) string {
|
func stringFromArg(e parser.Expr) string {
|
||||||
|
|
|
@ -16,13 +16,13 @@ package parser
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/model/labels"
|
"github.com/prometheus/prometheus/model/labels"
|
||||||
"github.com/prometheus/prometheus/model/value"
|
"github.com/prometheus/prometheus/model/value"
|
||||||
)
|
)
|
||||||
|
|
||||||
%}
|
%}
|
||||||
|
|
||||||
%union {
|
%union {
|
||||||
|
@ -32,6 +32,7 @@ import (
|
||||||
matcher *labels.Matcher
|
matcher *labels.Matcher
|
||||||
label labels.Label
|
label labels.Label
|
||||||
labels labels.Labels
|
labels labels.Labels
|
||||||
|
lblList []labels.Label
|
||||||
strings []string
|
strings []string
|
||||||
series []SequenceValue
|
series []SequenceValue
|
||||||
uint uint64
|
uint uint64
|
||||||
|
@ -138,10 +139,9 @@ START_METRIC_SELECTOR
|
||||||
// Type definitions for grammar rules.
|
// Type definitions for grammar rules.
|
||||||
%type <matchers> label_match_list
|
%type <matchers> label_match_list
|
||||||
%type <matcher> label_matcher
|
%type <matcher> label_matcher
|
||||||
|
|
||||||
%type <item> aggregate_op grouping_label match_op maybe_label metric_identifier unary_op at_modifier_preprocessors
|
%type <item> aggregate_op grouping_label match_op maybe_label metric_identifier unary_op at_modifier_preprocessors
|
||||||
|
%type <labels> label_set metric
|
||||||
%type <labels> label_set label_set_list metric
|
%type <lblList> label_set_list
|
||||||
%type <label> label_set_item
|
%type <label> label_set_item
|
||||||
%type <strings> grouping_label_list grouping_labels maybe_grouping_labels
|
%type <strings> grouping_label_list grouping_labels maybe_grouping_labels
|
||||||
%type <series> series_item series_values
|
%type <series> series_item series_values
|
||||||
|
@ -567,7 +567,7 @@ label_matcher : IDENTIFIER match_op STRING
|
||||||
*/
|
*/
|
||||||
|
|
||||||
metric : metric_identifier label_set
|
metric : metric_identifier label_set
|
||||||
{ $$ = append($2, labels.Label{Name: labels.MetricName, Value: $1.Val}); sort.Sort($$) }
|
{ b := labels.NewBuilder($2); b.Set(labels.MetricName, $1.Val); $$ = b.Labels(labels.EmptyLabels()) }
|
||||||
| label_set
|
| label_set
|
||||||
{$$ = $1}
|
{$$ = $1}
|
||||||
;
|
;
|
||||||
|
|
|
@ -9,7 +9,6 @@ import __yyfmt__ "fmt"
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -26,6 +25,7 @@ type yySymType struct {
|
||||||
matcher *labels.Matcher
|
matcher *labels.Matcher
|
||||||
label labels.Label
|
label labels.Label
|
||||||
labels labels.Labels
|
labels labels.Labels
|
||||||
|
lblList []labels.Label
|
||||||
strings []string
|
strings []string
|
||||||
series []SequenceValue
|
series []SequenceValue
|
||||||
uint uint64
|
uint uint64
|
||||||
|
@ -458,7 +458,7 @@ var yyPact = [...]int{
|
||||||
|
|
||||||
var yyPgo = [...]int{
|
var yyPgo = [...]int{
|
||||||
0, 267, 7, 265, 2, 264, 262, 164, 261, 257,
|
0, 267, 7, 265, 2, 264, 262, 164, 261, 257,
|
||||||
115, 253, 181, 8, 252, 4, 5, 251, 250, 0,
|
115, 181, 253, 8, 252, 4, 5, 251, 250, 0,
|
||||||
23, 248, 6, 247, 246, 245, 10, 64, 244, 239,
|
23, 248, 6, 247, 246, 245, 10, 64, 244, 239,
|
||||||
1, 231, 230, 9, 217, 21, 214, 213, 205, 201,
|
1, 231, 230, 9, 217, 21, 214, 213, 205, 201,
|
||||||
198, 196, 189, 188, 206, 3, 180, 165, 127,
|
198, 196, 189, 188, 206, 3, 180, 165, 127,
|
||||||
|
@ -474,10 +474,10 @@ var yyR1 = [...]int{
|
||||||
31, 33, 33, 32, 32, 32, 40, 38, 38, 38,
|
31, 33, 33, 32, 32, 32, 40, 38, 38, 38,
|
||||||
24, 24, 24, 9, 9, 36, 42, 42, 42, 42,
|
24, 24, 24, 9, 9, 36, 42, 42, 42, 42,
|
||||||
42, 43, 44, 44, 44, 35, 35, 35, 1, 1,
|
42, 43, 44, 44, 44, 35, 35, 35, 1, 1,
|
||||||
1, 2, 2, 2, 2, 12, 12, 7, 7, 7,
|
1, 2, 2, 2, 2, 11, 11, 7, 7, 7,
|
||||||
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
|
7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
|
||||||
7, 7, 7, 7, 7, 7, 7, 7, 7, 10,
|
7, 7, 7, 7, 7, 7, 7, 7, 7, 10,
|
||||||
10, 10, 10, 11, 11, 11, 13, 13, 13, 13,
|
10, 10, 10, 12, 12, 12, 13, 13, 13, 13,
|
||||||
48, 18, 18, 18, 18, 17, 17, 17, 17, 17,
|
48, 18, 18, 18, 18, 17, 17, 17, 17, 17,
|
||||||
21, 21, 21, 3, 3, 3, 3, 3, 3, 3,
|
21, 21, 21, 3, 3, 3, 3, 3, 3, 3,
|
||||||
3, 3, 3, 3, 3, 6, 6, 6, 6, 6,
|
3, 3, 3, 3, 3, 6, 6, 6, 6, 6,
|
||||||
|
@ -513,14 +513,14 @@ var yyR2 = [...]int{
|
||||||
}
|
}
|
||||||
|
|
||||||
var yyChk = [...]int{
|
var yyChk = [...]int{
|
||||||
-1000, -47, 75, 76, 77, 78, 2, 10, -12, -7,
|
-1000, -47, 75, 76, 77, 78, 2, 10, -11, -7,
|
||||||
-10, 47, 48, 62, 49, 50, 51, 12, 32, 33,
|
-10, 47, 48, 62, 49, 50, 51, 12, 32, 33,
|
||||||
36, 52, 16, 53, 66, 54, 55, 56, 57, 58,
|
36, 52, 16, 53, 66, 54, 55, 56, 57, 58,
|
||||||
68, 71, 72, 13, -48, -12, 10, -30, -25, -28,
|
68, 71, 72, 13, -48, -11, 10, -30, -25, -28,
|
||||||
-31, -36, -37, -38, -40, -41, -42, -43, -44, -24,
|
-31, -36, -37, -38, -40, -41, -42, -43, -44, -24,
|
||||||
-3, 12, 17, 15, 23, -8, -7, -35, 47, 48,
|
-3, 12, 17, 15, 23, -8, -7, -35, 47, 48,
|
||||||
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
|
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
|
||||||
26, 42, 13, -44, -10, -11, 18, -13, 12, 2,
|
26, 42, 13, -44, -10, -12, 18, -13, 12, 2,
|
||||||
-18, 2, 26, 44, 27, 28, 30, 31, 32, 33,
|
-18, 2, 26, 44, 27, 28, 30, 31, 32, 33,
|
||||||
34, 35, 36, 37, 38, 39, 41, 42, 66, 43,
|
34, 35, 36, 37, 38, 39, 41, 42, 66, 43,
|
||||||
14, -26, -33, 2, 62, 68, 15, -33, -30, -30,
|
14, -26, -33, 2, 62, 68, 15, -33, -30, -30,
|
||||||
|
@ -1492,8 +1492,9 @@ yydefault:
|
||||||
yyDollar = yyS[yypt-2 : yypt+1]
|
yyDollar = yyS[yypt-2 : yypt+1]
|
||||||
//line promql/parser/generated_parser.y:570
|
//line promql/parser/generated_parser.y:570
|
||||||
{
|
{
|
||||||
yyVAL.labels = append(yyDollar[2].labels, labels.Label{Name: labels.MetricName, Value: yyDollar[1].item.Val})
|
b := labels.NewBuilder(yyDollar[2].labels)
|
||||||
sort.Sort(yyVAL.labels)
|
b.Set(labels.MetricName, yyDollar[1].item.Val)
|
||||||
|
yyVAL.labels = b.Labels(labels.EmptyLabels())
|
||||||
}
|
}
|
||||||
case 96:
|
case 96:
|
||||||
yyDollar = yyS[yypt-1 : yypt+1]
|
yyDollar = yyS[yypt-1 : yypt+1]
|
||||||
|
@ -1505,13 +1506,13 @@ yydefault:
|
||||||
yyDollar = yyS[yypt-3 : yypt+1]
|
yyDollar = yyS[yypt-3 : yypt+1]
|
||||||
//line promql/parser/generated_parser.y:579
|
//line promql/parser/generated_parser.y:579
|
||||||
{
|
{
|
||||||
yyVAL.labels = labels.New(yyDollar[2].labels...)
|
yyVAL.labels = labels.New(yyDollar[2].lblList...)
|
||||||
}
|
}
|
||||||
case 120:
|
case 120:
|
||||||
yyDollar = yyS[yypt-4 : yypt+1]
|
yyDollar = yyS[yypt-4 : yypt+1]
|
||||||
//line promql/parser/generated_parser.y:581
|
//line promql/parser/generated_parser.y:581
|
||||||
{
|
{
|
||||||
yyVAL.labels = labels.New(yyDollar[2].labels...)
|
yyVAL.labels = labels.New(yyDollar[2].lblList...)
|
||||||
}
|
}
|
||||||
case 121:
|
case 121:
|
||||||
yyDollar = yyS[yypt-2 : yypt+1]
|
yyDollar = yyS[yypt-2 : yypt+1]
|
||||||
|
@ -1529,20 +1530,20 @@ yydefault:
|
||||||
yyDollar = yyS[yypt-3 : yypt+1]
|
yyDollar = yyS[yypt-3 : yypt+1]
|
||||||
//line promql/parser/generated_parser.y:589
|
//line promql/parser/generated_parser.y:589
|
||||||
{
|
{
|
||||||
yyVAL.labels = append(yyDollar[1].labels, yyDollar[3].label)
|
yyVAL.lblList = append(yyDollar[1].lblList, yyDollar[3].label)
|
||||||
}
|
}
|
||||||
case 124:
|
case 124:
|
||||||
yyDollar = yyS[yypt-1 : yypt+1]
|
yyDollar = yyS[yypt-1 : yypt+1]
|
||||||
//line promql/parser/generated_parser.y:591
|
//line promql/parser/generated_parser.y:591
|
||||||
{
|
{
|
||||||
yyVAL.labels = []labels.Label{yyDollar[1].label}
|
yyVAL.lblList = []labels.Label{yyDollar[1].label}
|
||||||
}
|
}
|
||||||
case 125:
|
case 125:
|
||||||
yyDollar = yyS[yypt-2 : yypt+1]
|
yyDollar = yyS[yypt-2 : yypt+1]
|
||||||
//line promql/parser/generated_parser.y:593
|
//line promql/parser/generated_parser.y:593
|
||||||
{
|
{
|
||||||
yylex.(*parser).unexpected("label set", "\",\" or \"}\"")
|
yylex.(*parser).unexpected("label set", "\",\" or \"}\"")
|
||||||
yyVAL.labels = yyDollar[1].labels
|
yyVAL.lblList = yyDollar[1].lblList
|
||||||
}
|
}
|
||||||
case 126:
|
case 126:
|
||||||
yyDollar = yyS[yypt-3 : yypt+1]
|
yyDollar = yyS[yypt-3 : yypt+1]
|
||||||
|
|
|
@ -202,7 +202,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if f, err := parseNumber(defLine); err == nil {
|
if f, err := parseNumber(defLine); err == nil {
|
||||||
cmd.expect(0, nil, parser.SequenceValue{Value: f})
|
cmd.expect(0, parser.SequenceValue{Value: f})
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
metric, vals, err := parser.ParseSeriesDesc(defLine)
|
metric, vals, err := parser.ParseSeriesDesc(defLine)
|
||||||
|
@ -218,7 +218,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
||||||
if len(vals) > 1 {
|
if len(vals) > 1 {
|
||||||
return i, nil, raise(i, "expecting multiple values in instant evaluation not allowed")
|
return i, nil, raise(i, "expecting multiple values in instant evaluation not allowed")
|
||||||
}
|
}
|
||||||
cmd.expect(j, metric, vals...)
|
cmd.expectMetric(j, metric, vals...)
|
||||||
}
|
}
|
||||||
return i, cmd, nil
|
return i, cmd, nil
|
||||||
}
|
}
|
||||||
|
@ -368,13 +368,15 @@ func (ev *evalCmd) String() string {
|
||||||
return "eval"
|
return "eval"
|
||||||
}
|
}
|
||||||
|
|
||||||
// expect adds a new metric with a sequence of values to the set of expected
|
// expect adds a sequence of values to the set of expected
|
||||||
// results for the query.
|
// results for the query.
|
||||||
func (ev *evalCmd) expect(pos int, m labels.Labels, vals ...parser.SequenceValue) {
|
func (ev *evalCmd) expect(pos int, vals ...parser.SequenceValue) {
|
||||||
if m == nil {
|
ev.expected[0] = entry{pos: pos, vals: vals}
|
||||||
ev.expected[0] = entry{pos: pos, vals: vals}
|
}
|
||||||
return
|
|
||||||
}
|
// expectMetric adds a new metric with a sequence of values to the set of expected
|
||||||
|
// results for the query.
|
||||||
|
func (ev *evalCmd) expectMetric(pos int, m labels.Labels, vals ...parser.SequenceValue) {
|
||||||
h := m.Hash()
|
h := m.Hash()
|
||||||
ev.metrics[h] = m
|
ev.metrics[h] = m
|
||||||
ev.expected[h] = entry{pos: pos, vals: vals}
|
ev.expected[h] = entry{pos: pos, vals: vals}
|
||||||
|
|
|
@ -127,11 +127,11 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, s := range tc.series {
|
for _, s := range tc.series {
|
||||||
var matchers []*labels.Matcher
|
var matchers []*labels.Matcher
|
||||||
for _, label := range s.Metric {
|
s.Metric.Range(func(label labels.Label) {
|
||||||
m, err := labels.NewMatcher(labels.MatchEqual, label.Name, label.Value)
|
m, err := labels.NewMatcher(labels.MatchEqual, label.Name, label.Value)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
matchers = append(matchers, m)
|
matchers = append(matchers, m)
|
||||||
}
|
})
|
||||||
|
|
||||||
// Get the series for the matcher.
|
// Get the series for the matcher.
|
||||||
ss := querier.Select(false, nil, matchers...)
|
ss := querier.Select(false, nil, matchers...)
|
||||||
|
|
|
@ -146,10 +146,7 @@ func NewAlertingRule(
|
||||||
labels, annotations, externalLabels labels.Labels, externalURL string,
|
labels, annotations, externalLabels labels.Labels, externalURL string,
|
||||||
restored bool, logger log.Logger,
|
restored bool, logger log.Logger,
|
||||||
) *AlertingRule {
|
) *AlertingRule {
|
||||||
el := make(map[string]string, len(externalLabels))
|
el := externalLabels.Map()
|
||||||
for _, lbl := range externalLabels {
|
|
||||||
el[lbl.Name] = lbl.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
return &AlertingRule{
|
return &AlertingRule{
|
||||||
name: name,
|
name: name,
|
||||||
|
@ -217,16 +214,16 @@ func (r *AlertingRule) Annotations() labels.Labels {
|
||||||
func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
|
func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
|
||||||
lb := labels.NewBuilder(r.labels)
|
lb := labels.NewBuilder(r.labels)
|
||||||
|
|
||||||
for _, l := range alert.Labels {
|
alert.Labels.Range(func(l labels.Label) {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
})
|
||||||
|
|
||||||
lb.Set(labels.MetricName, alertMetricName)
|
lb.Set(labels.MetricName, alertMetricName)
|
||||||
lb.Set(labels.AlertName, r.name)
|
lb.Set(labels.AlertName, r.name)
|
||||||
lb.Set(alertStateLabel, alert.State.String())
|
lb.Set(alertStateLabel, alert.State.String())
|
||||||
|
|
||||||
s := promql.Sample{
|
s := promql.Sample{
|
||||||
Metric: lb.Labels(nil),
|
Metric: lb.Labels(labels.EmptyLabels()),
|
||||||
Point: promql.Point{T: timestamp.FromTime(ts), V: 1},
|
Point: promql.Point{T: timestamp.FromTime(ts), V: 1},
|
||||||
}
|
}
|
||||||
return s
|
return s
|
||||||
|
@ -236,15 +233,15 @@ func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
|
||||||
func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) promql.Sample {
|
func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) promql.Sample {
|
||||||
lb := labels.NewBuilder(r.labels)
|
lb := labels.NewBuilder(r.labels)
|
||||||
|
|
||||||
for _, l := range alert.Labels {
|
alert.Labels.Range(func(l labels.Label) {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
})
|
||||||
|
|
||||||
lb.Set(labels.MetricName, alertForStateMetricName)
|
lb.Set(labels.MetricName, alertForStateMetricName)
|
||||||
lb.Set(labels.AlertName, r.name)
|
lb.Set(labels.AlertName, r.name)
|
||||||
|
|
||||||
s := promql.Sample{
|
s := promql.Sample{
|
||||||
Metric: lb.Labels(nil),
|
Metric: lb.Labels(labels.EmptyLabels()),
|
||||||
Point: promql.Point{T: timestamp.FromTime(ts), V: v},
|
Point: promql.Point{T: timestamp.FromTime(ts), V: v},
|
||||||
}
|
}
|
||||||
return s
|
return s
|
||||||
|
@ -254,13 +251,13 @@ func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) pro
|
||||||
func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (storage.Series, error) {
|
func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (storage.Series, error) {
|
||||||
smpl := r.forStateSample(alert, time.Now(), 0)
|
smpl := r.forStateSample(alert, time.Now(), 0)
|
||||||
var matchers []*labels.Matcher
|
var matchers []*labels.Matcher
|
||||||
for _, l := range smpl.Metric {
|
smpl.Metric.Range(func(l labels.Label) {
|
||||||
mt, err := labels.NewMatcher(labels.MatchEqual, l.Name, l.Value)
|
mt, err := labels.NewMatcher(labels.MatchEqual, l.Name, l.Value)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
matchers = append(matchers, mt)
|
matchers = append(matchers, mt)
|
||||||
}
|
})
|
||||||
sset := q.Select(false, nil, matchers...)
|
sset := q.Select(false, nil, matchers...)
|
||||||
|
|
||||||
var s storage.Series
|
var s storage.Series
|
||||||
|
@ -268,7 +265,7 @@ func (r *AlertingRule) QueryforStateSeries(alert *Alert, q storage.Querier) (sto
|
||||||
// Query assures that smpl.Metric is included in sset.At().Labels(),
|
// Query assures that smpl.Metric is included in sset.At().Labels(),
|
||||||
// hence just checking the length would act like equality.
|
// hence just checking the length would act like equality.
|
||||||
// (This is faster than calling labels.Compare again as we already have some info).
|
// (This is faster than calling labels.Compare again as we already have some info).
|
||||||
if len(sset.At().Labels()) == len(matchers) {
|
if sset.At().Labels().Len() == len(matchers) {
|
||||||
s = sset.At()
|
s = sset.At()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -327,10 +324,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
|
||||||
alerts := make(map[uint64]*Alert, len(res))
|
alerts := make(map[uint64]*Alert, len(res))
|
||||||
for _, smpl := range res {
|
for _, smpl := range res {
|
||||||
// Provide the alert information to the template.
|
// Provide the alert information to the template.
|
||||||
l := make(map[string]string, len(smpl.Metric))
|
l := smpl.Metric.Map()
|
||||||
for _, lbl := range smpl.Metric {
|
|
||||||
l[lbl.Name] = lbl.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
tmplData := template.AlertTemplateData(l, r.externalLabels, r.externalURL, smpl.V)
|
tmplData := template.AlertTemplateData(l, r.externalLabels, r.externalURL, smpl.V)
|
||||||
// Inject some convenience variables that are easier to remember for users
|
// Inject some convenience variables that are easier to remember for users
|
||||||
|
@ -363,17 +357,18 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
|
||||||
|
|
||||||
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricName)
|
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricName)
|
||||||
|
|
||||||
for _, l := range r.labels {
|
r.labels.Range(func(l labels.Label) {
|
||||||
lb.Set(l.Name, expand(l.Value))
|
lb.Set(l.Name, expand(l.Value))
|
||||||
}
|
})
|
||||||
lb.Set(labels.AlertName, r.Name())
|
lb.Set(labels.AlertName, r.Name())
|
||||||
|
|
||||||
annotations := make(labels.Labels, 0, len(r.annotations))
|
sb := labels.ScratchBuilder{}
|
||||||
for _, a := range r.annotations {
|
r.annotations.Range(func(a labels.Label) {
|
||||||
annotations = append(annotations, labels.Label{Name: a.Name, Value: expand(a.Value)})
|
sb.Add(a.Name, expand(a.Value))
|
||||||
}
|
})
|
||||||
|
annotations := sb.Labels()
|
||||||
|
|
||||||
lbs := lb.Labels(nil)
|
lbs := lb.Labels(labels.EmptyLabels())
|
||||||
h := lbs.Hash()
|
h := lbs.Hash()
|
||||||
resultFPs[h] = struct{}{}
|
resultFPs[h] = struct{}{}
|
||||||
|
|
||||||
|
|
|
@ -85,11 +85,11 @@ func (rule *RecordingRule) Eval(ctx context.Context, ts time.Time, query QueryFu
|
||||||
|
|
||||||
lb.Set(labels.MetricName, rule.name)
|
lb.Set(labels.MetricName, rule.name)
|
||||||
|
|
||||||
for _, l := range rule.labels {
|
rule.labels.Range(func(l labels.Label) {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
})
|
||||||
|
|
||||||
sample.Metric = lb.Labels(nil)
|
sample.Metric = lb.Labels(labels.EmptyLabels())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that the rule does not produce identical metrics after applying
|
// Check that the rule does not produce identical metrics after applying
|
||||||
|
|
|
@ -149,8 +149,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "no address",
|
err: "no address",
|
||||||
},
|
},
|
||||||
// Address label missing, but added in relabelling.
|
// Address label missing, but added in relabelling.
|
||||||
|
@ -242,8 +242,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "invalid label value for \"custom\": \"\\xbd\"",
|
err: "invalid label value for \"custom\": \"\\xbd\"",
|
||||||
},
|
},
|
||||||
// Invalid duration in interval label.
|
// Invalid duration in interval label.
|
||||||
|
@ -259,8 +259,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
|
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
|
||||||
},
|
},
|
||||||
// Invalid duration in timeout label.
|
// Invalid duration in timeout label.
|
||||||
|
@ -276,8 +276,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
|
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
|
||||||
},
|
},
|
||||||
// 0 interval in timeout label.
|
// 0 interval in timeout label.
|
||||||
|
@ -293,8 +293,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "scrape interval cannot be 0",
|
err: "scrape interval cannot be 0",
|
||||||
},
|
},
|
||||||
// 0 duration in timeout label.
|
// 0 duration in timeout label.
|
||||||
|
@ -310,8 +310,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "scrape timeout cannot be 0",
|
err: "scrape timeout cannot be 0",
|
||||||
},
|
},
|
||||||
// Timeout less than interval.
|
// Timeout less than interval.
|
||||||
|
@ -328,8 +328,8 @@ func TestPopulateLabels(t *testing.T) {
|
||||||
ScrapeInterval: model.Duration(time.Second),
|
ScrapeInterval: model.Duration(time.Second),
|
||||||
ScrapeTimeout: model.Duration(time.Second),
|
ScrapeTimeout: model.Duration(time.Second),
|
||||||
},
|
},
|
||||||
res: nil,
|
res: labels.EmptyLabels(),
|
||||||
resOrig: nil,
|
resOrig: labels.EmptyLabels(),
|
||||||
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
|
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
|
||||||
},
|
},
|
||||||
// Don't attach default port.
|
// Don't attach default port.
|
||||||
|
|
|
@ -268,6 +268,7 @@ type scrapeLoopOptions struct {
|
||||||
|
|
||||||
const maxAheadTime = 10 * time.Minute
|
const maxAheadTime = 10 * time.Minute
|
||||||
|
|
||||||
|
// returning an empty label set is interpreted as "drop"
|
||||||
type labelsMutator func(labels.Labels) labels.Labels
|
type labelsMutator func(labels.Labels) labels.Labels
|
||||||
|
|
||||||
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, options *Options) (*scrapePool, error) {
|
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, options *Options) (*scrapePool, error) {
|
||||||
|
@ -498,9 +499,9 @@ func (sp *scrapePool) Sync(tgs []*targetgroup.Group) {
|
||||||
}
|
}
|
||||||
targetSyncFailed.WithLabelValues(sp.config.JobName).Add(float64(len(failures)))
|
targetSyncFailed.WithLabelValues(sp.config.JobName).Add(float64(len(failures)))
|
||||||
for _, t := range targets {
|
for _, t := range targets {
|
||||||
if t.Labels().Len() > 0 {
|
if !t.Labels().IsEmpty() {
|
||||||
all = append(all, t)
|
all = append(all, t)
|
||||||
} else if t.DiscoveredLabels().Len() > 0 {
|
} else if !t.DiscoveredLabels().IsEmpty() {
|
||||||
sp.droppedTargets = append(sp.droppedTargets, t)
|
sp.droppedTargets = append(sp.droppedTargets, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -634,7 +635,7 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
|
||||||
|
|
||||||
met := lset.Get(labels.MetricName)
|
met := lset.Get(labels.MetricName)
|
||||||
if limits.labelLimit > 0 {
|
if limits.labelLimit > 0 {
|
||||||
nbLabels := len(lset)
|
nbLabels := lset.Len()
|
||||||
if nbLabels > int(limits.labelLimit) {
|
if nbLabels > int(limits.labelLimit) {
|
||||||
return fmt.Errorf("label_limit exceeded (metric: %.50s, number of labels: %d, limit: %d)", met, nbLabels, limits.labelLimit)
|
return fmt.Errorf("label_limit exceeded (metric: %.50s, number of labels: %d, limit: %d)", met, nbLabels, limits.labelLimit)
|
||||||
}
|
}
|
||||||
|
@ -644,7 +645,7 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, l := range lset {
|
return lset.Validate(func(l labels.Label) error {
|
||||||
if limits.labelNameLengthLimit > 0 {
|
if limits.labelNameLengthLimit > 0 {
|
||||||
nameLength := len(l.Name)
|
nameLength := len(l.Name)
|
||||||
if nameLength > int(limits.labelNameLengthLimit) {
|
if nameLength > int(limits.labelNameLengthLimit) {
|
||||||
|
@ -658,8 +659,8 @@ func verifyLabelLimits(lset labels.Labels, limits *labelLimits) error {
|
||||||
return fmt.Errorf("label_value_length_limit exceeded (metric: %.50s, label name: %.50s, value: %.50q, length: %d, limit: %d)", met, l.Name, l.Value, valueLength, limits.labelValueLengthLimit)
|
return fmt.Errorf("label_value_length_limit exceeded (metric: %.50s, label name: %.50s, value: %.50q, length: %d, limit: %d)", met, l.Name, l.Value, valueLength, limits.labelValueLengthLimit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
return nil
|
||||||
return nil
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*relabel.Config) labels.Labels {
|
func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*relabel.Config) labels.Labels {
|
||||||
|
@ -667,37 +668,37 @@ func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*re
|
||||||
targetLabels := target.Labels()
|
targetLabels := target.Labels()
|
||||||
|
|
||||||
if honor {
|
if honor {
|
||||||
for _, l := range targetLabels {
|
targetLabels.Range(func(l labels.Label) {
|
||||||
if !lset.Has(l.Name) {
|
if !lset.Has(l.Name) {
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
} else {
|
} else {
|
||||||
var conflictingExposedLabels labels.Labels
|
var conflictingExposedLabels []labels.Label
|
||||||
for _, l := range targetLabels {
|
targetLabels.Range(func(l labels.Label) {
|
||||||
existingValue := lset.Get(l.Name)
|
existingValue := lset.Get(l.Name)
|
||||||
if existingValue != "" {
|
if existingValue != "" {
|
||||||
conflictingExposedLabels = append(conflictingExposedLabels, labels.Label{Name: l.Name, Value: existingValue})
|
conflictingExposedLabels = append(conflictingExposedLabels, labels.Label{Name: l.Name, Value: existingValue})
|
||||||
}
|
}
|
||||||
// It is now safe to set the target label.
|
// It is now safe to set the target label.
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
})
|
||||||
|
|
||||||
if len(conflictingExposedLabels) > 0 {
|
if len(conflictingExposedLabels) > 0 {
|
||||||
resolveConflictingExposedLabels(lb, lset, targetLabels, conflictingExposedLabels)
|
resolveConflictingExposedLabels(lb, lset, targetLabels, conflictingExposedLabels)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
res := lb.Labels(nil)
|
res := lb.Labels(labels.EmptyLabels())
|
||||||
|
|
||||||
if len(rc) > 0 {
|
if len(rc) > 0 {
|
||||||
res = relabel.Process(res, rc...)
|
res, _ = relabel.Process(res, rc...)
|
||||||
}
|
}
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLabels, conflictingExposedLabels labels.Labels) {
|
func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLabels labels.Labels, conflictingExposedLabels []labels.Label) {
|
||||||
sort.SliceStable(conflictingExposedLabels, func(i, j int) bool {
|
sort.SliceStable(conflictingExposedLabels, func(i, j int) bool {
|
||||||
return len(conflictingExposedLabels[i].Name) < len(conflictingExposedLabels[j].Name)
|
return len(conflictingExposedLabels[i].Name) < len(conflictingExposedLabels[j].Name)
|
||||||
})
|
})
|
||||||
|
@ -708,7 +709,7 @@ func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLa
|
||||||
newName = model.ExportedLabelPrefix + newName
|
newName = model.ExportedLabelPrefix + newName
|
||||||
if !exposedLabels.Has(newName) &&
|
if !exposedLabels.Has(newName) &&
|
||||||
!targetLabels.Has(newName) &&
|
!targetLabels.Has(newName) &&
|
||||||
!conflictingExposedLabels[:i].Has(newName) {
|
!labelSliceHas(conflictingExposedLabels[:i], newName) {
|
||||||
conflictingExposedLabels[i].Name = newName
|
conflictingExposedLabels[i].Name = newName
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -720,15 +721,24 @@ func resolveConflictingExposedLabels(lb *labels.Builder, exposedLabels, targetLa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func labelSliceHas(lbls []labels.Label, name string) bool {
|
||||||
|
for _, l := range lbls {
|
||||||
|
if l.Name == name {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func mutateReportSampleLabels(lset labels.Labels, target *Target) labels.Labels {
|
func mutateReportSampleLabels(lset labels.Labels, target *Target) labels.Labels {
|
||||||
lb := labels.NewBuilder(lset)
|
lb := labels.NewBuilder(lset)
|
||||||
|
|
||||||
for _, l := range target.Labels() {
|
target.Labels().Range(func(l labels.Label) {
|
||||||
lb.Set(model.ExportedLabelPrefix+l.Name, lset.Get(l.Name))
|
lb.Set(model.ExportedLabelPrefix+l.Name, lset.Get(l.Name))
|
||||||
lb.Set(l.Name, l.Value)
|
lb.Set(l.Name, l.Value)
|
||||||
}
|
})
|
||||||
|
|
||||||
return lb.Labels(nil)
|
return lb.Labels(labels.EmptyLabels())
|
||||||
}
|
}
|
||||||
|
|
||||||
// appender returns an appender for ingested samples from the target.
|
// appender returns an appender for ingested samples from the target.
|
||||||
|
@ -1599,8 +1609,8 @@ loop:
|
||||||
// and relabeling and store the final label set.
|
// and relabeling and store the final label set.
|
||||||
lset = sl.sampleMutator(lset)
|
lset = sl.sampleMutator(lset)
|
||||||
|
|
||||||
// The label set may be set to nil to indicate dropping.
|
// The label set may be set to empty to indicate dropping.
|
||||||
if lset == nil {
|
if lset.IsEmpty() {
|
||||||
sl.cache.addDropped(mets)
|
sl.cache.addDropped(mets)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -1857,12 +1867,10 @@ func (sl *scrapeLoop) addReportSample(app storage.Appender, s string, t int64, v
|
||||||
ref = ce.ref
|
ref = ce.ref
|
||||||
lset = ce.lset
|
lset = ce.lset
|
||||||
} else {
|
} else {
|
||||||
lset = labels.Labels{
|
// The constants are suffixed with the invalid \xff unicode rune to avoid collisions
|
||||||
// The constants are suffixed with the invalid \xff unicode rune to avoid collisions
|
// with scraped metrics in the cache.
|
||||||
// with scraped metrics in the cache.
|
// We have to drop it when building the actual metric.
|
||||||
// We have to drop it when building the actual metric.
|
lset = labels.FromStrings(labels.MetricName, s[:len(s)-1])
|
||||||
labels.Label{Name: labels.MetricName, Value: s[:len(s)-1]},
|
|
||||||
}
|
|
||||||
lset = sl.reportSampleMutator(lset)
|
lset = sl.reportSampleMutator(lset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1623,7 +1623,7 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
|
||||||
nil, nil, nil,
|
nil, nil, nil,
|
||||||
func(l labels.Labels) labels.Labels {
|
func(l labels.Labels) labels.Labels {
|
||||||
if l.Has("deleteme") {
|
if l.Has("deleteme") {
|
||||||
return nil
|
return labels.EmptyLabels()
|
||||||
}
|
}
|
||||||
return l
|
return l
|
||||||
},
|
},
|
||||||
|
|
|
@ -172,22 +172,20 @@ func (t *Target) offset(interval time.Duration, jitterSeed uint64) time.Duration
|
||||||
|
|
||||||
// Labels returns a copy of the set of all public labels of the target.
|
// Labels returns a copy of the set of all public labels of the target.
|
||||||
func (t *Target) Labels() labels.Labels {
|
func (t *Target) Labels() labels.Labels {
|
||||||
lset := make(labels.Labels, 0, len(t.labels))
|
b := labels.NewScratchBuilder(t.labels.Len())
|
||||||
for _, l := range t.labels {
|
t.labels.Range(func(l labels.Label) {
|
||||||
if !strings.HasPrefix(l.Name, model.ReservedLabelPrefix) {
|
if !strings.HasPrefix(l.Name, model.ReservedLabelPrefix) {
|
||||||
lset = append(lset, l)
|
b.Add(l.Name, l.Value)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
return lset
|
return b.Labels()
|
||||||
}
|
}
|
||||||
|
|
||||||
// DiscoveredLabels returns a copy of the target's labels before any processing.
|
// DiscoveredLabels returns a copy of the target's labels before any processing.
|
||||||
func (t *Target) DiscoveredLabels() labels.Labels {
|
func (t *Target) DiscoveredLabels() labels.Labels {
|
||||||
t.mtx.Lock()
|
t.mtx.Lock()
|
||||||
defer t.mtx.Unlock()
|
defer t.mtx.Unlock()
|
||||||
lset := make(labels.Labels, len(t.discoveredLabels))
|
return t.discoveredLabels.Copy()
|
||||||
copy(lset, t.discoveredLabels)
|
|
||||||
return lset
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetDiscoveredLabels sets new DiscoveredLabels
|
// SetDiscoveredLabels sets new DiscoveredLabels
|
||||||
|
@ -205,9 +203,9 @@ func (t *Target) URL() *url.URL {
|
||||||
params[k] = make([]string, len(v))
|
params[k] = make([]string, len(v))
|
||||||
copy(params[k], v)
|
copy(params[k], v)
|
||||||
}
|
}
|
||||||
for _, l := range t.labels {
|
t.labels.Range(func(l labels.Label) {
|
||||||
if !strings.HasPrefix(l.Name, model.ParamLabelPrefix) {
|
if !strings.HasPrefix(l.Name, model.ParamLabelPrefix) {
|
||||||
continue
|
return
|
||||||
}
|
}
|
||||||
ks := l.Name[len(model.ParamLabelPrefix):]
|
ks := l.Name[len(model.ParamLabelPrefix):]
|
||||||
|
|
||||||
|
@ -216,7 +214,7 @@ func (t *Target) URL() *url.URL {
|
||||||
} else {
|
} else {
|
||||||
params[ks] = []string{l.Value}
|
params[ks] = []string{l.Value}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
|
||||||
return &url.URL{
|
return &url.URL{
|
||||||
Scheme: t.labels.Get(model.SchemeLabel),
|
Scheme: t.labels.Get(model.SchemeLabel),
|
||||||
|
@ -374,15 +372,15 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig, noDefaultPort
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
preRelabelLabels := lb.Labels(nil)
|
preRelabelLabels := lb.Labels(labels.EmptyLabels())
|
||||||
lset = relabel.Process(preRelabelLabels, cfg.RelabelConfigs...)
|
lset, keep := relabel.Process(preRelabelLabels, cfg.RelabelConfigs...)
|
||||||
|
|
||||||
// Check if the target was dropped.
|
// Check if the target was dropped.
|
||||||
if lset == nil {
|
if !keep {
|
||||||
return nil, preRelabelLabels, nil
|
return labels.EmptyLabels(), preRelabelLabels, nil
|
||||||
}
|
}
|
||||||
if v := lset.Get(model.AddressLabel); v == "" {
|
if v := lset.Get(model.AddressLabel); v == "" {
|
||||||
return nil, nil, errors.New("no address")
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("no address")
|
||||||
}
|
}
|
||||||
|
|
||||||
lb = labels.NewBuilder(lset)
|
lb = labels.NewBuilder(lset)
|
||||||
|
@ -413,7 +411,7 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig, noDefaultPort
|
||||||
case "https":
|
case "https":
|
||||||
addr = addr + ":443"
|
addr = addr + ":443"
|
||||||
default:
|
default:
|
||||||
return nil, nil, errors.Errorf("invalid scheme: %q", cfg.Scheme)
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("invalid scheme: %q", cfg.Scheme)
|
||||||
}
|
}
|
||||||
lb.Set(model.AddressLabel, addr)
|
lb.Set(model.AddressLabel, addr)
|
||||||
}
|
}
|
||||||
|
@ -434,50 +432,54 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig, noDefaultPort
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := config.CheckTargetAddress(model.LabelValue(addr)); err != nil {
|
if err := config.CheckTargetAddress(model.LabelValue(addr)); err != nil {
|
||||||
return nil, nil, err
|
return labels.EmptyLabels(), labels.EmptyLabels(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
interval := lset.Get(model.ScrapeIntervalLabel)
|
interval := lset.Get(model.ScrapeIntervalLabel)
|
||||||
intervalDuration, err := model.ParseDuration(interval)
|
intervalDuration, err := model.ParseDuration(interval)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("error parsing scrape interval: %v", err)
|
||||||
}
|
}
|
||||||
if time.Duration(intervalDuration) == 0 {
|
if time.Duration(intervalDuration) == 0 {
|
||||||
return nil, nil, errors.New("scrape interval cannot be 0")
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape interval cannot be 0")
|
||||||
}
|
}
|
||||||
|
|
||||||
timeout := lset.Get(model.ScrapeTimeoutLabel)
|
timeout := lset.Get(model.ScrapeTimeoutLabel)
|
||||||
timeoutDuration, err := model.ParseDuration(timeout)
|
timeoutDuration, err := model.ParseDuration(timeout)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("error parsing scrape timeout: %v", err)
|
||||||
}
|
}
|
||||||
if time.Duration(timeoutDuration) == 0 {
|
if time.Duration(timeoutDuration) == 0 {
|
||||||
return nil, nil, errors.New("scrape timeout cannot be 0")
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.New("scrape timeout cannot be 0")
|
||||||
}
|
}
|
||||||
|
|
||||||
if timeoutDuration > intervalDuration {
|
if timeoutDuration > intervalDuration {
|
||||||
return nil, nil, errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
|
return labels.EmptyLabels(), labels.EmptyLabels(), errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Meta labels are deleted after relabelling. Other internal labels propagate to
|
// Meta labels are deleted after relabelling. Other internal labels propagate to
|
||||||
// the target which decides whether they will be part of their label set.
|
// the target which decides whether they will be part of their label set.
|
||||||
for _, l := range lset {
|
lset.Range(func(l labels.Label) {
|
||||||
if strings.HasPrefix(l.Name, model.MetaLabelPrefix) {
|
if strings.HasPrefix(l.Name, model.MetaLabelPrefix) {
|
||||||
lb.Del(l.Name)
|
lb.Del(l.Name)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
|
||||||
// Default the instance label to the target address.
|
// Default the instance label to the target address.
|
||||||
if v := lset.Get(model.InstanceLabel); v == "" {
|
if v := lset.Get(model.InstanceLabel); v == "" {
|
||||||
lb.Set(model.InstanceLabel, addr)
|
lb.Set(model.InstanceLabel, addr)
|
||||||
}
|
}
|
||||||
|
|
||||||
res = lb.Labels(nil)
|
res = lb.Labels(labels.EmptyLabels())
|
||||||
for _, l := range res {
|
err = res.Validate(func(l labels.Label) error {
|
||||||
// Check label values are valid, drop the target if not.
|
// Check label values are valid, drop the target if not.
|
||||||
if !model.LabelValue(l.Value).IsValid() {
|
if !model.LabelValue(l.Value).IsValid() {
|
||||||
return nil, nil, errors.Errorf("invalid label value for %q: %q", l.Name, l.Value)
|
return errors.Errorf("invalid label value for %q: %q", l.Name, l.Value)
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return labels.EmptyLabels(), labels.EmptyLabels(), err
|
||||||
}
|
}
|
||||||
return res, preRelabelLabels, nil
|
return res, preRelabelLabels, nil
|
||||||
}
|
}
|
||||||
|
@ -501,12 +503,12 @@ func TargetsFromGroup(tg *targetgroup.Group, cfg *config.ScrapeConfig, noDefault
|
||||||
|
|
||||||
lset := labels.New(lbls...)
|
lset := labels.New(lbls...)
|
||||||
|
|
||||||
lbls, origLabels, err := PopulateLabels(lset, cfg, noDefaultPort)
|
lset, origLabels, err := PopulateLabels(lset, cfg, noDefaultPort)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
failures = append(failures, errors.Wrapf(err, "instance %d in group %s", i, tg))
|
failures = append(failures, errors.Wrapf(err, "instance %d in group %s", i, tg))
|
||||||
}
|
}
|
||||||
if lbls != nil || origLabels != nil {
|
if !lset.IsEmpty() || !origLabels.IsEmpty() {
|
||||||
targets = append(targets, NewTarget(lbls, origLabels, cfg.Params))
|
targets = append(targets, NewTarget(lset, origLabels, cfg.Params))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return targets, failures
|
return targets, failures
|
||||||
|
|
|
@ -129,7 +129,7 @@ func newTestTarget(targetURL string, deadline time.Duration, lbls labels.Labels)
|
||||||
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
|
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
|
||||||
lb.Set(model.MetricsPathLabel, "/metrics")
|
lb.Set(model.MetricsPathLabel, "/metrics")
|
||||||
|
|
||||||
return &Target{labels: lb.Labels(nil)}
|
return &Target{labels: lb.Labels(labels.EmptyLabels())}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewHTTPBearerToken(t *testing.T) {
|
func TestNewHTTPBearerToken(t *testing.T) {
|
||||||
|
|
|
@ -699,7 +699,7 @@ func (c *compactChunkIterator) Next() bool {
|
||||||
// 1:1 duplicates, skip it.
|
// 1:1 duplicates, skip it.
|
||||||
} else {
|
} else {
|
||||||
// We operate on same series, so labels does not matter here.
|
// We operate on same series, so labels does not matter here.
|
||||||
overlapping = append(overlapping, newChunkToSeriesDecoder(nil, next))
|
overlapping = append(overlapping, newChunkToSeriesDecoder(labels.EmptyLabels(), next))
|
||||||
if next.MaxTime > oMaxTime {
|
if next.MaxTime > oMaxTime {
|
||||||
oMaxTime = next.MaxTime
|
oMaxTime = next.MaxTime
|
||||||
}
|
}
|
||||||
|
@ -716,7 +716,7 @@ func (c *compactChunkIterator) Next() bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add last as it's not yet included in overlap. We operate on same series, so labels does not matter here.
|
// Add last as it's not yet included in overlap. We operate on same series, so labels does not matter here.
|
||||||
iter = NewSeriesToChunkEncoder(c.mergeFunc(append(overlapping, newChunkToSeriesDecoder(nil, c.curr))...)).Iterator(nil)
|
iter = NewSeriesToChunkEncoder(c.mergeFunc(append(overlapping, newChunkToSeriesDecoder(labels.EmptyLabels(), c.curr))...)).Iterator(nil)
|
||||||
if !iter.Next() {
|
if !iter.Next() {
|
||||||
if c.err = iter.Err(); c.err != nil {
|
if c.err = iter.Err(); c.err != nil {
|
||||||
return false
|
return false
|
||||||
|
|
|
@ -153,10 +153,10 @@ func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult,
|
||||||
func FromQueryResult(sortSeries bool, res *prompb.QueryResult) storage.SeriesSet {
|
func FromQueryResult(sortSeries bool, res *prompb.QueryResult) storage.SeriesSet {
|
||||||
series := make([]storage.Series, 0, len(res.Timeseries))
|
series := make([]storage.Series, 0, len(res.Timeseries))
|
||||||
for _, ts := range res.Timeseries {
|
for _, ts := range res.Timeseries {
|
||||||
lbls := labelProtosToLabels(ts.Labels)
|
if err := validateLabelsAndMetricName(ts.Labels); err != nil {
|
||||||
if err := validateLabelsAndMetricName(lbls); err != nil {
|
|
||||||
return errSeriesSet{err: err}
|
return errSeriesSet{err: err}
|
||||||
}
|
}
|
||||||
|
lbls := labelProtosToLabels(ts.Labels)
|
||||||
series = append(series, &concreteSeries{labels: lbls, samples: ts.Samples})
|
series = append(series, &concreteSeries{labels: lbls, samples: ts.Samples})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,7 +348,7 @@ type concreteSeries struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *concreteSeries) Labels() labels.Labels {
|
func (c *concreteSeries) Labels() labels.Labels {
|
||||||
return labels.New(c.labels...)
|
return c.labels.Copy()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *concreteSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
|
func (c *concreteSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
|
||||||
|
@ -443,7 +443,7 @@ func (c *concreteSeriesIterator) Err() error {
|
||||||
|
|
||||||
// validateLabelsAndMetricName validates the label names/values and metric names returned from remote read,
|
// validateLabelsAndMetricName validates the label names/values and metric names returned from remote read,
|
||||||
// also making sure that there are no labels with duplicate names
|
// also making sure that there are no labels with duplicate names
|
||||||
func validateLabelsAndMetricName(ls labels.Labels) error {
|
func validateLabelsAndMetricName(ls []prompb.Label) error {
|
||||||
for i, l := range ls {
|
for i, l := range ls {
|
||||||
if l.Name == labels.MetricName && !model.IsValidMetricName(model.LabelValue(l.Value)) {
|
if l.Name == labels.MetricName && !model.IsValidMetricName(model.LabelValue(l.Value)) {
|
||||||
return fmt.Errorf("invalid metric name: %v", l.Value)
|
return fmt.Errorf("invalid metric name: %v", l.Value)
|
||||||
|
@ -583,30 +583,24 @@ func LabelProtosToMetric(labelPairs []*prompb.Label) model.Metric {
|
||||||
}
|
}
|
||||||
|
|
||||||
func labelProtosToLabels(labelPairs []prompb.Label) labels.Labels {
|
func labelProtosToLabels(labelPairs []prompb.Label) labels.Labels {
|
||||||
result := make(labels.Labels, 0, len(labelPairs))
|
b := labels.ScratchBuilder{}
|
||||||
for _, l := range labelPairs {
|
for _, l := range labelPairs {
|
||||||
result = append(result, labels.Label{
|
b.Add(l.Name, l.Value)
|
||||||
Name: l.Name,
|
|
||||||
Value: l.Value,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
sort.Sort(result)
|
b.Sort()
|
||||||
return result
|
return b.Labels()
|
||||||
}
|
}
|
||||||
|
|
||||||
// labelsToLabelsProto transforms labels into prompb labels. The buffer slice
|
// labelsToLabelsProto transforms labels into prompb labels. The buffer slice
|
||||||
// will be used to avoid allocations if it is big enough to store the labels.
|
// will be used to avoid allocations if it is big enough to store the labels.
|
||||||
func labelsToLabelsProto(labels labels.Labels, buf []prompb.Label) []prompb.Label {
|
func labelsToLabelsProto(lbls labels.Labels, buf []prompb.Label) []prompb.Label {
|
||||||
result := buf[:0]
|
result := buf[:0]
|
||||||
if cap(buf) < len(labels) {
|
lbls.Range(func(l labels.Label) {
|
||||||
result = make([]prompb.Label, 0, len(labels))
|
|
||||||
}
|
|
||||||
for _, l := range labels {
|
|
||||||
result = append(result, prompb.Label{
|
result = append(result, prompb.Label{
|
||||||
Name: l.Name,
|
Name: l.Name,
|
||||||
Value: l.Value,
|
Value: l.Value,
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,86 +74,86 @@ var writeRequestFixture = &prompb.WriteRequest{
|
||||||
|
|
||||||
func TestValidateLabelsAndMetricName(t *testing.T) {
|
func TestValidateLabelsAndMetricName(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input labels.Labels
|
input []prompb.Label
|
||||||
expectedErr string
|
expectedErr string
|
||||||
description string
|
description string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name",
|
{Name: "__name__", Value: "name"},
|
||||||
"labelName", "labelValue",
|
{Name: "labelName", Value: "labelValue"},
|
||||||
),
|
},
|
||||||
expectedErr: "",
|
expectedErr: "",
|
||||||
description: "regular labels",
|
description: "regular labels",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name",
|
{Name: "__name__", Value: "name"},
|
||||||
"_labelName", "labelValue",
|
{Name: "_labelName", Value: "labelValue"},
|
||||||
),
|
},
|
||||||
expectedErr: "",
|
expectedErr: "",
|
||||||
description: "label name with _",
|
description: "label name with _",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name",
|
{Name: "__name__", Value: "name"},
|
||||||
"@labelName", "labelValue",
|
{Name: "@labelName", Value: "labelValue"},
|
||||||
),
|
},
|
||||||
expectedErr: "invalid label name: @labelName",
|
expectedErr: "invalid label name: @labelName",
|
||||||
description: "label name with @",
|
description: "label name with @",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name",
|
{Name: "__name__", Value: "name"},
|
||||||
"123labelName", "labelValue",
|
{Name: "123labelName", Value: "labelValue"},
|
||||||
),
|
},
|
||||||
expectedErr: "invalid label name: 123labelName",
|
expectedErr: "invalid label name: 123labelName",
|
||||||
description: "label name starts with numbers",
|
description: "label name starts with numbers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name",
|
{Name: "__name__", Value: "name"},
|
||||||
"", "labelValue",
|
{Name: "", Value: "labelValue"},
|
||||||
),
|
},
|
||||||
expectedErr: "invalid label name: ",
|
expectedErr: "invalid label name: ",
|
||||||
description: "label name is empty string",
|
description: "label name is empty string",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name",
|
{Name: "__name__", Value: "name"},
|
||||||
"labelName", string([]byte{0xff}),
|
{Name: "labelName", Value: string([]byte{0xff})},
|
||||||
),
|
},
|
||||||
expectedErr: "invalid label value: " + string([]byte{0xff}),
|
expectedErr: "invalid label value: " + string([]byte{0xff}),
|
||||||
description: "label value is an invalid UTF-8 value",
|
description: "label value is an invalid UTF-8 value",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "@invalid_name",
|
{Name: "__name__", Value: "@invalid_name"},
|
||||||
),
|
},
|
||||||
expectedErr: "invalid metric name: @invalid_name",
|
expectedErr: "invalid metric name: @invalid_name",
|
||||||
description: "metric name starts with @",
|
description: "metric name starts with @",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"__name__", "name1",
|
{Name: "__name__", Value: "name1"},
|
||||||
"__name__", "name2",
|
{Name: "__name__", Value: "name2"},
|
||||||
),
|
},
|
||||||
expectedErr: "duplicate label with name: __name__",
|
expectedErr: "duplicate label with name: __name__",
|
||||||
description: "duplicate label names",
|
description: "duplicate label names",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"label1", "name",
|
{Name: "label1", Value: "name"},
|
||||||
"label2", "name",
|
{Name: "label2", Value: "name"},
|
||||||
),
|
},
|
||||||
expectedErr: "",
|
expectedErr: "",
|
||||||
description: "duplicate label values",
|
description: "duplicate label values",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: labels.FromStrings(
|
input: []prompb.Label{
|
||||||
"", "name",
|
{Name: "", Value: "name"},
|
||||||
"label2", "name",
|
{Name: "label2", Value: "name"},
|
||||||
),
|
},
|
||||||
expectedErr: "invalid label name: ",
|
expectedErr: "invalid label name: ",
|
||||||
description: "don't report as duplicate label name",
|
description: "don't report as duplicate label name",
|
||||||
},
|
},
|
||||||
|
@ -200,8 +200,7 @@ func TestConcreteSeriesClonesLabels(t *testing.T) {
|
||||||
gotLabels := cs.Labels()
|
gotLabels := cs.Labels()
|
||||||
require.Equal(t, lbls, gotLabels)
|
require.Equal(t, lbls, gotLabels)
|
||||||
|
|
||||||
gotLabels[0].Value = "foo"
|
gotLabels.CopyFrom(labels.FromStrings("a", "foo", "c", "foo"))
|
||||||
gotLabels[1].Value = "bar"
|
|
||||||
|
|
||||||
gotLabels = cs.Labels()
|
gotLabels = cs.Labels()
|
||||||
require.Equal(t, lbls, gotLabels)
|
require.Equal(t, lbls, gotLabels)
|
||||||
|
|
|
@ -396,7 +396,7 @@ type QueueManager struct {
|
||||||
flushDeadline time.Duration
|
flushDeadline time.Duration
|
||||||
cfg config.QueueConfig
|
cfg config.QueueConfig
|
||||||
mcfg config.MetadataConfig
|
mcfg config.MetadataConfig
|
||||||
externalLabels labels.Labels
|
externalLabels []labels.Label
|
||||||
relabelConfigs []*relabel.Config
|
relabelConfigs []*relabel.Config
|
||||||
sendExemplars bool
|
sendExemplars bool
|
||||||
sendNativeHistograms bool
|
sendNativeHistograms bool
|
||||||
|
@ -454,13 +454,19 @@ func NewQueueManager(
|
||||||
logger = log.NewNopLogger()
|
logger = log.NewNopLogger()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Copy externalLabels into slice which we need for processExternalLabels.
|
||||||
|
extLabelsSlice := make([]labels.Label, 0, externalLabels.Len())
|
||||||
|
externalLabels.Range(func(l labels.Label) {
|
||||||
|
extLabelsSlice = append(extLabelsSlice, l)
|
||||||
|
})
|
||||||
|
|
||||||
logger = log.With(logger, remoteName, client.Name(), endpoint, client.Endpoint())
|
logger = log.With(logger, remoteName, client.Name(), endpoint, client.Endpoint())
|
||||||
t := &QueueManager{
|
t := &QueueManager{
|
||||||
logger: logger,
|
logger: logger,
|
||||||
flushDeadline: flushDeadline,
|
flushDeadline: flushDeadline,
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
mcfg: mCfg,
|
mcfg: mCfg,
|
||||||
externalLabels: externalLabels,
|
externalLabels: extLabelsSlice,
|
||||||
relabelConfigs: relabelConfigs,
|
relabelConfigs: relabelConfigs,
|
||||||
storeClient: client,
|
storeClient: client,
|
||||||
sendExemplars: enableExemplarRemoteWrite,
|
sendExemplars: enableExemplarRemoteWrite,
|
||||||
|
@ -769,8 +775,8 @@ func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
|
||||||
t.seriesSegmentIndexes[s.Ref] = index
|
t.seriesSegmentIndexes[s.Ref] = index
|
||||||
|
|
||||||
ls := processExternalLabels(s.Labels, t.externalLabels)
|
ls := processExternalLabels(s.Labels, t.externalLabels)
|
||||||
lbls := relabel.Process(ls, t.relabelConfigs...)
|
lbls, keep := relabel.Process(ls, t.relabelConfigs...)
|
||||||
if len(lbls) == 0 {
|
if !keep || lbls.IsEmpty() {
|
||||||
t.droppedSeries[s.Ref] = struct{}{}
|
t.droppedSeries[s.Ref] = struct{}{}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -831,44 +837,33 @@ func (t *QueueManager) client() WriteClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *QueueManager) internLabels(lbls labels.Labels) {
|
func (t *QueueManager) internLabels(lbls labels.Labels) {
|
||||||
for i, l := range lbls {
|
lbls.InternStrings(t.interner.intern)
|
||||||
lbls[i].Name = t.interner.intern(l.Name)
|
|
||||||
lbls[i].Value = t.interner.intern(l.Value)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *QueueManager) releaseLabels(ls labels.Labels) {
|
func (t *QueueManager) releaseLabels(ls labels.Labels) {
|
||||||
for _, l := range ls {
|
ls.ReleaseStrings(t.interner.release)
|
||||||
t.interner.release(l.Name)
|
|
||||||
t.interner.release(l.Value)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// processExternalLabels merges externalLabels into ls. If ls contains
|
// processExternalLabels merges externalLabels into ls. If ls contains
|
||||||
// a label in externalLabels, the value in ls wins.
|
// a label in externalLabels, the value in ls wins.
|
||||||
func processExternalLabels(ls, externalLabels labels.Labels) labels.Labels {
|
func processExternalLabels(ls labels.Labels, externalLabels []labels.Label) labels.Labels {
|
||||||
i, j, result := 0, 0, make(labels.Labels, 0, len(ls)+len(externalLabels))
|
b := labels.NewScratchBuilder(ls.Len() + len(externalLabels))
|
||||||
for i < len(ls) && j < len(externalLabels) {
|
j := 0
|
||||||
if ls[i].Name < externalLabels[j].Name {
|
ls.Range(func(l labels.Label) {
|
||||||
result = append(result, labels.Label{
|
for j < len(externalLabels) && l.Name > externalLabels[j].Name {
|
||||||
Name: ls[i].Name,
|
b.Add(externalLabels[j].Name, externalLabels[j].Value)
|
||||||
Value: ls[i].Value,
|
|
||||||
})
|
|
||||||
i++
|
|
||||||
} else if ls[i].Name > externalLabels[j].Name {
|
|
||||||
result = append(result, externalLabels[j])
|
|
||||||
j++
|
|
||||||
} else {
|
|
||||||
result = append(result, labels.Label{
|
|
||||||
Name: ls[i].Name,
|
|
||||||
Value: ls[i].Value,
|
|
||||||
})
|
|
||||||
i++
|
|
||||||
j++
|
j++
|
||||||
}
|
}
|
||||||
|
if j < len(externalLabels) && l.Name == externalLabels[j].Name {
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
b.Add(l.Name, l.Value)
|
||||||
|
})
|
||||||
|
for ; j < len(externalLabels); j++ {
|
||||||
|
b.Add(externalLabels[j].Name, externalLabels[j].Value)
|
||||||
}
|
}
|
||||||
|
|
||||||
return append(append(result, ls[i:]...), externalLabels[j:]...)
|
return b.Labels()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *QueueManager) updateShardsLoop() {
|
func (t *QueueManager) updateShardsLoop() {
|
||||||
|
|
|
@ -161,7 +161,7 @@ func TestMetadataDelivery(t *testing.T) {
|
||||||
mcfg := config.DefaultMetadataConfig
|
mcfg := config.DefaultMetadataConfig
|
||||||
|
|
||||||
metrics := newQueueManagerMetrics(nil, "", "")
|
metrics := newQueueManagerMetrics(nil, "", "")
|
||||||
m := NewQueueManager(metrics, nil, nil, nil, dir, newEWMARate(ewmaWeight, shardUpdateDuration), cfg, mcfg, nil, nil, c, defaultFlushDeadline, newPool(), newHighestTimestampMetric(), nil, false, false)
|
m := NewQueueManager(metrics, nil, nil, nil, dir, newEWMARate(ewmaWeight, shardUpdateDuration), cfg, mcfg, labels.EmptyLabels(), nil, c, defaultFlushDeadline, newPool(), newHighestTimestampMetric(), nil, false, false)
|
||||||
m.Start()
|
m.Start()
|
||||||
defer m.Stop()
|
defer m.Stop()
|
||||||
|
|
||||||
|
@ -539,6 +539,7 @@ func TestShouldReshard(t *testing.T) {
|
||||||
func createTimeseries(numSamples, numSeries int, extraLabels ...labels.Label) ([]record.RefSample, []record.RefSeries) {
|
func createTimeseries(numSamples, numSeries int, extraLabels ...labels.Label) ([]record.RefSample, []record.RefSeries) {
|
||||||
samples := make([]record.RefSample, 0, numSamples)
|
samples := make([]record.RefSample, 0, numSamples)
|
||||||
series := make([]record.RefSeries, 0, numSeries)
|
series := make([]record.RefSeries, 0, numSeries)
|
||||||
|
b := labels.ScratchBuilder{}
|
||||||
for i := 0; i < numSeries; i++ {
|
for i := 0; i < numSeries; i++ {
|
||||||
name := fmt.Sprintf("test_metric_%d", i)
|
name := fmt.Sprintf("test_metric_%d", i)
|
||||||
for j := 0; j < numSamples; j++ {
|
for j := 0; j < numSamples; j++ {
|
||||||
|
@ -548,9 +549,16 @@ func createTimeseries(numSamples, numSeries int, extraLabels ...labels.Label) ([
|
||||||
V: float64(i),
|
V: float64(i),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
// Create Labels that is name of series plus any extra labels supplied.
|
||||||
|
b.Reset()
|
||||||
|
b.Add(labels.MetricName, name)
|
||||||
|
for _, l := range extraLabels {
|
||||||
|
b.Add(l.Name, l.Value)
|
||||||
|
}
|
||||||
|
b.Sort()
|
||||||
series = append(series, record.RefSeries{
|
series = append(series, record.RefSeries{
|
||||||
Ref: chunks.HeadSeriesRef(i),
|
Ref: chunks.HeadSeriesRef(i),
|
||||||
Labels: append(labels.Labels{{Name: "__name__", Value: name}}, extraLabels...),
|
Labels: b.Labels(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return samples, series
|
return samples, series
|
||||||
|
@ -603,7 +611,7 @@ func createHistograms(numSamples, numSeries int) ([]record.RefHistogramSample, [
|
||||||
}
|
}
|
||||||
series = append(series, record.RefSeries{
|
series = append(series, record.RefSeries{
|
||||||
Ref: chunks.HeadSeriesRef(i),
|
Ref: chunks.HeadSeriesRef(i),
|
||||||
Labels: labels.Labels{{Name: "__name__", Value: name}},
|
Labels: labels.FromStrings("__name__", name),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return histograms, series
|
return histograms, series
|
||||||
|
@ -815,7 +823,7 @@ func BenchmarkSampleSend(b *testing.B) {
|
||||||
const numSeries = 10000
|
const numSeries = 10000
|
||||||
|
|
||||||
// Extra labels to make a more realistic workload - taken from Kubernetes' embedded cAdvisor metrics.
|
// Extra labels to make a more realistic workload - taken from Kubernetes' embedded cAdvisor metrics.
|
||||||
extraLabels := labels.Labels{
|
extraLabels := []labels.Label{
|
||||||
{Name: "kubernetes_io_arch", Value: "amd64"},
|
{Name: "kubernetes_io_arch", Value: "amd64"},
|
||||||
{Name: "kubernetes_io_instance_type", Value: "c3.somesize"},
|
{Name: "kubernetes_io_instance_type", Value: "c3.somesize"},
|
||||||
{Name: "kubernetes_io_os", Value: "linux"},
|
{Name: "kubernetes_io_os", Value: "linux"},
|
||||||
|
@ -902,56 +910,63 @@ func BenchmarkStartup(b *testing.B) {
|
||||||
func TestProcessExternalLabels(t *testing.T) {
|
func TestProcessExternalLabels(t *testing.T) {
|
||||||
for _, tc := range []struct {
|
for _, tc := range []struct {
|
||||||
labels labels.Labels
|
labels labels.Labels
|
||||||
externalLabels labels.Labels
|
externalLabels []labels.Label
|
||||||
expected labels.Labels
|
expected labels.Labels
|
||||||
}{
|
}{
|
||||||
// Test adding labels at the end.
|
// Test adding labels at the end.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{{Name: "a", Value: "b"}},
|
labels: labels.FromStrings("a", "b"),
|
||||||
externalLabels: labels.Labels{{Name: "c", Value: "d"}},
|
externalLabels: []labels.Label{{Name: "c", Value: "d"}},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}},
|
expected: labels.FromStrings("a", "b", "c", "d"),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Test adding labels at the beginning.
|
// Test adding labels at the beginning.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{{Name: "c", Value: "d"}},
|
labels: labels.FromStrings("c", "d"),
|
||||||
externalLabels: labels.Labels{{Name: "a", Value: "b"}},
|
externalLabels: []labels.Label{{Name: "a", Value: "b"}},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}},
|
expected: labels.FromStrings("a", "b", "c", "d"),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Test we don't override existing labels.
|
// Test we don't override existing labels.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{{Name: "a", Value: "b"}},
|
labels: labels.FromStrings("a", "b"),
|
||||||
externalLabels: labels.Labels{{Name: "a", Value: "c"}},
|
externalLabels: []labels.Label{{Name: "a", Value: "c"}},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}},
|
expected: labels.FromStrings("a", "b"),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Test empty externalLabels.
|
// Test empty externalLabels.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{{Name: "a", Value: "b"}},
|
labels: labels.FromStrings("a", "b"),
|
||||||
externalLabels: labels.Labels{},
|
externalLabels: []labels.Label{},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}},
|
expected: labels.FromStrings("a", "b"),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Test empty labels.
|
// Test empty labels.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{},
|
labels: labels.EmptyLabels(),
|
||||||
externalLabels: labels.Labels{{Name: "a", Value: "b"}},
|
externalLabels: []labels.Label{{Name: "a", Value: "b"}},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}},
|
expected: labels.FromStrings("a", "b"),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Test labels is longer than externalLabels.
|
// Test labels is longer than externalLabels.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}},
|
labels: labels.FromStrings("a", "b", "c", "d"),
|
||||||
externalLabels: labels.Labels{{Name: "e", Value: "f"}},
|
externalLabels: []labels.Label{{Name: "e", Value: "f"}},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}, {Name: "e", Value: "f"}},
|
expected: labels.FromStrings("a", "b", "c", "d", "e", "f"),
|
||||||
},
|
},
|
||||||
|
|
||||||
// Test externalLabels is longer than labels.
|
// Test externalLabels is longer than labels.
|
||||||
{
|
{
|
||||||
labels: labels.Labels{{Name: "c", Value: "d"}},
|
labels: labels.FromStrings("c", "d"),
|
||||||
externalLabels: labels.Labels{{Name: "a", Value: "b"}, {Name: "e", Value: "f"}},
|
externalLabels: []labels.Label{{Name: "a", Value: "b"}, {Name: "e", Value: "f"}},
|
||||||
expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}, {Name: "e", Value: "f"}},
|
expected: labels.FromStrings("a", "b", "c", "d", "e", "f"),
|
||||||
|
},
|
||||||
|
|
||||||
|
// Adding with and without clashing labels.
|
||||||
|
{
|
||||||
|
labels: labels.FromStrings("a", "b", "c", "d"),
|
||||||
|
externalLabels: []labels.Label{{Name: "a", Value: "xxx"}, {Name: "c", Value: "yyy"}, {Name: "e", Value: "f"}},
|
||||||
|
expected: labels.FromStrings("a", "b", "c", "d", "e", "f"),
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
require.Equal(t, tc.expected, processExternalLabels(tc.labels, tc.externalLabels))
|
require.Equal(t, tc.expected, processExternalLabels(tc.labels, tc.externalLabels))
|
||||||
|
|
|
@ -180,9 +180,11 @@ func (q *querier) Select(sortSeries bool, hints *storage.SelectHints, matchers .
|
||||||
// We return the new set of matchers, along with a map of labels for which
|
// We return the new set of matchers, along with a map of labels for which
|
||||||
// matchers were added, so that these can later be removed from the result
|
// matchers were added, so that these can later be removed from the result
|
||||||
// time series again.
|
// time series again.
|
||||||
func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, labels.Labels) {
|
func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, []string) {
|
||||||
el := make(labels.Labels, len(q.externalLabels))
|
el := make([]labels.Label, 0, q.externalLabels.Len())
|
||||||
copy(el, q.externalLabels)
|
q.externalLabels.Range(func(l labels.Label) {
|
||||||
|
el = append(el, l)
|
||||||
|
})
|
||||||
|
|
||||||
// ms won't be sorted, so have to O(n^2) the search.
|
// ms won't be sorted, so have to O(n^2) the search.
|
||||||
for _, m := range ms {
|
for _, m := range ms {
|
||||||
|
@ -202,7 +204,11 @@ func (q querier) addExternalLabels(ms []*labels.Matcher) ([]*labels.Matcher, lab
|
||||||
}
|
}
|
||||||
ms = append(ms, m)
|
ms = append(ms, m)
|
||||||
}
|
}
|
||||||
return ms, el
|
names := make([]string, len(el))
|
||||||
|
for i := range el {
|
||||||
|
names[i] = el[i].Name
|
||||||
|
}
|
||||||
|
return ms, names
|
||||||
}
|
}
|
||||||
|
|
||||||
// LabelValues implements storage.Querier and is a noop.
|
// LabelValues implements storage.Querier and is a noop.
|
||||||
|
@ -234,7 +240,8 @@ func (q *chunkQuerier) Select(sortSeries bool, hints *storage.SelectHints, match
|
||||||
return storage.NewSeriesSetToChunkSet(q.querier.Select(sortSeries, hints, matchers...))
|
return storage.NewSeriesSetToChunkSet(q.querier.Select(sortSeries, hints, matchers...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSeriesSetFilter(ss storage.SeriesSet, toFilter labels.Labels) storage.SeriesSet {
|
// Note strings in toFilter must be sorted.
|
||||||
|
func newSeriesSetFilter(ss storage.SeriesSet, toFilter []string) storage.SeriesSet {
|
||||||
return &seriesSetFilter{
|
return &seriesSetFilter{
|
||||||
SeriesSet: ss,
|
SeriesSet: ss,
|
||||||
toFilter: toFilter,
|
toFilter: toFilter,
|
||||||
|
@ -243,7 +250,7 @@ func newSeriesSetFilter(ss storage.SeriesSet, toFilter labels.Labels) storage.Se
|
||||||
|
|
||||||
type seriesSetFilter struct {
|
type seriesSetFilter struct {
|
||||||
storage.SeriesSet
|
storage.SeriesSet
|
||||||
toFilter labels.Labels
|
toFilter []string // Label names to remove from result
|
||||||
querier storage.Querier
|
querier storage.Querier
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,20 +271,12 @@ func (ssf seriesSetFilter) At() storage.Series {
|
||||||
|
|
||||||
type seriesFilter struct {
|
type seriesFilter struct {
|
||||||
storage.Series
|
storage.Series
|
||||||
toFilter labels.Labels
|
toFilter []string // Label names to remove from result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sf seriesFilter) Labels() labels.Labels {
|
func (sf seriesFilter) Labels() labels.Labels {
|
||||||
labels := sf.Series.Labels()
|
b := labels.NewBuilder(sf.Series.Labels())
|
||||||
for i, j := 0, 0; i < len(labels) && j < len(sf.toFilter); {
|
// todo: check if this is too inefficient.
|
||||||
if labels[i].Name < sf.toFilter[j].Name {
|
b.Del(sf.toFilter...)
|
||||||
i++
|
return b.Labels(labels.EmptyLabels())
|
||||||
} else if labels[i].Name > sf.toFilter[j].Name {
|
|
||||||
j++
|
|
||||||
} else {
|
|
||||||
labels = labels[:i+copy(labels[i:], labels[i+1:])]
|
|
||||||
j++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return labels
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
|
||||||
el labels.Labels
|
el labels.Labels
|
||||||
inMatchers []*labels.Matcher
|
inMatchers []*labels.Matcher
|
||||||
outMatchers []*labels.Matcher
|
outMatchers []*labels.Matcher
|
||||||
added labels.Labels
|
added []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
inMatchers: []*labels.Matcher{
|
inMatchers: []*labels.Matcher{
|
||||||
|
@ -119,7 +119,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
|
||||||
outMatchers: []*labels.Matcher{
|
outMatchers: []*labels.Matcher{
|
||||||
labels.MustNewMatcher(labels.MatchEqual, "job", "api-server"),
|
labels.MustNewMatcher(labels.MatchEqual, "job", "api-server"),
|
||||||
},
|
},
|
||||||
added: labels.Labels{},
|
added: []string{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
el: labels.FromStrings("dc", "berlin-01", "region", "europe"),
|
el: labels.FromStrings("dc", "berlin-01", "region", "europe"),
|
||||||
|
@ -131,7 +131,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
|
||||||
labels.MustNewMatcher(labels.MatchEqual, "region", "europe"),
|
labels.MustNewMatcher(labels.MatchEqual, "region", "europe"),
|
||||||
labels.MustNewMatcher(labels.MatchEqual, "dc", "berlin-01"),
|
labels.MustNewMatcher(labels.MatchEqual, "dc", "berlin-01"),
|
||||||
},
|
},
|
||||||
added: labels.FromStrings("dc", "berlin-01", "region", "europe"),
|
added: []string{"dc", "region"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
el: labels.FromStrings("dc", "berlin-01", "region", "europe"),
|
el: labels.FromStrings("dc", "berlin-01", "region", "europe"),
|
||||||
|
@ -144,7 +144,7 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
|
||||||
labels.MustNewMatcher(labels.MatchEqual, "region", "europe"),
|
labels.MustNewMatcher(labels.MatchEqual, "region", "europe"),
|
||||||
labels.MustNewMatcher(labels.MatchEqual, "dc", "munich-02"),
|
labels.MustNewMatcher(labels.MatchEqual, "dc", "munich-02"),
|
||||||
},
|
},
|
||||||
added: labels.FromStrings("region", "europe"),
|
added: []string{"region"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,12 +163,12 @@ func TestExternalLabelsQuerierAddExternalLabels(t *testing.T) {
|
||||||
func TestSeriesSetFilter(t *testing.T) {
|
func TestSeriesSetFilter(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
in *prompb.QueryResult
|
in *prompb.QueryResult
|
||||||
toRemove labels.Labels
|
toRemove []string
|
||||||
|
|
||||||
expected *prompb.QueryResult
|
expected *prompb.QueryResult
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
toRemove: labels.Labels{{Name: "foo", Value: "bar"}},
|
toRemove: []string{"foo"},
|
||||||
in: &prompb.QueryResult{
|
in: &prompb.QueryResult{
|
||||||
Timeseries: []*prompb.TimeSeries{
|
Timeseries: []*prompb.TimeSeries{
|
||||||
{Labels: labelsToLabelsProto(labels.FromStrings("foo", "bar", "a", "b"), nil), Samples: []prompb.Sample{}},
|
{Labels: labelsToLabelsProto(labels.FromStrings("foo", "bar", "a", "b"), nil), Samples: []prompb.Sample{}},
|
||||||
|
|
|
@ -91,7 +91,7 @@ func TestFilterExternalLabels(t *testing.T) {
|
||||||
|
|
||||||
require.NoError(t, s.ApplyConfig(conf))
|
require.NoError(t, s.ApplyConfig(conf))
|
||||||
require.Equal(t, 1, len(s.queryables))
|
require.Equal(t, 1, len(s.queryables))
|
||||||
require.Equal(t, 1, len(s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels))
|
require.Equal(t, 1, s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels.Len())
|
||||||
|
|
||||||
err := s.Close()
|
err := s.Close()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -118,7 +118,7 @@ func TestIgnoreExternalLabels(t *testing.T) {
|
||||||
|
|
||||||
require.NoError(t, s.ApplyConfig(conf))
|
require.NoError(t, s.ApplyConfig(conf))
|
||||||
require.Equal(t, 1, len(s.queryables))
|
require.Equal(t, 1, len(s.queryables))
|
||||||
require.Equal(t, 0, len(s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels))
|
require.Equal(t, 0, s.queryables[0].(*sampleAndChunkQueryableClient).externalLabels.Len())
|
||||||
|
|
||||||
err := s.Close()
|
err := s.Close()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -228,14 +228,14 @@ func TestUpdateExternalLabels(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NoError(t, s.ApplyConfig(conf))
|
require.NoError(t, s.ApplyConfig(conf))
|
||||||
require.Equal(t, 1, len(s.queues))
|
require.Equal(t, 1, len(s.queues))
|
||||||
require.Equal(t, labels.Labels(nil), s.queues[hash].externalLabels)
|
require.Equal(t, 0, len(s.queues[hash].externalLabels))
|
||||||
|
|
||||||
conf.GlobalConfig.ExternalLabels = externalLabels
|
conf.GlobalConfig.ExternalLabels = externalLabels
|
||||||
hash, err = toHash(conf.RemoteWriteConfigs[0])
|
hash, err = toHash(conf.RemoteWriteConfigs[0])
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NoError(t, s.ApplyConfig(conf))
|
require.NoError(t, s.ApplyConfig(conf))
|
||||||
require.Equal(t, 1, len(s.queues))
|
require.Equal(t, 1, len(s.queues))
|
||||||
require.Equal(t, externalLabels, s.queues[hash].externalLabels)
|
require.Equal(t, []labels.Label{{Name: "external", Value: "true"}}, s.queues[hash].externalLabels)
|
||||||
|
|
||||||
err = s.Close()
|
err = s.Close()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -713,7 +713,7 @@ func (a *appender) Append(ref storage.SeriesRef, l labels.Labels, t int64, v flo
|
||||||
// Ensure no empty or duplicate labels have gotten through. This mirrors the
|
// Ensure no empty or duplicate labels have gotten through. This mirrors the
|
||||||
// equivalent validation code in the TSDB's headAppender.
|
// equivalent validation code in the TSDB's headAppender.
|
||||||
l = l.WithoutEmpty()
|
l = l.WithoutEmpty()
|
||||||
if len(l) == 0 {
|
if l.IsEmpty() {
|
||||||
return 0, errors.Wrap(tsdb.ErrInvalidSample, "empty labelset")
|
return 0, errors.Wrap(tsdb.ErrInvalidSample, "empty labelset")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -786,13 +786,17 @@ func (a *appender) AppendExemplar(ref storage.SeriesRef, l labels.Labels, e exem
|
||||||
// Exemplar label length does not include chars involved in text rendering such as quotes
|
// Exemplar label length does not include chars involved in text rendering such as quotes
|
||||||
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
|
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
|
||||||
labelSetLen := 0
|
labelSetLen := 0
|
||||||
for _, l := range e.Labels {
|
err := e.Labels.Validate(func(l labels.Label) error {
|
||||||
labelSetLen += utf8.RuneCountInString(l.Name)
|
labelSetLen += utf8.RuneCountInString(l.Name)
|
||||||
labelSetLen += utf8.RuneCountInString(l.Value)
|
labelSetLen += utf8.RuneCountInString(l.Value)
|
||||||
|
|
||||||
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
|
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
|
||||||
return 0, storage.ErrExemplarLabelLength
|
return storage.ErrExemplarLabelLength
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for duplicate vs last stored exemplar for this series, and discard those.
|
// Check for duplicate vs last stored exemplar for this series, and discard those.
|
||||||
|
|
|
@ -49,28 +49,28 @@ func TestDB_InvalidSeries(t *testing.T) {
|
||||||
_, err := app.Append(0, labels.Labels{}, 0, 0)
|
_, err := app.Append(0, labels.Labels{}, 0, 0)
|
||||||
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject empty labels")
|
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject empty labels")
|
||||||
|
|
||||||
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}, 0, 0)
|
_, err = app.Append(0, labels.FromStrings("a", "1", "a", "2"), 0, 0)
|
||||||
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject duplicate labels")
|
require.ErrorIs(t, err, tsdb.ErrInvalidSample, "should reject duplicate labels")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Exemplars", func(t *testing.T) {
|
t.Run("Exemplars", func(t *testing.T) {
|
||||||
sRef, err := app.Append(0, labels.Labels{{Name: "a", Value: "1"}}, 0, 0)
|
sRef, err := app.Append(0, labels.FromStrings("a", "1"), 0, 0)
|
||||||
require.NoError(t, err, "should not reject valid series")
|
require.NoError(t, err, "should not reject valid series")
|
||||||
|
|
||||||
_, err = app.AppendExemplar(0, nil, exemplar.Exemplar{})
|
_, err = app.AppendExemplar(0, labels.EmptyLabels(), exemplar.Exemplar{})
|
||||||
require.EqualError(t, err, "unknown series ref when trying to add exemplar: 0")
|
require.EqualError(t, err, "unknown series ref when trying to add exemplar: 0")
|
||||||
|
|
||||||
e := exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}, {Name: "a", Value: "2"}}}
|
e := exemplar.Exemplar{Labels: labels.FromStrings("a", "1", "a", "2")}
|
||||||
_, err = app.AppendExemplar(sRef, nil, e)
|
_, err = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
require.ErrorIs(t, err, tsdb.ErrInvalidExemplar, "should reject duplicate labels")
|
require.ErrorIs(t, err, tsdb.ErrInvalidExemplar, "should reject duplicate labels")
|
||||||
|
|
||||||
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a_somewhat_long_trace_id", Value: "nYJSNtFrFTY37VR7mHzEE/LIDt7cdAQcuOzFajgmLDAdBSRHYPDzrxhMA4zz7el8naI/AoXFv9/e/G0vcETcIoNUi3OieeLfaIRQci2oa"}}}
|
e = exemplar.Exemplar{Labels: labels.FromStrings("a_somewhat_long_trace_id", "nYJSNtFrFTY37VR7mHzEE/LIDt7cdAQcuOzFajgmLDAdBSRHYPDzrxhMA4zz7el8naI/AoXFv9/e/G0vcETcIoNUi3OieeLfaIRQci2oa")}
|
||||||
_, err = app.AppendExemplar(sRef, nil, e)
|
_, err = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
require.ErrorIs(t, err, storage.ErrExemplarLabelLength, "should reject too long label length")
|
require.ErrorIs(t, err, storage.ErrExemplarLabelLength, "should reject too long label length")
|
||||||
|
|
||||||
// Inverse check
|
// Inverse check
|
||||||
e = exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}}, Value: 20, Ts: 10, HasTs: true}
|
e = exemplar.Exemplar{Labels: labels.FromStrings("a", "1"), Value: 20, Ts: 10, HasTs: true}
|
||||||
_, err = app.AppendExemplar(sRef, nil, e)
|
_, err = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
require.NoError(t, err, "should not reject valid exemplars")
|
require.NoError(t, err, "should not reject valid exemplars")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -426,9 +426,7 @@ func Test_ExistingWAL_NextRef(t *testing.T) {
|
||||||
// Append <seriesCount> series
|
// Append <seriesCount> series
|
||||||
app := db.Appender(context.Background())
|
app := db.Appender(context.Background())
|
||||||
for i := 0; i < seriesCount; i++ {
|
for i := 0; i < seriesCount; i++ {
|
||||||
lset := labels.Labels{
|
lset := labels.FromStrings(model.MetricNameLabel, fmt.Sprintf("series_%d", i))
|
||||||
{Name: model.MetricNameLabel, Value: fmt.Sprintf("series_%d", i)},
|
|
||||||
}
|
|
||||||
_, err := app.Append(0, lset, 0, 100)
|
_, err := app.Append(0, lset, 0, 100)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
@ -470,11 +468,11 @@ func startTime() (int64, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create series for tests.
|
// Create series for tests.
|
||||||
func labelsForTest(lName string, seriesCount int) []labels.Labels {
|
func labelsForTest(lName string, seriesCount int) [][]labels.Label {
|
||||||
var series []labels.Labels
|
var series [][]labels.Label
|
||||||
|
|
||||||
for i := 0; i < seriesCount; i++ {
|
for i := 0; i < seriesCount; i++ {
|
||||||
lset := labels.Labels{
|
lset := []labels.Label{
|
||||||
{Name: "a", Value: lName},
|
{Name: "a", Value: lName},
|
||||||
{Name: "instance", Value: "localhost" + strconv.Itoa(i)},
|
{Name: "instance", Value: "localhost" + strconv.Itoa(i)},
|
||||||
{Name: "job", Value: "prometheus"},
|
{Name: "job", Value: "prometheus"},
|
||||||
|
@ -507,28 +505,28 @@ func TestStorage_DuplicateExemplarsIgnored(t *testing.T) {
|
||||||
app := s.Appender(context.Background())
|
app := s.Appender(context.Background())
|
||||||
defer s.Close()
|
defer s.Close()
|
||||||
|
|
||||||
sRef, err := app.Append(0, labels.Labels{{Name: "a", Value: "1"}}, 0, 0)
|
sRef, err := app.Append(0, labels.FromStrings("a", "1"), 0, 0)
|
||||||
require.NoError(t, err, "should not reject valid series")
|
require.NoError(t, err, "should not reject valid series")
|
||||||
|
|
||||||
// Write a few exemplars to our appender and call Commit().
|
// Write a few exemplars to our appender and call Commit().
|
||||||
// If the Labels, Value or Timestamp are different than the last exemplar,
|
// If the Labels, Value or Timestamp are different than the last exemplar,
|
||||||
// then a new one should be appended; Otherwise, it should be skipped.
|
// then a new one should be appended; Otherwise, it should be skipped.
|
||||||
e := exemplar.Exemplar{Labels: labels.Labels{{Name: "a", Value: "1"}}, Value: 20, Ts: 10, HasTs: true}
|
e := exemplar.Exemplar{Labels: labels.FromStrings("a", "1"), Value: 20, Ts: 10, HasTs: true}
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
|
|
||||||
e.Labels = labels.Labels{{Name: "b", Value: "2"}}
|
e.Labels = labels.FromStrings("b", "2")
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
|
|
||||||
e.Value = 42
|
e.Value = 42
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
|
|
||||||
e.Ts = 25
|
e.Ts = 25
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
_, _ = app.AppendExemplar(sRef, nil, e)
|
_, _ = app.AppendExemplar(sRef, labels.EmptyLabels(), e)
|
||||||
|
|
||||||
require.NoError(t, app.Commit())
|
require.NoError(t, app.Commit())
|
||||||
|
|
||||||
|
|
|
@ -79,10 +79,10 @@ type IndexReader interface {
|
||||||
// by the label set of the underlying series.
|
// by the label set of the underlying series.
|
||||||
SortedPostings(index.Postings) index.Postings
|
SortedPostings(index.Postings) index.Postings
|
||||||
|
|
||||||
// Series populates the given labels and chunk metas for the series identified
|
// Series populates the given builder and chunk metas for the series identified
|
||||||
// by the reference.
|
// by the reference.
|
||||||
// Returns storage.ErrNotFound if the ref does not resolve to a known series.
|
// Returns storage.ErrNotFound if the ref does not resolve to a known series.
|
||||||
Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error
|
Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error
|
||||||
|
|
||||||
// LabelNames returns all the unique label names present in the index in sorted order.
|
// LabelNames returns all the unique label names present in the index in sorted order.
|
||||||
LabelNames(matchers ...*labels.Matcher) ([]string, error)
|
LabelNames(matchers ...*labels.Matcher) ([]string, error)
|
||||||
|
@ -499,8 +499,8 @@ func (r blockIndexReader) SortedPostings(p index.Postings) index.Postings {
|
||||||
return r.ir.SortedPostings(p)
|
return r.ir.SortedPostings(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r blockIndexReader) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
|
func (r blockIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
if err := r.ir.Series(ref, lset, chks); err != nil {
|
if err := r.ir.Series(ref, builder, chks); err != nil {
|
||||||
return errors.Wrapf(err, "block: %s", r.b.Meta().ULID)
|
return errors.Wrapf(err, "block: %s", r.b.Meta().ULID)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
@ -561,12 +561,12 @@ func (pb *Block) Delete(mint, maxt int64, ms ...*labels.Matcher) error {
|
||||||
// Choose only valid postings which have chunks in the time-range.
|
// Choose only valid postings which have chunks in the time-range.
|
||||||
stones := tombstones.NewMemTombstones()
|
stones := tombstones.NewMemTombstones()
|
||||||
|
|
||||||
var lset labels.Labels
|
|
||||||
var chks []chunks.Meta
|
var chks []chunks.Meta
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
|
|
||||||
Outer:
|
Outer:
|
||||||
for p.Next() {
|
for p.Next() {
|
||||||
err := ir.Series(p.At(), &lset, &chks)
|
err := ir.Series(p.At(), &builder, &chks)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -215,10 +215,10 @@ func TestLabelValuesWithMatchers(t *testing.T) {
|
||||||
|
|
||||||
var seriesEntries []storage.Series
|
var seriesEntries []storage.Series
|
||||||
for i := 0; i < 100; i++ {
|
for i := 0; i < 100; i++ {
|
||||||
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
|
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
|
||||||
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
|
"tens", fmt.Sprintf("value%d", i/10),
|
||||||
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
|
"unique", fmt.Sprintf("value%d", i),
|
||||||
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
||||||
}
|
}
|
||||||
|
|
||||||
blockDir := createBlock(t, tmpdir, seriesEntries)
|
blockDir := createBlock(t, tmpdir, seriesEntries)
|
||||||
|
@ -372,11 +372,11 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
|
||||||
for i := 0; i < metricCount; i++ {
|
for i := 0; i < metricCount; i++ {
|
||||||
// Note these series are not created in sort order: 'value2' sorts after 'value10'.
|
// Note these series are not created in sort order: 'value2' sorts after 'value10'.
|
||||||
// This makes a big difference to the benchmark timing.
|
// This makes a big difference to the benchmark timing.
|
||||||
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
|
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
|
||||||
{Name: "a_unique", Value: fmt.Sprintf("value%d", i)},
|
"a_unique", fmt.Sprintf("value%d", i),
|
||||||
{Name: "b_tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))},
|
"b_tens", fmt.Sprintf("value%d", i/(metricCount/10)),
|
||||||
{Name: "c_ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1"
|
"c_ninety", fmt.Sprintf("value%d", i/(metricCount/10)/9), // "0" for the first 90%, then "1"
|
||||||
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
||||||
}
|
}
|
||||||
|
|
||||||
blockDir := createBlock(b, tmpdir, seriesEntries)
|
blockDir := createBlock(b, tmpdir, seriesEntries)
|
||||||
|
@ -410,23 +410,23 @@ func TestLabelNamesWithMatchers(t *testing.T) {
|
||||||
|
|
||||||
var seriesEntries []storage.Series
|
var seriesEntries []storage.Series
|
||||||
for i := 0; i < 100; i++ {
|
for i := 0; i < 100; i++ {
|
||||||
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
|
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
|
||||||
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
|
"unique", fmt.Sprintf("value%d", i),
|
||||||
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
||||||
|
|
||||||
if i%10 == 0 {
|
if i%10 == 0 {
|
||||||
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
|
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
|
||||||
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
|
"tens", fmt.Sprintf("value%d", i/10),
|
||||||
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
|
"unique", fmt.Sprintf("value%d", i),
|
||||||
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
||||||
}
|
}
|
||||||
|
|
||||||
if i%20 == 0 {
|
if i%20 == 0 {
|
||||||
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
|
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.FromStrings(
|
||||||
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
|
"tens", fmt.Sprintf("value%d", i/10),
|
||||||
{Name: "twenties", Value: fmt.Sprintf("value%d", i/20)},
|
"twenties", fmt.Sprintf("value%d", i/20),
|
||||||
{Name: "unique", Value: fmt.Sprintf("value%d", i)},
|
"unique", fmt.Sprintf("value%d", i),
|
||||||
}, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
), []tsdbutil.Sample{sample{100, 0, nil, nil}}))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1478,11 +1478,11 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
|
||||||
|
|
||||||
for sid, schema := range allSchemas {
|
for sid, schema := range allSchemas {
|
||||||
for i := 0; i < c.numSeriesPerSchema; i++ {
|
for i := 0; i < c.numSeriesPerSchema; i++ {
|
||||||
lbls := labels.Labels{
|
lbls := labels.FromStrings(
|
||||||
{Name: "__name__", Value: fmt.Sprintf("rpc_durations_%d_histogram_seconds", i)},
|
"__name__", fmt.Sprintf("rpc_durations_%d_histogram_seconds", i),
|
||||||
{Name: "instance", Value: "localhost:8080"},
|
"instance", "localhost:8080",
|
||||||
{Name: "job", Value: fmt.Sprintf("sparse_histogram_schema_%s", schemaDescription[sid])},
|
"job", fmt.Sprintf("sparse_histogram_schema_%s", schemaDescription[sid]),
|
||||||
}
|
)
|
||||||
allSparseSeries = append(allSparseSeries, struct {
|
allSparseSeries = append(allSparseSeries, struct {
|
||||||
baseLabels labels.Labels
|
baseLabels labels.Labels
|
||||||
hists []*histogram.Histogram
|
hists []*histogram.Histogram
|
||||||
|
@ -1546,21 +1546,20 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
numOldSeriesPerHistogram++
|
numOldSeriesPerHistogram++
|
||||||
b := it.At()
|
b := it.At()
|
||||||
lbls := append(ah.baseLabels, labels.Label{Name: "le", Value: fmt.Sprintf("%.16f", b.Upper)})
|
lbls := labels.NewBuilder(ah.baseLabels).Set("le", fmt.Sprintf("%.16f", b.Upper)).Labels(labels.EmptyLabels())
|
||||||
refs[itIdx], err = oldApp.Append(refs[itIdx], lbls, ts, float64(b.Count))
|
refs[itIdx], err = oldApp.Append(refs[itIdx], lbls, ts, float64(b.Count))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
itIdx++
|
itIdx++
|
||||||
}
|
}
|
||||||
|
baseName := ah.baseLabels.Get(labels.MetricName)
|
||||||
// _count metric.
|
// _count metric.
|
||||||
countLbls := ah.baseLabels.Copy()
|
countLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_count").Labels(labels.EmptyLabels())
|
||||||
countLbls[0].Value = countLbls[0].Value + "_count"
|
|
||||||
_, err = oldApp.Append(0, countLbls, ts, float64(h.Count))
|
_, err = oldApp.Append(0, countLbls, ts, float64(h.Count))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
numOldSeriesPerHistogram++
|
numOldSeriesPerHistogram++
|
||||||
|
|
||||||
// _sum metric.
|
// _sum metric.
|
||||||
sumLbls := ah.baseLabels.Copy()
|
sumLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_sum").Labels(labels.EmptyLabels())
|
||||||
sumLbls[0].Value = sumLbls[0].Value + "_sum"
|
|
||||||
_, err = oldApp.Append(0, sumLbls, ts, h.Sum)
|
_, err = oldApp.Append(0, sumLbls, ts, h.Sum)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
numOldSeriesPerHistogram++
|
numOldSeriesPerHistogram++
|
||||||
|
|
|
@ -1002,7 +1002,7 @@ func (a dbAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRef,
|
||||||
if g, ok := a.Appender.(storage.GetRef); ok {
|
if g, ok := a.Appender.(storage.GetRef); ok {
|
||||||
return g.GetRef(lset, hash)
|
return g.GetRef(lset, hash)
|
||||||
}
|
}
|
||||||
return 0, nil
|
return 0, labels.EmptyLabels()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a dbAppender) Commit() error {
|
func (a dbAppender) Commit() error {
|
||||||
|
|
|
@ -478,9 +478,9 @@ func TestAmendDatapointCausesError(t *testing.T) {
|
||||||
require.NoError(t, app.Commit())
|
require.NoError(t, app.Commit())
|
||||||
|
|
||||||
app = db.Appender(ctx)
|
app = db.Appender(ctx)
|
||||||
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "b"}}, 0, 0)
|
_, err = app.Append(0, labels.FromStrings("a", "b"), 0, 0)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = app.Append(0, labels.Labels{{Name: "a", Value: "b"}}, 0, 1)
|
_, err = app.Append(0, labels.FromStrings("a", "b"), 0, 1)
|
||||||
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
|
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
|
||||||
require.NoError(t, app.Rollback())
|
require.NoError(t, app.Rollback())
|
||||||
|
|
||||||
|
@ -498,15 +498,15 @@ func TestAmendDatapointCausesError(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
app = db.Appender(ctx)
|
app = db.Appender(ctx)
|
||||||
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
|
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NoError(t, app.Commit())
|
require.NoError(t, app.Commit())
|
||||||
|
|
||||||
app = db.Appender(ctx)
|
app = db.Appender(ctx)
|
||||||
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
|
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
h.Schema = 2
|
h.Schema = 2
|
||||||
_, err = app.AppendHistogram(0, labels.Labels{{Name: "a", Value: "c"}}, 0, h.Copy())
|
_, err = app.AppendHistogram(0, labels.FromStrings("a", "c"), 0, h.Copy())
|
||||||
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
|
require.Equal(t, storage.ErrDuplicateSampleForTimestamp, err)
|
||||||
require.NoError(t, app.Rollback())
|
require.NoError(t, app.Rollback())
|
||||||
}
|
}
|
||||||
|
@ -1830,6 +1830,8 @@ func TestChunkAtBlockBoundary(t *testing.T) {
|
||||||
err = db.Compact()
|
err = db.Compact()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
|
|
||||||
for _, block := range db.Blocks() {
|
for _, block := range db.Blocks() {
|
||||||
r, err := block.Index()
|
r, err := block.Index()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -1841,15 +1843,12 @@ func TestChunkAtBlockBoundary(t *testing.T) {
|
||||||
p, err := r.Postings(k, v)
|
p, err := r.Postings(k, v)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var (
|
var chks []chunks.Meta
|
||||||
lset labels.Labels
|
|
||||||
chks []chunks.Meta
|
|
||||||
)
|
|
||||||
|
|
||||||
chunkCount := 0
|
chunkCount := 0
|
||||||
|
|
||||||
for p.Next() {
|
for p.Next() {
|
||||||
err = r.Series(p.At(), &lset, &chks)
|
err = r.Series(p.At(), &builder, &chks)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, c := range chks {
|
for _, c := range chks {
|
||||||
require.True(t, meta.MinTime <= c.MinTime && c.MaxTime <= meta.MaxTime,
|
require.True(t, meta.MinTime <= c.MinTime && c.MaxTime <= meta.MaxTime,
|
||||||
|
|
|
@ -226,13 +226,16 @@ func (ce *CircularExemplarStorage) validateExemplar(key []byte, e exemplar.Exemp
|
||||||
// Exemplar label length does not include chars involved in text rendering such as quotes
|
// Exemplar label length does not include chars involved in text rendering such as quotes
|
||||||
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
|
// equals sign, or commas. See definition of const ExemplarMaxLabelLength.
|
||||||
labelSetLen := 0
|
labelSetLen := 0
|
||||||
for _, l := range e.Labels {
|
if err := e.Labels.Validate(func(l labels.Label) error {
|
||||||
labelSetLen += utf8.RuneCountInString(l.Name)
|
labelSetLen += utf8.RuneCountInString(l.Name)
|
||||||
labelSetLen += utf8.RuneCountInString(l.Value)
|
labelSetLen += utf8.RuneCountInString(l.Value)
|
||||||
|
|
||||||
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
|
if labelSetLen > exemplar.ExemplarMaxLabelSetLength {
|
||||||
return storage.ErrExemplarLabelLength
|
return storage.ErrExemplarLabelLength
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
idx, ok := ce.index[string(key)]
|
idx, ok := ce.index[string(key)]
|
||||||
|
|
|
@ -102,7 +102,7 @@ func (a *initAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRe
|
||||||
if g, ok := a.app.(storage.GetRef); ok {
|
if g, ok := a.app.(storage.GetRef); ok {
|
||||||
return g.GetRef(lset, hash)
|
return g.GetRef(lset, hash)
|
||||||
}
|
}
|
||||||
return 0, nil
|
return 0, labels.EmptyLabels()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *initAppender) Commit() error {
|
func (a *initAppender) Commit() error {
|
||||||
|
@ -312,7 +312,7 @@ func (a *headAppender) Append(ref storage.SeriesRef, lset labels.Labels, t int64
|
||||||
if s == nil {
|
if s == nil {
|
||||||
// Ensure no empty labels have gotten through.
|
// Ensure no empty labels have gotten through.
|
||||||
lset = lset.WithoutEmpty()
|
lset = lset.WithoutEmpty()
|
||||||
if len(lset) == 0 {
|
if lset.IsEmpty() {
|
||||||
return 0, errors.Wrap(ErrInvalidSample, "empty labelset")
|
return 0, errors.Wrap(ErrInvalidSample, "empty labelset")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -494,7 +494,7 @@ func (a *headAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels
|
||||||
if s == nil {
|
if s == nil {
|
||||||
// Ensure no empty labels have gotten through.
|
// Ensure no empty labels have gotten through.
|
||||||
lset = lset.WithoutEmpty()
|
lset = lset.WithoutEmpty()
|
||||||
if len(lset) == 0 {
|
if lset.IsEmpty() {
|
||||||
return 0, errors.Wrap(ErrInvalidSample, "empty labelset")
|
return 0, errors.Wrap(ErrInvalidSample, "empty labelset")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -650,7 +650,7 @@ var _ storage.GetRef = &headAppender{}
|
||||||
func (a *headAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRef, labels.Labels) {
|
func (a *headAppender) GetRef(lset labels.Labels, hash uint64) (storage.SeriesRef, labels.Labels) {
|
||||||
s := a.head.series.getByHash(hash, lset)
|
s := a.head.series.getByHash(hash, lset)
|
||||||
if s == nil {
|
if s == nil {
|
||||||
return 0, nil
|
return 0, labels.EmptyLabels()
|
||||||
}
|
}
|
||||||
// returned labels must be suitable to pass to Append()
|
// returned labels must be suitable to pass to Append()
|
||||||
return storage.SeriesRef(s.ref), s.lset
|
return storage.SeriesRef(s.ref), s.lset
|
||||||
|
|
|
@ -148,14 +148,14 @@ func (h *headIndexReader) SortedPostings(p index.Postings) index.Postings {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Series returns the series for the given reference.
|
// Series returns the series for the given reference.
|
||||||
func (h *headIndexReader) Series(ref storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta) error {
|
func (h *headIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
s := h.head.series.getByID(chunks.HeadSeriesRef(ref))
|
s := h.head.series.getByID(chunks.HeadSeriesRef(ref))
|
||||||
|
|
||||||
if s == nil {
|
if s == nil {
|
||||||
h.head.metrics.seriesNotFound.Inc()
|
h.head.metrics.seriesNotFound.Inc()
|
||||||
return storage.ErrNotFound
|
return storage.ErrNotFound
|
||||||
}
|
}
|
||||||
*lbls = append((*lbls)[:0], s.lset...)
|
builder.Assign(s.lset)
|
||||||
|
|
||||||
s.Lock()
|
s.Lock()
|
||||||
defer s.Unlock()
|
defer s.Unlock()
|
||||||
|
@ -222,9 +222,9 @@ func (h *headIndexReader) LabelNamesFor(ids ...storage.SeriesRef) ([]string, err
|
||||||
if memSeries == nil {
|
if memSeries == nil {
|
||||||
return nil, storage.ErrNotFound
|
return nil, storage.ErrNotFound
|
||||||
}
|
}
|
||||||
for _, lbl := range memSeries.lset {
|
memSeries.lset.Range(func(lbl labels.Label) {
|
||||||
namesMap[lbl.Name] = struct{}{}
|
namesMap[lbl.Name] = struct{}{}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
names := make([]string, 0, len(namesMap))
|
names := make([]string, 0, len(namesMap))
|
||||||
for name := range namesMap {
|
for name := range namesMap {
|
||||||
|
|
|
@ -388,7 +388,12 @@ func TestHead_HighConcurrencyReadAndWrite(t *testing.T) {
|
||||||
|
|
||||||
querySeriesRef = (querySeriesRef + 1) % seriesCnt
|
querySeriesRef = (querySeriesRef + 1) % seriesCnt
|
||||||
lbls := labelSets[querySeriesRef]
|
lbls := labelSets[querySeriesRef]
|
||||||
samples, err := queryHead(ts-qryRange, ts, lbls[0])
|
// lbls has a single entry; extract it so we can run a query.
|
||||||
|
var lbl labels.Label
|
||||||
|
lbls.Range(func(l labels.Label) {
|
||||||
|
lbl = l
|
||||||
|
})
|
||||||
|
samples, err := queryHead(ts-qryRange, ts, lbl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
@ -1133,8 +1138,9 @@ func TestDelete_e2e(t *testing.T) {
|
||||||
require.NoError(t, hb.Delete(r.Mint, r.Maxt, del.ms...))
|
require.NoError(t, hb.Delete(r.Mint, r.Maxt, del.ms...))
|
||||||
}
|
}
|
||||||
matched := labels.Slice{}
|
matched := labels.Slice{}
|
||||||
for _, ls := range lbls {
|
for _, l := range lbls {
|
||||||
s := labels.Selector(del.ms)
|
s := labels.Selector(del.ms)
|
||||||
|
ls := labels.New(l...)
|
||||||
if s.Matches(ls) {
|
if s.Matches(ls) {
|
||||||
matched = append(matched, ls)
|
matched = append(matched, ls)
|
||||||
}
|
}
|
||||||
|
@ -1446,12 +1452,12 @@ func TestGCChunkAccess(t *testing.T) {
|
||||||
|
|
||||||
idx := h.indexRange(0, 1500)
|
idx := h.indexRange(0, 1500)
|
||||||
var (
|
var (
|
||||||
lset labels.Labels
|
chunks []chunks.Meta
|
||||||
chunks []chunks.Meta
|
builder labels.ScratchBuilder
|
||||||
)
|
)
|
||||||
require.NoError(t, idx.Series(1, &lset, &chunks))
|
require.NoError(t, idx.Series(1, &builder, &chunks))
|
||||||
|
|
||||||
require.Equal(t, labels.FromStrings("a", "1"), lset)
|
require.Equal(t, labels.FromStrings("a", "1"), builder.Labels())
|
||||||
require.Equal(t, 2, len(chunks))
|
require.Equal(t, 2, len(chunks))
|
||||||
|
|
||||||
cr, err := h.chunksRange(0, 1500, nil)
|
cr, err := h.chunksRange(0, 1500, nil)
|
||||||
|
@ -1499,12 +1505,12 @@ func TestGCSeriesAccess(t *testing.T) {
|
||||||
|
|
||||||
idx := h.indexRange(0, 2000)
|
idx := h.indexRange(0, 2000)
|
||||||
var (
|
var (
|
||||||
lset labels.Labels
|
chunks []chunks.Meta
|
||||||
chunks []chunks.Meta
|
builder labels.ScratchBuilder
|
||||||
)
|
)
|
||||||
require.NoError(t, idx.Series(1, &lset, &chunks))
|
require.NoError(t, idx.Series(1, &builder, &chunks))
|
||||||
|
|
||||||
require.Equal(t, labels.FromStrings("a", "1"), lset)
|
require.Equal(t, labels.FromStrings("a", "1"), builder.Labels())
|
||||||
require.Equal(t, 2, len(chunks))
|
require.Equal(t, 2, len(chunks))
|
||||||
|
|
||||||
cr, err := h.chunksRange(0, 2000, nil)
|
cr, err := h.chunksRange(0, 2000, nil)
|
||||||
|
@ -2806,7 +2812,7 @@ func TestWaitForPendingReadersInTimeRange(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAppendHistogram(t *testing.T) {
|
func TestAppendHistogram(t *testing.T) {
|
||||||
l := labels.Labels{{Name: "a", Value: "b"}}
|
l := labels.FromStrings("a", "b")
|
||||||
for _, numHistograms := range []int{1, 10, 150, 200, 250, 300} {
|
for _, numHistograms := range []int{1, 10, 150, 200, 250, 300} {
|
||||||
t.Run(fmt.Sprintf("%d", numHistograms), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%d", numHistograms), func(t *testing.T) {
|
||||||
head, _ := newTestHead(t, 1000, false, false)
|
head, _ := newTestHead(t, 1000, false, false)
|
||||||
|
@ -2861,7 +2867,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
|
||||||
require.NoError(t, head.Init(0))
|
require.NoError(t, head.Init(0))
|
||||||
|
|
||||||
// Series with only histograms.
|
// Series with only histograms.
|
||||||
s1 := labels.Labels{{Name: "a", Value: "b1"}}
|
s1 := labels.FromStrings("a", "b1")
|
||||||
k1 := s1.String()
|
k1 := s1.String()
|
||||||
numHistograms := 450
|
numHistograms := 450
|
||||||
exp := map[string][]tsdbutil.Sample{}
|
exp := map[string][]tsdbutil.Sample{}
|
||||||
|
@ -2893,7 +2899,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
|
||||||
require.Greater(t, expHeadChunkSamples, 0)
|
require.Greater(t, expHeadChunkSamples, 0)
|
||||||
|
|
||||||
// Series with mix of histograms and float.
|
// Series with mix of histograms and float.
|
||||||
s2 := labels.Labels{{Name: "a", Value: "b2"}}
|
s2 := labels.FromStrings("a", "b2")
|
||||||
k2 := s2.String()
|
k2 := s2.String()
|
||||||
app = head.Appender(context.Background())
|
app = head.Appender(context.Background())
|
||||||
ts := 0
|
ts := 0
|
||||||
|
@ -3254,7 +3260,7 @@ func TestHistogramMetrics(t *testing.T) {
|
||||||
|
|
||||||
for x := 0; x < 5; x++ {
|
for x := 0; x < 5; x++ {
|
||||||
expHSeries++
|
expHSeries++
|
||||||
l := labels.Labels{{Name: "a", Value: fmt.Sprintf("b%d", x)}}
|
l := labels.FromStrings("a", fmt.Sprintf("b%d", x))
|
||||||
for i, h := range GenerateTestHistograms(10) {
|
for i, h := range GenerateTestHistograms(10) {
|
||||||
app := head.Appender(context.Background())
|
app := head.Appender(context.Background())
|
||||||
_, err := app.AppendHistogram(0, l, int64(i), h)
|
_, err := app.AppendHistogram(0, l, int64(i), h)
|
||||||
|
@ -3277,7 +3283,7 @@ func TestHistogramMetrics(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestHistogramStaleSample(t *testing.T) {
|
func TestHistogramStaleSample(t *testing.T) {
|
||||||
l := labels.Labels{{Name: "a", Value: "b"}}
|
l := labels.FromStrings("a", "b")
|
||||||
numHistograms := 20
|
numHistograms := 20
|
||||||
head, _ := newTestHead(t, 100000, false, false)
|
head, _ := newTestHead(t, 100000, false, false)
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
|
@ -3372,7 +3378,7 @@ func TestHistogramStaleSample(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestHistogramCounterResetHeader(t *testing.T) {
|
func TestHistogramCounterResetHeader(t *testing.T) {
|
||||||
l := labels.Labels{{Name: "a", Value: "b"}}
|
l := labels.FromStrings("a", "b")
|
||||||
head, _ := newTestHead(t, 1000, false, false)
|
head, _ := newTestHead(t, 1000, false, false)
|
||||||
t.Cleanup(func() {
|
t.Cleanup(func() {
|
||||||
require.NoError(t, head.Close())
|
require.NoError(t, head.Close())
|
||||||
|
@ -3484,7 +3490,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
|
||||||
db.DisableCompactions()
|
db.DisableCompactions()
|
||||||
|
|
||||||
hists := GenerateTestHistograms(10)
|
hists := GenerateTestHistograms(10)
|
||||||
lbls := labels.Labels{{Name: "a", Value: "b"}}
|
lbls := labels.FromStrings("a", "b")
|
||||||
|
|
||||||
type result struct {
|
type result struct {
|
||||||
t int64
|
t int64
|
||||||
|
|
|
@ -423,7 +423,7 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
|
||||||
return errors.Errorf("out-of-order series added with label set %q", lset)
|
return errors.Errorf("out-of-order series added with label set %q", lset)
|
||||||
}
|
}
|
||||||
|
|
||||||
if ref < w.lastRef && len(w.lastSeries) != 0 {
|
if ref < w.lastRef && !w.lastSeries.IsEmpty() {
|
||||||
return errors.Errorf("series with reference greater than %d already added", ref)
|
return errors.Errorf("series with reference greater than %d already added", ref)
|
||||||
}
|
}
|
||||||
// We add padding to 16 bytes to increase the addressable space we get through 4 byte
|
// We add padding to 16 bytes to increase the addressable space we get through 4 byte
|
||||||
|
@ -437,9 +437,9 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
|
||||||
}
|
}
|
||||||
|
|
||||||
w.buf2.Reset()
|
w.buf2.Reset()
|
||||||
w.buf2.PutUvarint(len(lset))
|
w.buf2.PutUvarint(lset.Len())
|
||||||
|
|
||||||
for _, l := range lset {
|
if err := lset.Validate(func(l labels.Label) error {
|
||||||
var err error
|
var err error
|
||||||
cacheEntry, ok := w.symbolCache[l.Name]
|
cacheEntry, ok := w.symbolCache[l.Name]
|
||||||
nameIndex := cacheEntry.index
|
nameIndex := cacheEntry.index
|
||||||
|
@ -465,6 +465,9 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
w.buf2.PutUvarint32(valueIndex)
|
w.buf2.PutUvarint32(valueIndex)
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
w.buf2.PutUvarint(len(chunks))
|
w.buf2.PutUvarint(len(chunks))
|
||||||
|
@ -496,7 +499,7 @@ func (w *Writer) AddSeries(ref storage.SeriesRef, lset labels.Labels, chunks ...
|
||||||
return errors.Wrap(err, "write series data")
|
return errors.Wrap(err, "write series data")
|
||||||
}
|
}
|
||||||
|
|
||||||
w.lastSeries = append(w.lastSeries[:0], lset...)
|
w.lastSeries.CopyFrom(lset)
|
||||||
w.lastRef = ref
|
w.lastRef = ref
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -1593,8 +1596,8 @@ func (r *Reader) LabelValueFor(id storage.SeriesRef, label string) (string, erro
|
||||||
return value, nil
|
return value, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Series reads the series with the given ID and writes its labels and chunks into lbls and chks.
|
// Series reads the series with the given ID and writes its labels and chunks into builder and chks.
|
||||||
func (r *Reader) Series(id storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta) error {
|
func (r *Reader) Series(id storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
offset := id
|
offset := id
|
||||||
// In version 2 series IDs are no longer exact references but series are 16-byte padded
|
// In version 2 series IDs are no longer exact references but series are 16-byte padded
|
||||||
// and the ID is the multiple of 16 of the actual position.
|
// and the ID is the multiple of 16 of the actual position.
|
||||||
|
@ -1605,7 +1608,7 @@ func (r *Reader) Series(id storage.SeriesRef, lbls *labels.Labels, chks *[]chunk
|
||||||
if d.Err() != nil {
|
if d.Err() != nil {
|
||||||
return d.Err()
|
return d.Err()
|
||||||
}
|
}
|
||||||
return errors.Wrap(r.dec.Series(d.Get(), lbls, chks), "read series")
|
return errors.Wrap(r.dec.Series(d.Get(), builder, chks), "read series")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Reader) Postings(name string, values ...string) (Postings, error) {
|
func (r *Reader) Postings(name string, values ...string) (Postings, error) {
|
||||||
|
@ -1832,9 +1835,10 @@ func (dec *Decoder) LabelValueFor(b []byte, label string) (string, error) {
|
||||||
return "", d.Err()
|
return "", d.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Series decodes a series entry from the given byte slice into lset and chks.
|
// Series decodes a series entry from the given byte slice into builder and chks.
|
||||||
func (dec *Decoder) Series(b []byte, lbls *labels.Labels, chks *[]chunks.Meta) error {
|
// Previous contents of lbls can be overwritten - make sure you copy before retaining.
|
||||||
*lbls = (*lbls)[:0]
|
func (dec *Decoder) Series(b []byte, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
|
builder.Reset()
|
||||||
*chks = (*chks)[:0]
|
*chks = (*chks)[:0]
|
||||||
|
|
||||||
d := encoding.Decbuf{B: b}
|
d := encoding.Decbuf{B: b}
|
||||||
|
@ -1858,7 +1862,7 @@ func (dec *Decoder) Series(b []byte, lbls *labels.Labels, chks *[]chunks.Meta) e
|
||||||
return errors.Wrap(err, "lookup label value")
|
return errors.Wrap(err, "lookup label value")
|
||||||
}
|
}
|
||||||
|
|
||||||
*lbls = append(*lbls, labels.Label{Name: ln, Value: lv})
|
builder.Add(ln, lv)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the chunks meta data.
|
// Read the chunks meta data.
|
||||||
|
|
|
@ -68,14 +68,14 @@ func (m mockIndex) AddSeries(ref storage.SeriesRef, l labels.Labels, chunks ...c
|
||||||
if _, ok := m.series[ref]; ok {
|
if _, ok := m.series[ref]; ok {
|
||||||
return errors.Errorf("series with reference %d already added", ref)
|
return errors.Errorf("series with reference %d already added", ref)
|
||||||
}
|
}
|
||||||
for _, lbl := range l {
|
l.Range(func(lbl labels.Label) {
|
||||||
m.symbols[lbl.Name] = struct{}{}
|
m.symbols[lbl.Name] = struct{}{}
|
||||||
m.symbols[lbl.Value] = struct{}{}
|
m.symbols[lbl.Value] = struct{}{}
|
||||||
if _, ok := m.postings[lbl]; !ok {
|
if _, ok := m.postings[lbl]; !ok {
|
||||||
m.postings[lbl] = []storage.SeriesRef{}
|
m.postings[lbl] = []storage.SeriesRef{}
|
||||||
}
|
}
|
||||||
m.postings[lbl] = append(m.postings[lbl], ref)
|
m.postings[lbl] = append(m.postings[lbl], ref)
|
||||||
}
|
})
|
||||||
m.postings[allPostingsKey] = append(m.postings[allPostingsKey], ref)
|
m.postings[allPostingsKey] = append(m.postings[allPostingsKey], ref)
|
||||||
|
|
||||||
s := series{l: l}
|
s := series{l: l}
|
||||||
|
@ -124,12 +124,12 @@ func (m mockIndex) SortedPostings(p Postings) Postings {
|
||||||
return NewListPostings(ep)
|
return NewListPostings(ep)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m mockIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
|
func (m mockIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
s, ok := m.series[ref]
|
s, ok := m.series[ref]
|
||||||
if !ok {
|
if !ok {
|
||||||
return errors.New("not found")
|
return errors.New("not found")
|
||||||
}
|
}
|
||||||
*lset = append((*lset)[:0], s.l...)
|
builder.Assign(s.l)
|
||||||
*chks = append((*chks)[:0], s.chunks...)
|
*chks = append((*chks)[:0], s.chunks...)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -197,15 +197,15 @@ func TestIndexRW_Postings(t *testing.T) {
|
||||||
p, err := ir.Postings("a", "1")
|
p, err := ir.Postings("a", "1")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var l labels.Labels
|
|
||||||
var c []chunks.Meta
|
var c []chunks.Meta
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
|
|
||||||
for i := 0; p.Next(); i++ {
|
for i := 0; p.Next(); i++ {
|
||||||
err := ir.Series(p.At(), &l, &c)
|
err := ir.Series(p.At(), &builder, &c)
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, 0, len(c))
|
require.Equal(t, 0, len(c))
|
||||||
require.Equal(t, series[i], l)
|
require.Equal(t, series[i], builder.Labels())
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Err())
|
require.NoError(t, p.Err())
|
||||||
|
|
||||||
|
@ -311,16 +311,16 @@ func TestPostingsMany(t *testing.T) {
|
||||||
{in: []string{"126a", "126b", "127", "127a", "127b", "128", "128a", "128b", "129", "129a", "129b"}},
|
{in: []string{"126a", "126b", "127", "127a", "127b", "128", "128a", "128b", "129", "129a", "129b"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
for _, c := range cases {
|
for _, c := range cases {
|
||||||
it, err := ir.Postings("i", c.in...)
|
it, err := ir.Postings("i", c.in...)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
got := []string{}
|
got := []string{}
|
||||||
var lbls labels.Labels
|
|
||||||
var metas []chunks.Meta
|
var metas []chunks.Meta
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
require.NoError(t, ir.Series(it.At(), &lbls, &metas))
|
require.NoError(t, ir.Series(it.At(), &builder, &metas))
|
||||||
got = append(got, lbls.Get("i"))
|
got = append(got, builder.Labels().Get("i"))
|
||||||
}
|
}
|
||||||
require.NoError(t, it.Err())
|
require.NoError(t, it.Err())
|
||||||
exp := []string{}
|
exp := []string{}
|
||||||
|
@ -344,10 +344,10 @@ func TestPersistence_index_e2e(t *testing.T) {
|
||||||
|
|
||||||
symbols := map[string]struct{}{}
|
symbols := map[string]struct{}{}
|
||||||
for _, lset := range lbls {
|
for _, lset := range lbls {
|
||||||
for _, l := range lset {
|
lset.Range(func(l labels.Label) {
|
||||||
symbols[l.Name] = struct{}{}
|
symbols[l.Name] = struct{}{}
|
||||||
symbols[l.Value] = struct{}{}
|
symbols[l.Value] = struct{}{}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
var input indexWriterSeriesSlice
|
var input indexWriterSeriesSlice
|
||||||
|
@ -395,14 +395,14 @@ func TestPersistence_index_e2e(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NoError(t, mi.AddSeries(storage.SeriesRef(i), s.labels, s.chunks...))
|
require.NoError(t, mi.AddSeries(storage.SeriesRef(i), s.labels, s.chunks...))
|
||||||
|
|
||||||
for _, l := range s.labels {
|
s.labels.Range(func(l labels.Label) {
|
||||||
valset, ok := values[l.Name]
|
valset, ok := values[l.Name]
|
||||||
if !ok {
|
if !ok {
|
||||||
valset = map[string]struct{}{}
|
valset = map[string]struct{}{}
|
||||||
values[l.Name] = valset
|
values[l.Name] = valset
|
||||||
}
|
}
|
||||||
valset[l.Value] = struct{}{}
|
valset[l.Value] = struct{}{}
|
||||||
}
|
})
|
||||||
postings.Add(storage.SeriesRef(i), s.labels)
|
postings.Add(storage.SeriesRef(i), s.labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -419,20 +419,20 @@ func TestPersistence_index_e2e(t *testing.T) {
|
||||||
expp, err := mi.Postings(p.Name, p.Value)
|
expp, err := mi.Postings(p.Name, p.Value)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
var lset, explset labels.Labels
|
|
||||||
var chks, expchks []chunks.Meta
|
var chks, expchks []chunks.Meta
|
||||||
|
var builder, eBuilder labels.ScratchBuilder
|
||||||
|
|
||||||
for gotp.Next() {
|
for gotp.Next() {
|
||||||
require.True(t, expp.Next())
|
require.True(t, expp.Next())
|
||||||
|
|
||||||
ref := gotp.At()
|
ref := gotp.At()
|
||||||
|
|
||||||
err := ir.Series(ref, &lset, &chks)
|
err := ir.Series(ref, &builder, &chks)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = mi.Series(expp.At(), &explset, &expchks)
|
err = mi.Series(expp.At(), &eBuilder, &expchks)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, explset, lset)
|
require.Equal(t, eBuilder.Labels(), builder.Labels())
|
||||||
require.Equal(t, expchks, chks)
|
require.Equal(t, expchks, chks)
|
||||||
}
|
}
|
||||||
require.False(t, expp.Next(), "Expected no more postings for %q=%q", p.Name, p.Value)
|
require.False(t, expp.Next(), "Expected no more postings for %q=%q", p.Name, p.Value)
|
||||||
|
|
|
@ -353,9 +353,9 @@ func (p *MemPostings) Iter(f func(labels.Label, Postings) error) error {
|
||||||
func (p *MemPostings) Add(id storage.SeriesRef, lset labels.Labels) {
|
func (p *MemPostings) Add(id storage.SeriesRef, lset labels.Labels) {
|
||||||
p.mtx.Lock()
|
p.mtx.Lock()
|
||||||
|
|
||||||
for _, l := range lset {
|
lset.Range(func(l labels.Label) {
|
||||||
p.addFor(id, l)
|
p.addFor(id, l)
|
||||||
}
|
})
|
||||||
p.addFor(id, allPostingsKey)
|
p.addFor(id, allPostingsKey)
|
||||||
|
|
||||||
p.mtx.Unlock()
|
p.mtx.Unlock()
|
||||||
|
|
|
@ -47,21 +47,21 @@ func NewOOOHeadIndexReader(head *Head, mint, maxt int64) *OOOHeadIndexReader {
|
||||||
return &OOOHeadIndexReader{hr}
|
return &OOOHeadIndexReader{hr}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (oh *OOOHeadIndexReader) Series(ref storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta) error {
|
func (oh *OOOHeadIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
return oh.series(ref, lbls, chks, 0)
|
return oh.series(ref, builder, chks, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The passed lastMmapRef tells upto what max m-map chunk that we can consider.
|
// The passed lastMmapRef tells upto what max m-map chunk that we can consider.
|
||||||
// If it is 0, it means all chunks need to be considered.
|
// If it is 0, it means all chunks need to be considered.
|
||||||
// If it is non-0, then the oooHeadChunk must not be considered.
|
// If it is non-0, then the oooHeadChunk must not be considered.
|
||||||
func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, lbls *labels.Labels, chks *[]chunks.Meta, lastMmapRef chunks.ChunkDiskMapperRef) error {
|
func (oh *OOOHeadIndexReader) series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta, lastMmapRef chunks.ChunkDiskMapperRef) error {
|
||||||
s := oh.head.series.getByID(chunks.HeadSeriesRef(ref))
|
s := oh.head.series.getByID(chunks.HeadSeriesRef(ref))
|
||||||
|
|
||||||
if s == nil {
|
if s == nil {
|
||||||
oh.head.metrics.seriesNotFound.Inc()
|
oh.head.metrics.seriesNotFound.Inc()
|
||||||
return storage.ErrNotFound
|
return storage.ErrNotFound
|
||||||
}
|
}
|
||||||
*lbls = append((*lbls)[:0], s.lset...)
|
builder.Assign(s.lset)
|
||||||
|
|
||||||
if chks == nil {
|
if chks == nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -400,8 +400,8 @@ func (ir *OOOCompactionHeadIndexReader) SortedPostings(p index.Postings) index.P
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ir *OOOCompactionHeadIndexReader) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
|
func (ir *OOOCompactionHeadIndexReader) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
return ir.ch.oooIR.series(ref, lset, chks, ir.ch.lastMmapRef)
|
return ir.ch.oooIR.series(ref, builder, chks, ir.ch.lastMmapRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ir *OOOCompactionHeadIndexReader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) {
|
func (ir *OOOCompactionHeadIndexReader) SortedLabelValues(name string, matchers ...*labels.Matcher) ([]string, error) {
|
||||||
|
|
|
@ -357,13 +357,13 @@ func TestOOOHeadIndexReader_Series(t *testing.T) {
|
||||||
ir := NewOOOHeadIndexReader(h, tc.queryMinT, tc.queryMaxT)
|
ir := NewOOOHeadIndexReader(h, tc.queryMinT, tc.queryMaxT)
|
||||||
|
|
||||||
var chks []chunks.Meta
|
var chks []chunks.Meta
|
||||||
var respLset labels.Labels
|
var b labels.ScratchBuilder
|
||||||
err := ir.Series(storage.SeriesRef(s1ID), &respLset, &chks)
|
err := ir.Series(storage.SeriesRef(s1ID), &b, &chks)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, s1Lset, respLset)
|
require.Equal(t, s1Lset, b.Labels())
|
||||||
require.Equal(t, expChunks, chks)
|
require.Equal(t, expChunks, chks)
|
||||||
|
|
||||||
err = ir.Series(storage.SeriesRef(s1ID+1), &respLset, &chks)
|
err = ir.Series(storage.SeriesRef(s1ID+1), &b, &chks)
|
||||||
require.Equal(t, storage.ErrNotFound, err)
|
require.Equal(t, storage.ErrNotFound, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -379,23 +379,15 @@ func TestOOOHeadChunkReader_LabelValues(t *testing.T) {
|
||||||
app := head.Appender(context.Background())
|
app := head.Appender(context.Background())
|
||||||
|
|
||||||
// Add in-order samples
|
// Add in-order samples
|
||||||
_, err := app.Append(0, labels.Labels{
|
_, err := app.Append(0, labels.FromStrings("foo", "bar1"), 100, 1)
|
||||||
{Name: "foo", Value: "bar1"},
|
|
||||||
}, 100, 1)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = app.Append(0, labels.Labels{
|
_, err = app.Append(0, labels.FromStrings("foo", "bar2"), 100, 2)
|
||||||
{Name: "foo", Value: "bar2"},
|
|
||||||
}, 100, 2)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Add ooo samples for those series
|
// Add ooo samples for those series
|
||||||
_, err = app.Append(0, labels.Labels{
|
_, err = app.Append(0, labels.FromStrings("foo", "bar1"), 90, 1)
|
||||||
{Name: "foo", Value: "bar1"},
|
|
||||||
}, 90, 1)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = app.Append(0, labels.Labels{
|
_, err = app.Append(0, labels.FromStrings("foo", "bar2"), 90, 2)
|
||||||
{Name: "foo", Value: "bar2"},
|
|
||||||
}, 90, 2)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.NoError(t, app.Commit())
|
require.NoError(t, app.Commit())
|
||||||
|
@ -848,8 +840,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
|
||||||
// markers like OOOLastRef. These are then used by the ChunkReader.
|
// markers like OOOLastRef. These are then used by the ChunkReader.
|
||||||
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
|
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
|
||||||
var chks []chunks.Meta
|
var chks []chunks.Meta
|
||||||
var respLset labels.Labels
|
var b labels.ScratchBuilder
|
||||||
err := ir.Series(s1Ref, &respLset, &chks)
|
err := ir.Series(s1Ref, &b, &chks)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, len(tc.expChunksSamples), len(chks))
|
require.Equal(t, len(tc.expChunksSamples), len(chks))
|
||||||
|
|
||||||
|
@ -1011,8 +1003,8 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
|
||||||
// markers like OOOLastRef. These are then used by the ChunkReader.
|
// markers like OOOLastRef. These are then used by the ChunkReader.
|
||||||
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
|
ir := NewOOOHeadIndexReader(db.head, tc.queryMinT, tc.queryMaxT)
|
||||||
var chks []chunks.Meta
|
var chks []chunks.Meta
|
||||||
var respLset labels.Labels
|
var b labels.ScratchBuilder
|
||||||
err := ir.Series(s1Ref, &respLset, &chks)
|
err := ir.Series(s1Ref, &b, &chks)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, len(tc.expChunksSamples), len(chks))
|
require.Equal(t, len(tc.expChunksSamples), len(chks))
|
||||||
|
|
||||||
|
|
|
@ -451,13 +451,13 @@ type blockBaseSeriesSet struct {
|
||||||
curr seriesData
|
curr seriesData
|
||||||
|
|
||||||
bufChks []chunks.Meta
|
bufChks []chunks.Meta
|
||||||
bufLbls labels.Labels
|
builder labels.ScratchBuilder
|
||||||
err error
|
err error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *blockBaseSeriesSet) Next() bool {
|
func (b *blockBaseSeriesSet) Next() bool {
|
||||||
for b.p.Next() {
|
for b.p.Next() {
|
||||||
if err := b.index.Series(b.p.At(), &b.bufLbls, &b.bufChks); err != nil {
|
if err := b.index.Series(b.p.At(), &b.builder, &b.bufChks); err != nil {
|
||||||
// Postings may be stale. Skip if no underlying series exists.
|
// Postings may be stale. Skip if no underlying series exists.
|
||||||
if errors.Cause(err) == storage.ErrNotFound {
|
if errors.Cause(err) == storage.ErrNotFound {
|
||||||
continue
|
continue
|
||||||
|
@ -528,8 +528,7 @@ func (b *blockBaseSeriesSet) Next() bool {
|
||||||
intervals = intervals.Add(tombstones.Interval{Mint: b.maxt + 1, Maxt: math.MaxInt64})
|
intervals = intervals.Add(tombstones.Interval{Mint: b.maxt + 1, Maxt: math.MaxInt64})
|
||||||
}
|
}
|
||||||
|
|
||||||
b.curr.labels = make(labels.Labels, len(b.bufLbls))
|
b.curr.labels = b.builder.Labels()
|
||||||
copy(b.curr.labels, b.bufLbls)
|
|
||||||
b.curr.chks = chks
|
b.curr.chks = chks
|
||||||
b.curr.intervals = intervals
|
b.curr.intervals = intervals
|
||||||
|
|
||||||
|
@ -865,7 +864,6 @@ func newBlockSeriesSet(i IndexReader, c ChunkReader, t tombstones.Reader, p inde
|
||||||
mint: mint,
|
mint: mint,
|
||||||
maxt: maxt,
|
maxt: maxt,
|
||||||
disableTrimming: disableTrimming,
|
disableTrimming: disableTrimming,
|
||||||
bufLbls: make(labels.Labels, 0, 10),
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -897,7 +895,6 @@ func newBlockChunkSeriesSet(id ulid.ULID, i IndexReader, c ChunkReader, t tombst
|
||||||
mint: mint,
|
mint: mint,
|
||||||
maxt: maxt,
|
maxt: maxt,
|
||||||
disableTrimming: disableTrimming,
|
disableTrimming: disableTrimming,
|
||||||
bufLbls: make(labels.Labels, 0, 10),
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,14 +142,14 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe
|
||||||
|
|
||||||
postings.Add(storage.SeriesRef(i), ls)
|
postings.Add(storage.SeriesRef(i), ls)
|
||||||
|
|
||||||
for _, l := range ls {
|
ls.Range(func(l labels.Label) {
|
||||||
vs, present := lblIdx[l.Name]
|
vs, present := lblIdx[l.Name]
|
||||||
if !present {
|
if !present {
|
||||||
vs = map[string]struct{}{}
|
vs = map[string]struct{}{}
|
||||||
lblIdx[l.Name] = vs
|
lblIdx[l.Name] = vs
|
||||||
}
|
}
|
||||||
vs[l.Value] = struct{}{}
|
vs[l.Value] = struct{}{}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, postings.Iter(func(l labels.Label, p index.Postings) error {
|
require.NoError(t, postings.Iter(func(l labels.Label, p index.Postings) error {
|
||||||
|
@ -1168,10 +1168,10 @@ func (m *mockIndex) AddSeries(ref storage.SeriesRef, l labels.Labels, chunks ...
|
||||||
if _, ok := m.series[ref]; ok {
|
if _, ok := m.series[ref]; ok {
|
||||||
return errors.Errorf("series with reference %d already added", ref)
|
return errors.Errorf("series with reference %d already added", ref)
|
||||||
}
|
}
|
||||||
for _, lbl := range l {
|
l.Range(func(lbl labels.Label) {
|
||||||
m.symbols[lbl.Name] = struct{}{}
|
m.symbols[lbl.Name] = struct{}{}
|
||||||
m.symbols[lbl.Value] = struct{}{}
|
m.symbols[lbl.Value] = struct{}{}
|
||||||
}
|
})
|
||||||
|
|
||||||
s := series{l: l}
|
s := series{l: l}
|
||||||
// Actual chunk data is not stored in the index.
|
// Actual chunk data is not stored in the index.
|
||||||
|
@ -1238,9 +1238,9 @@ func (m mockIndex) LabelValueFor(id storage.SeriesRef, label string) (string, er
|
||||||
func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) {
|
func (m mockIndex) LabelNamesFor(ids ...storage.SeriesRef) ([]string, error) {
|
||||||
namesMap := make(map[string]bool)
|
namesMap := make(map[string]bool)
|
||||||
for _, id := range ids {
|
for _, id := range ids {
|
||||||
for _, lbl := range m.series[id].l {
|
m.series[id].l.Range(func(lbl labels.Label) {
|
||||||
namesMap[lbl.Name] = true
|
namesMap[lbl.Name] = true
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
names := make([]string, 0, len(namesMap))
|
names := make([]string, 0, len(namesMap))
|
||||||
for name := range namesMap {
|
for name := range namesMap {
|
||||||
|
@ -1270,12 +1270,12 @@ func (m mockIndex) SortedPostings(p index.Postings) index.Postings {
|
||||||
return index.NewListPostings(ep)
|
return index.NewListPostings(ep)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m mockIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
|
func (m mockIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
s, ok := m.series[ref]
|
s, ok := m.series[ref]
|
||||||
if !ok {
|
if !ok {
|
||||||
return storage.ErrNotFound
|
return storage.ErrNotFound
|
||||||
}
|
}
|
||||||
*lset = append((*lset)[:0], s.l...)
|
builder.Assign(s.l)
|
||||||
*chks = append((*chks)[:0], s.chunks...)
|
*chks = append((*chks)[:0], s.chunks...)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -1297,9 +1297,9 @@ func (m mockIndex) LabelNames(matchers ...*labels.Matcher) ([]string, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if matches {
|
if matches {
|
||||||
for _, lbl := range series.l {
|
series.l.Range(func(lbl labels.Label) {
|
||||||
names[lbl.Name] = struct{}{}
|
names[lbl.Name] = struct{}{}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1884,9 +1884,10 @@ func TestPostingsForMatchers(t *testing.T) {
|
||||||
p, err := PostingsForMatchers(ir, c.matchers...)
|
p, err := PostingsForMatchers(ir, c.matchers...)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
for p.Next() {
|
for p.Next() {
|
||||||
lbls := labels.Labels{}
|
require.NoError(t, ir.Series(p.At(), &builder, &[]chunks.Meta{}))
|
||||||
require.NoError(t, ir.Series(p.At(), &lbls, &[]chunks.Meta{}))
|
lbls := builder.Labels()
|
||||||
if _, ok := exp[lbls.String()]; !ok {
|
if _, ok := exp[lbls.String()]; !ok {
|
||||||
t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String())
|
t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String())
|
||||||
} else {
|
} else {
|
||||||
|
@ -1973,7 +1974,7 @@ func BenchmarkQueries(b *testing.B) {
|
||||||
|
|
||||||
// Add some common labels to make the matchers select these series.
|
// Add some common labels to make the matchers select these series.
|
||||||
{
|
{
|
||||||
var commonLbls labels.Labels
|
var commonLbls []labels.Label
|
||||||
for _, selector := range selectors {
|
for _, selector := range selectors {
|
||||||
switch selector.Type {
|
switch selector.Type {
|
||||||
case labels.MatchEqual:
|
case labels.MatchEqual:
|
||||||
|
@ -1984,8 +1985,11 @@ func BenchmarkQueries(b *testing.B) {
|
||||||
}
|
}
|
||||||
for i := range commonLbls {
|
for i := range commonLbls {
|
||||||
s := series[i].(*storage.SeriesEntry)
|
s := series[i].(*storage.SeriesEntry)
|
||||||
allLabels := append(commonLbls, s.Labels()...)
|
allLabels := commonLbls
|
||||||
newS := storage.NewListSeries(allLabels, nil)
|
s.Labels().Range(func(l labels.Label) {
|
||||||
|
allLabels = append(allLabels, l)
|
||||||
|
})
|
||||||
|
newS := storage.NewListSeries(labels.New(allLabels...), nil)
|
||||||
newS.SampleIteratorFn = s.SampleIteratorFn
|
newS.SampleIteratorFn = s.SampleIteratorFn
|
||||||
|
|
||||||
series[i] = newS
|
series[i] = newS
|
||||||
|
@ -2097,7 +2101,7 @@ func (m mockMatcherIndex) SortedPostings(p index.Postings) index.Postings {
|
||||||
return index.EmptyPostings()
|
return index.EmptyPostings()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m mockMatcherIndex) Series(ref storage.SeriesRef, lset *labels.Labels, chks *[]chunks.Meta) error {
|
func (m mockMatcherIndex) Series(ref storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2160,7 +2164,7 @@ func TestBlockBaseSeriesSet(t *testing.T) {
|
||||||
{
|
{
|
||||||
series: []refdSeries{
|
series: []refdSeries{
|
||||||
{
|
{
|
||||||
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}}...),
|
lset: labels.FromStrings("a", "a"),
|
||||||
chunks: []chunks.Meta{
|
chunks: []chunks.Meta{
|
||||||
{Ref: 29},
|
{Ref: 29},
|
||||||
{Ref: 45},
|
{Ref: 45},
|
||||||
|
@ -2173,19 +2177,19 @@ func TestBlockBaseSeriesSet(t *testing.T) {
|
||||||
ref: 12,
|
ref: 12,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...),
|
lset: labels.FromStrings("a", "a", "b", "b"),
|
||||||
chunks: []chunks.Meta{
|
chunks: []chunks.Meta{
|
||||||
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
||||||
},
|
},
|
||||||
ref: 10,
|
ref: 10,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
lset: labels.New([]labels.Label{{Name: "b", Value: "c"}}...),
|
lset: labels.FromStrings("b", "c"),
|
||||||
chunks: []chunks.Meta{{Ref: 8282}},
|
chunks: []chunks.Meta{{Ref: 8282}},
|
||||||
ref: 1,
|
ref: 1,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
lset: labels.New([]labels.Label{{Name: "b", Value: "b"}}...),
|
lset: labels.FromStrings("b", "b"),
|
||||||
chunks: []chunks.Meta{
|
chunks: []chunks.Meta{
|
||||||
{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269},
|
{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269},
|
||||||
},
|
},
|
||||||
|
@ -2198,14 +2202,14 @@ func TestBlockBaseSeriesSet(t *testing.T) {
|
||||||
{
|
{
|
||||||
series: []refdSeries{
|
series: []refdSeries{
|
||||||
{
|
{
|
||||||
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...),
|
lset: labels.FromStrings("a", "a", "b", "b"),
|
||||||
chunks: []chunks.Meta{
|
chunks: []chunks.Meta{
|
||||||
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
||||||
},
|
},
|
||||||
ref: 10,
|
ref: 10,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
lset: labels.New([]labels.Label{{Name: "b", Value: "c"}}...),
|
lset: labels.FromStrings("b", "c"),
|
||||||
chunks: []chunks.Meta{{Ref: 8282}},
|
chunks: []chunks.Meta{{Ref: 8282}},
|
||||||
ref: 3,
|
ref: 3,
|
||||||
},
|
},
|
||||||
|
|
|
@ -17,7 +17,6 @@ package record
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
|
@ -182,7 +181,9 @@ type RefMmapMarker struct {
|
||||||
|
|
||||||
// Decoder decodes series, sample, metadata and tombstone records.
|
// Decoder decodes series, sample, metadata and tombstone records.
|
||||||
// The zero value is ready to use.
|
// The zero value is ready to use.
|
||||||
type Decoder struct{}
|
type Decoder struct {
|
||||||
|
builder labels.ScratchBuilder
|
||||||
|
}
|
||||||
|
|
||||||
// Type returns the type of the record.
|
// Type returns the type of the record.
|
||||||
// Returns RecordUnknown if no valid record type is found.
|
// Returns RecordUnknown if no valid record type is found.
|
||||||
|
@ -267,14 +268,15 @@ func (d *Decoder) Metadata(rec []byte, metadata []RefMetadata) ([]RefMetadata, e
|
||||||
|
|
||||||
// DecodeLabels decodes one set of labels from buf.
|
// DecodeLabels decodes one set of labels from buf.
|
||||||
func (d *Decoder) DecodeLabels(dec *encoding.Decbuf) labels.Labels {
|
func (d *Decoder) DecodeLabels(dec *encoding.Decbuf) labels.Labels {
|
||||||
lset := make(labels.Labels, dec.Uvarint())
|
// TODO: reconsider if this function could be pushed down into labels.Labels to be more efficient.
|
||||||
|
d.builder.Reset()
|
||||||
for i := range lset {
|
nLabels := dec.Uvarint()
|
||||||
lset[i].Name = dec.UvarintStr()
|
for i := 0; i < nLabels; i++ {
|
||||||
lset[i].Value = dec.UvarintStr()
|
lName := dec.UvarintStr()
|
||||||
|
lValue := dec.UvarintStr()
|
||||||
|
d.builder.Add(lName, lValue)
|
||||||
}
|
}
|
||||||
sort.Sort(lset)
|
return d.builder.Labels()
|
||||||
return lset
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Samples appends samples in rec to the given slice.
|
// Samples appends samples in rec to the given slice.
|
||||||
|
@ -525,12 +527,13 @@ func (e *Encoder) Metadata(metadata []RefMetadata, b []byte) []byte {
|
||||||
|
|
||||||
// EncodeLabels encodes the contents of labels into buf.
|
// EncodeLabels encodes the contents of labels into buf.
|
||||||
func EncodeLabels(buf *encoding.Encbuf, lbls labels.Labels) {
|
func EncodeLabels(buf *encoding.Encbuf, lbls labels.Labels) {
|
||||||
buf.PutUvarint(len(lbls))
|
// TODO: reconsider if this function could be pushed down into labels.Labels to be more efficient.
|
||||||
|
buf.PutUvarint(lbls.Len())
|
||||||
|
|
||||||
for _, l := range lbls {
|
lbls.Range(func(l labels.Label) {
|
||||||
buf.PutUvarintStr(l.Name)
|
buf.PutUvarintStr(l.Name)
|
||||||
buf.PutUvarintStr(l.Value)
|
buf.PutUvarintStr(l.Value)
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Samples appends the encoded samples to b and returns the resulting slice.
|
// Samples appends the encoded samples to b and returns the resulting slice.
|
||||||
|
|
|
@ -80,11 +80,11 @@ func TestRepairBadIndexVersion(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
p, err := r.Postings("b", "1")
|
p, err := r.Postings("b", "1")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
var builder labels.ScratchBuilder
|
||||||
for p.Next() {
|
for p.Next() {
|
||||||
t.Logf("next ID %d", p.At())
|
t.Logf("next ID %d", p.At())
|
||||||
|
|
||||||
var lset labels.Labels
|
require.Error(t, r.Series(p.At(), &builder, nil))
|
||||||
require.Error(t, r.Series(p.At(), &lset, nil))
|
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Err())
|
require.NoError(t, p.Err())
|
||||||
require.NoError(t, r.Close())
|
require.NoError(t, r.Close())
|
||||||
|
@ -104,10 +104,9 @@ func TestRepairBadIndexVersion(t *testing.T) {
|
||||||
for p.Next() {
|
for p.Next() {
|
||||||
t.Logf("next ID %d", p.At())
|
t.Logf("next ID %d", p.At())
|
||||||
|
|
||||||
var lset labels.Labels
|
|
||||||
var chks []chunks.Meta
|
var chks []chunks.Meta
|
||||||
require.NoError(t, r.Series(p.At(), &lset, &chks))
|
require.NoError(t, r.Series(p.At(), &builder, &chks))
|
||||||
res = append(res, lset)
|
res = append(res, builder.Labels())
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, p.Err())
|
require.NoError(t, p.Err())
|
||||||
|
|
|
@ -58,9 +58,7 @@ func BenchmarkLabelsClone(b *testing.B) {
|
||||||
l := labels.FromMap(m)
|
l := labels.FromMap(m)
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
res := make(labels.Labels, len(l))
|
l = l.Copy()
|
||||||
copy(res, l)
|
|
||||||
l = res
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,13 +104,13 @@ func BenchmarkLabelSetAccess(b *testing.B) {
|
||||||
|
|
||||||
var v string
|
var v string
|
||||||
|
|
||||||
for _, l := range ls {
|
ls.Range(func(l labels.Label) {
|
||||||
b.Run(l.Name, func(b *testing.B) {
|
b.Run(l.Name, func(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
v = ls.Get(l.Name)
|
v = ls.Get(l.Name)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
|
|
||||||
_ = v
|
_ = v
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,8 +90,8 @@ type testTargetRetriever struct {
|
||||||
|
|
||||||
type testTargetParams struct {
|
type testTargetParams struct {
|
||||||
Identifier string
|
Identifier string
|
||||||
Labels []labels.Label
|
Labels labels.Labels
|
||||||
DiscoveredLabels []labels.Label
|
DiscoveredLabels labels.Labels
|
||||||
Params url.Values
|
Params url.Values
|
||||||
Reports []*testReport
|
Reports []*testReport
|
||||||
Active bool
|
Active bool
|
||||||
|
@ -508,9 +508,9 @@ func TestGetSeries(t *testing.T) {
|
||||||
name: "non empty label matcher",
|
name: "non empty label matcher",
|
||||||
matchers: []string{`{foo=~".+"}`},
|
matchers: []string{`{foo=~".+"}`},
|
||||||
expected: []labels.Labels{
|
expected: []labels.Labels{
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "abc", Value: "qwerty"}, labels.Label{Name: "foo", Value: "baz"}},
|
labels.FromStrings("__name__", "test_metric2", "abc", "qwerty", "foo", "baz"),
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}},
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}, labels.Label{Name: "xyz", Value: "qwerty"}},
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo", "xyz", "qwerty"),
|
||||||
},
|
},
|
||||||
api: api,
|
api: api,
|
||||||
},
|
},
|
||||||
|
@ -518,8 +518,8 @@ func TestGetSeries(t *testing.T) {
|
||||||
name: "exact label matcher",
|
name: "exact label matcher",
|
||||||
matchers: []string{`{foo="boo"}`},
|
matchers: []string{`{foo="boo"}`},
|
||||||
expected: []labels.Labels{
|
expected: []labels.Labels{
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}},
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}, labels.Label{Name: "xyz", Value: "qwerty"}},
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo", "xyz", "qwerty"),
|
||||||
},
|
},
|
||||||
api: api,
|
api: api,
|
||||||
},
|
},
|
||||||
|
@ -527,9 +527,9 @@ func TestGetSeries(t *testing.T) {
|
||||||
name: "two matchers",
|
name: "two matchers",
|
||||||
matchers: []string{`{foo="boo"}`, `{foo="baz"}`},
|
matchers: []string{`{foo="boo"}`, `{foo="baz"}`},
|
||||||
expected: []labels.Labels{
|
expected: []labels.Labels{
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "abc", Value: "qwerty"}, labels.Label{Name: "foo", Value: "baz"}},
|
labels.FromStrings("__name__", "test_metric2", "abc", "qwerty", "foo", "baz"),
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}},
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
||||||
{labels.Label{Name: "__name__", Value: "test_metric2"}, labels.Label{Name: "foo", Value: "boo"}, labels.Label{Name: "xyz", Value: "qwerty"}},
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo", "xyz", "qwerty"),
|
||||||
},
|
},
|
||||||
api: api,
|
api: api,
|
||||||
},
|
},
|
||||||
|
@ -558,12 +558,6 @@ func TestGetSeries(t *testing.T) {
|
||||||
assertAPIError(t, res.err, tc.expectedErrorType)
|
assertAPIError(t, res.err, tc.expectedErrorType)
|
||||||
if tc.expectedErrorType == errorNone {
|
if tc.expectedErrorType == errorNone {
|
||||||
r := res.data.([]labels.Labels)
|
r := res.data.([]labels.Labels)
|
||||||
for _, l := range tc.expected {
|
|
||||||
sort.Sort(l)
|
|
||||||
}
|
|
||||||
for _, l := range r {
|
|
||||||
sort.Sort(l)
|
|
||||||
}
|
|
||||||
sort.Sort(byLabels(tc.expected))
|
sort.Sort(byLabels(tc.expected))
|
||||||
sort.Sort(byLabels(r))
|
sort.Sort(byLabels(r))
|
||||||
require.Equal(t, tc.expected, r)
|
require.Equal(t, tc.expected, r)
|
||||||
|
@ -928,7 +922,7 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
||||||
model.ScrapeIntervalLabel: "15s",
|
model.ScrapeIntervalLabel: "15s",
|
||||||
model.ScrapeTimeoutLabel: "5s",
|
model.ScrapeTimeoutLabel: "5s",
|
||||||
}),
|
}),
|
||||||
DiscoveredLabels: nil,
|
DiscoveredLabels: labels.EmptyLabels(),
|
||||||
Params: url.Values{},
|
Params: url.Values{},
|
||||||
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
|
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
|
||||||
Active: true,
|
Active: true,
|
||||||
|
@ -943,14 +937,14 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
||||||
model.ScrapeIntervalLabel: "20s",
|
model.ScrapeIntervalLabel: "20s",
|
||||||
model.ScrapeTimeoutLabel: "10s",
|
model.ScrapeTimeoutLabel: "10s",
|
||||||
}),
|
}),
|
||||||
DiscoveredLabels: nil,
|
DiscoveredLabels: labels.EmptyLabels(),
|
||||||
Params: url.Values{"target": []string{"example.com"}},
|
Params: url.Values{"target": []string{"example.com"}},
|
||||||
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
|
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
|
||||||
Active: true,
|
Active: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Identifier: "blackbox",
|
Identifier: "blackbox",
|
||||||
Labels: nil,
|
Labels: labels.EmptyLabels(),
|
||||||
DiscoveredLabels: labels.FromMap(map[string]string{
|
DiscoveredLabels: labels.FromMap(map[string]string{
|
||||||
model.SchemeLabel: "http",
|
model.SchemeLabel: "http",
|
||||||
model.AddressLabel: "http://dropped.example.com:9115",
|
model.AddressLabel: "http://dropped.example.com:9115",
|
||||||
|
@ -1111,7 +1105,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
||||||
{V: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
{V: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
||||||
{V: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
{V: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
||||||
},
|
},
|
||||||
Metric: nil,
|
// No Metric returned - use zero value for comparison.
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -3296,7 +3290,7 @@ func BenchmarkRespond(b *testing.B) {
|
||||||
Result: promql.Matrix{
|
Result: promql.Matrix{
|
||||||
promql.Series{
|
promql.Series{
|
||||||
Points: points,
|
Points: points,
|
||||||
Metric: nil,
|
Metric: labels.EmptyLabels(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -171,26 +171,24 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
||||||
Untyped: &dto.Untyped{},
|
Untyped: &dto.Untyped{},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, l := range s.Metric {
|
err := s.Metric.Validate(func(l labels.Label) error {
|
||||||
if l.Value == "" {
|
if l.Value == "" {
|
||||||
// No value means unset. Never consider those labels.
|
// No value means unset. Never consider those labels.
|
||||||
// This is also important to protect against nameless metrics.
|
// This is also important to protect against nameless metrics.
|
||||||
continue
|
return nil
|
||||||
}
|
}
|
||||||
if l.Name == labels.MetricName {
|
if l.Name == labels.MetricName {
|
||||||
nameSeen = true
|
nameSeen = true
|
||||||
if l.Value == lastMetricName {
|
if l.Value == lastMetricName {
|
||||||
// We already have the name in the current MetricFamily,
|
// We already have the name in the current MetricFamily,
|
||||||
// and we ignore nameless metrics.
|
// and we ignore nameless metrics.
|
||||||
continue
|
return nil
|
||||||
}
|
}
|
||||||
// Need to start a new MetricFamily. Ship off the old one (if any) before
|
// Need to start a new MetricFamily. Ship off the old one (if any) before
|
||||||
// creating the new one.
|
// creating the new one.
|
||||||
if protMetricFam != nil {
|
if protMetricFam != nil {
|
||||||
if err := enc.Encode(protMetricFam); err != nil {
|
if err := enc.Encode(protMetricFam); err != nil {
|
||||||
federationErrors.Inc()
|
return err
|
||||||
level.Error(h.logger).Log("msg", "federation failed", "err", err)
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
protMetricFam = &dto.MetricFamily{
|
protMetricFam = &dto.MetricFamily{
|
||||||
|
@ -198,7 +196,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
||||||
Name: proto.String(l.Value),
|
Name: proto.String(l.Value),
|
||||||
}
|
}
|
||||||
lastMetricName = l.Value
|
lastMetricName = l.Value
|
||||||
continue
|
return nil
|
||||||
}
|
}
|
||||||
protMetric.Label = append(protMetric.Label, &dto.LabelPair{
|
protMetric.Label = append(protMetric.Label, &dto.LabelPair{
|
||||||
Name: proto.String(l.Name),
|
Name: proto.String(l.Name),
|
||||||
|
@ -207,6 +205,12 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
||||||
if _, ok := externalLabels[l.Name]; ok {
|
if _, ok := externalLabels[l.Name]; ok {
|
||||||
globalUsed[l.Name] = struct{}{}
|
globalUsed[l.Name] = struct{}{}
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
federationErrors.Inc()
|
||||||
|
level.Error(h.logger).Log("msg", "federation failed", "err", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
if !nameSeen {
|
if !nameSeen {
|
||||||
level.Warn(h.logger).Log("msg", "Ignoring nameless metric during federation", "metric", s.Metric)
|
level.Warn(h.logger).Log("msg", "Ignoring nameless metric during federation", "metric", s.Metric)
|
||||||
|
|
|
@ -162,7 +162,7 @@ test_metric_without_labels{instance=""} 1001 6000000
|
||||||
},
|
},
|
||||||
"external labels are added if not already present": {
|
"external labels are added if not already present": {
|
||||||
params: "match[]={__name__=~'.%2b'}", // '%2b' is an URL-encoded '+'.
|
params: "match[]={__name__=~'.%2b'}", // '%2b' is an URL-encoded '+'.
|
||||||
externalLabels: labels.Labels{{Name: "foo", Value: "baz"}, {Name: "zone", Value: "ie"}},
|
externalLabels: labels.FromStrings("foo", "baz", "zone", "ie"),
|
||||||
code: 200,
|
code: 200,
|
||||||
body: `# TYPE test_metric1 untyped
|
body: `# TYPE test_metric1 untyped
|
||||||
test_metric1{foo="bar",instance="i",zone="ie"} 10000 6000000
|
test_metric1{foo="bar",instance="i",zone="ie"} 10000 6000000
|
||||||
|
@ -179,7 +179,7 @@ test_metric_without_labels{foo="baz",instance="",zone="ie"} 1001 6000000
|
||||||
// This makes no sense as a configuration, but we should
|
// This makes no sense as a configuration, but we should
|
||||||
// know what it does anyway.
|
// know what it does anyway.
|
||||||
params: "match[]={__name__=~'.%2b'}", // '%2b' is an URL-encoded '+'.
|
params: "match[]={__name__=~'.%2b'}", // '%2b' is an URL-encoded '+'.
|
||||||
externalLabels: labels.Labels{{Name: "instance", Value: "baz"}},
|
externalLabels: labels.FromStrings("instance", "baz"),
|
||||||
code: 200,
|
code: 200,
|
||||||
body: `# TYPE test_metric1 untyped
|
body: `# TYPE test_metric1 untyped
|
||||||
test_metric1{foo="bar",instance="i"} 10000 6000000
|
test_metric1{foo="bar",instance="i"} 10000 6000000
|
||||||
|
|
|
@ -653,13 +653,10 @@ func (h *Handler) consoles(w http.ResponseWriter, r *http.Request) {
|
||||||
params[k] = v[0]
|
params[k] = v[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
externalLabels := map[string]string{}
|
|
||||||
h.mtx.RLock()
|
h.mtx.RLock()
|
||||||
els := h.config.GlobalConfig.ExternalLabels
|
els := h.config.GlobalConfig.ExternalLabels
|
||||||
h.mtx.RUnlock()
|
h.mtx.RUnlock()
|
||||||
for _, el := range els {
|
externalLabels := els.Map()
|
||||||
externalLabels[el.Name] = el.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
// Inject some convenience variables that are easier to remember for users
|
// Inject some convenience variables that are easier to remember for users
|
||||||
// who are not used to Go's templating system.
|
// who are not used to Go's templating system.
|
||||||
|
|
Loading…
Reference in a new issue