2017-04-10 11:59:45 -07:00
|
|
|
// Copyright 2017 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2016-12-14 06:47:05 -08:00
|
|
|
package tsdb
|
|
|
|
|
|
|
|
import (
|
2017-07-05 07:19:28 -07:00
|
|
|
"fmt"
|
2018-10-23 14:35:52 -07:00
|
|
|
"io/ioutil"
|
2017-04-13 12:06:14 -07:00
|
|
|
"math"
|
2017-04-13 07:27:31 -07:00
|
|
|
"math/rand"
|
2018-10-23 14:35:52 -07:00
|
|
|
"os"
|
2018-10-25 02:32:57 -07:00
|
|
|
"path/filepath"
|
2016-12-19 02:44:11 -08:00
|
|
|
"sort"
|
2019-06-07 06:41:44 -07:00
|
|
|
"strconv"
|
2016-12-14 06:47:05 -08:00
|
|
|
"testing"
|
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
"github.com/pkg/errors"
|
2019-11-18 11:53:33 -08:00
|
|
|
"github.com/prometheus/prometheus/pkg/labels"
|
2019-08-13 01:34:14 -07:00
|
|
|
"github.com/prometheus/prometheus/tsdb/chunkenc"
|
|
|
|
"github.com/prometheus/prometheus/tsdb/chunks"
|
|
|
|
"github.com/prometheus/prometheus/tsdb/index"
|
2019-09-19 02:15:41 -07:00
|
|
|
"github.com/prometheus/prometheus/tsdb/tombstones"
|
2019-08-13 01:34:14 -07:00
|
|
|
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
2019-08-14 02:07:02 -07:00
|
|
|
"github.com/prometheus/prometheus/util/testutil"
|
2016-12-14 06:47:05 -08:00
|
|
|
)
|
|
|
|
|
2018-09-21 01:07:35 -07:00
|
|
|
type mockSeriesSet struct {
|
|
|
|
next func() bool
|
|
|
|
series func() Series
|
|
|
|
err func() error
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *mockSeriesSet) Next() bool { return m.next() }
|
|
|
|
func (m *mockSeriesSet) At() Series { return m.series() }
|
|
|
|
func (m *mockSeriesSet) Err() error { return m.err() }
|
|
|
|
|
|
|
|
func newMockSeriesSet(list []Series) *mockSeriesSet {
|
|
|
|
i := -1
|
|
|
|
return &mockSeriesSet{
|
|
|
|
next: func() bool {
|
|
|
|
i++
|
|
|
|
return i < len(list)
|
|
|
|
},
|
|
|
|
series: func() Series {
|
|
|
|
return list[i]
|
|
|
|
},
|
|
|
|
err: func() error { return nil },
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-14 07:24:08 -07:00
|
|
|
func TestMergedSeriesSet(t *testing.T) {
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
cases := []struct {
|
|
|
|
// The input sets in order (samples in series in b are strictly
|
|
|
|
// after those in a).
|
|
|
|
a, b SeriesSet
|
2017-01-05 23:08:02 -08:00
|
|
|
// The composition of a and b in the partition series set must yield
|
2016-12-19 02:44:11 -08:00
|
|
|
// results equivalent to the result series set.
|
|
|
|
exp SeriesSet
|
|
|
|
}{
|
|
|
|
{
|
2018-09-21 01:07:35 -07:00
|
|
|
a: newMockSeriesSet([]Series{
|
2016-12-19 02:44:11 -08:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 1},
|
2016-12-19 02:44:11 -08:00
|
|
|
}),
|
|
|
|
}),
|
2018-09-21 01:07:35 -07:00
|
|
|
b: newMockSeriesSet([]Series{
|
2016-12-19 02:44:11 -08:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 2, v: 2},
|
2016-12-19 02:44:11 -08:00
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"b": "b",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 1},
|
2016-12-19 02:44:11 -08:00
|
|
|
}),
|
|
|
|
}),
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{
|
2016-12-19 02:44:11 -08:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 1},
|
|
|
|
sample{t: 2, v: 2},
|
2016-12-19 02:44:11 -08:00
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"b": "b",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 1},
|
2016-12-19 02:44:11 -08:00
|
|
|
}),
|
|
|
|
}),
|
|
|
|
},
|
2017-01-03 10:02:42 -08:00
|
|
|
{
|
2018-09-21 01:07:35 -07:00
|
|
|
a: newMockSeriesSet([]Series{
|
2017-01-03 10:02:42 -08:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "127.0.0.1:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 1},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "localhost:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 2},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
}),
|
2018-09-21 01:07:35 -07:00
|
|
|
b: newMockSeriesSet([]Series{
|
2017-01-03 10:02:42 -08:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "127.0.0.1:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 2, v: 1},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "query",
|
|
|
|
"instance": "localhost:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 2, v: 2},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
}),
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{
|
2017-01-03 10:02:42 -08:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "127.0.0.1:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 1},
|
|
|
|
sample{t: 2, v: 1},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "localhost:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 1, v: 2},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "query",
|
|
|
|
"instance": "localhost:9090",
|
2019-01-28 03:24:49 -08:00
|
|
|
}, []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{t: 2, v: 2},
|
2017-01-03 10:02:42 -08:00
|
|
|
}),
|
|
|
|
}),
|
|
|
|
},
|
2016-12-19 02:44:11 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
Outer:
|
|
|
|
for _, c := range cases {
|
2019-11-15 06:45:29 -08:00
|
|
|
res := NewMergedSeriesSet([]SeriesSet{c.a, c.b})
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
for {
|
|
|
|
eok, rok := c.exp.Next(), res.Next()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, eok, rok)
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
if !eok {
|
|
|
|
continue Outer
|
|
|
|
}
|
2017-01-02 04:27:52 -08:00
|
|
|
sexp := c.exp.At()
|
|
|
|
sres := res.At()
|
2016-12-19 02:44:11 -08:00
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
|
|
|
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, errExp, errRes)
|
|
|
|
testutil.Equals(t, smplExp, smplRes)
|
2016-12-19 02:44:11 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
func expandSeriesIterator(it SeriesIterator) (r []tsdbutil.Sample, err error) {
|
2016-12-19 02:44:11 -08:00
|
|
|
for it.Next() {
|
2017-01-02 04:27:52 -08:00
|
|
|
t, v := it.At()
|
2016-12-19 02:44:11 -08:00
|
|
|
r = append(r, sample{t: t, v: v})
|
|
|
|
}
|
|
|
|
|
|
|
|
return r, it.Err()
|
|
|
|
}
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2018-10-12 02:45:19 -07:00
|
|
|
type seriesSamples struct {
|
2017-04-13 07:27:31 -07:00
|
|
|
lset map[string]string
|
|
|
|
chunks [][]sample
|
2018-10-12 02:45:19 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Index: labels -> postings -> chunkMetas -> chunkRef
|
|
|
|
// ChunkReader: ref -> vals
|
2019-04-25 03:07:04 -07:00
|
|
|
func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkReader, int64, int64) {
|
2017-04-13 07:27:31 -07:00
|
|
|
sort.Slice(tc, func(i, j int) bool {
|
|
|
|
return labels.Compare(labels.FromMap(tc[i].lset), labels.FromMap(tc[i].lset)) < 0
|
|
|
|
})
|
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
postings := index.NewMemPostings()
|
|
|
|
chkReader := mockChunkReader(make(map[uint64]chunkenc.Chunk))
|
2017-07-21 01:37:52 -07:00
|
|
|
lblIdx := make(map[string]stringset)
|
2017-04-13 07:27:31 -07:00
|
|
|
mi := newMockIndex()
|
2019-02-14 05:29:41 -08:00
|
|
|
blockMint := int64(math.MaxInt64)
|
|
|
|
blockMaxt := int64(math.MinInt64)
|
2017-04-13 07:27:31 -07:00
|
|
|
|
2019-04-18 06:11:39 -07:00
|
|
|
var chunkRef uint64
|
2017-04-13 07:27:31 -07:00
|
|
|
for i, s := range tc {
|
2017-05-22 01:01:57 -07:00
|
|
|
i = i + 1 // 0 is not a valid posting.
|
2017-11-30 06:34:49 -08:00
|
|
|
metas := make([]chunks.Meta, 0, len(s.chunks))
|
2017-04-13 07:27:31 -07:00
|
|
|
for _, chk := range s.chunks {
|
2019-02-14 05:29:41 -08:00
|
|
|
if chk[0].t < blockMint {
|
|
|
|
blockMint = chk[0].t
|
|
|
|
}
|
|
|
|
if chk[len(chk)-1].t > blockMaxt {
|
|
|
|
blockMaxt = chk[len(chk)-1].t
|
|
|
|
}
|
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
metas = append(metas, chunks.Meta{
|
2017-04-13 07:27:31 -07:00
|
|
|
MinTime: chk[0].t,
|
|
|
|
MaxTime: chk[len(chk)-1].t,
|
2019-04-18 06:11:39 -07:00
|
|
|
Ref: chunkRef,
|
2017-04-13 07:27:31 -07:00
|
|
|
})
|
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
chunk := chunkenc.NewXORChunk()
|
2017-04-13 07:27:31 -07:00
|
|
|
app, _ := chunk.Appender()
|
|
|
|
for _, smpl := range chk {
|
|
|
|
app.Append(smpl.t, smpl.v)
|
|
|
|
}
|
2019-04-18 06:11:39 -07:00
|
|
|
chkReader[chunkRef] = chunk
|
|
|
|
chunkRef += 1
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
|
2017-07-21 01:37:52 -07:00
|
|
|
ls := labels.FromMap(s.lset)
|
2019-04-25 03:07:04 -07:00
|
|
|
testutil.Ok(t, mi.AddSeries(uint64(i), ls, metas...))
|
2017-04-13 07:27:31 -07:00
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
postings.Add(uint64(i), ls)
|
2017-07-21 01:37:52 -07:00
|
|
|
|
2017-09-05 02:45:18 -07:00
|
|
|
for _, l := range ls {
|
2017-07-21 01:37:52 -07:00
|
|
|
vs, present := lblIdx[l.Name]
|
|
|
|
if !present {
|
|
|
|
vs = stringset{}
|
|
|
|
lblIdx[l.Name] = vs
|
|
|
|
}
|
|
|
|
vs.set(l.Value)
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-21 01:37:52 -07:00
|
|
|
for l, vs := range lblIdx {
|
2019-04-25 03:07:04 -07:00
|
|
|
testutil.Ok(t, mi.WriteLabelIndex([]string{l}, vs.slice()))
|
2017-07-21 01:37:52 -07:00
|
|
|
}
|
|
|
|
|
2019-04-25 03:07:04 -07:00
|
|
|
testutil.Ok(t, postings.Iter(func(l labels.Label, p index.Postings) error {
|
2017-11-30 06:34:49 -08:00
|
|
|
return mi.WritePostings(l.Name, l.Value, p)
|
2019-04-25 03:07:04 -07:00
|
|
|
}))
|
2017-04-13 07:27:31 -07:00
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
return mi, chkReader, blockMint, blockMaxt
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestBlockQuerier(t *testing.T) {
|
2019-01-28 03:24:49 -08:00
|
|
|
newSeries := func(l map[string]string, s []tsdbutil.Sample) Series {
|
2017-04-13 12:06:14 -07:00
|
|
|
return &mockSeries{
|
|
|
|
labels: func() labels.Labels { return labels.FromMap(l) },
|
|
|
|
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
|
|
|
|
}
|
|
|
|
}
|
2017-04-13 07:27:31 -07:00
|
|
|
|
2017-04-21 13:08:26 -07:00
|
|
|
type query struct {
|
|
|
|
mint, maxt int64
|
2019-11-18 11:53:33 -08:00
|
|
|
ms []*labels.Matcher
|
2017-04-21 13:08:26 -07:00
|
|
|
exp SeriesSet
|
|
|
|
}
|
|
|
|
|
|
|
|
cases := struct {
|
2018-10-12 02:45:19 -07:00
|
|
|
data []seriesSamples
|
2017-04-21 13:08:26 -07:00
|
|
|
|
|
|
|
queries []query
|
2017-04-13 07:27:31 -07:00
|
|
|
}{
|
2018-10-12 02:45:19 -07:00
|
|
|
data: []seriesSamples{
|
2017-04-13 07:27:31 -07:00
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 2}, {2, 3}, {3, 4},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 2}, {6, 3}, {7, 4},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 1}, {2, 2}, {3, 3},
|
|
|
|
},
|
|
|
|
{
|
2017-04-13 12:06:14 -07:00
|
|
|
{5, 3}, {6, 6},
|
2017-04-13 07:27:31 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 3}, {2, 2}, {3, 6},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 1}, {6, 7}, {7, 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
|
2017-04-21 13:08:26 -07:00
|
|
|
queries: []query{
|
|
|
|
{
|
|
|
|
mint: 0,
|
|
|
|
maxt: 0,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{}),
|
2017-04-21 13:08:26 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 0,
|
|
|
|
maxt: 0,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{}),
|
2017-04-21 13:08:26 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 1,
|
|
|
|
maxt: 0,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{}),
|
2017-04-21 13:08:26 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 6,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{
|
2017-04-21 13:08:26 -07:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}},
|
2017-04-21 13:08:26 -07:00
|
|
|
),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}},
|
2017-04-21 13:08:26 -07:00
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
2017-04-13 12:06:14 -07:00
|
|
|
},
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
Outer:
|
2017-12-06 16:27:09 -08:00
|
|
|
for _, c := range cases.queries {
|
2019-04-25 03:07:04 -07:00
|
|
|
ir, cr, _, _ := createIdxChkReaders(t, cases.data)
|
2017-04-13 07:27:31 -07:00
|
|
|
querier := &blockQuerier{
|
2017-05-17 02:19:42 -07:00
|
|
|
index: ir,
|
|
|
|
chunks: cr,
|
2019-09-19 02:15:41 -07:00
|
|
|
tombstones: tombstones.NewMemTombstones(),
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
mint: c.mint,
|
|
|
|
maxt: c.maxt,
|
|
|
|
}
|
|
|
|
|
2017-11-13 03:16:58 -08:00
|
|
|
res, err := querier.Select(c.ms...)
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Ok(t, err)
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
for {
|
|
|
|
eok, rok := c.exp.Next(), res.Next()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, eok, rok)
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
if !eok {
|
|
|
|
continue Outer
|
|
|
|
}
|
|
|
|
sexp := c.exp.At()
|
|
|
|
sres := res.At()
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
|
|
|
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, errExp, errRes)
|
|
|
|
testutil.Equals(t, smplExp, smplRes)
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2017-05-22 01:01:57 -07:00
|
|
|
func TestBlockQuerierDelete(t *testing.T) {
|
2019-01-28 03:24:49 -08:00
|
|
|
newSeries := func(l map[string]string, s []tsdbutil.Sample) Series {
|
2017-05-22 01:01:57 -07:00
|
|
|
return &mockSeries{
|
|
|
|
labels: func() labels.Labels { return labels.FromMap(l) },
|
|
|
|
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type query struct {
|
|
|
|
mint, maxt int64
|
2019-11-18 11:53:33 -08:00
|
|
|
ms []*labels.Matcher
|
2017-05-22 01:01:57 -07:00
|
|
|
exp SeriesSet
|
|
|
|
}
|
|
|
|
|
|
|
|
cases := struct {
|
2018-10-12 02:45:19 -07:00
|
|
|
data []seriesSamples
|
2017-05-22 01:01:57 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
tombstones tombstones.Reader
|
2017-05-22 01:01:57 -07:00
|
|
|
queries []query
|
|
|
|
}{
|
2018-10-12 02:45:19 -07:00
|
|
|
data: []seriesSamples{
|
2017-05-22 01:01:57 -07:00
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 2}, {2, 3}, {3, 4},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 2}, {6, 3}, {7, 4},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 1}, {2, 2}, {3, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{4, 15}, {5, 3}, {6, 6},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 3}, {2, 2}, {3, 6},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 1}, {6, 7}, {7, 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-09-19 02:15:41 -07:00
|
|
|
tombstones: tombstones.NewTestMemTombstones([]tombstones.Intervals{
|
|
|
|
tombstones.Intervals{{Mint: 1, Maxt: 3}},
|
|
|
|
tombstones.Intervals{{Mint: 1, Maxt: 3}, {Mint: 6, Maxt: 10}},
|
|
|
|
tombstones.Intervals{{Mint: 6, Maxt: 10}},
|
|
|
|
}),
|
2017-05-22 01:01:57 -07:00
|
|
|
queries: []query{
|
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 7,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{
|
2017-05-22 01:01:57 -07:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}},
|
2017-05-22 01:01:57 -07:00
|
|
|
),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{4, 15}, sample{5, 3}},
|
2017-05-22 01:01:57 -07:00
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 7,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "b", "b")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{
|
2017-05-22 01:01:57 -07:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{4, 15}, sample{5, 3}},
|
2017-05-22 01:01:57 -07:00
|
|
|
),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{2, 2}, sample{3, 6}, sample{5, 1}},
|
2017-05-22 01:01:57 -07:00
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 1,
|
|
|
|
maxt: 4,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{
|
2017-05-22 01:01:57 -07:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
[]tsdbutil.Sample{sample{4, 15}},
|
2017-05-22 01:01:57 -07:00
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 1,
|
|
|
|
maxt: 3,
|
2019-11-18 11:53:33 -08:00
|
|
|
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
|
2018-09-21 01:07:35 -07:00
|
|
|
exp: newMockSeriesSet([]Series{}),
|
2017-05-22 01:01:57 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
fmt.Println("tombstones", cases.tombstones)
|
2017-05-22 01:01:57 -07:00
|
|
|
Outer:
|
|
|
|
for _, c := range cases.queries {
|
2019-04-25 03:07:04 -07:00
|
|
|
ir, cr, _, _ := createIdxChkReaders(t, cases.data)
|
2017-05-22 01:01:57 -07:00
|
|
|
querier := &blockQuerier{
|
|
|
|
index: ir,
|
|
|
|
chunks: cr,
|
2017-05-23 22:54:24 -07:00
|
|
|
tombstones: cases.tombstones,
|
2017-05-22 01:01:57 -07:00
|
|
|
|
|
|
|
mint: c.mint,
|
|
|
|
maxt: c.maxt,
|
|
|
|
}
|
|
|
|
|
2017-11-13 03:16:58 -08:00
|
|
|
res, err := querier.Select(c.ms...)
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Ok(t, err)
|
2017-05-22 01:01:57 -07:00
|
|
|
|
|
|
|
for {
|
|
|
|
eok, rok := c.exp.Next(), res.Next()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, eok, rok)
|
2017-05-22 01:01:57 -07:00
|
|
|
|
|
|
|
if !eok {
|
|
|
|
continue Outer
|
|
|
|
}
|
|
|
|
sexp := c.exp.At()
|
|
|
|
sres := res.At()
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
2017-05-22 01:01:57 -07:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
|
|
|
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, errExp, errRes)
|
|
|
|
testutil.Equals(t, smplExp, smplRes)
|
2017-05-22 01:01:57 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-09 07:00:25 -07:00
|
|
|
func TestBaseChunkSeries(t *testing.T) {
|
|
|
|
type refdSeries struct {
|
|
|
|
lset labels.Labels
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks []chunks.Meta
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2017-09-04 07:08:38 -07:00
|
|
|
ref uint64
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
cases := []struct {
|
|
|
|
series []refdSeries
|
2019-08-28 06:43:02 -07:00
|
|
|
// Postings should be in the sorted order of the series
|
2017-09-04 07:08:38 -07:00
|
|
|
postings []uint64
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
expIdxs []int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
series: []refdSeries{
|
|
|
|
{
|
2019-08-13 01:34:14 -07:00
|
|
|
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}}...),
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks: []chunks.Meta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 29}, {Ref: 45}, {Ref: 245}, {Ref: 123}, {Ref: 4232}, {Ref: 5344},
|
|
|
|
{Ref: 121},
|
|
|
|
},
|
|
|
|
ref: 12,
|
|
|
|
},
|
|
|
|
{
|
2019-08-13 01:34:14 -07:00
|
|
|
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...),
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks: []chunks.Meta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
|
|
|
},
|
|
|
|
ref: 10,
|
|
|
|
},
|
|
|
|
{
|
2019-08-13 01:34:14 -07:00
|
|
|
lset: labels.New([]labels.Label{{Name: "b", Value: "c"}}...),
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks: []chunks.Meta{{Ref: 8282}},
|
2017-04-09 07:00:25 -07:00
|
|
|
ref: 1,
|
|
|
|
},
|
|
|
|
{
|
2019-08-13 01:34:14 -07:00
|
|
|
lset: labels.New([]labels.Label{{Name: "b", Value: "b"}}...),
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks: []chunks.Meta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269},
|
|
|
|
},
|
|
|
|
ref: 108,
|
|
|
|
},
|
|
|
|
},
|
2017-10-11 00:33:35 -07:00
|
|
|
postings: []uint64{12, 13, 10, 108}, // 13 doesn't exist and should just be skipped over.
|
|
|
|
expIdxs: []int{0, 1, 3},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
series: []refdSeries{
|
|
|
|
{
|
2019-08-13 01:34:14 -07:00
|
|
|
lset: labels.New([]labels.Label{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}...),
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks: []chunks.Meta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
|
|
|
},
|
|
|
|
ref: 10,
|
|
|
|
},
|
|
|
|
{
|
2019-08-13 01:34:14 -07:00
|
|
|
lset: labels.New([]labels.Label{{Name: "b", Value: "c"}}...),
|
2017-11-30 06:34:49 -08:00
|
|
|
chunks: []chunks.Meta{{Ref: 8282}},
|
2017-10-11 00:33:35 -07:00
|
|
|
ref: 3,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
},
|
2017-09-04 07:08:38 -07:00
|
|
|
postings: []uint64{},
|
2017-10-11 00:33:35 -07:00
|
|
|
expIdxs: []int{},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tc := range cases {
|
|
|
|
mi := newMockIndex()
|
|
|
|
for _, s := range tc.series {
|
2019-04-25 03:07:04 -07:00
|
|
|
testutil.Ok(t, mi.AddSeries(s.ref, s.lset, s.chunks...))
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
bcs := &baseChunkSeries{
|
2017-11-30 06:34:49 -08:00
|
|
|
p: index.NewListPostings(tc.postings),
|
2017-05-17 02:19:42 -07:00
|
|
|
index: mi,
|
2019-09-19 02:15:41 -07:00
|
|
|
tombstones: tombstones.NewMemTombstones(),
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
i := 0
|
|
|
|
for bcs.Next() {
|
2017-05-17 02:19:42 -07:00
|
|
|
lset, chks, _ := bcs.At()
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
idx := tc.expIdxs[i]
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, tc.series[idx].lset, lset)
|
|
|
|
testutil.Equals(t, tc.series[idx].chunks, chks)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
i++
|
|
|
|
}
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, len(tc.expIdxs), i)
|
|
|
|
testutil.Ok(t, bcs.Err())
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Remove after simpleSeries is merged
|
|
|
|
type itSeries struct {
|
|
|
|
si SeriesIterator
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s itSeries) Iterator() SeriesIterator { return s.si }
|
|
|
|
func (s itSeries) Labels() labels.Labels { return labels.Labels{} }
|
|
|
|
|
|
|
|
func TestSeriesIterator(t *testing.T) {
|
|
|
|
itcases := []struct {
|
2019-01-28 03:24:49 -08:00
|
|
|
a, b, c []tsdbutil.Sample
|
|
|
|
exp []tsdbutil.Sample
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint, maxt int64
|
2017-04-09 07:00:25 -07:00
|
|
|
}{
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{},
|
|
|
|
b: []tsdbutil.Sample{},
|
|
|
|
c: []tsdbutil.Sample{},
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{},
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2},
|
|
|
|
sample{2, 3},
|
|
|
|
sample{3, 5},
|
|
|
|
sample{6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{},
|
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{},
|
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 2,
|
|
|
|
maxt: 8,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{10, 22}, sample{203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{10, 22}, sample{203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 6,
|
|
|
|
maxt: 10,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
seekcases := []struct {
|
2019-01-28 03:24:49 -08:00
|
|
|
a, b, c []tsdbutil.Sample
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
seek int64
|
|
|
|
success bool
|
2019-01-28 03:24:49 -08:00
|
|
|
exp []tsdbutil.Sample
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint, maxt int64
|
2017-04-09 07:00:25 -07:00
|
|
|
}{
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{},
|
|
|
|
b: []tsdbutil.Sample{},
|
|
|
|
c: []tsdbutil.Sample{},
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
seek: 0,
|
|
|
|
success: false,
|
|
|
|
exp: nil,
|
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{2, 3},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{},
|
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 10,
|
|
|
|
success: false,
|
|
|
|
exp: nil,
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{},
|
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{1, 2}, sample{3, 5}, sample{6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 2,
|
|
|
|
success: true,
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 5,
|
|
|
|
maxt: 8,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{10, 22}, sample{203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 10,
|
|
|
|
success: true,
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{10, 22}, sample{203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 10,
|
|
|
|
maxt: 203,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{10, 22}, sample{203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 203,
|
|
|
|
success: true,
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 7,
|
|
|
|
maxt: 203,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Run("Chunk", func(t *testing.T) {
|
|
|
|
for _, tc := range itcases {
|
2017-11-30 06:34:49 -08:00
|
|
|
chkMetas := []chunks.Meta{
|
2018-10-25 13:06:19 -07:00
|
|
|
tsdbutil.ChunkFromSamples(tc.a),
|
|
|
|
tsdbutil.ChunkFromSamples(tc.b),
|
|
|
|
tsdbutil.ChunkFromSamples(tc.c),
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
2017-05-22 04:12:36 -07:00
|
|
|
res := newChunkSeriesIterator(chkMetas, nil, tc.mint, tc.maxt)
|
2017-04-13 12:07:21 -07:00
|
|
|
|
2019-01-28 03:24:49 -08:00
|
|
|
smplValid := make([]tsdbutil.Sample, 0)
|
2017-04-13 12:07:21 -07:00
|
|
|
for _, s := range tc.exp {
|
2018-10-25 13:06:19 -07:00
|
|
|
if s.T() >= tc.mint && s.T() <= tc.maxt {
|
2019-01-28 03:24:49 -08:00
|
|
|
smplValid = append(smplValid, tsdbutil.Sample(s))
|
2017-04-13 12:07:21 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
exp := newListSeriesIterator(smplValid)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(exp)
|
|
|
|
smplRes, errRes := expandSeriesIterator(res)
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, errExp, errRes)
|
|
|
|
testutil.Equals(t, smplExp, smplRes)
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
t.Run("Seek", func(t *testing.T) {
|
2017-04-13 12:07:21 -07:00
|
|
|
extra := []struct {
|
2019-01-28 03:24:49 -08:00
|
|
|
a, b, c []tsdbutil.Sample
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
seek int64
|
|
|
|
success bool
|
2019-01-28 03:24:49 -08:00
|
|
|
exp []tsdbutil.Sample
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint, maxt int64
|
|
|
|
}{
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{6, 1},
|
2017-04-13 12:07:21 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{9, 8},
|
2017-04-13 12:07:21 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{10, 22}, sample{203, 3493},
|
2017-04-13 12:07:21 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 203,
|
|
|
|
success: false,
|
|
|
|
exp: nil,
|
|
|
|
mint: 2,
|
|
|
|
maxt: 202,
|
|
|
|
},
|
|
|
|
{
|
2019-01-28 03:24:49 -08:00
|
|
|
a: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{6, 1},
|
2017-04-13 12:07:21 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
b: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{9, 8},
|
2017-04-13 12:07:21 -07:00
|
|
|
},
|
2019-01-28 03:24:49 -08:00
|
|
|
c: []tsdbutil.Sample{
|
2018-10-25 13:06:19 -07:00
|
|
|
sample{10, 22}, sample{203, 3493},
|
2017-04-13 12:07:21 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 5,
|
|
|
|
success: true,
|
2019-01-28 03:24:49 -08:00
|
|
|
exp: []tsdbutil.Sample{sample{10, 22}},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 10,
|
|
|
|
maxt: 202,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
seekcases2 := append(seekcases, extra...)
|
|
|
|
|
|
|
|
for _, tc := range seekcases2 {
|
2017-11-30 06:34:49 -08:00
|
|
|
chkMetas := []chunks.Meta{
|
2018-10-25 13:06:19 -07:00
|
|
|
tsdbutil.ChunkFromSamples(tc.a),
|
|
|
|
tsdbutil.ChunkFromSamples(tc.b),
|
|
|
|
tsdbutil.ChunkFromSamples(tc.c),
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
2017-05-22 04:12:36 -07:00
|
|
|
res := newChunkSeriesIterator(chkMetas, nil, tc.mint, tc.maxt)
|
2017-04-13 12:07:21 -07:00
|
|
|
|
2019-01-28 03:24:49 -08:00
|
|
|
smplValid := make([]tsdbutil.Sample, 0)
|
2017-04-13 12:07:21 -07:00
|
|
|
for _, s := range tc.exp {
|
2018-10-25 13:06:19 -07:00
|
|
|
if s.T() >= tc.mint && s.T() <= tc.maxt {
|
2019-01-28 03:24:49 -08:00
|
|
|
smplValid = append(smplValid, tsdbutil.Sample(s))
|
2017-04-13 12:07:21 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
exp := newListSeriesIterator(smplValid)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, tc.success, res.Seek(tc.seek))
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
if tc.success {
|
|
|
|
// Init the list and then proceed to check.
|
|
|
|
remaining := exp.Next()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, remaining == true, "")
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
for remaining {
|
|
|
|
sExp, eExp := exp.At()
|
|
|
|
sRes, eRes := res.At()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, eExp, eRes)
|
|
|
|
testutil.Equals(t, sExp, sRes)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
remaining = exp.Next()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, remaining, res.Next())
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("Chain", func(t *testing.T) {
|
2019-02-14 05:29:41 -08:00
|
|
|
// Extra cases for overlapping series.
|
|
|
|
itcasesExtra := []struct {
|
|
|
|
a, b, c []tsdbutil.Sample
|
|
|
|
exp []tsdbutil.Sample
|
|
|
|
mint, maxt int64
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
a: []tsdbutil.Sample{
|
|
|
|
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1},
|
|
|
|
},
|
|
|
|
b: []tsdbutil.Sample{
|
|
|
|
sample{5, 49}, sample{7, 89}, sample{9, 8},
|
|
|
|
},
|
|
|
|
c: []tsdbutil.Sample{
|
|
|
|
sample{2, 33}, sample{4, 44}, sample{10, 3},
|
|
|
|
},
|
|
|
|
|
|
|
|
exp: []tsdbutil.Sample{
|
|
|
|
sample{1, 2}, sample{2, 33}, sample{3, 5}, sample{4, 44}, sample{5, 49}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{10, 3},
|
|
|
|
},
|
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []tsdbutil.Sample{
|
|
|
|
sample{1, 2}, sample{2, 3}, sample{9, 5}, sample{13, 1},
|
|
|
|
},
|
|
|
|
b: []tsdbutil.Sample{},
|
|
|
|
c: []tsdbutil.Sample{
|
|
|
|
sample{1, 23}, sample{2, 342}, sample{3, 25}, sample{6, 11},
|
|
|
|
},
|
|
|
|
|
|
|
|
exp: []tsdbutil.Sample{
|
|
|
|
sample{1, 23}, sample{2, 342}, sample{3, 25}, sample{6, 11}, sample{9, 5}, sample{13, 1},
|
|
|
|
},
|
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2017-04-09 07:00:25 -07:00
|
|
|
for _, tc := range itcases {
|
|
|
|
a, b, c := itSeries{newListSeriesIterator(tc.a)},
|
|
|
|
itSeries{newListSeriesIterator(tc.b)},
|
|
|
|
itSeries{newListSeriesIterator(tc.c)}
|
|
|
|
|
|
|
|
res := newChainedSeriesIterator(a, b, c)
|
2019-01-28 03:24:49 -08:00
|
|
|
exp := newListSeriesIterator([]tsdbutil.Sample(tc.exp))
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(exp)
|
|
|
|
smplRes, errRes := expandSeriesIterator(res)
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, errExp, errRes)
|
|
|
|
testutil.Equals(t, smplExp, smplRes)
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
for _, tc := range append(itcases, itcasesExtra...) {
|
|
|
|
a, b, c := itSeries{newListSeriesIterator(tc.a)},
|
|
|
|
itSeries{newListSeriesIterator(tc.b)},
|
|
|
|
itSeries{newListSeriesIterator(tc.c)}
|
|
|
|
|
|
|
|
res := newVerticalMergeSeriesIterator(a, b, c)
|
|
|
|
exp := newListSeriesIterator([]tsdbutil.Sample(tc.exp))
|
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(exp)
|
|
|
|
smplRes, errRes := expandSeriesIterator(res)
|
|
|
|
|
|
|
|
testutil.Equals(t, errExp, errRes)
|
|
|
|
testutil.Equals(t, smplExp, smplRes)
|
|
|
|
}
|
|
|
|
|
2017-04-09 07:00:25 -07:00
|
|
|
t.Run("Seek", func(t *testing.T) {
|
|
|
|
for _, tc := range seekcases {
|
2019-02-14 05:29:41 -08:00
|
|
|
ress := []SeriesIterator{
|
|
|
|
newChainedSeriesIterator(
|
|
|
|
itSeries{newListSeriesIterator(tc.a)},
|
|
|
|
itSeries{newListSeriesIterator(tc.b)},
|
|
|
|
itSeries{newListSeriesIterator(tc.c)},
|
|
|
|
),
|
|
|
|
newVerticalMergeSeriesIterator(
|
|
|
|
itSeries{newListSeriesIterator(tc.a)},
|
|
|
|
itSeries{newListSeriesIterator(tc.b)},
|
|
|
|
itSeries{newListSeriesIterator(tc.c)},
|
|
|
|
),
|
|
|
|
}
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
for _, res := range ress {
|
|
|
|
exp := newListSeriesIterator(tc.exp)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
testutil.Equals(t, tc.success, res.Seek(tc.seek))
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
if tc.success {
|
|
|
|
// Init the list and then proceed to check.
|
|
|
|
remaining := exp.Next()
|
|
|
|
testutil.Assert(t, remaining == true, "")
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
for remaining {
|
|
|
|
sExp, eExp := exp.At()
|
|
|
|
sRes, eRes := res.At()
|
|
|
|
testutil.Equals(t, eExp, eRes)
|
|
|
|
testutil.Equals(t, sExp, sRes)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2019-02-14 05:29:41 -08:00
|
|
|
remaining = exp.Next()
|
|
|
|
testutil.Equals(t, remaining, res.Next())
|
|
|
|
}
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
2017-05-01 02:03:56 -07:00
|
|
|
|
2019-08-13 01:34:14 -07:00
|
|
|
// Regression for: https://github.com/prometheus/prometheus/tsdb/pull/97
|
2017-06-30 06:06:27 -07:00
|
|
|
func TestChunkSeriesIterator_DoubleSeek(t *testing.T) {
|
2017-11-30 06:34:49 -08:00
|
|
|
chkMetas := []chunks.Meta{
|
2019-01-28 03:24:49 -08:00
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
|
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}),
|
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{4, 4}, sample{5, 5}}),
|
2017-06-13 00:51:22 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
res := newChunkSeriesIterator(chkMetas, nil, 2, 8)
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, res.Seek(1) == true, "")
|
|
|
|
testutil.Assert(t, res.Seek(2) == true, "")
|
2017-06-13 00:51:22 -07:00
|
|
|
ts, v := res.At()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, int64(2), ts)
|
|
|
|
testutil.Equals(t, float64(2), v)
|
2017-06-13 00:51:22 -07:00
|
|
|
}
|
|
|
|
|
2017-06-30 06:06:27 -07:00
|
|
|
// Regression when seeked chunks were still found via binary search and we always
|
|
|
|
// skipped to the end when seeking a value in the current chunk.
|
|
|
|
func TestChunkSeriesIterator_SeekInCurrentChunk(t *testing.T) {
|
2017-11-30 06:34:49 -08:00
|
|
|
metas := []chunks.Meta{
|
2019-01-28 03:24:49 -08:00
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
|
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2}, sample{3, 4}, sample{5, 6}, sample{7, 8}}),
|
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
|
2017-06-30 06:06:27 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
it := newChunkSeriesIterator(metas, nil, 1, 7)
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, it.Next() == true, "")
|
2017-06-30 06:06:27 -07:00
|
|
|
ts, v := it.At()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, int64(1), ts)
|
|
|
|
testutil.Equals(t, float64(2), v)
|
2017-06-30 06:06:27 -07:00
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, it.Seek(4) == true, "")
|
2017-06-30 06:06:27 -07:00
|
|
|
ts, v = it.At()
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Equals(t, int64(5), ts)
|
|
|
|
testutil.Equals(t, float64(6), v)
|
2017-06-30 06:06:27 -07:00
|
|
|
}
|
|
|
|
|
2017-08-28 15:39:17 -07:00
|
|
|
// Regression when calling Next() with a time bounded to fit within two samples.
|
|
|
|
// Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
|
|
|
|
func TestChunkSeriesIterator_NextWithMinTime(t *testing.T) {
|
2017-11-30 06:34:49 -08:00
|
|
|
metas := []chunks.Meta{
|
2019-01-28 03:24:49 -08:00
|
|
|
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 6}, sample{5, 6}, sample{7, 8}}),
|
2017-08-28 15:39:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
it := newChunkSeriesIterator(metas, nil, 2, 4)
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, it.Next() == false, "")
|
2017-08-28 15:39:17 -07:00
|
|
|
}
|
|
|
|
|
2017-05-01 02:03:56 -07:00
|
|
|
func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
|
2019-08-13 01:34:14 -07:00
|
|
|
lbls := []labels.Labels{labels.New(labels.Label{Name: "a", Value: "b"})}
|
2017-11-30 06:34:49 -08:00
|
|
|
chunkMetas := [][]chunks.Meta{
|
2017-05-01 02:03:56 -07:00
|
|
|
{
|
|
|
|
{MinTime: 1, MaxTime: 2, Ref: 1},
|
|
|
|
{MinTime: 3, MaxTime: 4, Ref: 2},
|
|
|
|
{MinTime: 10, MaxTime: 12, Ref: 3},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
cr := mockChunkReader(
|
2017-11-30 06:34:49 -08:00
|
|
|
map[uint64]chunkenc.Chunk{
|
|
|
|
1: chunkenc.NewXORChunk(),
|
|
|
|
2: chunkenc.NewXORChunk(),
|
|
|
|
3: chunkenc.NewXORChunk(),
|
2017-05-01 02:03:56 -07:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
m := &mockChunkSeriesSet{l: lbls, cm: chunkMetas, i: -1}
|
|
|
|
p := &populatedChunkSeries{
|
|
|
|
set: m,
|
|
|
|
chunks: cr,
|
|
|
|
|
2017-05-01 02:31:17 -07:00
|
|
|
mint: 0,
|
|
|
|
maxt: 0,
|
2017-05-01 02:03:56 -07:00
|
|
|
}
|
|
|
|
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, p.Next() == false, "")
|
2017-05-01 02:31:17 -07:00
|
|
|
|
|
|
|
p.mint = 6
|
|
|
|
p.maxt = 9
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, p.Next() == false, "")
|
2017-05-01 02:31:17 -07:00
|
|
|
|
2017-05-03 10:15:28 -07:00
|
|
|
// Test the case where 1 chunk could cause an unpopulated chunk to be returned.
|
2017-11-30 06:34:49 -08:00
|
|
|
chunkMetas = [][]chunks.Meta{
|
2017-05-03 10:15:28 -07:00
|
|
|
{
|
|
|
|
{MinTime: 1, MaxTime: 2, Ref: 1},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
m = &mockChunkSeriesSet{l: lbls, cm: chunkMetas, i: -1}
|
|
|
|
p = &populatedChunkSeries{
|
|
|
|
set: m,
|
|
|
|
chunks: cr,
|
|
|
|
|
|
|
|
mint: 10,
|
|
|
|
maxt: 15,
|
|
|
|
}
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Assert(t, p.Next() == false, "")
|
2017-05-01 02:03:56 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
type mockChunkSeriesSet struct {
|
|
|
|
l []labels.Labels
|
2017-11-30 06:34:49 -08:00
|
|
|
cm [][]chunks.Meta
|
2017-05-01 02:03:56 -07:00
|
|
|
|
|
|
|
i int
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *mockChunkSeriesSet) Next() bool {
|
|
|
|
if len(m.l) != len(m.cm) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
m.i++
|
|
|
|
return m.i < len(m.l)
|
|
|
|
}
|
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
func (m *mockChunkSeriesSet) At() (labels.Labels, []chunks.Meta, tombstones.Intervals) {
|
2017-05-22 04:12:36 -07:00
|
|
|
return m.l[m.i], m.cm[m.i], nil
|
2017-05-01 02:03:56 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m *mockChunkSeriesSet) Err() error {
|
|
|
|
return nil
|
|
|
|
}
|
2017-07-05 07:19:28 -07:00
|
|
|
|
|
|
|
// Test the cost of merging series sets for different number of merged sets and their size.
|
|
|
|
// The subset are all equivalent so this does not capture merging of partial or non-overlapping sets well.
|
|
|
|
func BenchmarkMergedSeriesSet(b *testing.B) {
|
2019-11-15 06:45:29 -08:00
|
|
|
var sel = func(sets []SeriesSet) SeriesSet {
|
|
|
|
return NewMergedSeriesSet(sets)
|
2017-07-05 07:19:28 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, k := range []int{
|
|
|
|
100,
|
|
|
|
1000,
|
|
|
|
10000,
|
2018-09-13 09:34:26 -07:00
|
|
|
20000,
|
2017-07-05 07:19:28 -07:00
|
|
|
} {
|
|
|
|
for _, j := range []int{1, 2, 4, 8, 16, 32} {
|
|
|
|
b.Run(fmt.Sprintf("series=%d,blocks=%d", k, j), func(b *testing.B) {
|
2018-10-25 02:32:57 -07:00
|
|
|
lbls, err := labels.ReadLabels(filepath.Join("testdata", "20kseries.json"), k)
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Ok(b, err)
|
2017-07-05 07:19:28 -07:00
|
|
|
|
|
|
|
sort.Sort(labels.Slice(lbls))
|
|
|
|
|
|
|
|
in := make([][]Series, j)
|
|
|
|
|
|
|
|
for _, l := range lbls {
|
|
|
|
l2 := l
|
|
|
|
for j := range in {
|
|
|
|
in[j] = append(in[j], &mockSeries{labels: func() labels.Labels { return l2 }})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
b.ResetTimer()
|
|
|
|
|
|
|
|
for i := 0; i < b.N; i++ {
|
|
|
|
var sets []SeriesSet
|
|
|
|
for _, s := range in {
|
2018-09-21 01:07:35 -07:00
|
|
|
sets = append(sets, newMockSeriesSet(s))
|
2017-07-05 07:19:28 -07:00
|
|
|
}
|
|
|
|
ms := sel(sets)
|
|
|
|
|
|
|
|
i := 0
|
|
|
|
for ms.Next() {
|
|
|
|
i++
|
|
|
|
}
|
2017-12-06 17:06:14 -08:00
|
|
|
testutil.Ok(b, ms.Err())
|
|
|
|
testutil.Equals(b, len(lbls), i)
|
2017-07-05 07:19:28 -07:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-11-30 06:34:49 -08:00
|
|
|
|
|
|
|
type mockChunkReader map[uint64]chunkenc.Chunk
|
|
|
|
|
|
|
|
func (cr mockChunkReader) Chunk(id uint64) (chunkenc.Chunk, error) {
|
|
|
|
chk, ok := cr[id]
|
|
|
|
if ok {
|
|
|
|
return chk, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, errors.New("Chunk with ref not found")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (cr mockChunkReader) Close() error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestDeletedIterator(t *testing.T) {
|
2017-12-21 02:55:58 -08:00
|
|
|
chk := chunkenc.NewXORChunk()
|
2017-11-30 06:34:49 -08:00
|
|
|
app, err := chk.Appender()
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
// Insert random stuff from (0, 1000).
|
|
|
|
act := make([]sample, 1000)
|
|
|
|
for i := 0; i < 1000; i++ {
|
|
|
|
act[i].t = int64(i)
|
|
|
|
act[i].v = rand.Float64()
|
|
|
|
app.Append(act[i].t, act[i].v)
|
|
|
|
}
|
|
|
|
|
|
|
|
cases := []struct {
|
2019-09-19 02:15:41 -07:00
|
|
|
r tombstones.Intervals
|
2017-11-30 06:34:49 -08:00
|
|
|
}{
|
2019-09-19 02:15:41 -07:00
|
|
|
{r: tombstones.Intervals{{Mint: 1, Maxt: 20}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 1, Maxt: 10}, {Mint: 12, Maxt: 20}, {Mint: 21, Maxt: 23}, {Mint: 25, Maxt: 30}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 1, Maxt: 10}, {Mint: 12, Maxt: 20}, {Mint: 20, Maxt: 30}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 1, Maxt: 10}, {Mint: 12, Maxt: 23}, {Mint: 25, Maxt: 30}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 1, Maxt: 23}, {Mint: 12, Maxt: 20}, {Mint: 25, Maxt: 30}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 1, Maxt: 23}, {Mint: 12, Maxt: 20}, {Mint: 25, Maxt: 3000}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 0, Maxt: 2000}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 500, Maxt: 2000}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 0, Maxt: 200}}},
|
|
|
|
{r: tombstones.Intervals{{Mint: 1000, Maxt: 20000}}},
|
2017-11-30 06:34:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
i := int64(-1)
|
2019-07-09 02:49:34 -07:00
|
|
|
it := &deletedIterator{it: chk.Iterator(nil), intervals: c.r[:]}
|
2017-11-30 06:34:49 -08:00
|
|
|
ranges := c.r[:]
|
|
|
|
for it.Next() {
|
|
|
|
i++
|
|
|
|
for _, tr := range ranges {
|
2019-09-19 02:15:41 -07:00
|
|
|
if tr.InBounds(i) {
|
2017-11-30 06:34:49 -08:00
|
|
|
i = tr.Maxt + 1
|
|
|
|
ranges = ranges[1:]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
testutil.Assert(t, i < 1000, "")
|
2017-11-30 06:34:49 -08:00
|
|
|
|
|
|
|
ts, v := it.At()
|
|
|
|
testutil.Equals(t, act[i].t, ts)
|
|
|
|
testutil.Equals(t, act[i].v, v)
|
|
|
|
}
|
|
|
|
// There has been an extra call to Next().
|
|
|
|
i++
|
|
|
|
for _, tr := range ranges {
|
2019-09-19 02:15:41 -07:00
|
|
|
if tr.InBounds(i) {
|
2017-11-30 06:34:49 -08:00
|
|
|
i = tr.Maxt + 1
|
|
|
|
ranges = ranges[1:]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-30 06:34:49 -08:00
|
|
|
testutil.Assert(t, i >= 1000, "")
|
2017-11-30 06:34:49 -08:00
|
|
|
testutil.Ok(t, it.Err())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type series struct {
|
|
|
|
l labels.Labels
|
|
|
|
chunks []chunks.Meta
|
|
|
|
}
|
|
|
|
|
|
|
|
type mockIndex struct {
|
|
|
|
series map[uint64]series
|
|
|
|
labelIndex map[string][]string
|
|
|
|
postings map[labels.Label][]uint64
|
|
|
|
symbols map[string]struct{}
|
|
|
|
}
|
|
|
|
|
|
|
|
func newMockIndex() mockIndex {
|
|
|
|
ix := mockIndex{
|
|
|
|
series: make(map[uint64]series),
|
|
|
|
labelIndex: make(map[string][]string),
|
|
|
|
postings: make(map[labels.Label][]uint64),
|
|
|
|
symbols: make(map[string]struct{}),
|
|
|
|
}
|
|
|
|
return ix
|
|
|
|
}
|
|
|
|
|
Stream symbols during compaction. (#6468)
Rather than buffer up symbols in RAM, do it one by one
during compaction. Then use the reader's symbol handling
for symbol lookups during the rest of the index write.
There is some slowdown in compaction, due to having to look through a file
rather than a hash lookup. This is noise to the overall cost of compacting
series with thousands of samples though.
benchmark old ns/op new ns/op delta
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4 539917175 675341565 +25.08%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4 2441815993 2477453524 +1.46%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4 3978543559 3922909687 -1.40%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4 8430219716 8586610007 +1.86%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4 1786424591 1909552782 +6.89%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4 5328998202 6020839950 +12.98%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4 10085059958 11085278690 +9.92%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4 25497010155 27018079806 +5.97%
BenchmarkCompactionFromHead/labelnames=1,labelvalues=100000-4 2427391406 2817217987 +16.06%
BenchmarkCompactionFromHead/labelnames=10,labelvalues=10000-4 2592965497 2538805050 -2.09%
BenchmarkCompactionFromHead/labelnames=100,labelvalues=1000-4 2437388343 2668012858 +9.46%
BenchmarkCompactionFromHead/labelnames=1000,labelvalues=100-4 2317095324 2787423966 +20.30%
BenchmarkCompactionFromHead/labelnames=10000,labelvalues=10-4 2600239857 2096973860 -19.35%
benchmark old allocs new allocs delta
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4 500851 470794 -6.00%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4 821527 791451 -3.66%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4 1141562 1111508 -2.63%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4 2141576 2111504 -1.40%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4 871466 841424 -3.45%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4 1941428 1911415 -1.55%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4 3071573 3041510 -0.98%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4 6771648 6741509 -0.45%
BenchmarkCompactionFromHead/labelnames=1,labelvalues=100000-4 731493 824888 +12.77%
BenchmarkCompactionFromHead/labelnames=10,labelvalues=10000-4 793918 887311 +11.76%
BenchmarkCompactionFromHead/labelnames=100,labelvalues=1000-4 811842 905204 +11.50%
BenchmarkCompactionFromHead/labelnames=1000,labelvalues=100-4 832244 925081 +11.16%
BenchmarkCompactionFromHead/labelnames=10000,labelvalues=10-4 921553 1019162 +10.59%
benchmark old bytes new bytes delta
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4 40532648 35698276 -11.93%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4 60340216 53409568 -11.49%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4 81087336 72065552 -11.13%
BenchmarkCompaction/type=normal,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4 142485576 120878544 -15.16%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=101-4 208661368 203831136 -2.31%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=1001-4 347345904 340484696 -1.98%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=2001-4 585185856 576244648 -1.53%
BenchmarkCompaction/type=vertical,blocks=4,series=10000,samplesPerSeriesPerBlock=5001-4 1357641792 1358966528 +0.10%
BenchmarkCompactionFromHead/labelnames=1,labelvalues=100000-4 126486664 119666744 -5.39%
BenchmarkCompactionFromHead/labelnames=10,labelvalues=10000-4 122323192 115117224 -5.89%
BenchmarkCompactionFromHead/labelnames=100,labelvalues=1000-4 126404504 119469864 -5.49%
BenchmarkCompactionFromHead/labelnames=1000,labelvalues=100-4 119047832 112230408 -5.73%
BenchmarkCompactionFromHead/labelnames=10000,labelvalues=10-4 136576016 116634800 -14.60%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-12-17 11:49:54 -08:00
|
|
|
func (m mockIndex) Symbols() index.StringIter {
|
|
|
|
l := []string{}
|
|
|
|
for s := range m.symbols {
|
|
|
|
l = append(l, s)
|
|
|
|
}
|
|
|
|
sort.Strings(l)
|
|
|
|
return index.NewStringListIter(l)
|
2017-11-30 06:34:49 -08:00
|
|
|
}
|
|
|
|
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
func (m *mockIndex) AddSeries(ref uint64, l labels.Labels, chunks ...chunks.Meta) error {
|
2017-11-30 06:34:49 -08:00
|
|
|
if _, ok := m.series[ref]; ok {
|
|
|
|
return errors.Errorf("series with reference %d already added", ref)
|
|
|
|
}
|
|
|
|
for _, lbl := range l {
|
|
|
|
m.symbols[lbl.Name] = struct{}{}
|
|
|
|
m.symbols[lbl.Value] = struct{}{}
|
|
|
|
}
|
|
|
|
|
|
|
|
s := series{l: l}
|
|
|
|
// Actual chunk data is not stored in the index.
|
|
|
|
for _, c := range chunks {
|
|
|
|
c.Chunk = nil
|
|
|
|
s.chunks = append(s.chunks, c)
|
|
|
|
}
|
|
|
|
m.series[ref] = s
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m mockIndex) WriteLabelIndex(names []string, values []string) error {
|
|
|
|
// TODO support composite indexes
|
|
|
|
if len(names) != 1 {
|
|
|
|
return errors.New("composite indexes not supported yet")
|
|
|
|
}
|
|
|
|
sort.Strings(values)
|
|
|
|
m.labelIndex[names[0]] = values
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m mockIndex) WritePostings(name, value string, it index.Postings) error {
|
|
|
|
l := labels.Label{Name: name, Value: value}
|
|
|
|
if _, ok := m.postings[l]; ok {
|
|
|
|
return errors.Errorf("postings for %s already added", l)
|
|
|
|
}
|
|
|
|
ep, err := index.ExpandPostings(it)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
m.postings[l] = ep
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m mockIndex) Close() error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m mockIndex) LabelValues(names ...string) (index.StringTuples, error) {
|
|
|
|
// TODO support composite indexes
|
|
|
|
if len(names) != 1 {
|
|
|
|
return nil, errors.New("composite indexes not supported yet")
|
|
|
|
}
|
|
|
|
|
|
|
|
return index.NewStringTuples(m.labelIndex[names[0]], 1)
|
|
|
|
}
|
|
|
|
|
Reduce memory used by postings offset table.
Rather than keeping the offset of each postings list, instead
keep the nth offset of the offset of the posting list. As postings
list offsets have always been sorted, we can then get to the closest
entry before the one we want an iterate forwards.
I haven't done much tuning on the 32 number, it was chosen to try
not to read through more than a 4k page of data.
Switch to a bulk interface for fetching postings. Use it to avoid having
to re-read parts of the posting offset table when querying lots of it.
For a index with what BenchmarkHeadPostingForMatchers uses RAM
for r.postings drops from 3.79MB to 80.19kB or about 48x.
Bytes allocated go down by 30%, and suprisingly CPU usage drops by
4-6% for typical queries too.
benchmark old ns/op new ns/op delta
BenchmarkPostingsForMatchers/Block/n="1"-4 35231 36673 +4.09%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4 563380 540627 -4.04%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4 536782 534186 -0.48%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4 533990 541550 +1.42%
BenchmarkPostingsForMatchers/Block/i=~".*"-4 113374598 117969608 +4.05%
BenchmarkPostingsForMatchers/Block/i=~".+"-4 146329884 139651442 -4.56%
BenchmarkPostingsForMatchers/Block/i=~""-4 50346510 44961127 -10.70%
BenchmarkPostingsForMatchers/Block/i!=""-4 41261550 35356165 -14.31%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4 112544418 116904010 +3.87%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4 112487086 116864918 +3.89%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4 41094758 35457904 -13.72%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4 41906372 36151473 -13.73%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4 147262414 140424800 -4.64%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4 28615629 27872072 -2.60%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4 147117177 140462403 -4.52%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4 175096826 167902298 -4.11%
benchmark old allocs new allocs delta
BenchmarkPostingsForMatchers/Block/n="1"-4 4 6 +50.00%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4 7 11 +57.14%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4 7 11 +57.14%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4 15 17 +13.33%
BenchmarkPostingsForMatchers/Block/i=~".*"-4 100010 100012 +0.00%
BenchmarkPostingsForMatchers/Block/i=~".+"-4 200069 200040 -0.01%
BenchmarkPostingsForMatchers/Block/i=~""-4 200072 200045 -0.01%
BenchmarkPostingsForMatchers/Block/i!=""-4 200070 200041 -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4 100013 100017 +0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4 100017 100023 +0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4 200073 200046 -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4 200075 200050 -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4 200074 200049 -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4 111165 111150 -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4 200078 200055 -0.01%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4 311282 311238 -0.01%
benchmark old bytes new bytes delta
BenchmarkPostingsForMatchers/Block/n="1"-4 264 296 +12.12%
BenchmarkPostingsForMatchers/Block/n="1",j="foo"-4 360 424 +17.78%
BenchmarkPostingsForMatchers/Block/j="foo",n="1"-4 360 424 +17.78%
BenchmarkPostingsForMatchers/Block/n="1",j!="foo"-4 520 552 +6.15%
BenchmarkPostingsForMatchers/Block/i=~".*"-4 1600461 1600482 +0.00%
BenchmarkPostingsForMatchers/Block/i=~".+"-4 24900801 17259077 -30.69%
BenchmarkPostingsForMatchers/Block/i=~""-4 24900836 17259151 -30.69%
BenchmarkPostingsForMatchers/Block/i!=""-4 24900760 17259048 -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",j="foo"-4 1600557 1600621 +0.00%
BenchmarkPostingsForMatchers/Block/n="1",i=~".*",i!="2",j="foo"-4 1600717 1600813 +0.01%
BenchmarkPostingsForMatchers/Block/n="1",i!=""-4 24900856 17259176 -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i!="",j="foo"-4 24900952 17259304 -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",j="foo"-4 24900993 17259333 -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~"1.+",j="foo"-4 3788311 3142630 -17.04%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!="2",j="foo"-4 24901137 17259509 -30.69%
BenchmarkPostingsForMatchers/Block/n="1",i=~".+",i!~"2.*",j="foo"-4 28693086 20405680 -28.88%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-12-05 10:27:40 -08:00
|
|
|
func (m mockIndex) Postings(name string, values ...string) (index.Postings, error) {
|
|
|
|
res := make([]index.Postings, 0, len(values))
|
|
|
|
for _, value := range values {
|
|
|
|
l := labels.Label{Name: name, Value: value}
|
|
|
|
res = append(res, index.NewListPostings(m.postings[l]))
|
|
|
|
}
|
|
|
|
return index.Merge(res...), nil
|
2017-11-30 06:34:49 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m mockIndex) SortedPostings(p index.Postings) index.Postings {
|
|
|
|
ep, err := index.ExpandPostings(p)
|
|
|
|
if err != nil {
|
|
|
|
return index.ErrPostings(errors.Wrap(err, "expand postings"))
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Slice(ep, func(i, j int) bool {
|
|
|
|
return labels.Compare(m.series[ep[i]].l, m.series[ep[j]].l) < 0
|
|
|
|
})
|
|
|
|
return index.NewListPostings(ep)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m mockIndex) Series(ref uint64, lset *labels.Labels, chks *[]chunks.Meta) error {
|
|
|
|
s, ok := m.series[ref]
|
|
|
|
if !ok {
|
|
|
|
return ErrNotFound
|
|
|
|
}
|
|
|
|
*lset = append((*lset)[:0], s.l...)
|
|
|
|
*chks = append((*chks)[:0], s.chunks...)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-11-07 07:52:41 -08:00
|
|
|
func (m mockIndex) LabelNames() ([]string, error) {
|
|
|
|
labelNames := make([]string, 0, len(m.labelIndex))
|
|
|
|
for name := range m.labelIndex {
|
|
|
|
labelNames = append(labelNames, name)
|
|
|
|
}
|
|
|
|
sort.Strings(labelNames)
|
|
|
|
return labelNames, nil
|
|
|
|
}
|
2019-01-28 03:24:49 -08:00
|
|
|
|
|
|
|
type mockSeries struct {
|
|
|
|
labels func() labels.Labels
|
|
|
|
iterator func() SeriesIterator
|
|
|
|
}
|
|
|
|
|
|
|
|
func newSeries(l map[string]string, s []tsdbutil.Sample) Series {
|
|
|
|
return &mockSeries{
|
|
|
|
labels: func() labels.Labels { return labels.FromMap(l) },
|
|
|
|
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
|
|
|
|
}
|
|
|
|
}
|
|
|
|
func (m *mockSeries) Labels() labels.Labels { return m.labels() }
|
|
|
|
func (m *mockSeries) Iterator() SeriesIterator { return m.iterator() }
|
|
|
|
|
|
|
|
type listSeriesIterator struct {
|
|
|
|
list []tsdbutil.Sample
|
|
|
|
idx int
|
|
|
|
}
|
|
|
|
|
|
|
|
func newListSeriesIterator(list []tsdbutil.Sample) *listSeriesIterator {
|
|
|
|
return &listSeriesIterator{list: list, idx: -1}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) At() (int64, float64) {
|
|
|
|
s := it.list[it.idx]
|
|
|
|
return s.T(), s.V()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Next() bool {
|
|
|
|
it.idx++
|
|
|
|
return it.idx < len(it.list)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Seek(t int64) bool {
|
|
|
|
if it.idx == -1 {
|
|
|
|
it.idx = 0
|
|
|
|
}
|
|
|
|
// Do binary search between current position and end.
|
|
|
|
it.idx = sort.Search(len(it.list)-it.idx, func(i int) bool {
|
|
|
|
s := it.list[i+it.idx]
|
|
|
|
return s.T() >= t
|
|
|
|
})
|
|
|
|
|
|
|
|
return it.idx < len(it.list)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Err() error {
|
|
|
|
return nil
|
|
|
|
}
|
2019-02-14 05:29:41 -08:00
|
|
|
|
|
|
|
func BenchmarkQueryIterator(b *testing.B) {
|
|
|
|
cases := []struct {
|
|
|
|
numBlocks int
|
|
|
|
numSeries int
|
|
|
|
numSamplesPerSeriesPerBlock int
|
|
|
|
overlapPercentages []int // >=0, <=100, this is w.r.t. the previous block.
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
numBlocks: 20,
|
|
|
|
numSeries: 1000,
|
|
|
|
numSamplesPerSeriesPerBlock: 20000,
|
|
|
|
overlapPercentages: []int{0, 10, 30},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
for _, overlapPercentage := range c.overlapPercentages {
|
|
|
|
benchMsg := fmt.Sprintf("nBlocks=%d,nSeries=%d,numSamplesPerSeriesPerBlock=%d,overlap=%d%%",
|
|
|
|
c.numBlocks, c.numSeries, c.numSamplesPerSeriesPerBlock, overlapPercentage)
|
|
|
|
|
|
|
|
b.Run(benchMsg, func(b *testing.B) {
|
|
|
|
dir, err := ioutil.TempDir("", "bench_query_iterator")
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(b, os.RemoveAll(dir))
|
|
|
|
}()
|
|
|
|
|
|
|
|
var (
|
|
|
|
blocks []*Block
|
|
|
|
overlapDelta = int64(overlapPercentage * c.numSamplesPerSeriesPerBlock / 100)
|
|
|
|
prefilledLabels []map[string]string
|
|
|
|
generatedSeries []Series
|
|
|
|
)
|
|
|
|
for i := int64(0); i < int64(c.numBlocks); i++ {
|
|
|
|
offset := i * overlapDelta
|
|
|
|
mint := i*int64(c.numSamplesPerSeriesPerBlock) - offset
|
|
|
|
maxt := mint + int64(c.numSamplesPerSeriesPerBlock) - 1
|
|
|
|
if len(prefilledLabels) == 0 {
|
|
|
|
generatedSeries = genSeries(c.numSeries, 10, mint, maxt)
|
|
|
|
for _, s := range generatedSeries {
|
|
|
|
prefilledLabels = append(prefilledLabels, s.Labels().Map())
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
generatedSeries = populateSeries(prefilledLabels, mint, maxt)
|
|
|
|
}
|
|
|
|
block, err := OpenBlock(nil, createBlock(b, dir, generatedSeries), nil)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
blocks = append(blocks, block)
|
|
|
|
defer block.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
que := &querier{
|
|
|
|
blocks: make([]Querier, 0, len(blocks)),
|
|
|
|
}
|
|
|
|
for _, blk := range blocks {
|
|
|
|
q, err := NewBlockQuerier(blk, math.MinInt64, math.MaxInt64)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
que.blocks = append(que.blocks, q)
|
|
|
|
}
|
|
|
|
|
|
|
|
var sq Querier = que
|
|
|
|
if overlapPercentage > 0 {
|
|
|
|
sq = &verticalQuerier{
|
|
|
|
querier: *que,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
defer sq.Close()
|
|
|
|
|
2019-11-18 11:53:33 -08:00
|
|
|
benchQuery(b, c.numSeries, sq, labels.Selector{labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*")})
|
2019-02-14 05:29:41 -08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func BenchmarkQuerySeek(b *testing.B) {
|
|
|
|
cases := []struct {
|
|
|
|
numBlocks int
|
|
|
|
numSeries int
|
|
|
|
numSamplesPerSeriesPerBlock int
|
|
|
|
overlapPercentages []int // >=0, <=100, this is w.r.t. the previous block.
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
numBlocks: 20,
|
|
|
|
numSeries: 100,
|
|
|
|
numSamplesPerSeriesPerBlock: 2000,
|
|
|
|
overlapPercentages: []int{0, 10, 30, 50},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
for _, overlapPercentage := range c.overlapPercentages {
|
|
|
|
benchMsg := fmt.Sprintf("nBlocks=%d,nSeries=%d,numSamplesPerSeriesPerBlock=%d,overlap=%d%%",
|
|
|
|
c.numBlocks, c.numSeries, c.numSamplesPerSeriesPerBlock, overlapPercentage)
|
|
|
|
|
|
|
|
b.Run(benchMsg, func(b *testing.B) {
|
|
|
|
dir, err := ioutil.TempDir("", "bench_query_iterator")
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(b, os.RemoveAll(dir))
|
|
|
|
}()
|
|
|
|
|
|
|
|
var (
|
|
|
|
blocks []*Block
|
|
|
|
overlapDelta = int64(overlapPercentage * c.numSamplesPerSeriesPerBlock / 100)
|
|
|
|
prefilledLabels []map[string]string
|
|
|
|
generatedSeries []Series
|
|
|
|
)
|
|
|
|
for i := int64(0); i < int64(c.numBlocks); i++ {
|
|
|
|
offset := i * overlapDelta
|
|
|
|
mint := i*int64(c.numSamplesPerSeriesPerBlock) - offset
|
|
|
|
maxt := mint + int64(c.numSamplesPerSeriesPerBlock) - 1
|
|
|
|
if len(prefilledLabels) == 0 {
|
|
|
|
generatedSeries = genSeries(c.numSeries, 10, mint, maxt)
|
|
|
|
for _, s := range generatedSeries {
|
|
|
|
prefilledLabels = append(prefilledLabels, s.Labels().Map())
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
generatedSeries = populateSeries(prefilledLabels, mint, maxt)
|
|
|
|
}
|
|
|
|
block, err := OpenBlock(nil, createBlock(b, dir, generatedSeries), nil)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
blocks = append(blocks, block)
|
|
|
|
defer block.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
que := &querier{
|
|
|
|
blocks: make([]Querier, 0, len(blocks)),
|
|
|
|
}
|
|
|
|
for _, blk := range blocks {
|
|
|
|
q, err := NewBlockQuerier(blk, math.MinInt64, math.MaxInt64)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
que.blocks = append(que.blocks, q)
|
|
|
|
}
|
|
|
|
|
|
|
|
var sq Querier = que
|
|
|
|
if overlapPercentage > 0 {
|
|
|
|
sq = &verticalQuerier{
|
|
|
|
querier: *que,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
defer sq.Close()
|
|
|
|
|
|
|
|
mint := blocks[0].meta.MinTime
|
|
|
|
maxt := blocks[len(blocks)-1].meta.MaxTime
|
|
|
|
|
|
|
|
b.ResetTimer()
|
|
|
|
b.ReportAllocs()
|
|
|
|
|
2019-11-18 11:53:33 -08:00
|
|
|
ss, err := sq.Select(labels.MustNewMatcher(labels.MatchRegexp, "__name__", ".*"))
|
2019-02-14 05:29:41 -08:00
|
|
|
for ss.Next() {
|
|
|
|
it := ss.At().Iterator()
|
|
|
|
for t := mint; t <= maxt; t++ {
|
|
|
|
it.Seek(t)
|
|
|
|
}
|
|
|
|
testutil.Ok(b, it.Err())
|
|
|
|
}
|
|
|
|
testutil.Ok(b, ss.Err())
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
|
2019-05-27 04:24:46 -07:00
|
|
|
// Refer to https://github.com/prometheus/prometheus/issues/2651.
|
|
|
|
func BenchmarkSetMatcher(b *testing.B) {
|
|
|
|
cases := []struct {
|
|
|
|
numBlocks int
|
|
|
|
numSeries int
|
|
|
|
numSamplesPerSeriesPerBlock int
|
|
|
|
cardinality int
|
|
|
|
pattern string
|
|
|
|
}{
|
|
|
|
// The first three cases are to find out whether the set
|
|
|
|
// matcher is always faster than regex matcher.
|
|
|
|
{
|
|
|
|
numBlocks: 1,
|
|
|
|
numSeries: 1,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 100,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
numBlocks: 1,
|
|
|
|
numSeries: 15,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 100,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
numBlocks: 1,
|
|
|
|
numSeries: 15,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 100,
|
|
|
|
pattern: "^(?:1|2|3)$",
|
|
|
|
},
|
|
|
|
// Big data sizes benchmarks.
|
|
|
|
{
|
|
|
|
numBlocks: 20,
|
|
|
|
numSeries: 1000,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 100,
|
|
|
|
pattern: "^(?:1|2|3)$",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
numBlocks: 20,
|
|
|
|
numSeries: 1000,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 100,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
// Increase cardinality.
|
|
|
|
{
|
|
|
|
numBlocks: 1,
|
|
|
|
numSeries: 100000,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 100000,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
numBlocks: 1,
|
|
|
|
numSeries: 500000,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 500000,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
numBlocks: 10,
|
|
|
|
numSeries: 500000,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 500000,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
numBlocks: 1,
|
|
|
|
numSeries: 1000000,
|
|
|
|
numSamplesPerSeriesPerBlock: 10,
|
|
|
|
cardinality: 1000000,
|
|
|
|
pattern: "^(?:1|2|3|4|5|6|7|8|9|10)$",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
dir, err := ioutil.TempDir("", "bench_postings_for_matchers")
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(b, os.RemoveAll(dir))
|
|
|
|
}()
|
|
|
|
|
|
|
|
var (
|
|
|
|
blocks []*Block
|
|
|
|
prefilledLabels []map[string]string
|
|
|
|
generatedSeries []Series
|
|
|
|
)
|
|
|
|
for i := int64(0); i < int64(c.numBlocks); i++ {
|
|
|
|
mint := i * int64(c.numSamplesPerSeriesPerBlock)
|
|
|
|
maxt := mint + int64(c.numSamplesPerSeriesPerBlock) - 1
|
|
|
|
if len(prefilledLabels) == 0 {
|
|
|
|
generatedSeries = genSeries(c.numSeries, 10, mint, maxt)
|
|
|
|
for _, s := range generatedSeries {
|
|
|
|
prefilledLabels = append(prefilledLabels, s.Labels().Map())
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
generatedSeries = populateSeries(prefilledLabels, mint, maxt)
|
|
|
|
}
|
|
|
|
block, err := OpenBlock(nil, createBlock(b, dir, generatedSeries), nil)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
blocks = append(blocks, block)
|
|
|
|
defer block.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
que := &querier{
|
|
|
|
blocks: make([]Querier, 0, len(blocks)),
|
|
|
|
}
|
|
|
|
for _, blk := range blocks {
|
|
|
|
q, err := NewBlockQuerier(blk, math.MinInt64, math.MaxInt64)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
que.blocks = append(que.blocks, q)
|
|
|
|
}
|
|
|
|
defer que.Close()
|
|
|
|
|
|
|
|
benchMsg := fmt.Sprintf("nSeries=%d,nBlocks=%d,cardinality=%d,pattern=\"%s\"", c.numSeries, c.numBlocks, c.cardinality, c.pattern)
|
|
|
|
b.Run(benchMsg, func(b *testing.B) {
|
|
|
|
b.ResetTimer()
|
|
|
|
b.ReportAllocs()
|
|
|
|
for n := 0; n < b.N; n++ {
|
2019-11-18 11:53:33 -08:00
|
|
|
_, err := que.Select(labels.MustNewMatcher(labels.MatchRegexp, "test", c.pattern))
|
2019-05-27 04:24:46 -07:00
|
|
|
testutil.Ok(b, err)
|
|
|
|
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Refer to https://github.com/prometheus/prometheus/issues/2651.
|
|
|
|
func TestFindSetMatches(t *testing.T) {
|
|
|
|
cases := []struct {
|
|
|
|
pattern string
|
|
|
|
exp []string
|
|
|
|
}{
|
|
|
|
// Simple sets.
|
|
|
|
{
|
|
|
|
pattern: "^(?:foo|bar|baz)$",
|
|
|
|
exp: []string{
|
|
|
|
"foo",
|
|
|
|
"bar",
|
|
|
|
"baz",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Simple sets containing escaped characters.
|
|
|
|
{
|
|
|
|
pattern: "^(?:fo\\.o|bar\\?|\\^baz)$",
|
|
|
|
exp: []string{
|
|
|
|
"fo.o",
|
|
|
|
"bar?",
|
|
|
|
"^baz",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Simple sets containing special characters without escaping.
|
|
|
|
{
|
|
|
|
pattern: "^(?:fo.o|bar?|^baz)$",
|
|
|
|
exp: nil,
|
|
|
|
},
|
|
|
|
// Missing wrapper.
|
|
|
|
{
|
|
|
|
pattern: "foo|bar|baz",
|
|
|
|
exp: nil,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
matches := findSetMatches(c.pattern)
|
|
|
|
if len(c.exp) == 0 {
|
|
|
|
if len(matches) != 0 {
|
|
|
|
t.Errorf("Evaluating %s, unexpected result %v", c.pattern, matches)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if len(matches) != len(c.exp) {
|
|
|
|
t.Errorf("Evaluating %s, length of result not equal to exp", c.pattern)
|
|
|
|
} else {
|
|
|
|
for i := 0; i < len(c.exp); i++ {
|
|
|
|
if c.exp[i] != matches[i] {
|
|
|
|
t.Errorf("Evaluating %s, unexpected result %s", c.pattern, matches[i])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
func TestPostingsForMatchers(t *testing.T) {
|
|
|
|
h, err := NewHead(nil, nil, nil, 1000)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(t, h.Close())
|
|
|
|
}()
|
|
|
|
|
|
|
|
app := h.Appender()
|
|
|
|
app.Add(labels.FromStrings("n", "1"), 0, 0)
|
|
|
|
app.Add(labels.FromStrings("n", "1", "i", "a"), 0, 0)
|
|
|
|
app.Add(labels.FromStrings("n", "1", "i", "b"), 0, 0)
|
|
|
|
app.Add(labels.FromStrings("n", "2"), 0, 0)
|
2019-05-27 04:24:46 -07:00
|
|
|
app.Add(labels.FromStrings("n", "2.5"), 0, 0)
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
testutil.Ok(t, app.Commit())
|
|
|
|
|
|
|
|
cases := []struct {
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers []*labels.Matcher
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp []labels.Labels
|
|
|
|
}{
|
|
|
|
// Simple equals.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchEqual, "i", "a")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchEqual, "i", "missing")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "missing", "")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
labels.FromStrings("n", "2"),
|
2019-05-27 04:24:46 -07:00
|
|
|
labels.FromStrings("n", "2.5"),
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Not equals.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotEqual, "n", "1")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "2"),
|
2019-05-27 04:24:46 -07:00
|
|
|
labels.FromStrings("n", "2.5"),
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotEqual, "i", "")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotEqual, "missing", "")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", "a")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", "")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Regex.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "^1$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^a$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^a?$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "i", "^$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "2"),
|
2019-05-27 04:24:46 -07:00
|
|
|
labels.FromStrings("n", "2.5"),
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^.*$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^.+$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Not regex.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchNotRegexp, "n", "^1$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "2"),
|
2019-05-27 04:24:46 -07:00
|
|
|
labels.FromStrings("n", "2.5"),
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^a$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^a?$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^.*$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotRegexp, "i", "^.+$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Combinations.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", ""), labels.MustNewMatcher(labels.MatchEqual, "i", "a")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "n", "1"), labels.MustNewMatcher(labels.MatchNotEqual, "i", "b"), labels.MustNewMatcher(labels.MatchRegexp, "i", "^(b|a).*$")},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
},
|
|
|
|
},
|
2019-05-27 04:24:46 -07:00
|
|
|
// Set optimization for Regex.
|
|
|
|
// Refer to https://github.com/prometheus/prometheus/issues/2651.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "^(?:1|2)$")},
|
2019-05-27 04:24:46 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
labels.FromStrings("n", "2"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "i", "^(?:a|b)$")},
|
2019-05-27 04:24:46 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1", "i", "a"),
|
|
|
|
labels.FromStrings("n", "1", "i", "b"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "^(?:x1|2)$")},
|
2019-05-27 04:24:46 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "2"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "n", "^(?:2|2\\.5)$")},
|
2019-05-27 04:24:46 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "2"),
|
|
|
|
labels.FromStrings("n", "2.5"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Empty value.
|
|
|
|
{
|
2019-11-18 11:53:33 -08:00
|
|
|
matchers: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "i", "^(?:c||d)$")},
|
2019-05-27 04:24:46 -07:00
|
|
|
exp: []labels.Labels{
|
|
|
|
labels.FromStrings("n", "1"),
|
|
|
|
labels.FromStrings("n", "2"),
|
|
|
|
labels.FromStrings("n", "2.5"),
|
|
|
|
},
|
|
|
|
},
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
ir, err := h.Index()
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
exp := map[string]struct{}{}
|
|
|
|
for _, l := range c.exp {
|
|
|
|
exp[l.String()] = struct{}{}
|
|
|
|
}
|
|
|
|
p, err := PostingsForMatchers(ir, c.matchers...)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
for p.Next() {
|
|
|
|
lbls := labels.Labels{}
|
2019-04-25 03:07:04 -07:00
|
|
|
testutil.Ok(t, ir.Series(p.At(), &lbls, &[]chunks.Meta{}))
|
Be smarter in how we look at matchers. (#572)
* Add unittests for PostingsForMatcher.
* Selector methods are all stateless, don't need a reference.
* Be smarter in how we look at matchers.
Look at all matchers to see if a label can be empty.
Optimise Not handling, so i!="2" is a simple lookup
rather than an inverse postings list.
All all the Withouts together, rather than
having to subtract each from all postings.
Change the pre-expand the postings logic to always do it before doing a
Without only. Don't do that if it's already a list.
The initial goal here was that the oft-seen pattern
i=~"something.+",i!="foo",i!="bar" becomes more efficient.
benchmark old ns/op new ns/op delta
BenchmarkHeadPostingForMatchers/n="1"-4 5888 6160 +4.62%
BenchmarkHeadPostingForMatchers/n="1",j="foo"-4 7190 6640 -7.65%
BenchmarkHeadPostingForMatchers/j="foo",n="1"-4 6038 5923 -1.90%
BenchmarkHeadPostingForMatchers/n="1",j!="foo"-4 6030884 4850525 -19.57%
BenchmarkHeadPostingForMatchers/i=~".*"-4 887377940 230329137 -74.04%
BenchmarkHeadPostingForMatchers/i=~".+"-4 490316101 319931758 -34.75%
BenchmarkHeadPostingForMatchers/i=~""-4 594961991 130279313 -78.10%
BenchmarkHeadPostingForMatchers/i!=""-4 537542388 318751015 -40.70%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",j="foo"-4 10460243 8565195 -18.12%
BenchmarkHeadPostingForMatchers/n="1",i=~".*",i!="2",j="foo"-4 44964267 8561546 -80.96%
BenchmarkHeadPostingForMatchers/n="1",i!="",j="foo"-4 42244885 29137737 -31.03%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",j="foo"-4 35285834 32774584 -7.12%
BenchmarkHeadPostingForMatchers/n="1",i=~"1.+",j="foo"-4 8951047 8379024 -6.39%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!="2",j="foo"-4 63813335 30672688 -51.93%
BenchmarkHeadPostingForMatchers/n="1",i=~".+",i!~"2.*",j="foo"-4 45381112 44924397 -1.01%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2019-04-09 03:59:45 -07:00
|
|
|
if _, ok := exp[lbls.String()]; !ok {
|
|
|
|
t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String())
|
|
|
|
} else {
|
|
|
|
delete(exp, lbls.String())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
testutil.Ok(t, p.Err())
|
|
|
|
if len(exp) != 0 {
|
|
|
|
t.Errorf("Evaluating %v, missing results %+v", c.matchers, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2019-04-30 00:17:07 -07:00
|
|
|
|
|
|
|
// TestClose ensures that calling Close more than once doesn't block and doesn't panic.
|
|
|
|
func TestClose(t *testing.T) {
|
|
|
|
dir, err := ioutil.TempDir("", "test_storage")
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Opening test dir failed: %s", err)
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(t, os.RemoveAll(dir))
|
|
|
|
}()
|
|
|
|
|
|
|
|
createBlock(t, dir, genSeries(1, 1, 0, 10))
|
|
|
|
createBlock(t, dir, genSeries(1, 1, 10, 20))
|
|
|
|
|
|
|
|
db, err := Open(dir, nil, nil, DefaultOptions)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Opening test storage failed: %s", err)
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(t, db.Close())
|
|
|
|
}()
|
|
|
|
|
|
|
|
q, err := db.Querier(0, 20)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
testutil.Ok(t, q.Close())
|
|
|
|
testutil.NotOk(t, q.Close())
|
|
|
|
}
|
2019-06-07 06:41:44 -07:00
|
|
|
|
|
|
|
func BenchmarkQueries(b *testing.B) {
|
|
|
|
cases := map[string]labels.Selector{
|
2019-08-13 01:34:14 -07:00
|
|
|
"Eq Matcher: Expansion - 1": {
|
2019-11-18 11:53:33 -08:00
|
|
|
labels.MustNewMatcher(labels.MatchEqual, "la", "va"),
|
2019-06-07 06:41:44 -07:00
|
|
|
},
|
2019-08-13 01:34:14 -07:00
|
|
|
"Eq Matcher: Expansion - 2": {
|
2019-11-18 11:53:33 -08:00
|
|
|
labels.MustNewMatcher(labels.MatchEqual, "la", "va"),
|
|
|
|
labels.MustNewMatcher(labels.MatchEqual, "lb", "vb"),
|
2019-06-07 06:41:44 -07:00
|
|
|
},
|
|
|
|
|
2019-08-13 01:34:14 -07:00
|
|
|
"Eq Matcher: Expansion - 3": {
|
2019-11-18 11:53:33 -08:00
|
|
|
labels.MustNewMatcher(labels.MatchEqual, "la", "va"),
|
|
|
|
labels.MustNewMatcher(labels.MatchEqual, "lb", "vb"),
|
|
|
|
labels.MustNewMatcher(labels.MatchEqual, "lc", "vc"),
|
2019-06-07 06:41:44 -07:00
|
|
|
},
|
2019-08-13 01:34:14 -07:00
|
|
|
"Regex Matcher: Expansion - 1": {
|
2019-11-18 11:53:33 -08:00
|
|
|
labels.MustNewMatcher(labels.MatchRegexp, "la", ".*va"),
|
2019-06-07 06:41:44 -07:00
|
|
|
},
|
2019-08-13 01:34:14 -07:00
|
|
|
"Regex Matcher: Expansion - 2": {
|
2019-11-18 11:53:33 -08:00
|
|
|
labels.MustNewMatcher(labels.MatchRegexp, "la", ".*va"),
|
|
|
|
labels.MustNewMatcher(labels.MatchRegexp, "lb", ".*vb"),
|
2019-06-07 06:41:44 -07:00
|
|
|
},
|
2019-08-13 01:34:14 -07:00
|
|
|
"Regex Matcher: Expansion - 3": {
|
2019-11-18 11:53:33 -08:00
|
|
|
labels.MustNewMatcher(labels.MatchRegexp, "la", ".*va"),
|
|
|
|
labels.MustNewMatcher(labels.MatchRegexp, "lb", ".*vb"),
|
|
|
|
labels.MustNewMatcher(labels.MatchRegexp, "lc", ".*vc"),
|
2019-06-07 06:41:44 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
queryTypes := make(map[string]Querier)
|
|
|
|
defer func() {
|
|
|
|
for _, q := range queryTypes {
|
|
|
|
// Can't run a check for error here as some of these will fail as
|
|
|
|
// queryTypes is using the same slice for the different block queriers
|
2020-01-02 06:54:09 -08:00
|
|
|
// and would have been closed in the previous iteration.
|
2019-06-07 06:41:44 -07:00
|
|
|
q.Close()
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
for title, selectors := range cases {
|
|
|
|
for _, nSeries := range []int{10} {
|
|
|
|
for _, nSamples := range []int64{1000, 10000, 100000} {
|
|
|
|
dir, err := ioutil.TempDir("", "test_persisted_query")
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
defer func() {
|
|
|
|
testutil.Ok(b, os.RemoveAll(dir))
|
|
|
|
}()
|
|
|
|
|
|
|
|
series := genSeries(nSeries, 5, 1, int64(nSamples))
|
|
|
|
|
|
|
|
// Add some common labels to make the matchers select these series.
|
|
|
|
{
|
|
|
|
var commonLbls labels.Labels
|
|
|
|
for _, selector := range selectors {
|
2019-11-18 11:53:33 -08:00
|
|
|
switch selector.Type {
|
|
|
|
case labels.MatchEqual:
|
|
|
|
commonLbls = append(commonLbls, labels.Label{Name: selector.Name, Value: selector.Value})
|
|
|
|
case labels.MatchRegexp:
|
|
|
|
commonLbls = append(commonLbls, labels.Label{Name: selector.Name, Value: selector.Value})
|
2019-06-07 06:41:44 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for i := range commonLbls {
|
|
|
|
s := series[i].(*mockSeries)
|
|
|
|
allLabels := append(commonLbls, s.Labels()...)
|
|
|
|
s = &mockSeries{
|
|
|
|
labels: func() labels.Labels { return allLabels },
|
|
|
|
iterator: s.iterator,
|
|
|
|
}
|
|
|
|
series[i] = s
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
qs := []Querier{}
|
|
|
|
for x := 0; x <= 10; x++ {
|
|
|
|
block, err := OpenBlock(nil, createBlock(b, dir, series), nil)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
q, err := NewBlockQuerier(block, 1, int64(nSamples))
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
qs = append(qs, q)
|
|
|
|
}
|
|
|
|
queryTypes["_1-Block"] = &querier{blocks: qs[:1]}
|
|
|
|
queryTypes["_3-Blocks"] = &querier{blocks: qs[0:3]}
|
|
|
|
queryTypes["_10-Blocks"] = &querier{blocks: qs}
|
|
|
|
|
|
|
|
head := createHead(b, series)
|
|
|
|
qHead, err := NewBlockQuerier(head, 1, int64(nSamples))
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
queryTypes["_Head"] = qHead
|
|
|
|
|
|
|
|
for qtype, querier := range queryTypes {
|
|
|
|
b.Run(title+qtype+"_nSeries:"+strconv.Itoa(nSeries)+"_nSamples:"+strconv.Itoa(int(nSamples)), func(b *testing.B) {
|
|
|
|
expExpansions, err := strconv.Atoi(string(title[len(title)-1]))
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
benchQuery(b, expExpansions, querier, selectors)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func benchQuery(b *testing.B, expExpansions int, q Querier, selectors labels.Selector) {
|
|
|
|
b.ResetTimer()
|
|
|
|
b.ReportAllocs()
|
|
|
|
for i := 0; i < b.N; i++ {
|
|
|
|
ss, err := q.Select(selectors...)
|
|
|
|
testutil.Ok(b, err)
|
|
|
|
var actualExpansions int
|
|
|
|
for ss.Next() {
|
|
|
|
s := ss.At()
|
|
|
|
s.Labels()
|
|
|
|
it := s.Iterator()
|
|
|
|
for it.Next() {
|
|
|
|
}
|
|
|
|
actualExpansions++
|
|
|
|
}
|
|
|
|
testutil.Equals(b, expExpansions, actualExpansions)
|
|
|
|
testutil.Ok(b, ss.Err())
|
|
|
|
}
|
|
|
|
}
|