2017-04-10 11:59:45 -07:00
|
|
|
// Copyright 2017 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2016-12-14 06:47:05 -08:00
|
|
|
package tsdb
|
|
|
|
|
|
|
|
import (
|
2017-07-05 07:19:28 -07:00
|
|
|
"fmt"
|
2017-04-13 12:06:14 -07:00
|
|
|
"math"
|
2017-04-13 07:27:31 -07:00
|
|
|
"math/rand"
|
2016-12-19 02:44:11 -08:00
|
|
|
"sort"
|
2016-12-14 06:47:05 -08:00
|
|
|
"testing"
|
|
|
|
|
2017-04-09 07:00:25 -07:00
|
|
|
"github.com/prometheus/tsdb/chunks"
|
2017-04-04 02:27:26 -07:00
|
|
|
"github.com/prometheus/tsdb/labels"
|
2016-12-14 06:47:05 -08:00
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
2016-12-19 02:44:11 -08:00
|
|
|
type mockSeriesIterator struct {
|
2017-01-02 04:27:52 -08:00
|
|
|
seek func(int64) bool
|
|
|
|
at func() (int64, float64)
|
|
|
|
next func() bool
|
|
|
|
err func() error
|
2016-12-19 02:44:11 -08:00
|
|
|
}
|
|
|
|
|
2017-01-02 04:27:52 -08:00
|
|
|
func (m *mockSeriesIterator) Seek(t int64) bool { return m.seek(t) }
|
|
|
|
func (m *mockSeriesIterator) At() (int64, float64) { return m.at() }
|
|
|
|
func (m *mockSeriesIterator) Next() bool { return m.next() }
|
|
|
|
func (m *mockSeriesIterator) Err() error { return m.err() }
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
type mockSeries struct {
|
2016-12-21 06:12:26 -08:00
|
|
|
labels func() labels.Labels
|
2016-12-19 02:44:11 -08:00
|
|
|
iterator func() SeriesIterator
|
|
|
|
}
|
|
|
|
|
2017-05-03 10:15:28 -07:00
|
|
|
func newSeries(l map[string]string, s []sample) Series {
|
|
|
|
return &mockSeries{
|
|
|
|
labels: func() labels.Labels { return labels.FromMap(l) },
|
|
|
|
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
|
|
|
|
}
|
|
|
|
}
|
2016-12-21 06:12:26 -08:00
|
|
|
func (m *mockSeries) Labels() labels.Labels { return m.labels() }
|
2016-12-19 02:44:11 -08:00
|
|
|
func (m *mockSeries) Iterator() SeriesIterator { return m.iterator() }
|
|
|
|
|
|
|
|
type listSeriesIterator struct {
|
|
|
|
list []sample
|
|
|
|
idx int
|
|
|
|
}
|
|
|
|
|
|
|
|
func newListSeriesIterator(list []sample) *listSeriesIterator {
|
|
|
|
return &listSeriesIterator{list: list, idx: -1}
|
|
|
|
}
|
|
|
|
|
2017-01-02 04:27:52 -08:00
|
|
|
func (it *listSeriesIterator) At() (int64, float64) {
|
2016-12-19 02:44:11 -08:00
|
|
|
s := it.list[it.idx]
|
|
|
|
return s.t, s.v
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Next() bool {
|
|
|
|
it.idx++
|
|
|
|
return it.idx < len(it.list)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Seek(t int64) bool {
|
2016-12-21 07:06:33 -08:00
|
|
|
if it.idx == -1 {
|
|
|
|
it.idx = 0
|
|
|
|
}
|
2016-12-19 02:44:11 -08:00
|
|
|
// Do binary search between current position and end.
|
|
|
|
it.idx = sort.Search(len(it.list)-it.idx, func(i int) bool {
|
|
|
|
s := it.list[i+it.idx]
|
|
|
|
return s.t >= t
|
|
|
|
})
|
2016-12-21 07:06:33 -08:00
|
|
|
|
2016-12-19 02:44:11 -08:00
|
|
|
return it.idx < len(it.list)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Err() error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-03-14 07:24:08 -07:00
|
|
|
func TestMergedSeriesSet(t *testing.T) {
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
cases := []struct {
|
|
|
|
// The input sets in order (samples in series in b are strictly
|
|
|
|
// after those in a).
|
|
|
|
a, b SeriesSet
|
2017-01-05 23:08:02 -08:00
|
|
|
// The composition of a and b in the partition series set must yield
|
2016-12-19 02:44:11 -08:00
|
|
|
// results equivalent to the result series set.
|
|
|
|
exp SeriesSet
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
a: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 1},
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
b: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
}, []sample{
|
|
|
|
{t: 2, v: 2},
|
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 1},
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 1},
|
|
|
|
{t: 2, v: 2},
|
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 1},
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
},
|
2017-01-03 10:02:42 -08:00
|
|
|
{
|
|
|
|
a: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "127.0.0.1:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 1},
|
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "localhost:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 2},
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
b: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "127.0.0.1:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 2, v: 1},
|
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "query",
|
|
|
|
"instance": "localhost:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 2, v: 2},
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "127.0.0.1:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 1},
|
|
|
|
{t: 2, v: 1},
|
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "prometheus",
|
|
|
|
"instance": "localhost:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 1, v: 2},
|
|
|
|
}),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"handler": "query",
|
|
|
|
"instance": "localhost:9090",
|
|
|
|
}, []sample{
|
|
|
|
{t: 2, v: 2},
|
|
|
|
}),
|
|
|
|
}),
|
|
|
|
},
|
2016-12-19 02:44:11 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
Outer:
|
|
|
|
for _, c := range cases {
|
2017-03-14 07:24:08 -07:00
|
|
|
res := newMergedSeriesSet(c.a, c.b)
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
for {
|
|
|
|
eok, rok := c.exp.Next(), res.Next()
|
|
|
|
require.Equal(t, eok, rok, "next")
|
|
|
|
|
|
|
|
if !eok {
|
|
|
|
continue Outer
|
|
|
|
}
|
2017-01-02 04:27:52 -08:00
|
|
|
sexp := c.exp.At()
|
|
|
|
sres := res.At()
|
2016-12-19 02:44:11 -08:00
|
|
|
|
|
|
|
require.Equal(t, sexp.Labels(), sres.Labels(), "labels")
|
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
|
|
|
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
|
|
|
|
|
|
|
require.Equal(t, errExp, errRes, "samples error")
|
|
|
|
require.Equal(t, smplExp, smplRes, "samples")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func expandSeriesIterator(it SeriesIterator) (r []sample, err error) {
|
|
|
|
for it.Next() {
|
2017-01-02 04:27:52 -08:00
|
|
|
t, v := it.At()
|
2016-12-19 02:44:11 -08:00
|
|
|
r = append(r, sample{t: t, v: v})
|
|
|
|
}
|
|
|
|
|
|
|
|
return r, it.Err()
|
|
|
|
}
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2017-04-13 07:27:31 -07:00
|
|
|
// Index: labels -> postings -> chunkMetas -> chunkRef
|
|
|
|
// ChunkReader: ref -> vals
|
|
|
|
func createIdxChkReaders(tc []struct {
|
|
|
|
lset map[string]string
|
|
|
|
chunks [][]sample
|
|
|
|
}) (IndexReader, ChunkReader) {
|
|
|
|
sort.Slice(tc, func(i, j int) bool {
|
|
|
|
return labels.Compare(labels.FromMap(tc[i].lset), labels.FromMap(tc[i].lset)) < 0
|
|
|
|
})
|
|
|
|
|
2017-09-05 02:45:18 -07:00
|
|
|
postings := newMemPostings()
|
2017-04-13 07:27:31 -07:00
|
|
|
chkReader := mockChunkReader(make(map[uint64]chunks.Chunk))
|
2017-07-21 01:37:52 -07:00
|
|
|
lblIdx := make(map[string]stringset)
|
2017-04-13 07:27:31 -07:00
|
|
|
mi := newMockIndex()
|
|
|
|
|
|
|
|
for i, s := range tc {
|
2017-05-22 01:01:57 -07:00
|
|
|
i = i + 1 // 0 is not a valid posting.
|
2017-08-06 11:41:24 -07:00
|
|
|
metas := make([]ChunkMeta, 0, len(s.chunks))
|
2017-04-13 07:27:31 -07:00
|
|
|
for _, chk := range s.chunks {
|
|
|
|
// Collisions can be there, but for tests, its fine.
|
|
|
|
ref := rand.Uint64()
|
|
|
|
|
2017-08-06 11:41:24 -07:00
|
|
|
metas = append(metas, ChunkMeta{
|
2017-04-13 07:27:31 -07:00
|
|
|
MinTime: chk[0].t,
|
|
|
|
MaxTime: chk[len(chk)-1].t,
|
|
|
|
Ref: ref,
|
|
|
|
})
|
|
|
|
|
|
|
|
chunk := chunks.NewXORChunk()
|
|
|
|
app, _ := chunk.Appender()
|
|
|
|
for _, smpl := range chk {
|
|
|
|
app.Append(smpl.t, smpl.v)
|
|
|
|
}
|
|
|
|
chkReader[ref] = chunk
|
|
|
|
}
|
|
|
|
|
2017-07-21 01:37:52 -07:00
|
|
|
ls := labels.FromMap(s.lset)
|
2017-09-04 07:08:38 -07:00
|
|
|
mi.AddSeries(uint64(i), ls, metas...)
|
2017-04-13 07:27:31 -07:00
|
|
|
|
2017-09-05 02:45:18 -07:00
|
|
|
postings.add(uint64(i), ls)
|
2017-07-21 01:37:52 -07:00
|
|
|
|
2017-09-05 02:45:18 -07:00
|
|
|
for _, l := range ls {
|
2017-07-21 01:37:52 -07:00
|
|
|
vs, present := lblIdx[l.Name]
|
|
|
|
if !present {
|
|
|
|
vs = stringset{}
|
|
|
|
lblIdx[l.Name] = vs
|
|
|
|
}
|
|
|
|
vs.set(l.Value)
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-21 01:37:52 -07:00
|
|
|
for l, vs := range lblIdx {
|
|
|
|
mi.WriteLabelIndex([]string{l}, vs.slice())
|
|
|
|
}
|
|
|
|
|
2017-09-05 02:45:18 -07:00
|
|
|
for l := range postings.m {
|
|
|
|
mi.WritePostings(l.Name, l.Value, postings.get(l.Name, l.Value))
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return mi, chkReader
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestBlockQuerier(t *testing.T) {
|
2017-04-13 12:06:14 -07:00
|
|
|
newSeries := func(l map[string]string, s []sample) Series {
|
|
|
|
return &mockSeries{
|
|
|
|
labels: func() labels.Labels { return labels.FromMap(l) },
|
|
|
|
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
|
|
|
|
}
|
|
|
|
}
|
2017-04-13 07:27:31 -07:00
|
|
|
|
2017-04-21 13:08:26 -07:00
|
|
|
type query struct {
|
|
|
|
mint, maxt int64
|
|
|
|
ms []labels.Matcher
|
|
|
|
exp SeriesSet
|
|
|
|
}
|
|
|
|
|
|
|
|
cases := struct {
|
|
|
|
data []struct {
|
|
|
|
lset map[string]string
|
|
|
|
chunks [][]sample
|
|
|
|
}
|
|
|
|
|
|
|
|
queries []query
|
2017-04-13 07:27:31 -07:00
|
|
|
}{
|
2017-04-21 13:08:26 -07:00
|
|
|
data: []struct {
|
|
|
|
lset map[string]string
|
|
|
|
chunks [][]sample
|
|
|
|
}{
|
2017-04-13 07:27:31 -07:00
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 2}, {2, 3}, {3, 4},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 2}, {6, 3}, {7, 4},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 1}, {2, 2}, {3, 3},
|
|
|
|
},
|
|
|
|
{
|
2017-04-13 12:06:14 -07:00
|
|
|
{5, 3}, {6, 6},
|
2017-04-13 07:27:31 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 3}, {2, 2}, {3, 6},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 1}, {6, 7}, {7, 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-07-21 01:37:52 -07:00
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"p": "abcd",
|
|
|
|
"x": "xyz",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 2}, {2, 3}, {3, 4},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 2}, {6, 3}, {7, 4},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "ab",
|
|
|
|
"p": "abce",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 1}, {2, 2}, {3, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 3}, {6, 6},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"p": "xyz",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 1}, {2, 2}, {3, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{4, 4}, {5, 5}, {6, 6},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-04-13 07:27:31 -07:00
|
|
|
},
|
|
|
|
|
2017-04-21 13:08:26 -07:00
|
|
|
queries: []query{
|
|
|
|
{
|
|
|
|
mint: 0,
|
|
|
|
maxt: 0,
|
|
|
|
ms: []labels.Matcher{},
|
|
|
|
exp: newListSeriesSet([]Series{}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 0,
|
|
|
|
maxt: 0,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")},
|
|
|
|
exp: newListSeriesSet([]Series{}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 1,
|
|
|
|
maxt: 0,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")},
|
|
|
|
exp: newListSeriesSet([]Series{}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 6,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")},
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
|
|
|
[]sample{{2, 3}, {3, 4}, {5, 2}, {6, 3}},
|
|
|
|
),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
[]sample{{2, 2}, {3, 3}, {5, 3}, {6, 6}},
|
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
2017-07-21 01:37:52 -07:00
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 6,
|
|
|
|
ms: []labels.Matcher{labels.NewPrefixMatcher("p", "abc")},
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "ab",
|
|
|
|
"p": "abce",
|
|
|
|
},
|
|
|
|
[]sample{{2, 2}, {3, 3}, {5, 3}, {6, 6}},
|
|
|
|
),
|
2017-08-05 04:31:48 -07:00
|
|
|
newSeries(map[string]string{
|
|
|
|
"p": "abcd",
|
|
|
|
"x": "xyz",
|
|
|
|
},
|
|
|
|
[]sample{{2, 3}, {3, 4}, {5, 2}, {6, 3}},
|
|
|
|
),
|
2017-07-21 01:37:52 -07:00
|
|
|
}),
|
|
|
|
},
|
2017-04-13 12:06:14 -07:00
|
|
|
},
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
Outer:
|
2017-07-21 01:37:52 -07:00
|
|
|
for i, c := range cases.queries {
|
2017-04-21 13:08:26 -07:00
|
|
|
ir, cr := createIdxChkReaders(cases.data)
|
2017-04-13 07:27:31 -07:00
|
|
|
querier := &blockQuerier{
|
2017-05-17 02:19:42 -07:00
|
|
|
index: ir,
|
|
|
|
chunks: cr,
|
2017-05-19 10:24:29 -07:00
|
|
|
tombstones: newEmptyTombstoneReader(),
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
mint: c.mint,
|
|
|
|
maxt: c.maxt,
|
|
|
|
}
|
|
|
|
|
|
|
|
res := querier.Select(c.ms...)
|
|
|
|
|
|
|
|
for {
|
|
|
|
eok, rok := c.exp.Next(), res.Next()
|
2017-07-21 01:37:52 -07:00
|
|
|
require.Equal(t, eok, rok, "%d: next", i)
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
if !eok {
|
|
|
|
continue Outer
|
|
|
|
}
|
|
|
|
sexp := c.exp.At()
|
|
|
|
sres := res.At()
|
|
|
|
|
2017-07-21 01:37:52 -07:00
|
|
|
require.Equal(t, sexp.Labels(), sres.Labels(), "%d: labels", i)
|
2017-04-13 07:27:31 -07:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
|
|
|
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
|
|
|
|
2017-07-21 01:37:52 -07:00
|
|
|
require.Equal(t, errExp, errRes, "%d: samples error", i)
|
|
|
|
require.Equal(t, smplExp, smplRes, "%d: samples", i)
|
2017-04-13 07:27:31 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2017-05-22 01:01:57 -07:00
|
|
|
func TestBlockQuerierDelete(t *testing.T) {
|
|
|
|
newSeries := func(l map[string]string, s []sample) Series {
|
|
|
|
return &mockSeries{
|
|
|
|
labels: func() labels.Labels { return labels.FromMap(l) },
|
|
|
|
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type query struct {
|
|
|
|
mint, maxt int64
|
|
|
|
ms []labels.Matcher
|
|
|
|
exp SeriesSet
|
|
|
|
}
|
|
|
|
|
|
|
|
cases := struct {
|
|
|
|
data []struct {
|
|
|
|
lset map[string]string
|
|
|
|
chunks [][]sample
|
|
|
|
}
|
|
|
|
|
2017-05-23 22:54:24 -07:00
|
|
|
tombstones tombstoneReader
|
2017-05-22 01:01:57 -07:00
|
|
|
queries []query
|
|
|
|
}{
|
|
|
|
data: []struct {
|
|
|
|
lset map[string]string
|
|
|
|
chunks [][]sample
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 2}, {2, 3}, {3, 4},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 2}, {6, 3}, {7, 4},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 1}, {2, 2}, {3, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{4, 15}, {5, 3}, {6, 6},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
chunks: [][]sample{
|
|
|
|
{
|
|
|
|
{1, 3}, {2, 2}, {3, 6},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
{5, 1}, {6, 7}, {7, 2},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-05-23 22:54:24 -07:00
|
|
|
tombstones: newTombstoneReader(
|
2017-09-04 07:08:38 -07:00
|
|
|
map[uint64]Intervals{
|
2017-08-25 01:11:46 -07:00
|
|
|
1: Intervals{{1, 3}},
|
|
|
|
2: Intervals{{1, 3}, {6, 10}},
|
|
|
|
3: Intervals{{6, 10}},
|
2017-05-22 01:01:57 -07:00
|
|
|
},
|
|
|
|
),
|
|
|
|
|
|
|
|
queries: []query{
|
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 7,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")},
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
},
|
|
|
|
[]sample{{5, 2}, {6, 3}, {7, 4}},
|
|
|
|
),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
[]sample{{4, 15}, {5, 3}},
|
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 2,
|
|
|
|
maxt: 7,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("b", "b")},
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
[]sample{{4, 15}, {5, 3}},
|
|
|
|
),
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
[]sample{{2, 2}, {3, 6}, {5, 1}},
|
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 1,
|
|
|
|
maxt: 4,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")},
|
|
|
|
exp: newListSeriesSet([]Series{
|
|
|
|
newSeries(map[string]string{
|
|
|
|
"a": "a",
|
|
|
|
"b": "b",
|
|
|
|
},
|
|
|
|
[]sample{{4, 15}},
|
|
|
|
),
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
mint: 1,
|
|
|
|
maxt: 3,
|
|
|
|
ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")},
|
|
|
|
exp: newListSeriesSet([]Series{}),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
Outer:
|
|
|
|
for _, c := range cases.queries {
|
|
|
|
ir, cr := createIdxChkReaders(cases.data)
|
|
|
|
querier := &blockQuerier{
|
|
|
|
index: ir,
|
|
|
|
chunks: cr,
|
2017-05-23 22:54:24 -07:00
|
|
|
tombstones: cases.tombstones,
|
2017-05-22 01:01:57 -07:00
|
|
|
|
|
|
|
mint: c.mint,
|
|
|
|
maxt: c.maxt,
|
|
|
|
}
|
|
|
|
|
|
|
|
res := querier.Select(c.ms...)
|
|
|
|
|
|
|
|
for {
|
|
|
|
eok, rok := c.exp.Next(), res.Next()
|
|
|
|
require.Equal(t, eok, rok, "next")
|
|
|
|
|
|
|
|
if !eok {
|
|
|
|
continue Outer
|
|
|
|
}
|
|
|
|
sexp := c.exp.At()
|
|
|
|
sres := res.At()
|
|
|
|
|
|
|
|
require.Equal(t, sexp.Labels(), sres.Labels(), "labels")
|
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
|
|
|
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
|
|
|
|
|
|
|
require.Equal(t, errExp, errRes, "samples error")
|
|
|
|
require.Equal(t, smplExp, smplRes, "samples")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-04-09 07:00:25 -07:00
|
|
|
func TestBaseChunkSeries(t *testing.T) {
|
|
|
|
type refdSeries struct {
|
|
|
|
lset labels.Labels
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks []ChunkMeta
|
2017-04-09 07:00:25 -07:00
|
|
|
|
2017-09-04 07:08:38 -07:00
|
|
|
ref uint64
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
cases := []struct {
|
|
|
|
series []refdSeries
|
|
|
|
// Postings should be in the sorted order of the the series
|
2017-09-04 07:08:38 -07:00
|
|
|
postings []uint64
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
expIdxs []int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
series: []refdSeries{
|
|
|
|
{
|
|
|
|
lset: labels.New([]labels.Label{{"a", "a"}}...),
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks: []ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 29}, {Ref: 45}, {Ref: 245}, {Ref: 123}, {Ref: 4232}, {Ref: 5344},
|
|
|
|
{Ref: 121},
|
|
|
|
},
|
|
|
|
ref: 12,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: labels.New([]labels.Label{{"a", "a"}, {"b", "b"}}...),
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks: []ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
|
|
|
},
|
|
|
|
ref: 10,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: labels.New([]labels.Label{{"b", "c"}}...),
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks: []ChunkMeta{{Ref: 8282}},
|
2017-04-09 07:00:25 -07:00
|
|
|
ref: 1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: labels.New([]labels.Label{{"b", "b"}}...),
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks: []ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269},
|
|
|
|
},
|
|
|
|
ref: 108,
|
|
|
|
},
|
|
|
|
},
|
2017-10-11 00:33:35 -07:00
|
|
|
postings: []uint64{12, 13, 10, 108}, // 13 doesn't exist and should just be skipped over.
|
|
|
|
expIdxs: []int{0, 1, 3},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
series: []refdSeries{
|
|
|
|
{
|
|
|
|
lset: labels.New([]labels.Label{{"a", "a"}, {"b", "b"}}...),
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks: []ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
|
|
|
|
},
|
|
|
|
ref: 10,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
lset: labels.New([]labels.Label{{"b", "c"}}...),
|
2017-08-06 11:41:24 -07:00
|
|
|
chunks: []ChunkMeta{{Ref: 8282}},
|
2017-10-11 00:33:35 -07:00
|
|
|
ref: 3,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
},
|
2017-09-04 07:08:38 -07:00
|
|
|
postings: []uint64{},
|
2017-10-11 00:33:35 -07:00
|
|
|
expIdxs: []int{},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tc := range cases {
|
|
|
|
mi := newMockIndex()
|
|
|
|
for _, s := range tc.series {
|
|
|
|
mi.AddSeries(s.ref, s.lset, s.chunks...)
|
|
|
|
}
|
|
|
|
|
|
|
|
bcs := &baseChunkSeries{
|
2017-05-17 02:19:42 -07:00
|
|
|
p: newListPostings(tc.postings),
|
|
|
|
index: mi,
|
2017-05-19 10:24:29 -07:00
|
|
|
tombstones: newEmptyTombstoneReader(),
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
i := 0
|
|
|
|
for bcs.Next() {
|
2017-05-17 02:19:42 -07:00
|
|
|
lset, chks, _ := bcs.At()
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
idx := tc.expIdxs[i]
|
|
|
|
|
|
|
|
require.Equal(t, tc.series[idx].lset, lset)
|
|
|
|
require.Equal(t, tc.series[idx].chunks, chks)
|
|
|
|
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
require.Equal(t, len(tc.expIdxs), i)
|
|
|
|
require.NoError(t, bcs.Err())
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Remove after simpleSeries is merged
|
|
|
|
type itSeries struct {
|
|
|
|
si SeriesIterator
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s itSeries) Iterator() SeriesIterator { return s.si }
|
|
|
|
func (s itSeries) Labels() labels.Labels { return labels.Labels{} }
|
|
|
|
|
2017-08-06 11:41:24 -07:00
|
|
|
func chunkFromSamples(s []sample) ChunkMeta {
|
2017-04-09 07:00:25 -07:00
|
|
|
mint, maxt := int64(0), int64(0)
|
|
|
|
|
|
|
|
if len(s) > 0 {
|
|
|
|
mint, maxt = s[0].t, s[len(s)-1].t
|
|
|
|
}
|
|
|
|
|
|
|
|
c := chunks.NewXORChunk()
|
|
|
|
ca, _ := c.Appender()
|
|
|
|
|
|
|
|
for _, s := range s {
|
|
|
|
ca.Append(s.t, s.v)
|
|
|
|
}
|
2017-08-06 11:41:24 -07:00
|
|
|
return ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
MinTime: mint,
|
|
|
|
MaxTime: maxt,
|
2017-08-06 11:41:24 -07:00
|
|
|
Chunk: c,
|
2017-04-09 07:00:25 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSeriesIterator(t *testing.T) {
|
|
|
|
itcases := []struct {
|
|
|
|
a, b, c []sample
|
|
|
|
exp []sample
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint, maxt int64
|
2017-04-09 07:00:25 -07:00
|
|
|
}{
|
|
|
|
{
|
|
|
|
a: []sample{},
|
|
|
|
b: []sample{},
|
|
|
|
c: []sample{},
|
|
|
|
|
|
|
|
exp: []sample{},
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {2, 3}, {3, 5}, {6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
b: []sample{},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
exp: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {2, 3}, {3, 5}, {6, 1}, {7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{},
|
|
|
|
b: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {2, 3}, {3, 5}, {6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
exp: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {2, 3}, {3, 5}, {6, 1}, {7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 2,
|
|
|
|
maxt: 8,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {2, 3}, {3, 5}, {6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
b: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{10, 22}, {203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
exp: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {2, 3}, {3, 5}, {6, 1}, {7, 89}, {9, 8}, {10, 22}, {203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 6,
|
|
|
|
maxt: 10,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
seekcases := []struct {
|
|
|
|
a, b, c []sample
|
|
|
|
|
|
|
|
seek int64
|
|
|
|
success bool
|
|
|
|
exp []sample
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
mint, maxt int64
|
2017-04-09 07:00:25 -07:00
|
|
|
}{
|
|
|
|
{
|
|
|
|
a: []sample{},
|
|
|
|
b: []sample{},
|
|
|
|
c: []sample{},
|
|
|
|
|
|
|
|
seek: 0,
|
|
|
|
success: false,
|
|
|
|
exp: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{
|
|
|
|
{2, 3},
|
|
|
|
},
|
|
|
|
b: []sample{},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 10,
|
|
|
|
success: false,
|
|
|
|
exp: nil,
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: math.MinInt64,
|
|
|
|
maxt: math.MaxInt64,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{},
|
|
|
|
b: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{1, 2}, {3, 5}, {6, 1},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 2,
|
|
|
|
success: true,
|
|
|
|
exp: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{3, 5}, {6, 1}, {7, 89}, {9, 8},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 5,
|
|
|
|
maxt: 8,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{
|
|
|
|
{6, 1},
|
|
|
|
},
|
|
|
|
b: []sample{
|
|
|
|
{9, 8},
|
|
|
|
},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{10, 22}, {203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 10,
|
|
|
|
success: true,
|
|
|
|
exp: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{10, 22}, {203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 10,
|
|
|
|
maxt: 203,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{
|
|
|
|
{6, 1},
|
|
|
|
},
|
|
|
|
b: []sample{
|
|
|
|
{9, 8},
|
|
|
|
},
|
|
|
|
c: []sample{
|
2017-04-13 12:07:21 -07:00
|
|
|
{10, 22}, {203, 3493},
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
|
|
|
|
seek: 203,
|
|
|
|
success: true,
|
|
|
|
exp: []sample{
|
|
|
|
{203, 3493},
|
|
|
|
},
|
2017-04-13 12:07:21 -07:00
|
|
|
mint: 7,
|
|
|
|
maxt: 203,
|
2017-04-09 07:00:25 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Run("Chunk", func(t *testing.T) {
|
|
|
|
for _, tc := range itcases {
|
2017-08-06 11:41:24 -07:00
|
|
|
chkMetas := []ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
chunkFromSamples(tc.a),
|
|
|
|
chunkFromSamples(tc.b),
|
|
|
|
chunkFromSamples(tc.c),
|
|
|
|
}
|
2017-05-22 04:12:36 -07:00
|
|
|
res := newChunkSeriesIterator(chkMetas, nil, tc.mint, tc.maxt)
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
smplValid := make([]sample, 0)
|
|
|
|
for _, s := range tc.exp {
|
|
|
|
if s.t >= tc.mint && s.t <= tc.maxt {
|
|
|
|
smplValid = append(smplValid, s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
exp := newListSeriesIterator(smplValid)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(exp)
|
|
|
|
smplRes, errRes := expandSeriesIterator(res)
|
|
|
|
|
|
|
|
require.Equal(t, errExp, errRes, "samples error")
|
|
|
|
require.Equal(t, smplExp, smplRes, "samples")
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Run("Seek", func(t *testing.T) {
|
2017-04-13 12:07:21 -07:00
|
|
|
extra := []struct {
|
|
|
|
a, b, c []sample
|
|
|
|
|
|
|
|
seek int64
|
|
|
|
success bool
|
|
|
|
exp []sample
|
|
|
|
|
|
|
|
mint, maxt int64
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
a: []sample{
|
|
|
|
{6, 1},
|
|
|
|
},
|
|
|
|
b: []sample{
|
|
|
|
{9, 8},
|
|
|
|
},
|
|
|
|
c: []sample{
|
|
|
|
{10, 22}, {203, 3493},
|
|
|
|
},
|
|
|
|
|
|
|
|
seek: 203,
|
|
|
|
success: false,
|
|
|
|
exp: nil,
|
|
|
|
mint: 2,
|
|
|
|
maxt: 202,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
a: []sample{
|
|
|
|
{6, 1},
|
|
|
|
},
|
|
|
|
b: []sample{
|
|
|
|
{9, 8},
|
|
|
|
},
|
|
|
|
c: []sample{
|
|
|
|
{10, 22}, {203, 3493},
|
|
|
|
},
|
|
|
|
|
|
|
|
seek: 5,
|
|
|
|
success: true,
|
|
|
|
exp: []sample{{10, 22}},
|
|
|
|
mint: 10,
|
|
|
|
maxt: 202,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
seekcases2 := append(seekcases, extra...)
|
|
|
|
|
|
|
|
for _, tc := range seekcases2 {
|
2017-08-06 11:41:24 -07:00
|
|
|
chkMetas := []ChunkMeta{
|
2017-04-09 07:00:25 -07:00
|
|
|
chunkFromSamples(tc.a),
|
|
|
|
chunkFromSamples(tc.b),
|
|
|
|
chunkFromSamples(tc.c),
|
|
|
|
}
|
2017-05-22 04:12:36 -07:00
|
|
|
res := newChunkSeriesIterator(chkMetas, nil, tc.mint, tc.maxt)
|
2017-04-13 12:07:21 -07:00
|
|
|
|
|
|
|
smplValid := make([]sample, 0)
|
|
|
|
for _, s := range tc.exp {
|
|
|
|
if s.t >= tc.mint && s.t <= tc.maxt {
|
|
|
|
smplValid = append(smplValid, s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
exp := newListSeriesIterator(smplValid)
|
2017-04-09 07:00:25 -07:00
|
|
|
|
|
|
|
require.Equal(t, tc.success, res.Seek(tc.seek))
|
|
|
|
|
|
|
|
if tc.success {
|
|
|
|
// Init the list and then proceed to check.
|
|
|
|
remaining := exp.Next()
|
|
|
|
require.True(t, remaining)
|
|
|
|
|
|
|
|
for remaining {
|
|
|
|
sExp, eExp := exp.At()
|
|
|
|
sRes, eRes := res.At()
|
|
|
|
require.Equal(t, eExp, eRes, "samples error")
|
|
|
|
require.Equal(t, sExp, sRes, "samples")
|
|
|
|
|
|
|
|
remaining = exp.Next()
|
|
|
|
require.Equal(t, remaining, res.Next())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("Chain", func(t *testing.T) {
|
|
|
|
for _, tc := range itcases {
|
|
|
|
a, b, c := itSeries{newListSeriesIterator(tc.a)},
|
|
|
|
itSeries{newListSeriesIterator(tc.b)},
|
|
|
|
itSeries{newListSeriesIterator(tc.c)}
|
|
|
|
|
|
|
|
res := newChainedSeriesIterator(a, b, c)
|
|
|
|
exp := newListSeriesIterator(tc.exp)
|
|
|
|
|
|
|
|
smplExp, errExp := expandSeriesIterator(exp)
|
|
|
|
smplRes, errRes := expandSeriesIterator(res)
|
|
|
|
|
|
|
|
require.Equal(t, errExp, errRes, "samples error")
|
|
|
|
require.Equal(t, smplExp, smplRes, "samples")
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Run("Seek", func(t *testing.T) {
|
|
|
|
for _, tc := range seekcases {
|
|
|
|
a, b, c := itSeries{newListSeriesIterator(tc.a)},
|
|
|
|
itSeries{newListSeriesIterator(tc.b)},
|
|
|
|
itSeries{newListSeriesIterator(tc.c)}
|
|
|
|
|
|
|
|
res := newChainedSeriesIterator(a, b, c)
|
|
|
|
exp := newListSeriesIterator(tc.exp)
|
|
|
|
|
|
|
|
require.Equal(t, tc.success, res.Seek(tc.seek))
|
|
|
|
|
|
|
|
if tc.success {
|
|
|
|
// Init the list and then proceed to check.
|
|
|
|
remaining := exp.Next()
|
|
|
|
require.True(t, remaining)
|
|
|
|
|
|
|
|
for remaining {
|
|
|
|
sExp, eExp := exp.At()
|
|
|
|
sRes, eRes := res.At()
|
|
|
|
require.Equal(t, eExp, eRes, "samples error")
|
|
|
|
require.Equal(t, sExp, sRes, "samples")
|
|
|
|
|
|
|
|
remaining = exp.Next()
|
|
|
|
require.Equal(t, remaining, res.Next())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
2017-05-01 02:03:56 -07:00
|
|
|
|
2017-06-13 00:51:22 -07:00
|
|
|
// Regression for: https://github.com/prometheus/tsdb/pull/97
|
2017-06-30 06:06:27 -07:00
|
|
|
func TestChunkSeriesIterator_DoubleSeek(t *testing.T) {
|
2017-08-06 11:41:24 -07:00
|
|
|
chkMetas := []ChunkMeta{
|
2017-06-13 00:51:22 -07:00
|
|
|
chunkFromSamples([]sample{}),
|
|
|
|
chunkFromSamples([]sample{{1, 1}, {2, 2}, {3, 3}}),
|
|
|
|
chunkFromSamples([]sample{{4, 4}, {5, 5}}),
|
|
|
|
}
|
|
|
|
|
|
|
|
res := newChunkSeriesIterator(chkMetas, nil, 2, 8)
|
|
|
|
require.True(t, res.Seek(1))
|
|
|
|
require.True(t, res.Seek(2))
|
|
|
|
ts, v := res.At()
|
|
|
|
require.Equal(t, int64(2), ts)
|
|
|
|
require.Equal(t, float64(2), v)
|
|
|
|
}
|
|
|
|
|
2017-06-30 06:06:27 -07:00
|
|
|
// Regression when seeked chunks were still found via binary search and we always
|
|
|
|
// skipped to the end when seeking a value in the current chunk.
|
|
|
|
func TestChunkSeriesIterator_SeekInCurrentChunk(t *testing.T) {
|
2017-08-06 11:41:24 -07:00
|
|
|
metas := []ChunkMeta{
|
2017-06-30 06:06:27 -07:00
|
|
|
chunkFromSamples([]sample{}),
|
|
|
|
chunkFromSamples([]sample{{1, 2}, {3, 4}, {5, 6}, {7, 8}}),
|
|
|
|
chunkFromSamples([]sample{}),
|
|
|
|
}
|
|
|
|
|
|
|
|
it := newChunkSeriesIterator(metas, nil, 1, 7)
|
|
|
|
|
|
|
|
require.True(t, it.Next())
|
|
|
|
ts, v := it.At()
|
|
|
|
require.Equal(t, int64(1), ts)
|
|
|
|
require.Equal(t, float64(2), v)
|
|
|
|
|
|
|
|
require.True(t, it.Seek(4))
|
|
|
|
ts, v = it.At()
|
|
|
|
require.Equal(t, int64(5), ts)
|
|
|
|
require.Equal(t, float64(6), v)
|
|
|
|
}
|
|
|
|
|
2017-08-28 15:39:17 -07:00
|
|
|
// Regression when calling Next() with a time bounded to fit within two samples.
|
|
|
|
// Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
|
|
|
|
func TestChunkSeriesIterator_NextWithMinTime(t *testing.T) {
|
|
|
|
metas := []ChunkMeta{
|
|
|
|
chunkFromSamples([]sample{{1, 6}, {5, 6}, {7, 8}}),
|
|
|
|
}
|
|
|
|
|
|
|
|
it := newChunkSeriesIterator(metas, nil, 2, 4)
|
|
|
|
require.False(t, it.Next())
|
|
|
|
}
|
|
|
|
|
2017-05-01 02:03:56 -07:00
|
|
|
func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
|
|
|
|
lbls := []labels.Labels{labels.New(labels.Label{"a", "b"})}
|
2017-08-06 11:41:24 -07:00
|
|
|
chunkMetas := [][]ChunkMeta{
|
2017-05-01 02:03:56 -07:00
|
|
|
{
|
|
|
|
{MinTime: 1, MaxTime: 2, Ref: 1},
|
|
|
|
{MinTime: 3, MaxTime: 4, Ref: 2},
|
|
|
|
{MinTime: 10, MaxTime: 12, Ref: 3},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
cr := mockChunkReader(
|
|
|
|
map[uint64]chunks.Chunk{
|
|
|
|
1: chunks.NewXORChunk(),
|
|
|
|
2: chunks.NewXORChunk(),
|
|
|
|
3: chunks.NewXORChunk(),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
m := &mockChunkSeriesSet{l: lbls, cm: chunkMetas, i: -1}
|
|
|
|
p := &populatedChunkSeries{
|
|
|
|
set: m,
|
|
|
|
chunks: cr,
|
|
|
|
|
2017-05-01 02:31:17 -07:00
|
|
|
mint: 0,
|
|
|
|
maxt: 0,
|
2017-05-01 02:03:56 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
require.False(t, p.Next())
|
2017-05-01 02:31:17 -07:00
|
|
|
|
|
|
|
p.mint = 6
|
|
|
|
p.maxt = 9
|
|
|
|
require.False(t, p.Next())
|
|
|
|
|
2017-05-03 10:15:28 -07:00
|
|
|
// Test the case where 1 chunk could cause an unpopulated chunk to be returned.
|
2017-08-06 11:41:24 -07:00
|
|
|
chunkMetas = [][]ChunkMeta{
|
2017-05-03 10:15:28 -07:00
|
|
|
{
|
|
|
|
{MinTime: 1, MaxTime: 2, Ref: 1},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
m = &mockChunkSeriesSet{l: lbls, cm: chunkMetas, i: -1}
|
|
|
|
p = &populatedChunkSeries{
|
|
|
|
set: m,
|
|
|
|
chunks: cr,
|
|
|
|
|
|
|
|
mint: 10,
|
|
|
|
maxt: 15,
|
|
|
|
}
|
|
|
|
require.False(t, p.Next())
|
2017-05-01 02:03:56 -07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
type mockChunkSeriesSet struct {
|
|
|
|
l []labels.Labels
|
2017-08-06 11:41:24 -07:00
|
|
|
cm [][]ChunkMeta
|
2017-05-01 02:03:56 -07:00
|
|
|
|
|
|
|
i int
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *mockChunkSeriesSet) Next() bool {
|
|
|
|
if len(m.l) != len(m.cm) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
m.i++
|
|
|
|
return m.i < len(m.l)
|
|
|
|
}
|
|
|
|
|
2017-08-25 01:11:46 -07:00
|
|
|
func (m *mockChunkSeriesSet) At() (labels.Labels, []ChunkMeta, Intervals) {
|
2017-05-22 04:12:36 -07:00
|
|
|
return m.l[m.i], m.cm[m.i], nil
|
2017-05-01 02:03:56 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m *mockChunkSeriesSet) Err() error {
|
|
|
|
return nil
|
|
|
|
}
|
2017-07-05 07:19:28 -07:00
|
|
|
|
|
|
|
// Test the cost of merging series sets for different number of merged sets and their size.
|
|
|
|
// The subset are all equivalent so this does not capture merging of partial or non-overlapping sets well.
|
|
|
|
func BenchmarkMergedSeriesSet(b *testing.B) {
|
|
|
|
var sel func(sets []SeriesSet) SeriesSet
|
|
|
|
|
|
|
|
sel = func(sets []SeriesSet) SeriesSet {
|
|
|
|
if len(sets) == 0 {
|
|
|
|
return nopSeriesSet{}
|
|
|
|
}
|
|
|
|
if len(sets) == 1 {
|
|
|
|
return sets[0]
|
|
|
|
}
|
|
|
|
l := len(sets) / 2
|
|
|
|
return newMergedSeriesSet(sel(sets[:l]), sel(sets[l:]))
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, k := range []int{
|
|
|
|
100,
|
|
|
|
1000,
|
|
|
|
10000,
|
|
|
|
100000,
|
|
|
|
} {
|
|
|
|
for _, j := range []int{1, 2, 4, 8, 16, 32} {
|
|
|
|
b.Run(fmt.Sprintf("series=%d,blocks=%d", k, j), func(b *testing.B) {
|
|
|
|
lbls, err := readPrometheusLabels("testdata/1m.series", k)
|
|
|
|
require.NoError(b, err)
|
|
|
|
|
|
|
|
sort.Sort(labels.Slice(lbls))
|
|
|
|
|
|
|
|
in := make([][]Series, j)
|
|
|
|
|
|
|
|
for _, l := range lbls {
|
|
|
|
l2 := l
|
|
|
|
for j := range in {
|
|
|
|
in[j] = append(in[j], &mockSeries{labels: func() labels.Labels { return l2 }})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
b.ResetTimer()
|
|
|
|
|
|
|
|
for i := 0; i < b.N; i++ {
|
|
|
|
var sets []SeriesSet
|
|
|
|
for _, s := range in {
|
|
|
|
sets = append(sets, newListSeriesSet(s))
|
|
|
|
}
|
|
|
|
ms := sel(sets)
|
|
|
|
|
|
|
|
i := 0
|
|
|
|
for ms.Next() {
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
require.NoError(b, ms.Err())
|
|
|
|
require.Equal(b, len(lbls), i)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|