mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 21:54:10 -08:00
Update tests to not assume the chunk implementation
Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com>
This commit is contained in:
parent
45b025898f
commit
0a3f203c63
|
@ -130,7 +130,25 @@ func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[str
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
// queryChunks runs a matcher query against the querier and fully expands its data.
|
// queryAndExpandChunks runs a matcher query against the querier and fully expands its data into samples.
|
||||||
|
func queryAndExpandChunks(t testing.TB, q storage.ChunkQuerier, matchers ...*labels.Matcher) map[string][][]tsdbutil.Sample {
|
||||||
|
s := queryChunks(t, q, matchers...)
|
||||||
|
|
||||||
|
res := make(map[string][][]tsdbutil.Sample)
|
||||||
|
for k, v := range s {
|
||||||
|
var samples [][]tsdbutil.Sample
|
||||||
|
for _, chk := range v {
|
||||||
|
sam, err := storage.ExpandSamples(chk.Chunk.Iterator(nil), nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
samples = append(samples, sam)
|
||||||
|
}
|
||||||
|
res[k] = samples
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// queryChunks runs a matcher query against the querier and expands its data.
|
||||||
func queryChunks(t testing.TB, q storage.ChunkQuerier, matchers ...*labels.Matcher) map[string][]chunks.Meta {
|
func queryChunks(t testing.TB, q storage.ChunkQuerier, matchers ...*labels.Matcher) map[string][]chunks.Meta {
|
||||||
ss := q.Select(false, nil, matchers...)
|
ss := q.Select(false, nil, matchers...)
|
||||||
defer func() {
|
defer func() {
|
||||||
|
@ -2367,7 +2385,7 @@ func TestDBReadOnly(t *testing.T) {
|
||||||
logger = log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
logger = log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
||||||
expBlocks []*Block
|
expBlocks []*Block
|
||||||
expSeries map[string][]tsdbutil.Sample
|
expSeries map[string][]tsdbutil.Sample
|
||||||
expChunks map[string][]chunks.Meta
|
expChunks map[string][][]tsdbutil.Sample
|
||||||
expDBHash []byte
|
expDBHash []byte
|
||||||
matchAll = labels.MustNewMatcher(labels.MatchEqual, "", "")
|
matchAll = labels.MustNewMatcher(labels.MatchEqual, "", "")
|
||||||
err error
|
err error
|
||||||
|
@ -2418,7 +2436,7 @@ func TestDBReadOnly(t *testing.T) {
|
||||||
expSeries = query(t, q, matchAll)
|
expSeries = query(t, q, matchAll)
|
||||||
cq, err := dbWritable.ChunkQuerier(context.TODO(), math.MinInt64, math.MaxInt64)
|
cq, err := dbWritable.ChunkQuerier(context.TODO(), math.MinInt64, math.MaxInt64)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
expChunks = queryChunks(t, cq, matchAll)
|
expChunks = queryAndExpandChunks(t, cq, matchAll)
|
||||||
|
|
||||||
require.NoError(t, dbWritable.Close()) // Close here to allow getting the dir hash for windows.
|
require.NoError(t, dbWritable.Close()) // Close here to allow getting the dir hash for windows.
|
||||||
expDBHash = testutil.DirHash(t, dbWritable.Dir())
|
expDBHash = testutil.DirHash(t, dbWritable.Dir())
|
||||||
|
@ -2452,7 +2470,7 @@ func TestDBReadOnly(t *testing.T) {
|
||||||
t.Run("chunk querier", func(t *testing.T) {
|
t.Run("chunk querier", func(t *testing.T) {
|
||||||
cq, err := dbReadOnly.ChunkQuerier(context.TODO(), math.MinInt64, math.MaxInt64)
|
cq, err := dbReadOnly.ChunkQuerier(context.TODO(), math.MinInt64, math.MaxInt64)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
readOnlySeries := queryChunks(t, cq, matchAll)
|
readOnlySeries := queryAndExpandChunks(t, cq, matchAll)
|
||||||
readOnlyDBHash := testutil.DirHash(t, dbDir)
|
readOnlyDBHash := testutil.DirHash(t, dbDir)
|
||||||
|
|
||||||
require.Equal(t, len(expChunks), len(readOnlySeries), "total series mismatch")
|
require.Equal(t, len(expChunks), len(readOnlySeries), "total series mismatch")
|
||||||
|
|
|
@ -235,7 +235,19 @@ func testBlockQuerier(t *testing.T, c blockQuerierTestCase, ir IndexReader, cr C
|
||||||
chksRes, errRes := storage.ExpandChunks(sres.Iterator(nil))
|
chksRes, errRes := storage.ExpandChunks(sres.Iterator(nil))
|
||||||
rmChunkRefs(chksRes)
|
rmChunkRefs(chksRes)
|
||||||
require.Equal(t, errExp, errRes)
|
require.Equal(t, errExp, errRes)
|
||||||
require.Equal(t, chksExp, chksRes)
|
|
||||||
|
require.Equal(t, len(chksExp), len(chksRes))
|
||||||
|
var exp, act [][]tsdbutil.Sample
|
||||||
|
for i := range chksExp {
|
||||||
|
samples, err := storage.ExpandSamples(chksExp[i].Chunk.Iterator(nil), nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
exp = append(exp, samples)
|
||||||
|
samples, err = storage.ExpandSamples(chksRes[i].Chunk.Iterator(nil), nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
act = append(act, samples)
|
||||||
|
}
|
||||||
|
|
||||||
|
require.Equal(t, exp, act)
|
||||||
}
|
}
|
||||||
require.NoError(t, res.Err())
|
require.NoError(t, res.Err())
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in a new issue