mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 05:34:05 -08:00
move test utils to it's own package so we can use it in tests for other packages in tsdb repo
This commit is contained in:
parent
fa26e2b6c3
commit
ca4e817372
|
@ -17,38 +17,40 @@ import (
|
|||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
func TestSetCompactionFailed(t *testing.T) {
|
||||
tmpdir, err := ioutil.TempDir("", "test-tsdb")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
b := createEmptyBlock(t, tmpdir)
|
||||
|
||||
Equals(t, false, b.meta.Compaction.Failed)
|
||||
Ok(t, b.setCompactionFailed())
|
||||
Equals(t, true, b.meta.Compaction.Failed)
|
||||
Ok(t, b.Close())
|
||||
testutil.Equals(t, false, b.meta.Compaction.Failed)
|
||||
testutil.Ok(t, b.setCompactionFailed())
|
||||
testutil.Equals(t, true, b.meta.Compaction.Failed)
|
||||
testutil.Ok(t, b.Close())
|
||||
|
||||
b, err = OpenBlock(tmpdir, nil)
|
||||
Ok(t, err)
|
||||
Equals(t, true, b.meta.Compaction.Failed)
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, true, b.meta.Compaction.Failed)
|
||||
}
|
||||
|
||||
func createEmptyBlock(t *testing.T, dir string) *Block {
|
||||
Ok(t, os.MkdirAll(dir, 0777))
|
||||
testutil.Ok(t, os.MkdirAll(dir, 0777))
|
||||
|
||||
Ok(t, writeMetaFile(dir, &BlockMeta{}))
|
||||
testutil.Ok(t, writeMetaFile(dir, &BlockMeta{}))
|
||||
|
||||
ir, err := newIndexWriter(dir)
|
||||
Ok(t, err)
|
||||
Ok(t, ir.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, ir.Close())
|
||||
|
||||
Ok(t, os.MkdirAll(chunkDir(dir), 0777))
|
||||
testutil.Ok(t, os.MkdirAll(chunkDir(dir), 0777))
|
||||
|
||||
Ok(t, writeTombstoneFile(dir, EmptyTombstoneReader()))
|
||||
testutil.Ok(t, writeTombstoneFile(dir, EmptyTombstoneReader()))
|
||||
|
||||
b, err := OpenBlock(dir, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
return b
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ import (
|
|||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prometheus/tsdb/chunks"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
type mockChunkReader map[uint64]chunks.Chunk
|
||||
|
@ -39,7 +40,7 @@ func (cr mockChunkReader) Close() error {
|
|||
func TestDeletedIterator(t *testing.T) {
|
||||
chk := chunks.NewXORChunk()
|
||||
app, err := chk.Appender()
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
// Insert random stuff from (0, 1000).
|
||||
act := make([]sample, 1000)
|
||||
for i := 0; i < 1000; i++ {
|
||||
|
@ -76,11 +77,11 @@ func TestDeletedIterator(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
Assert(t, i < 1000 == true, "")
|
||||
testutil.Assert(t, i < 1000 == true, "")
|
||||
|
||||
ts, v := it.At()
|
||||
Equals(t, act[i].t, ts)
|
||||
Equals(t, act[i].v, v)
|
||||
testutil.Equals(t, act[i].t, ts)
|
||||
testutil.Equals(t, act[i].v, v)
|
||||
}
|
||||
// There has been an extra call to Next().
|
||||
i++
|
||||
|
@ -91,7 +92,7 @@ func TestDeletedIterator(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
Assert(t, i < 1000 == false, "")
|
||||
Ok(t, it.Err())
|
||||
testutil.Assert(t, i < 1000 == false, "")
|
||||
testutil.Ok(t, it.Err())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import (
|
|||
|
||||
"github.com/go-kit/kit/log"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
func TestSplitByRange(t *testing.T) {
|
||||
|
@ -122,7 +123,7 @@ func TestSplitByRange(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
Equals(t, exp, splitByRange(blocks, c.trange))
|
||||
testutil.Equals(t, exp, splitByRange(blocks, c.trange))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -146,7 +147,7 @@ func TestNoPanicFor0Tombstones(t *testing.T) {
|
|||
}
|
||||
|
||||
c, err := NewLeveledCompactor(nil, nil, []int64{50}, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
c.plan(metas)
|
||||
}
|
||||
|
@ -159,7 +160,7 @@ func TestLeveledCompactor_plan(t *testing.T) {
|
|||
720,
|
||||
2160,
|
||||
}, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
cases := []struct {
|
||||
metas []dirMeta
|
||||
|
@ -262,9 +263,9 @@ func TestLeveledCompactor_plan(t *testing.T) {
|
|||
|
||||
for _, c := range cases {
|
||||
res, err := compactor.plan(c.metas)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Equals(t, c.expected, res)
|
||||
testutil.Equals(t, c.expected, res)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -276,7 +277,7 @@ func TestRangeWithFailedCompactionWontGetSelected(t *testing.T) {
|
|||
720,
|
||||
2160,
|
||||
}, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
cases := []struct {
|
||||
metas []dirMeta
|
||||
|
@ -309,9 +310,9 @@ func TestRangeWithFailedCompactionWontGetSelected(t *testing.T) {
|
|||
for _, c := range cases {
|
||||
c.metas[1].meta.Compaction.Failed = true
|
||||
res, err := compactor.plan(c.metas)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Equals(t, []string(nil), res)
|
||||
testutil.Equals(t, []string(nil), res)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -323,14 +324,14 @@ func TestCompactionFailWillCleanUpTempDir(t *testing.T) {
|
|||
720,
|
||||
2160,
|
||||
}, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
tmpdir, err := ioutil.TempDir("", "test")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
NotOk(t, compactor.write(tmpdir, &BlockMeta{}, erringBReader{}))
|
||||
testutil.NotOk(t, compactor.write(tmpdir, &BlockMeta{}, erringBReader{}))
|
||||
_, err = os.Stat(filepath.Join(tmpdir, BlockMeta{}.ULID.String()) + ".tmp")
|
||||
Assert(t, os.IsNotExist(err), "directory is not cleaned up")
|
||||
testutil.Assert(t, os.IsNotExist(err), "directory is not cleaned up")
|
||||
}
|
||||
|
||||
func metaRange(name string, mint, maxt int64, stats *BlockStats) dirMeta {
|
||||
|
|
267
db_test.go
267
db_test.go
|
@ -23,14 +23,15 @@ import (
|
|||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prometheus/tsdb/labels"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
func openTestDB(t testing.TB, opts *Options) (db *DB, close func()) {
|
||||
tmpdir, err := ioutil.TempDir("", "test")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
db, err = Open(tmpdir, nil, nil, opts)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Do not close the test database by default as it will deadlock on test failures.
|
||||
return db, func() { os.RemoveAll(tmpdir) }
|
||||
|
@ -39,7 +40,7 @@ func openTestDB(t testing.TB, opts *Options) (db *DB, close func()) {
|
|||
// query runs a matcher query against the querier and fully expands its data.
|
||||
func query(t testing.TB, q Querier, matchers ...labels.Matcher) map[string][]sample {
|
||||
ss, err := q.Select(matchers...)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
result := map[string][]sample{}
|
||||
|
||||
|
@ -52,12 +53,12 @@ func query(t testing.TB, q Querier, matchers ...labels.Matcher) map[string][]sam
|
|||
t, v := it.At()
|
||||
samples = append(samples, sample{t: t, v: v})
|
||||
}
|
||||
Ok(t, it.Err())
|
||||
testutil.Ok(t, it.Err())
|
||||
|
||||
name := series.Labels().String()
|
||||
result[name] = samples
|
||||
}
|
||||
Ok(t, ss.Err())
|
||||
testutil.Ok(t, ss.Err())
|
||||
|
||||
return result
|
||||
}
|
||||
|
@ -69,25 +70,25 @@ func TestDataAvailableOnlyAfterCommit(t *testing.T) {
|
|||
app := db.Appender()
|
||||
|
||||
_, err := app.Add(labels.FromStrings("foo", "bar"), 0, 0)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
querier, err := db.Querier(0, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
seriesSet := query(t, querier, labels.NewEqualMatcher("foo", "bar"))
|
||||
|
||||
Equals(t, seriesSet, map[string][]sample{})
|
||||
Ok(t, querier.Close())
|
||||
testutil.Equals(t, seriesSet, map[string][]sample{})
|
||||
testutil.Ok(t, querier.Close())
|
||||
|
||||
err = app.Commit()
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
querier, err = db.Querier(0, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer querier.Close()
|
||||
|
||||
seriesSet = query(t, querier, labels.NewEqualMatcher("foo", "bar"))
|
||||
|
||||
Equals(t, seriesSet, map[string][]sample{`{foo="bar"}`: []sample{{t: 0, v: 0}}})
|
||||
testutil.Equals(t, seriesSet, map[string][]sample{`{foo="bar"}`: []sample{{t: 0, v: 0}}})
|
||||
}
|
||||
|
||||
func TestDataNotAvailableAfterRollback(t *testing.T) {
|
||||
|
@ -96,18 +97,18 @@ func TestDataNotAvailableAfterRollback(t *testing.T) {
|
|||
|
||||
app := db.Appender()
|
||||
_, err := app.Add(labels.FromStrings("foo", "bar"), 0, 0)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
err = app.Rollback()
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
querier, err := db.Querier(0, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer querier.Close()
|
||||
|
||||
seriesSet := query(t, querier, labels.NewEqualMatcher("foo", "bar"))
|
||||
|
||||
Equals(t, seriesSet, map[string][]sample{})
|
||||
testutil.Equals(t, seriesSet, map[string][]sample{})
|
||||
}
|
||||
|
||||
func TestDBAppenderAddRef(t *testing.T) {
|
||||
|
@ -117,41 +118,41 @@ func TestDBAppenderAddRef(t *testing.T) {
|
|||
app1 := db.Appender()
|
||||
|
||||
ref1, err := app1.Add(labels.FromStrings("a", "b"), 123, 0)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Reference should already work before commit.
|
||||
err = app1.AddFast(ref1, 124, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
err = app1.Commit()
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
app2 := db.Appender()
|
||||
|
||||
// first ref should already work in next transaction.
|
||||
err = app2.AddFast(ref1, 125, 0)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
ref2, err := app2.Add(labels.FromStrings("a", "b"), 133, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Assert(t, ref1 == ref2, "")
|
||||
testutil.Assert(t, ref1 == ref2, "")
|
||||
|
||||
// Reference must be valid to add another sample.
|
||||
err = app2.AddFast(ref2, 143, 2)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
err = app2.AddFast(9999999, 1, 1)
|
||||
Equals(t, errors.Cause(err), ErrNotFound)
|
||||
testutil.Equals(t, errors.Cause(err), ErrNotFound)
|
||||
|
||||
Ok(t, app2.Commit())
|
||||
testutil.Ok(t, app2.Commit())
|
||||
|
||||
q, err := db.Querier(0, 200)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
res := query(t, q, labels.NewEqualMatcher("a", "b"))
|
||||
|
||||
Equals(t, map[string][]sample{
|
||||
testutil.Equals(t, map[string][]sample{
|
||||
labels.FromStrings("a", "b").String(): []sample{
|
||||
{t: 123, v: 0},
|
||||
{t: 124, v: 1},
|
||||
|
@ -161,7 +162,7 @@ func TestDBAppenderAddRef(t *testing.T) {
|
|||
},
|
||||
}, res)
|
||||
|
||||
Ok(t, q.Close())
|
||||
testutil.Ok(t, q.Close())
|
||||
}
|
||||
|
||||
func TestDeleteSimple(t *testing.T) {
|
||||
|
@ -178,7 +179,7 @@ func TestDeleteSimple(t *testing.T) {
|
|||
app.Add(labels.Labels{{"a", "b"}}, i, smpls[i])
|
||||
}
|
||||
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, app.Commit())
|
||||
cases := []struct {
|
||||
intervals Intervals
|
||||
remaint []int64
|
||||
|
@ -194,15 +195,15 @@ Outer:
|
|||
// TODO(gouthamve): Reset the tombstones somehow.
|
||||
// Delete the ranges.
|
||||
for _, r := range c.intervals {
|
||||
Ok(t, db.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
testutil.Ok(t, db.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
}
|
||||
|
||||
// Compare the result.
|
||||
q, err := db.Querier(0, numSamples)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
res, err := q.Select(labels.NewEqualMatcher("a", "b"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
expSamples := make([]sample, 0, len(c.remaint))
|
||||
for _, ts := range c.remaint {
|
||||
|
@ -214,13 +215,13 @@ Outer:
|
|||
})
|
||||
|
||||
if len(expSamples) == 0 {
|
||||
Assert(t, res.Next() == false, "")
|
||||
testutil.Assert(t, res.Next() == false, "")
|
||||
continue
|
||||
}
|
||||
|
||||
for {
|
||||
eok, rok := expss.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
continue Outer
|
||||
|
@ -228,13 +229,13 @@ Outer:
|
|||
sexp := expss.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -245,13 +246,13 @@ func TestAmendDatapointCausesError(t *testing.T) {
|
|||
|
||||
app := db.Appender()
|
||||
_, err := app.Add(labels.Labels{}, 0, 0)
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
app = db.Appender()
|
||||
_, err = app.Add(labels.Labels{}, 0, 1)
|
||||
Equals(t, ErrAmendSample, err)
|
||||
Ok(t, app.Rollback())
|
||||
testutil.Equals(t, ErrAmendSample, err)
|
||||
testutil.Ok(t, app.Rollback())
|
||||
}
|
||||
|
||||
func TestDuplicateNaNDatapointNoAmendError(t *testing.T) {
|
||||
|
@ -260,12 +261,12 @@ func TestDuplicateNaNDatapointNoAmendError(t *testing.T) {
|
|||
|
||||
app := db.Appender()
|
||||
_, err := app.Add(labels.Labels{}, 0, math.NaN())
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
app = db.Appender()
|
||||
_, err = app.Add(labels.Labels{}, 0, math.NaN())
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
}
|
||||
|
||||
func TestNonDuplicateNaNDatapointsCausesAmendError(t *testing.T) {
|
||||
|
@ -274,12 +275,12 @@ func TestNonDuplicateNaNDatapointsCausesAmendError(t *testing.T) {
|
|||
|
||||
app := db.Appender()
|
||||
_, err := app.Add(labels.Labels{}, 0, math.Float64frombits(0x7ff0000000000001))
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
app = db.Appender()
|
||||
_, err = app.Add(labels.Labels{}, 0, math.Float64frombits(0x7ff0000000000002))
|
||||
Equals(t, ErrAmendSample, err)
|
||||
testutil.Equals(t, ErrAmendSample, err)
|
||||
}
|
||||
|
||||
func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
|
||||
|
@ -289,40 +290,40 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
|
|||
// Append AmendedValue.
|
||||
app := db.Appender()
|
||||
_, err := app.Add(labels.Labels{{"a", "b"}}, 0, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
_, err = app.Add(labels.Labels{{"a", "b"}}, 0, 2)
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
// Make sure the right value is stored.
|
||||
q, err := db.Querier(0, 10)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
ssMap := query(t, q, labels.NewEqualMatcher("a", "b"))
|
||||
|
||||
Equals(t, map[string][]sample{
|
||||
testutil.Equals(t, map[string][]sample{
|
||||
labels.New(labels.Label{"a", "b"}).String(): []sample{{0, 1}},
|
||||
}, ssMap)
|
||||
|
||||
Ok(t, q.Close())
|
||||
testutil.Ok(t, q.Close())
|
||||
|
||||
// Append Out of Order Value.
|
||||
app = db.Appender()
|
||||
_, err = app.Add(labels.Labels{{"a", "b"}}, 10, 3)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
_, err = app.Add(labels.Labels{{"a", "b"}}, 7, 5)
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
q, err = db.Querier(0, 10)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
ssMap = query(t, q, labels.NewEqualMatcher("a", "b"))
|
||||
|
||||
Equals(t, map[string][]sample{
|
||||
testutil.Equals(t, map[string][]sample{
|
||||
labels.New(labels.Label{"a", "b"}).String(): []sample{{0, 1}, {10, 3}},
|
||||
}, ssMap)
|
||||
Ok(t, q.Close())
|
||||
testutil.Ok(t, q.Close())
|
||||
}
|
||||
|
||||
func TestDB_Snapshot(t *testing.T) {
|
||||
|
@ -334,28 +335,28 @@ func TestDB_Snapshot(t *testing.T) {
|
|||
mint := int64(1414141414000)
|
||||
for i := 0; i < 1000; i++ {
|
||||
_, err := app.Add(labels.FromStrings("foo", "bar"), mint+int64(i), 1.0)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
}
|
||||
Ok(t, app.Commit())
|
||||
Ok(t, app.Rollback())
|
||||
testutil.Ok(t, app.Commit())
|
||||
testutil.Ok(t, app.Rollback())
|
||||
|
||||
// create snapshot
|
||||
snap, err := ioutil.TempDir("", "snap")
|
||||
Ok(t, err)
|
||||
Ok(t, db.Snapshot(snap))
|
||||
Ok(t, db.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, db.Snapshot(snap))
|
||||
testutil.Ok(t, db.Close())
|
||||
|
||||
// reopen DB from snapshot
|
||||
db, err = Open(snap, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
querier, err := db.Querier(mint, mint+1000)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer querier.Close()
|
||||
|
||||
// sum values
|
||||
seriesSet, err := querier.Select(labels.NewEqualMatcher("foo", "bar"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
sum := 0.0
|
||||
for seriesSet.Next() {
|
||||
|
@ -364,10 +365,10 @@ func TestDB_Snapshot(t *testing.T) {
|
|||
_, v := series.At()
|
||||
sum += v
|
||||
}
|
||||
Ok(t, series.Err())
|
||||
testutil.Ok(t, series.Err())
|
||||
}
|
||||
Ok(t, seriesSet.Err())
|
||||
Equals(t, sum, 1000.0)
|
||||
testutil.Ok(t, seriesSet.Err())
|
||||
testutil.Equals(t, sum, 1000.0)
|
||||
}
|
||||
|
||||
func TestDB_SnapshotWithDelete(t *testing.T) {
|
||||
|
@ -384,7 +385,7 @@ func TestDB_SnapshotWithDelete(t *testing.T) {
|
|||
app.Add(labels.Labels{{"a", "b"}}, i, smpls[i])
|
||||
}
|
||||
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, app.Commit())
|
||||
cases := []struct {
|
||||
intervals Intervals
|
||||
remaint []int64
|
||||
|
@ -400,25 +401,25 @@ Outer:
|
|||
// TODO(gouthamve): Reset the tombstones somehow.
|
||||
// Delete the ranges.
|
||||
for _, r := range c.intervals {
|
||||
Ok(t, db.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
testutil.Ok(t, db.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
}
|
||||
|
||||
// create snapshot
|
||||
snap, err := ioutil.TempDir("", "snap")
|
||||
Ok(t, err)
|
||||
Ok(t, db.Snapshot(snap))
|
||||
Ok(t, db.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, db.Snapshot(snap))
|
||||
testutil.Ok(t, db.Close())
|
||||
|
||||
// reopen DB from snapshot
|
||||
db, err = Open(snap, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Compare the result.
|
||||
q, err := db.Querier(0, numSamples)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
res, err := q.Select(labels.NewEqualMatcher("a", "b"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
expSamples := make([]sample, 0, len(c.remaint))
|
||||
for _, ts := range c.remaint {
|
||||
|
@ -430,13 +431,13 @@ Outer:
|
|||
})
|
||||
|
||||
if len(expSamples) == 0 {
|
||||
Assert(t, res.Next() == false, "")
|
||||
testutil.Assert(t, res.Next() == false, "")
|
||||
continue
|
||||
}
|
||||
|
||||
for {
|
||||
eok, rok := expss.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
continue Outer
|
||||
|
@ -444,13 +445,13 @@ Outer:
|
|||
sexp := expss.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -528,7 +529,7 @@ func TestDB_e2e(t *testing.T) {
|
|||
series = append(series, sample{ts, v})
|
||||
|
||||
_, err := app.Add(lset, ts, v)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
ts += rand.Int63n(timeInterval) + 1
|
||||
}
|
||||
|
@ -536,7 +537,7 @@ func TestDB_e2e(t *testing.T) {
|
|||
seriesMap[lset.String()] = series
|
||||
}
|
||||
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
// Query each selector on 1000 random time-ranges.
|
||||
queries := []struct {
|
||||
|
@ -587,10 +588,10 @@ func TestDB_e2e(t *testing.T) {
|
|||
}
|
||||
|
||||
q, err := db.Querier(mint, maxt)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
ss, err := q.Select(qry.ms...)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
result := map[string][]sample{}
|
||||
|
||||
|
@ -598,15 +599,15 @@ func TestDB_e2e(t *testing.T) {
|
|||
x := ss.At()
|
||||
|
||||
smpls, err := expandSeriesIterator(x.Iterator())
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
if len(smpls) > 0 {
|
||||
result[x.Labels().String()] = smpls
|
||||
}
|
||||
}
|
||||
|
||||
Ok(t, ss.Err())
|
||||
Equals(t, expected, result)
|
||||
testutil.Ok(t, ss.Err())
|
||||
testutil.Equals(t, expected, result)
|
||||
|
||||
q.Close()
|
||||
}
|
||||
|
@ -617,30 +618,30 @@ func TestDB_e2e(t *testing.T) {
|
|||
|
||||
func TestWALFlushedOnDBClose(t *testing.T) {
|
||||
tmpdir, err := ioutil.TempDir("", "test")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
db, err := Open(tmpdir, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
lbls := labels.Labels{labels.Label{Name: "labelname", Value: "labelvalue"}}
|
||||
|
||||
app := db.Appender()
|
||||
_, err = app.Add(lbls, 0, 1)
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
db.Close()
|
||||
|
||||
db, err = Open(tmpdir, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
q, err := db.Querier(0, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
values, err := q.LabelValues("labelname")
|
||||
Ok(t, err)
|
||||
Equals(t, values, []string{"labelvalue"})
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, values, []string{"labelvalue"})
|
||||
}
|
||||
|
||||
func TestTombstoneClean(t *testing.T) {
|
||||
|
@ -657,7 +658,7 @@ func TestTombstoneClean(t *testing.T) {
|
|||
app.Add(labels.Labels{{"a", "b"}}, i, smpls[i])
|
||||
}
|
||||
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, app.Commit())
|
||||
cases := []struct {
|
||||
intervals Intervals
|
||||
remaint []int64
|
||||
|
@ -673,27 +674,27 @@ func TestTombstoneClean(t *testing.T) {
|
|||
|
||||
// create snapshot
|
||||
snap, err := ioutil.TempDir("", "snap")
|
||||
Ok(t, err)
|
||||
Ok(t, db.Snapshot(snap))
|
||||
Ok(t, db.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, db.Snapshot(snap))
|
||||
testutil.Ok(t, db.Close())
|
||||
|
||||
// reopen DB from snapshot
|
||||
db, err = Open(snap, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
for _, r := range c.intervals {
|
||||
Ok(t, db.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
testutil.Ok(t, db.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
}
|
||||
|
||||
// All of the setup for THIS line.
|
||||
Ok(t, db.CleanTombstones())
|
||||
testutil.Ok(t, db.CleanTombstones())
|
||||
|
||||
// Compare the result.
|
||||
q, err := db.Querier(0, numSamples)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
res, err := q.Select(labels.NewEqualMatcher("a", "b"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
expSamples := make([]sample, 0, len(c.remaint))
|
||||
for _, ts := range c.remaint {
|
||||
|
@ -705,13 +706,13 @@ func TestTombstoneClean(t *testing.T) {
|
|||
})
|
||||
|
||||
if len(expSamples) == 0 {
|
||||
Assert(t, res.Next() == false, "")
|
||||
testutil.Assert(t, res.Next() == false, "")
|
||||
continue
|
||||
}
|
||||
|
||||
for {
|
||||
eok, rok := expss.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
break
|
||||
|
@ -719,17 +720,17 @@ func TestTombstoneClean(t *testing.T) {
|
|||
sexp := expss.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
|
||||
for _, b := range db.blocks {
|
||||
Equals(t, emptyTombstoneReader, b.tombstones)
|
||||
testutil.Equals(t, emptyTombstoneReader, b.tombstones)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -739,39 +740,39 @@ func TestDB_Retention(t *testing.T) {
|
|||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
db, err := Open(tmpdir, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
lbls := labels.Labels{labels.Label{Name: "labelname", Value: "labelvalue"}}
|
||||
|
||||
app := db.Appender()
|
||||
_, err = app.Add(lbls, 0, 1)
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
// create snapshot to make it create a block.
|
||||
// TODO(gouthamve): Add a method to compact headblock.
|
||||
snap, err := ioutil.TempDir("", "snap")
|
||||
Ok(t, err)
|
||||
Ok(t, db.Snapshot(snap))
|
||||
Ok(t, db.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, db.Snapshot(snap))
|
||||
testutil.Ok(t, db.Close())
|
||||
defer os.RemoveAll(snap)
|
||||
|
||||
// reopen DB from snapshot
|
||||
db, err = Open(snap, nil, nil, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Equals(t, 1, len(db.blocks))
|
||||
testutil.Equals(t, 1, len(db.blocks))
|
||||
|
||||
app = db.Appender()
|
||||
_, err = app.Add(lbls, 100, 1)
|
||||
Ok(t, err)
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, app.Commit())
|
||||
|
||||
// Snapshot again to create another block.
|
||||
snap, err = ioutil.TempDir("", "snap")
|
||||
Ok(t, err)
|
||||
Ok(t, db.Snapshot(snap))
|
||||
Ok(t, db.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, db.Snapshot(snap))
|
||||
testutil.Ok(t, db.Close())
|
||||
defer os.RemoveAll(snap)
|
||||
|
||||
// reopen DB from snapshot
|
||||
|
@ -779,14 +780,14 @@ func TestDB_Retention(t *testing.T) {
|
|||
RetentionDuration: 10,
|
||||
BlockRanges: []int64{50},
|
||||
})
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Equals(t, 2, len(db.blocks))
|
||||
testutil.Equals(t, 2, len(db.blocks))
|
||||
|
||||
// Now call rentention.
|
||||
changes, err := db.retentionCutoff()
|
||||
Ok(t, err)
|
||||
Assert(t, changes, "there should be changes")
|
||||
Equals(t, 1, len(db.blocks))
|
||||
Equals(t, int64(100), db.blocks[0].meta.MaxTime) // To verify its the right block.
|
||||
testutil.Ok(t, err)
|
||||
testutil.Assert(t, changes, "there should be changes")
|
||||
testutil.Equals(t, 1, len(db.blocks))
|
||||
testutil.Equals(t, int64(100), db.blocks[0].meta.MaxTime) // To verify its the right block.
|
||||
}
|
||||
|
|
149
head_test.go
149
head_test.go
|
@ -26,15 +26,16 @@ import (
|
|||
|
||||
promlabels "github.com/prometheus/prometheus/pkg/labels"
|
||||
"github.com/prometheus/prometheus/pkg/textparse"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
func BenchmarkCreateSeries(b *testing.B) {
|
||||
lbls, err := readPrometheusLabels("testdata/all.series", b.N)
|
||||
Ok(b, err)
|
||||
testutil.Ok(b, err)
|
||||
|
||||
h, err := NewHead(nil, nil, nil, 10000)
|
||||
if err != nil {
|
||||
Ok(b, err)
|
||||
testutil.Ok(b, err)
|
||||
}
|
||||
defer h.Close()
|
||||
|
||||
|
@ -130,40 +131,40 @@ func TestHead_ReadWAL(t *testing.T) {
|
|||
wal := &memoryWAL{entries: entries}
|
||||
|
||||
head, err := NewHead(nil, nil, wal, 1000)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer head.Close()
|
||||
|
||||
Ok(t, head.ReadWAL())
|
||||
Equals(t, uint64(100), head.lastSeriesID)
|
||||
testutil.Ok(t, head.ReadWAL())
|
||||
testutil.Equals(t, uint64(100), head.lastSeriesID)
|
||||
|
||||
s10 := head.series.getByID(10)
|
||||
s11 := head.series.getByID(11)
|
||||
s50 := head.series.getByID(50)
|
||||
s100 := head.series.getByID(100)
|
||||
|
||||
Equals(t, labels.FromStrings("a", "1"), s10.lset)
|
||||
Equals(t, labels.FromStrings("a", "2"), s11.lset)
|
||||
Equals(t, labels.FromStrings("a", "4"), s50.lset)
|
||||
Equals(t, labels.FromStrings("a", "3"), s100.lset)
|
||||
testutil.Equals(t, labels.FromStrings("a", "1"), s10.lset)
|
||||
testutil.Equals(t, labels.FromStrings("a", "2"), s11.lset)
|
||||
testutil.Equals(t, labels.FromStrings("a", "4"), s50.lset)
|
||||
testutil.Equals(t, labels.FromStrings("a", "3"), s100.lset)
|
||||
|
||||
expandChunk := func(c chunks.Iterator) (x []sample) {
|
||||
for c.Next() {
|
||||
t, v := c.At()
|
||||
x = append(x, sample{t: t, v: v})
|
||||
}
|
||||
Ok(t, c.Err())
|
||||
testutil.Ok(t, c.Err())
|
||||
return x
|
||||
}
|
||||
|
||||
Equals(t, []sample{{100, 2}, {101, 5}}, expandChunk(s10.iterator(0)))
|
||||
Equals(t, 0, len(s11.chunks))
|
||||
Equals(t, []sample{{101, 6}}, expandChunk(s50.iterator(0)))
|
||||
Equals(t, []sample{{100, 3}}, expandChunk(s100.iterator(0)))
|
||||
testutil.Equals(t, []sample{{100, 2}, {101, 5}}, expandChunk(s10.iterator(0)))
|
||||
testutil.Equals(t, 0, len(s11.chunks))
|
||||
testutil.Equals(t, []sample{{101, 6}}, expandChunk(s50.iterator(0)))
|
||||
testutil.Equals(t, []sample{{100, 3}}, expandChunk(s100.iterator(0)))
|
||||
}
|
||||
|
||||
func TestHead_Truncate(t *testing.T) {
|
||||
h, err := NewHead(nil, nil, nil, 1000)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer h.Close()
|
||||
|
||||
h.initTime(0)
|
||||
|
@ -190,21 +191,21 @@ func TestHead_Truncate(t *testing.T) {
|
|||
s4.chunks = []*memChunk{}
|
||||
|
||||
// Truncation need not be aligned.
|
||||
Ok(t, h.Truncate(1))
|
||||
testutil.Ok(t, h.Truncate(1))
|
||||
|
||||
h.Truncate(2000)
|
||||
|
||||
Equals(t, []*memChunk{
|
||||
testutil.Equals(t, []*memChunk{
|
||||
{minTime: 2000, maxTime: 2999},
|
||||
}, h.series.getByID(s1.ref).chunks)
|
||||
|
||||
Equals(t, []*memChunk{
|
||||
testutil.Equals(t, []*memChunk{
|
||||
{minTime: 2000, maxTime: 2999},
|
||||
{minTime: 3000, maxTime: 3999},
|
||||
}, h.series.getByID(s2.ref).chunks)
|
||||
|
||||
Nil(t, h.series.getByID(s3.ref), "")
|
||||
Nil(t, h.series.getByID(s4.ref), "")
|
||||
testutil.Nil(t, h.series.getByID(s3.ref), "")
|
||||
testutil.Nil(t, h.series.getByID(s4.ref), "")
|
||||
|
||||
postingsA1, _ := expandPostings(h.postings.get("a", "1"))
|
||||
postingsA2, _ := expandPostings(h.postings.get("a", "2"))
|
||||
|
@ -213,14 +214,14 @@ func TestHead_Truncate(t *testing.T) {
|
|||
postingsC1, _ := expandPostings(h.postings.get("c", "1"))
|
||||
postingsAll, _ := expandPostings(h.postings.get("", ""))
|
||||
|
||||
Equals(t, []uint64{s1.ref}, postingsA1)
|
||||
Equals(t, []uint64{s2.ref}, postingsA2)
|
||||
Equals(t, []uint64{s1.ref, s2.ref}, postingsB1)
|
||||
Equals(t, []uint64{s1.ref, s2.ref}, postingsAll)
|
||||
Nil(t, postingsB2, "")
|
||||
Nil(t, postingsC1, "")
|
||||
testutil.Equals(t, []uint64{s1.ref}, postingsA1)
|
||||
testutil.Equals(t, []uint64{s2.ref}, postingsA2)
|
||||
testutil.Equals(t, []uint64{s1.ref, s2.ref}, postingsB1)
|
||||
testutil.Equals(t, []uint64{s1.ref, s2.ref}, postingsAll)
|
||||
testutil.Nil(t, postingsB2, "")
|
||||
testutil.Nil(t, postingsC1, "")
|
||||
|
||||
Equals(t, map[string]struct{}{
|
||||
testutil.Equals(t, map[string]struct{}{
|
||||
"": struct{}{}, // from 'all' postings list
|
||||
"a": struct{}{},
|
||||
"b": struct{}{},
|
||||
|
@ -228,7 +229,7 @@ func TestHead_Truncate(t *testing.T) {
|
|||
"2": struct{}{},
|
||||
}, h.symbols)
|
||||
|
||||
Equals(t, map[string]stringset{
|
||||
testutil.Equals(t, map[string]stringset{
|
||||
"a": stringset{"1": struct{}{}, "2": struct{}{}},
|
||||
"b": stringset{"1": struct{}{}},
|
||||
"": stringset{"": struct{}{}},
|
||||
|
@ -242,7 +243,7 @@ func TestMemSeries_truncateChunks(t *testing.T) {
|
|||
|
||||
for i := 0; i < 4000; i += 5 {
|
||||
ok, _ := s.append(int64(i), float64(i))
|
||||
Assert(t, ok == true, "sample append failed")
|
||||
testutil.Assert(t, ok == true, "sample append failed")
|
||||
}
|
||||
|
||||
// Check that truncate removes half of the chunks and afterwards
|
||||
|
@ -251,32 +252,32 @@ func TestMemSeries_truncateChunks(t *testing.T) {
|
|||
lastID := s.chunkID(countBefore - 1)
|
||||
lastChunk := s.chunk(lastID)
|
||||
|
||||
NotNil(t, s.chunk(0), "")
|
||||
NotNil(t, lastChunk, "")
|
||||
testutil.NotNil(t, s.chunk(0), "")
|
||||
testutil.NotNil(t, lastChunk, "")
|
||||
|
||||
s.truncateChunksBefore(2000)
|
||||
|
||||
Equals(t, int64(2000), s.chunks[0].minTime)
|
||||
Nil(t, s.chunk(0), "first chunks not gone")
|
||||
Equals(t, countBefore/2, len(s.chunks))
|
||||
Equals(t, lastChunk, s.chunk(lastID))
|
||||
testutil.Equals(t, int64(2000), s.chunks[0].minTime)
|
||||
testutil.Nil(t, s.chunk(0), "first chunks not gone")
|
||||
testutil.Equals(t, countBefore/2, len(s.chunks))
|
||||
testutil.Equals(t, lastChunk, s.chunk(lastID))
|
||||
|
||||
// Validate that the series' sample buffer is applied correctly to the last chunk
|
||||
// after truncation.
|
||||
it1 := s.iterator(s.chunkID(len(s.chunks) - 1))
|
||||
_, ok := it1.(*memSafeIterator)
|
||||
Assert(t, ok == true, "")
|
||||
testutil.Assert(t, ok == true, "")
|
||||
|
||||
it2 := s.iterator(s.chunkID(len(s.chunks) - 2))
|
||||
_, ok = it2.(*memSafeIterator)
|
||||
Assert(t, ok == false, "non-last chunk incorrectly wrapped with sample buffer")
|
||||
testutil.Assert(t, ok == false, "non-last chunk incorrectly wrapped with sample buffer")
|
||||
}
|
||||
|
||||
func TestHeadDeleteSimple(t *testing.T) {
|
||||
numSamples := int64(10)
|
||||
|
||||
head, err := NewHead(nil, nil, nil, 1000)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer head.Close()
|
||||
|
||||
app := head.Appender()
|
||||
|
@ -287,7 +288,7 @@ func TestHeadDeleteSimple(t *testing.T) {
|
|||
app.Add(labels.Labels{{"a", "b"}}, i, smpls[i])
|
||||
}
|
||||
|
||||
Ok(t, app.Commit())
|
||||
testutil.Ok(t, app.Commit())
|
||||
cases := []struct {
|
||||
intervals Intervals
|
||||
remaint []int64
|
||||
|
@ -321,14 +322,14 @@ Outer:
|
|||
|
||||
// Delete the ranges.
|
||||
for _, r := range c.intervals {
|
||||
Ok(t, head.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
testutil.Ok(t, head.Delete(r.Mint, r.Maxt, labels.NewEqualMatcher("a", "b")))
|
||||
}
|
||||
|
||||
// Compare the result.
|
||||
q, err := NewBlockQuerier(head, head.MinTime(), head.MaxTime())
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
res, err := q.Select(labels.NewEqualMatcher("a", "b"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
expSamples := make([]sample, 0, len(c.remaint))
|
||||
for _, ts := range c.remaint {
|
||||
|
@ -340,13 +341,13 @@ Outer:
|
|||
})
|
||||
|
||||
if len(expSamples) == 0 {
|
||||
Assert(t, res.Next() == false, "")
|
||||
testutil.Assert(t, res.Next() == false, "")
|
||||
continue
|
||||
}
|
||||
|
||||
for {
|
||||
eok, rok := expss.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
continue Outer
|
||||
|
@ -354,13 +355,13 @@ Outer:
|
|||
sexp := expss.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -380,12 +381,12 @@ Outer:
|
|||
// app.Add(labels.Labels{{"a", "b"}}, i, smpls[i])
|
||||
// }
|
||||
|
||||
// Ok(t, app.Commit())
|
||||
// Ok(t, hb.Delete(0, 10000, labels.NewEqualMatcher("a", "b")))
|
||||
// testutil.Ok(t, app.Commit())
|
||||
// testutil.Ok(t, hb.Delete(0, 10000, labels.NewEqualMatcher("a", "b")))
|
||||
// app = hb.Appender()
|
||||
// _, err := app.Add(labels.Labels{{"a", "b"}}, 11, 1)
|
||||
// Ok(t, err)
|
||||
// Ok(t, app.Commit())
|
||||
// testutil.Ok(t, err)
|
||||
// testutil.Ok(t, app.Commit())
|
||||
|
||||
// q := hb.Querier(0, 100000)
|
||||
// res := q.Select(labels.NewEqualMatcher("a", "b"))
|
||||
|
@ -394,8 +395,8 @@ Outer:
|
|||
// exps := res.At()
|
||||
// it := exps.Iterator()
|
||||
// ressmpls, err := expandSeriesIterator(it)
|
||||
// Ok(t, err)
|
||||
// Equals(t, []sample{{11, 1}}, ressmpls)
|
||||
// testutil.Ok(t, err)
|
||||
// testutil.Equals(t, []sample{{11, 1}}, ressmpls)
|
||||
// }
|
||||
|
||||
// func TestDelete_e2e(t *testing.T) {
|
||||
|
@ -472,9 +473,9 @@ Outer:
|
|||
|
||||
// _, err := app.Add(ls, ts, v)
|
||||
// if ts >= minTime && ts <= maxTime {
|
||||
// Ok(t, err)
|
||||
// testutil.Ok(t, err)
|
||||
// } else {
|
||||
// EqualsError(t, err, ErrOutOfBounds.Error())
|
||||
// testutil.EqualsError(t, err, ErrOutOfBounds.Error())
|
||||
// }
|
||||
|
||||
// ts += rand.Int63n(timeInterval) + 1
|
||||
|
@ -483,7 +484,7 @@ Outer:
|
|||
// seriesMap[labels.New(l...).String()] = series
|
||||
// }
|
||||
|
||||
// Ok(t, app.Commit())
|
||||
// testutil.Ok(t, app.Commit())
|
||||
|
||||
// // Delete a time-range from each-selector.
|
||||
// dels := []struct {
|
||||
|
@ -518,7 +519,7 @@ Outer:
|
|||
// hb.tombstones = newEmptyTombstoneReader()
|
||||
|
||||
// for _, r := range del.drange {
|
||||
// Ok(t, hb.Delete(r.Mint, r.Maxt, del.ms...))
|
||||
// testutil.Ok(t, hb.Delete(r.Mint, r.Maxt, del.ms...))
|
||||
// }
|
||||
|
||||
// matched := labels.Slice{}
|
||||
|
@ -569,7 +570,7 @@ Outer:
|
|||
// }
|
||||
// }
|
||||
// }
|
||||
// Equals(t, eok, rok, "next")
|
||||
// testutil.Equals(t, eok, rok, "next")
|
||||
|
||||
// if !eok {
|
||||
// break
|
||||
|
@ -577,13 +578,13 @@ Outer:
|
|||
// sexp := expSs.At()
|
||||
// sres := ss.At()
|
||||
|
||||
// Equals(t, sexp.Labels(), sres.Labels(), "labels")
|
||||
// testutil.Equals(t, sexp.Labels(), sres.Labels(), "labels")
|
||||
|
||||
// smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
// smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
// Equals(t, errExp, errRes, "samples error")
|
||||
// Equals(t, smplExp, smplRes, "samples")
|
||||
// testutil.Equals(t, errExp, errRes, "samples error")
|
||||
// testutil.Equals(t, smplExp, smplRes, "samples")
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
@ -672,35 +673,35 @@ func TestMemSeries_append(t *testing.T) {
|
|||
// on and after it.
|
||||
// New chunk must correctly be cut at 1000.
|
||||
ok, chunkCreated := s.append(998, 1)
|
||||
Assert(t, ok, "append failed")
|
||||
Assert(t, chunkCreated, "first sample created chunk")
|
||||
testutil.Assert(t, ok, "append failed")
|
||||
testutil.Assert(t, chunkCreated, "first sample created chunk")
|
||||
|
||||
ok, chunkCreated = s.append(999, 2)
|
||||
Assert(t, ok, "append failed")
|
||||
Assert(t, !chunkCreated, "second sample should use same chunk")
|
||||
testutil.Assert(t, ok, "append failed")
|
||||
testutil.Assert(t, !chunkCreated, "second sample should use same chunk")
|
||||
|
||||
ok, chunkCreated = s.append(1000, 3)
|
||||
Assert(t, ok, "append failed")
|
||||
Assert(t, ok, "expected new chunk on boundary")
|
||||
testutil.Assert(t, ok, "append failed")
|
||||
testutil.Assert(t, ok, "expected new chunk on boundary")
|
||||
|
||||
ok, chunkCreated = s.append(1001, 4)
|
||||
Assert(t, ok, "append failed")
|
||||
Assert(t, !chunkCreated, "second sample should use same chunk")
|
||||
testutil.Assert(t, ok, "append failed")
|
||||
testutil.Assert(t, !chunkCreated, "second sample should use same chunk")
|
||||
|
||||
Assert(t, s.chunks[0].minTime == 998 && s.chunks[0].maxTime == 999, "wrong chunk range")
|
||||
Assert(t, s.chunks[1].minTime == 1000 && s.chunks[1].maxTime == 1001, "wrong chunk range")
|
||||
testutil.Assert(t, s.chunks[0].minTime == 998 && s.chunks[0].maxTime == 999, "wrong chunk range")
|
||||
testutil.Assert(t, s.chunks[1].minTime == 1000 && s.chunks[1].maxTime == 1001, "wrong chunk range")
|
||||
|
||||
// Fill the range [1000,2000) with many samples. Intermediate chunks should be cut
|
||||
// at approximately 120 samples per chunk.
|
||||
for i := 1; i < 1000; i++ {
|
||||
ok, _ := s.append(1001+int64(i), float64(i))
|
||||
Assert(t, ok, "append failed")
|
||||
testutil.Assert(t, ok, "append failed")
|
||||
}
|
||||
|
||||
Assert(t, len(s.chunks) > 7, "expected intermediate chunks")
|
||||
testutil.Assert(t, len(s.chunks) > 7, "expected intermediate chunks")
|
||||
|
||||
// All chunks but the first and last should now be moderately full.
|
||||
for i, c := range s.chunks[1 : len(s.chunks)-1] {
|
||||
Assert(t, c.chunk.NumSamples() > 100, "unexpected small chunk %d of length %d", i, c.chunk.NumSamples())
|
||||
testutil.Assert(t, c.chunk.NumSamples() > 100, "unexpected small chunk %d of length %d", i, c.chunk.NumSamples())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import (
|
|||
"github.com/pkg/errors"
|
||||
"github.com/prometheus/tsdb/chunks"
|
||||
"github.com/prometheus/tsdb/labels"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
type series struct {
|
||||
|
@ -150,35 +151,35 @@ func (m mockIndex) LabelIndices() ([][]string, error) {
|
|||
|
||||
func TestIndexRW_Create_Open(t *testing.T) {
|
||||
dir, err := ioutil.TempDir("", "test_index_create")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
// An empty index must still result in a readable file.
|
||||
iw, err := newIndexWriter(dir)
|
||||
Ok(t, err)
|
||||
Ok(t, iw.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, iw.Close())
|
||||
|
||||
ir, err := NewFileIndexReader(filepath.Join(dir, "index"))
|
||||
Ok(t, err)
|
||||
Ok(t, ir.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, ir.Close())
|
||||
|
||||
// Modify magic header must cause open to fail.
|
||||
f, err := os.OpenFile(filepath.Join(dir, "index"), os.O_WRONLY, 0666)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
_, err = f.WriteAt([]byte{0, 0}, 0)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
_, err = NewFileIndexReader(dir)
|
||||
NotOk(t, err)
|
||||
testutil.NotOk(t, err)
|
||||
}
|
||||
|
||||
func TestIndexRW_Postings(t *testing.T) {
|
||||
dir, err := ioutil.TempDir("", "test_index_postings")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
iw, err := newIndexWriter(dir)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
series := []labels.Labels{
|
||||
labels.FromStrings("a", "1", "b", "1"),
|
||||
|
@ -195,25 +196,25 @@ func TestIndexRW_Postings(t *testing.T) {
|
|||
"3": struct{}{},
|
||||
"4": struct{}{},
|
||||
})
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Postings lists are only written if a series with the respective
|
||||
// reference was added before.
|
||||
Ok(t, iw.AddSeries(1, series[0]))
|
||||
Ok(t, iw.AddSeries(2, series[1]))
|
||||
Ok(t, iw.AddSeries(3, series[2]))
|
||||
Ok(t, iw.AddSeries(4, series[3]))
|
||||
testutil.Ok(t, iw.AddSeries(1, series[0]))
|
||||
testutil.Ok(t, iw.AddSeries(2, series[1]))
|
||||
testutil.Ok(t, iw.AddSeries(3, series[2]))
|
||||
testutil.Ok(t, iw.AddSeries(4, series[3]))
|
||||
|
||||
err = iw.WritePostings("a", "1", newListPostings([]uint64{1, 2, 3, 4}))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Ok(t, iw.Close())
|
||||
testutil.Ok(t, iw.Close())
|
||||
|
||||
ir, err := NewFileIndexReader(filepath.Join(dir, "index"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
p, err := ir.Postings("a", "1")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
var l labels.Labels
|
||||
var c []ChunkMeta
|
||||
|
@ -221,22 +222,22 @@ func TestIndexRW_Postings(t *testing.T) {
|
|||
for i := 0; p.Next(); i++ {
|
||||
err := ir.Series(p.At(), &l, &c)
|
||||
|
||||
Ok(t, err)
|
||||
Equals(t, 0, len(c))
|
||||
Equals(t, series[i], l)
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, 0, len(c))
|
||||
testutil.Equals(t, series[i], l)
|
||||
}
|
||||
Ok(t, p.Err())
|
||||
testutil.Ok(t, p.Err())
|
||||
|
||||
Ok(t, ir.Close())
|
||||
testutil.Ok(t, ir.Close())
|
||||
}
|
||||
|
||||
func TestPersistence_index_e2e(t *testing.T) {
|
||||
dir, err := ioutil.TempDir("", "test_persistence_e2e")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
lbls, err := readPrometheusLabels("testdata/20k.series", 20000)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Sort labels as the index writer expects series in sorted order.
|
||||
sort.Sort(labels.Slice(lbls))
|
||||
|
@ -270,9 +271,9 @@ func TestPersistence_index_e2e(t *testing.T) {
|
|||
}
|
||||
|
||||
iw, err := newIndexWriter(dir)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Ok(t, iw.AddSymbols(symbols))
|
||||
testutil.Ok(t, iw.AddSymbols(symbols))
|
||||
|
||||
// Population procedure as done by compaction.
|
||||
var (
|
||||
|
@ -285,7 +286,7 @@ func TestPersistence_index_e2e(t *testing.T) {
|
|||
|
||||
for i, s := range input {
|
||||
err = iw.AddSeries(uint64(i), s.labels, s.chunks...)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
mi.AddSeries(uint64(i), s.labels, s.chunks...)
|
||||
|
||||
for _, l := range s.labels {
|
||||
|
@ -303,8 +304,8 @@ func TestPersistence_index_e2e(t *testing.T) {
|
|||
for k, v := range values {
|
||||
vals := v.slice()
|
||||
|
||||
Ok(t, iw.WriteLabelIndex([]string{k}, vals))
|
||||
Ok(t, mi.WriteLabelIndex([]string{k}, vals))
|
||||
testutil.Ok(t, iw.WriteLabelIndex([]string{k}, vals))
|
||||
testutil.Ok(t, mi.WriteLabelIndex([]string{k}, vals))
|
||||
}
|
||||
|
||||
all := make([]uint64, len(lbls))
|
||||
|
@ -312,24 +313,24 @@ func TestPersistence_index_e2e(t *testing.T) {
|
|||
all[i] = uint64(i)
|
||||
}
|
||||
err = iw.WritePostings("", "", newListPostings(all))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
mi.WritePostings("", "", newListPostings(all))
|
||||
|
||||
for l := range postings.m {
|
||||
err = iw.WritePostings(l.Name, l.Value, postings.get(l.Name, l.Value))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
mi.WritePostings(l.Name, l.Value, postings.get(l.Name, l.Value))
|
||||
}
|
||||
|
||||
err = iw.Close()
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
ir, err := NewFileIndexReader(filepath.Join(dir, "index"))
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
for p := range mi.postings.m {
|
||||
gotp, err := ir.Postings(p.Name, p.Value)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
expp, err := mi.Postings(p.Name, p.Value)
|
||||
|
||||
|
@ -337,39 +338,39 @@ func TestPersistence_index_e2e(t *testing.T) {
|
|||
var chks, expchks []ChunkMeta
|
||||
|
||||
for gotp.Next() {
|
||||
Assert(t, expp.Next() == true, "")
|
||||
testutil.Assert(t, expp.Next() == true, "")
|
||||
|
||||
ref := gotp.At()
|
||||
|
||||
err := ir.Series(ref, &lset, &chks)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
err = mi.Series(expp.At(), &explset, &expchks)
|
||||
Equals(t, explset, lset)
|
||||
Equals(t, expchks, chks)
|
||||
testutil.Equals(t, explset, lset)
|
||||
testutil.Equals(t, expchks, chks)
|
||||
}
|
||||
Assert(t, expp.Next() == false, "")
|
||||
Ok(t, gotp.Err())
|
||||
testutil.Assert(t, expp.Next() == false, "")
|
||||
testutil.Ok(t, gotp.Err())
|
||||
}
|
||||
|
||||
for k, v := range mi.labelIndex {
|
||||
tplsExp, err := newStringTuples(v, 1)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
tplsRes, err := ir.LabelValues(k)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Equals(t, tplsExp.Len(), tplsRes.Len())
|
||||
testutil.Equals(t, tplsExp.Len(), tplsRes.Len())
|
||||
for i := 0; i < tplsExp.Len(); i++ {
|
||||
strsExp, err := tplsExp.At(i)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
strsRes, err := tplsRes.At(i)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Equals(t, strsExp, strsRes)
|
||||
testutil.Equals(t, strsExp, strsRes)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(t, ir.Close())
|
||||
testutil.Ok(t, ir.Close())
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import (
|
|||
|
||||
"github.com/prometheus/tsdb/chunks"
|
||||
"github.com/prometheus/tsdb/labels"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
type mockSeriesIterator struct {
|
||||
|
@ -189,7 +190,7 @@ Outer:
|
|||
|
||||
for {
|
||||
eok, rok := c.exp.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
continue Outer
|
||||
|
@ -197,13 +198,13 @@ Outer:
|
|||
sexp := c.exp.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -460,11 +461,11 @@ Outer:
|
|||
}
|
||||
|
||||
res, err := querier.Select(c.ms...)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
for {
|
||||
eok, rok := c.exp.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
continue Outer
|
||||
|
@ -472,13 +473,13 @@ Outer:
|
|||
sexp := c.exp.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -631,11 +632,11 @@ Outer:
|
|||
}
|
||||
|
||||
res, err := querier.Select(c.ms...)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
for {
|
||||
eok, rok := c.exp.Next(), res.Next()
|
||||
Equals(t, eok, rok)
|
||||
testutil.Equals(t, eok, rok)
|
||||
|
||||
if !eok {
|
||||
continue Outer
|
||||
|
@ -643,13 +644,13 @@ Outer:
|
|||
sexp := c.exp.At()
|
||||
sres := res.At()
|
||||
|
||||
Equals(t, sexp.Labels(), sres.Labels())
|
||||
testutil.Equals(t, sexp.Labels(), sres.Labels())
|
||||
|
||||
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
|
||||
smplRes, errRes := expandSeriesIterator(sres.Iterator())
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -742,13 +743,13 @@ func TestBaseChunkSeries(t *testing.T) {
|
|||
|
||||
idx := tc.expIdxs[i]
|
||||
|
||||
Equals(t, tc.series[idx].lset, lset)
|
||||
Equals(t, tc.series[idx].chunks, chks)
|
||||
testutil.Equals(t, tc.series[idx].lset, lset)
|
||||
testutil.Equals(t, tc.series[idx].chunks, chks)
|
||||
|
||||
i++
|
||||
}
|
||||
Equals(t, len(tc.expIdxs), i)
|
||||
Ok(t, bcs.Err())
|
||||
testutil.Equals(t, len(tc.expIdxs), i)
|
||||
testutil.Ok(t, bcs.Err())
|
||||
}
|
||||
|
||||
return
|
||||
|
@ -958,8 +959,8 @@ func TestSeriesIterator(t *testing.T) {
|
|||
smplExp, errExp := expandSeriesIterator(exp)
|
||||
smplRes, errRes := expandSeriesIterator(res)
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
|
||||
t.Run("Seek", func(t *testing.T) {
|
||||
|
@ -1026,21 +1027,21 @@ func TestSeriesIterator(t *testing.T) {
|
|||
}
|
||||
exp := newListSeriesIterator(smplValid)
|
||||
|
||||
Equals(t, tc.success, res.Seek(tc.seek))
|
||||
testutil.Equals(t, tc.success, res.Seek(tc.seek))
|
||||
|
||||
if tc.success {
|
||||
// Init the list and then proceed to check.
|
||||
remaining := exp.Next()
|
||||
Assert(t, remaining == true, "")
|
||||
testutil.Assert(t, remaining == true, "")
|
||||
|
||||
for remaining {
|
||||
sExp, eExp := exp.At()
|
||||
sRes, eRes := res.At()
|
||||
Equals(t, eExp, eRes)
|
||||
Equals(t, sExp, sRes)
|
||||
testutil.Equals(t, eExp, eRes)
|
||||
testutil.Equals(t, sExp, sRes)
|
||||
|
||||
remaining = exp.Next()
|
||||
Equals(t, remaining, res.Next())
|
||||
testutil.Equals(t, remaining, res.Next())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1059,8 +1060,8 @@ func TestSeriesIterator(t *testing.T) {
|
|||
smplExp, errExp := expandSeriesIterator(exp)
|
||||
smplRes, errRes := expandSeriesIterator(res)
|
||||
|
||||
Equals(t, errExp, errRes)
|
||||
Equals(t, smplExp, smplRes)
|
||||
testutil.Equals(t, errExp, errRes)
|
||||
testutil.Equals(t, smplExp, smplRes)
|
||||
}
|
||||
|
||||
t.Run("Seek", func(t *testing.T) {
|
||||
|
@ -1072,21 +1073,21 @@ func TestSeriesIterator(t *testing.T) {
|
|||
res := newChainedSeriesIterator(a, b, c)
|
||||
exp := newListSeriesIterator(tc.exp)
|
||||
|
||||
Equals(t, tc.success, res.Seek(tc.seek))
|
||||
testutil.Equals(t, tc.success, res.Seek(tc.seek))
|
||||
|
||||
if tc.success {
|
||||
// Init the list and then proceed to check.
|
||||
remaining := exp.Next()
|
||||
Assert(t, remaining == true, "")
|
||||
testutil.Assert(t, remaining == true, "")
|
||||
|
||||
for remaining {
|
||||
sExp, eExp := exp.At()
|
||||
sRes, eRes := res.At()
|
||||
Equals(t, eExp, eRes)
|
||||
Equals(t, sExp, sRes)
|
||||
testutil.Equals(t, eExp, eRes)
|
||||
testutil.Equals(t, sExp, sRes)
|
||||
|
||||
remaining = exp.Next()
|
||||
Equals(t, remaining, res.Next())
|
||||
testutil.Equals(t, remaining, res.Next())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1105,11 +1106,11 @@ func TestChunkSeriesIterator_DoubleSeek(t *testing.T) {
|
|||
}
|
||||
|
||||
res := newChunkSeriesIterator(chkMetas, nil, 2, 8)
|
||||
Assert(t, res.Seek(1) == true, "")
|
||||
Assert(t, res.Seek(2) == true, "")
|
||||
testutil.Assert(t, res.Seek(1) == true, "")
|
||||
testutil.Assert(t, res.Seek(2) == true, "")
|
||||
ts, v := res.At()
|
||||
Equals(t, int64(2), ts)
|
||||
Equals(t, float64(2), v)
|
||||
testutil.Equals(t, int64(2), ts)
|
||||
testutil.Equals(t, float64(2), v)
|
||||
}
|
||||
|
||||
// Regression when seeked chunks were still found via binary search and we always
|
||||
|
@ -1123,15 +1124,15 @@ func TestChunkSeriesIterator_SeekInCurrentChunk(t *testing.T) {
|
|||
|
||||
it := newChunkSeriesIterator(metas, nil, 1, 7)
|
||||
|
||||
Assert(t, it.Next() == true, "")
|
||||
testutil.Assert(t, it.Next() == true, "")
|
||||
ts, v := it.At()
|
||||
Equals(t, int64(1), ts)
|
||||
Equals(t, float64(2), v)
|
||||
testutil.Equals(t, int64(1), ts)
|
||||
testutil.Equals(t, float64(2), v)
|
||||
|
||||
Assert(t, it.Seek(4) == true, "")
|
||||
testutil.Assert(t, it.Seek(4) == true, "")
|
||||
ts, v = it.At()
|
||||
Equals(t, int64(5), ts)
|
||||
Equals(t, float64(6), v)
|
||||
testutil.Equals(t, int64(5), ts)
|
||||
testutil.Equals(t, float64(6), v)
|
||||
}
|
||||
|
||||
// Regression when calling Next() with a time bounded to fit within two samples.
|
||||
|
@ -1142,7 +1143,7 @@ func TestChunkSeriesIterator_NextWithMinTime(t *testing.T) {
|
|||
}
|
||||
|
||||
it := newChunkSeriesIterator(metas, nil, 2, 4)
|
||||
Assert(t, it.Next() == false, "")
|
||||
testutil.Assert(t, it.Next() == false, "")
|
||||
}
|
||||
|
||||
func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
|
||||
|
@ -1172,11 +1173,11 @@ func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
|
|||
maxt: 0,
|
||||
}
|
||||
|
||||
Assert(t, p.Next() == false, "")
|
||||
testutil.Assert(t, p.Next() == false, "")
|
||||
|
||||
p.mint = 6
|
||||
p.maxt = 9
|
||||
Assert(t, p.Next() == false, "")
|
||||
testutil.Assert(t, p.Next() == false, "")
|
||||
|
||||
// Test the case where 1 chunk could cause an unpopulated chunk to be returned.
|
||||
chunkMetas = [][]ChunkMeta{
|
||||
|
@ -1193,7 +1194,7 @@ func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
|
|||
mint: 10,
|
||||
maxt: 15,
|
||||
}
|
||||
Assert(t, p.Next() == false, "")
|
||||
testutil.Assert(t, p.Next() == false, "")
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -1245,7 +1246,7 @@ func BenchmarkMergedSeriesSet(b *testing.B) {
|
|||
for _, j := range []int{1, 2, 4, 8, 16, 32} {
|
||||
b.Run(fmt.Sprintf("series=%d,blocks=%d", k, j), func(b *testing.B) {
|
||||
lbls, err := readPrometheusLabels("testdata/1m.series", k)
|
||||
Ok(b, err)
|
||||
testutil.Ok(b, err)
|
||||
|
||||
sort.Sort(labels.Slice(lbls))
|
||||
|
||||
|
@ -1271,8 +1272,8 @@ func BenchmarkMergedSeriesSet(b *testing.B) {
|
|||
for ms.Next() {
|
||||
i++
|
||||
}
|
||||
Ok(b, ms.Err())
|
||||
Equals(b, len(lbls), i)
|
||||
testutil.Ok(b, ms.Err())
|
||||
testutil.Equals(b, len(lbls), i)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package tsdb
|
||||
package testutil
|
||||
|
||||
import (
|
||||
"fmt"
|
|
@ -19,6 +19,8 @@ import (
|
|||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
func TestWriteAndReadbackTombStones(t *testing.T) {
|
||||
|
@ -41,13 +43,13 @@ func TestWriteAndReadbackTombStones(t *testing.T) {
|
|||
stones[ref] = dranges
|
||||
}
|
||||
|
||||
Ok(t, writeTombstoneFile(tmpdir, stones))
|
||||
testutil.Ok(t, writeTombstoneFile(tmpdir, stones))
|
||||
|
||||
restr, err := readTombstones(tmpdir)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Compare the two readers.
|
||||
Equals(t, stones, restr)
|
||||
testutil.Equals(t, stones, restr)
|
||||
}
|
||||
|
||||
func TestAddingNewIntervals(t *testing.T) {
|
||||
|
@ -115,7 +117,7 @@ func TestAddingNewIntervals(t *testing.T) {
|
|||
|
||||
for _, c := range cases {
|
||||
|
||||
Equals(t, c.exp, c.exist.add(c.new))
|
||||
testutil.Equals(t, c.exp, c.exist.add(c.new))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
153
wal_test.go
153
wal_test.go
|
@ -22,46 +22,47 @@ import (
|
|||
|
||||
"github.com/go-kit/kit/log"
|
||||
"github.com/prometheus/tsdb/fileutil"
|
||||
"github.com/prometheus/tsdb/testutil"
|
||||
)
|
||||
|
||||
func TestSegmentWAL_cut(t *testing.T) {
|
||||
tmpdir, err := ioutil.TempDir("", "test_wal_cut")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
// This calls cut() implicitly the first time without a previous tail.
|
||||
w, err := OpenSegmentWAL(tmpdir, nil, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Ok(t, w.write(WALEntrySeries, 1, []byte("Hello World!!")))
|
||||
testutil.Ok(t, w.write(WALEntrySeries, 1, []byte("Hello World!!")))
|
||||
|
||||
Ok(t, w.cut())
|
||||
testutil.Ok(t, w.cut())
|
||||
|
||||
// Cutting creates a new file.
|
||||
Equals(t, 2, len(w.files))
|
||||
testutil.Equals(t, 2, len(w.files))
|
||||
|
||||
Ok(t, w.write(WALEntrySeries, 1, []byte("Hello World!!")))
|
||||
testutil.Ok(t, w.write(WALEntrySeries, 1, []byte("Hello World!!")))
|
||||
|
||||
Ok(t, w.Close())
|
||||
testutil.Ok(t, w.Close())
|
||||
|
||||
for _, of := range w.files {
|
||||
f, err := os.Open(of.Name())
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Verify header data.
|
||||
metab := make([]byte, 8)
|
||||
_, err = f.Read(metab)
|
||||
Ok(t, err)
|
||||
Equals(t, WALMagic, binary.BigEndian.Uint32(metab[:4]))
|
||||
Equals(t, WALFormatDefault, metab[4])
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, WALMagic, binary.BigEndian.Uint32(metab[:4]))
|
||||
testutil.Equals(t, WALFormatDefault, metab[4])
|
||||
|
||||
// We cannot actually check for correct pre-allocation as it is
|
||||
// optional per filesystem and handled transparently.
|
||||
et, flag, b, err := newWALReader(nil, nil).entry(f)
|
||||
Ok(t, err)
|
||||
Equals(t, WALEntrySeries, et)
|
||||
Equals(t, flag, byte(walSeriesSimple))
|
||||
Equals(t, []byte("Hello World!!"), b)
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, WALEntrySeries, et)
|
||||
testutil.Equals(t, flag, byte(walSeriesSimple))
|
||||
testutil.Equals(t, []byte("Hello World!!"), b)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,14 +72,14 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
|||
batch = 100
|
||||
)
|
||||
series, err := readPrometheusLabels("testdata/20k.series", numMetrics)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
dir, err := ioutil.TempDir("", "test_wal_log_truncate")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
// defer os.RemoveAll(dir)
|
||||
|
||||
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
w.segmentSize = 10000
|
||||
|
||||
for i := 0; i < numMetrics; i += batch {
|
||||
|
@ -88,7 +89,7 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
|||
rs = append(rs, RefSeries{Labels: s, Ref: uint64(i+j) + 1})
|
||||
}
|
||||
err := w.LogSeries(rs)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
}
|
||||
|
||||
// We mark the 2nd half of the files with a min timestamp that should discard
|
||||
|
@ -110,7 +111,7 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
|||
}
|
||||
|
||||
err = w.Truncate(1000, keepf)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
var expected []RefSeries
|
||||
|
||||
|
@ -123,12 +124,12 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
|||
// Call Truncate once again to see whether we can read the written file without
|
||||
// creating a new WAL.
|
||||
err = w.Truncate(1000, keepf)
|
||||
Ok(t, err)
|
||||
Ok(t, w.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, w.Close())
|
||||
|
||||
// The same again with a new WAL.
|
||||
w, err = OpenSegmentWAL(dir, nil, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
var readSeries []RefSeries
|
||||
r := w.Reader()
|
||||
|
@ -137,7 +138,7 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
|||
readSeries = append(readSeries, s...)
|
||||
}, nil, nil)
|
||||
|
||||
Equals(t, expected, readSeries)
|
||||
testutil.Equals(t, expected, readSeries)
|
||||
}
|
||||
|
||||
// Symmetrical test of reading and writing to the WAL via its main interface.
|
||||
|
@ -150,10 +151,10 @@ func TestSegmentWAL_Log_Restore(t *testing.T) {
|
|||
// Generate testing data. It does not make semantical sense but
|
||||
// for the purpose of this test.
|
||||
series, err := readPrometheusLabels("testdata/20k.series", numMetrics)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
dir, err := ioutil.TempDir("", "test_wal_log_restore")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
var (
|
||||
|
@ -167,7 +168,7 @@ func TestSegmentWAL_Log_Restore(t *testing.T) {
|
|||
// write more data to it, close it.
|
||||
for k := 0; k < numMetrics; k += numMetrics / iterations {
|
||||
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Set smaller segment size so we can actually write several files.
|
||||
w.segmentSize = 1000 * 1000
|
||||
|
@ -203,11 +204,11 @@ func TestSegmentWAL_Log_Restore(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
Ok(t, r.Read(serf, smplf, delf))
|
||||
testutil.Ok(t, r.Read(serf, smplf, delf))
|
||||
|
||||
Equals(t, recordedSamples, resultSamples)
|
||||
Equals(t, recordedSeries, resultSeries)
|
||||
Equals(t, recordedDeletes, resultDeletes)
|
||||
testutil.Equals(t, recordedSamples, resultSamples)
|
||||
testutil.Equals(t, recordedSeries, resultSeries)
|
||||
testutil.Equals(t, recordedDeletes, resultDeletes)
|
||||
|
||||
series := series[k : k+(numMetrics/iterations)]
|
||||
|
||||
|
@ -238,9 +239,9 @@ func TestSegmentWAL_Log_Restore(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
Ok(t, w.LogSeries(series))
|
||||
Ok(t, w.LogSamples(samples))
|
||||
Ok(t, w.LogDeletes(stones))
|
||||
testutil.Ok(t, w.LogSeries(series))
|
||||
testutil.Ok(t, w.LogSamples(samples))
|
||||
testutil.Ok(t, w.LogDeletes(stones))
|
||||
|
||||
if len(lbls) > 0 {
|
||||
recordedSeries = append(recordedSeries, series)
|
||||
|
@ -254,39 +255,39 @@ func TestSegmentWAL_Log_Restore(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
Ok(t, w.Close())
|
||||
testutil.Ok(t, w.Close())
|
||||
}
|
||||
}
|
||||
|
||||
func TestWALRestoreCorrupted_invalidSegment(t *testing.T) {
|
||||
dir, err := ioutil.TempDir("", "test_wal_log_restore")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
wal, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
_, err = wal.createSegmentFile(dir + "/000000")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
f, err := wal.createSegmentFile(dir + "/000001")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
f2, err := wal.createSegmentFile(dir + "/000002")
|
||||
Ok(t, err)
|
||||
Ok(t, f2.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, f2.Close())
|
||||
|
||||
// Make header of second segment invalid.
|
||||
_, err = f.WriteAt([]byte{1, 2, 3, 4}, 0)
|
||||
Ok(t, err)
|
||||
Ok(t, f.Close())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Ok(t, f.Close())
|
||||
|
||||
Ok(t, wal.Close())
|
||||
testutil.Ok(t, wal.Close())
|
||||
|
||||
wal, err = OpenSegmentWAL(dir, log.NewLogfmtLogger(os.Stderr), 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
fns, err := fileutil.ReadDir(dir)
|
||||
Ok(t, err)
|
||||
Equals(t, []string{"000000"}, fns)
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, []string{"000000"}, fns)
|
||||
}
|
||||
|
||||
// Test reading from a WAL that has been corrupted through various means.
|
||||
|
@ -299,56 +300,56 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
|||
name: "truncate_checksum",
|
||||
f: func(t *testing.T, w *SegmentWAL) {
|
||||
f, err := os.OpenFile(w.files[0].Name(), os.O_WRONLY, 0666)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer f.Close()
|
||||
|
||||
off, err := f.Seek(0, os.SEEK_END)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Ok(t, f.Truncate(off-1))
|
||||
testutil.Ok(t, f.Truncate(off-1))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "truncate_body",
|
||||
f: func(t *testing.T, w *SegmentWAL) {
|
||||
f, err := os.OpenFile(w.files[0].Name(), os.O_WRONLY, 0666)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer f.Close()
|
||||
|
||||
off, err := f.Seek(0, os.SEEK_END)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Ok(t, f.Truncate(off-8))
|
||||
testutil.Ok(t, f.Truncate(off-8))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "body_content",
|
||||
f: func(t *testing.T, w *SegmentWAL) {
|
||||
f, err := os.OpenFile(w.files[0].Name(), os.O_WRONLY, 0666)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer f.Close()
|
||||
|
||||
off, err := f.Seek(0, os.SEEK_END)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Write junk before checksum starts.
|
||||
_, err = f.WriteAt([]byte{1, 2, 3, 4}, off-8)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "checksum",
|
||||
f: func(t *testing.T, w *SegmentWAL) {
|
||||
f, err := os.OpenFile(w.files[0].Name(), os.O_WRONLY, 0666)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer f.Close()
|
||||
|
||||
off, err := f.Seek(0, os.SEEK_END)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
// Write junk into checksum
|
||||
_, err = f.WriteAt([]byte{1, 2, 3, 4}, off-4)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -357,21 +358,21 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
|||
// Generate testing data. It does not make semantical sense but
|
||||
// for the purpose of this test.
|
||||
dir, err := ioutil.TempDir("", "test_corrupted")
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
Ok(t, w.LogSamples([]RefSample{{T: 1, V: 2}}))
|
||||
Ok(t, w.LogSamples([]RefSample{{T: 2, V: 3}}))
|
||||
testutil.Ok(t, w.LogSamples([]RefSample{{T: 1, V: 2}}))
|
||||
testutil.Ok(t, w.LogSamples([]RefSample{{T: 2, V: 3}}))
|
||||
|
||||
Ok(t, w.cut())
|
||||
testutil.Ok(t, w.cut())
|
||||
|
||||
Ok(t, w.LogSamples([]RefSample{{T: 3, V: 4}}))
|
||||
Ok(t, w.LogSamples([]RefSample{{T: 5, V: 6}}))
|
||||
testutil.Ok(t, w.LogSamples([]RefSample{{T: 3, V: 4}}))
|
||||
testutil.Ok(t, w.LogSamples([]RefSample{{T: 5, V: 6}}))
|
||||
|
||||
Ok(t, w.Close())
|
||||
testutil.Ok(t, w.Close())
|
||||
|
||||
// cut() truncates and fsyncs the first segment async. If it happens after
|
||||
// the corruption we apply below, the corruption will be overwritten again.
|
||||
|
@ -386,39 +387,39 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
|||
logger := log.NewLogfmtLogger(os.Stderr)
|
||||
|
||||
w2, err := OpenSegmentWAL(dir, logger, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
r := w2.Reader()
|
||||
|
||||
serf := func(l []RefSeries) {
|
||||
Equals(t, 0, len(l))
|
||||
testutil.Equals(t, 0, len(l))
|
||||
}
|
||||
|
||||
// Weird hack to check order of reads.
|
||||
i := 0
|
||||
samplf := func(s []RefSample) {
|
||||
if i == 0 {
|
||||
Equals(t, []RefSample{{T: 1, V: 2}}, s)
|
||||
testutil.Equals(t, []RefSample{{T: 1, V: 2}}, s)
|
||||
i++
|
||||
} else {
|
||||
Equals(t, []RefSample{{T: 99, V: 100}}, s)
|
||||
testutil.Equals(t, []RefSample{{T: 99, V: 100}}, s)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(t, r.Read(serf, samplf, nil))
|
||||
testutil.Ok(t, r.Read(serf, samplf, nil))
|
||||
|
||||
Ok(t, w2.LogSamples([]RefSample{{T: 99, V: 100}}))
|
||||
Ok(t, w2.Close())
|
||||
testutil.Ok(t, w2.LogSamples([]RefSample{{T: 99, V: 100}}))
|
||||
testutil.Ok(t, w2.Close())
|
||||
|
||||
// We should see the first valid entry and the new one, everything after
|
||||
// is truncated.
|
||||
w3, err := OpenSegmentWAL(dir, logger, 0, nil)
|
||||
Ok(t, err)
|
||||
testutil.Ok(t, err)
|
||||
|
||||
r = w3.Reader()
|
||||
|
||||
i = 0
|
||||
Ok(t, r.Read(serf, samplf, nil))
|
||||
testutil.Ok(t, r.Read(serf, samplf, nil))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue