From b41d6d54f2262f089ff1b7151f67a00fcdfb8e0f Mon Sep 17 00:00:00 2001 From: Simon Pasquier Date: Fri, 15 Feb 2019 16:47:41 +0100 Subject: [PATCH] storage/remote: increase timeouts for Travis CI (#5224) * storage/remote: adapt tests for Travis CI Signed-off-by: Simon Pasquier * Check filesystems on Travis environment Signed-off-by: Simon Pasquier * Run remote/storage tests on CircleCI for troubleshooting Signed-off-by: Simon Pasquier * Try using tmpfs partition Signed-off-by: Simon Pasquier * Revert "Try using tmpfs partition" This reverts commit 85a30deb727b8907eb82ec7fc6979deb94bb8c04. Signed-off-by: Simon Pasquier * Don't store labels in writeToMock Signed-off-by: Simon Pasquier * Fix data race Signed-off-by: Simon Pasquier * Bump retries to 100 meaning that the total timeout is 10s Signed-off-by: Simon Pasquier * clean up .travis.yml Signed-off-by: Simon Pasquier * code fixup Signed-off-by: Simon Pasquier * Remove unneeded empty line Signed-off-by: Simon Pasquier --- .travis.yml | 2 - storage/remote/wal_watcher.go | 2 +- storage/remote/wal_watcher_test.go | 114 +++++++++++------------------ 3 files changed, 43 insertions(+), 75 deletions(-) diff --git a/.travis.yml b/.travis.yml index bb704e9cd..018bdabc2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,3 @@ -sudo: false - language: go # Whenever the Go version is updated here, .circleci/config.yml and .promu.yml diff --git a/storage/remote/wal_watcher.go b/storage/remote/wal_watcher.go index 0568fc14b..b3e93dcb4 100644 --- a/storage/remote/wal_watcher.go +++ b/storage/remote/wal_watcher.go @@ -276,7 +276,7 @@ func (w *WALWatcher) runWatcher() { } } -// Use tail true to indicate thatreader is currently on a segment that is +// Use tail true to indicate that the reader is currently on a segment that is // actively being written to. If false, assume it's a full segment and we're // replaying it on start to cache the series records. func (w *WALWatcher) watch(wl *wal.WAL, reader *wal.LiveReader, tail bool) error { diff --git a/storage/remote/wal_watcher_test.go b/storage/remote/wal_watcher_test.go index f16c8f0ce..9a68a5e3e 100644 --- a/storage/remote/wal_watcher_test.go +++ b/storage/remote/wal_watcher_test.go @@ -22,18 +22,33 @@ import ( "testing" "time" - "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/timestamp" - "github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/tsdb" "github.com/prometheus/tsdb/labels" "github.com/prometheus/tsdb/wal" ) +var defaultRetryInterval = 100 * time.Millisecond +var defaultRetries = 100 + +// retry executes f() n times at each interval until it returns true. +func retry(t *testing.T, interval time.Duration, n int, f func() bool) { + t.Helper() + ticker := time.NewTicker(interval) + for i := 0; i <= n; i++ { + if f() { + return + } + t.Logf("retry %d/%d", i, n) + <-ticker.C + } + ticker.Stop() + t.Logf("function returned false") +} + type writeToMock struct { samplesAppended int - seriesLabels map[uint64][]prompb.Label seriesLock sync.Mutex seriesSegmentIndexes map[uint64]int } @@ -44,20 +59,10 @@ func (wtm *writeToMock) Append(s []tsdb.RefSample) bool { } func (wtm *writeToMock) StoreSeries(series []tsdb.RefSeries, index int) { - temp := make(map[uint64][]prompb.Label, len(series)) - for _, s := range series { - ls := make(model.LabelSet, len(s.Labels)) - for _, label := range s.Labels { - ls[model.LabelName(label.Name)] = model.LabelValue(label.Value) - } - - temp[s.Ref] = labelsetToLabelsProto(ls) - } wtm.seriesLock.Lock() defer wtm.seriesLock.Unlock() - for ref, labels := range temp { - wtm.seriesLabels[ref] = labels - wtm.seriesSegmentIndexes[ref] = index + for _, s := range series { + wtm.seriesSegmentIndexes[s.Ref] = index } } @@ -68,7 +73,6 @@ func (wtm *writeToMock) SeriesReset(index int) { defer wtm.seriesLock.Unlock() for k, v := range wtm.seriesSegmentIndexes { if v < index { - delete(wtm.seriesLabels, k) delete(wtm.seriesSegmentIndexes, k) } } @@ -77,12 +81,11 @@ func (wtm *writeToMock) SeriesReset(index int) { func (wtm *writeToMock) checkNumLabels() int { wtm.seriesLock.Lock() defer wtm.seriesLock.Unlock() - return len(wtm.seriesLabels) + return len(wtm.seriesSegmentIndexes) } func newWriteToMock() *writeToMock { return &writeToMock{ - seriesLabels: make(map[uint64][]prompb.Label), seriesSegmentIndexes: make(map[uint64]int), } } @@ -141,21 +144,13 @@ func Test_readToEnd_noCheckpoint(t *testing.T) { st := timestamp.FromTime(time.Now()) watcher := NewWALWatcher(nil, "", wt, dir, st) go watcher.Start() - i := 0 - ticker := time.NewTicker(100 * time.Millisecond) - for range ticker.C { - if wt.checkNumLabels() >= seriesCount*10*2 { - break - } - i++ - if i >= 10 { - break - } - } + expected := seriesCount + retry(t, defaultRetryInterval, defaultRetries, func() bool { + return wt.checkNumLabels() >= expected + }) watcher.Stop() - ticker.Stop() - testutil.Equals(t, seriesCount, wt.checkNumLabels()) + testutil.Equals(t, expected, wt.checkNumLabels()) } func Test_readToEnd_withCheckpoint(t *testing.T) { @@ -231,21 +226,13 @@ func Test_readToEnd_withCheckpoint(t *testing.T) { st := timestamp.FromTime(time.Now()) watcher := NewWALWatcher(nil, "", wt, dir, st) go watcher.Start() - i := 0 - ticker := time.NewTicker(100 * time.Millisecond) - for range ticker.C { - if wt.checkNumLabels() >= seriesCount*10*2 { - break - } - i++ - if i >= 20 { - break - } - } + expected := seriesCount * 10 * 2 + retry(t, defaultRetryInterval, defaultRetries, func() bool { + return wt.checkNumLabels() >= expected + }) watcher.Stop() - ticker.Stop() - testutil.Equals(t, seriesCount*10*2, wt.checkNumLabels()) + testutil.Equals(t, expected, wt.checkNumLabels()) } func Test_readCheckpoint(t *testing.T) { @@ -301,21 +288,13 @@ func Test_readCheckpoint(t *testing.T) { st := timestamp.FromTime(time.Now()) watcher := NewWALWatcher(nil, "", wt, dir, st) go watcher.Start() - i := 0 - ticker := time.NewTicker(100 * time.Millisecond) - for range ticker.C { - if wt.checkNumLabels() >= seriesCount*10*2 { - break - } - i++ - if i >= 8 { - break - } - } + expected := seriesCount * 10 + retry(t, defaultRetryInterval, defaultRetries, func() bool { + return wt.checkNumLabels() >= expected + }) watcher.Stop() - ticker.Stop() - testutil.Equals(t, seriesCount*10, wt.checkNumLabels()) + testutil.Equals(t, expected, wt.checkNumLabels()) } func Test_checkpoint_seriesReset(t *testing.T) { @@ -366,21 +345,12 @@ func Test_checkpoint_seriesReset(t *testing.T) { st := timestamp.FromTime(time.Now()) watcher := NewWALWatcher(nil, "", wt, dir, st) go watcher.Start() - i := 0 - ticker := time.NewTicker(100 * time.Millisecond) - for range ticker.C { - if wt.checkNumLabels() >= seriesCount*10*2 { - break - } - i++ - if i >= 50 { - break - } - - } + expected := seriesCount * 10 + retry(t, defaultRetryInterval, defaultRetries, func() bool { + return wt.checkNumLabels() >= expected + }) watcher.Stop() - ticker.Stop() testutil.Equals(t, seriesCount*10, wt.checkNumLabels()) // If you modify the checkpoint and truncate segment #'s run the test to see how @@ -411,7 +381,7 @@ func Test_decodeRecord(t *testing.T) { watcher.decodeRecord(buf) testutil.Ok(t, err) - testutil.Equals(t, 1, len(wt.seriesLabels)) + testutil.Equals(t, 1, wt.checkNumLabels()) // decode a samples record buf = enc.Samples([]tsdb.RefSample{tsdb.RefSample{Ref: 100, T: 1, V: 1.0}, tsdb.RefSample{Ref: 100, T: 2, V: 2.0}}, nil) @@ -436,7 +406,7 @@ func Test_decodeRecord_afterStart(t *testing.T) { watcher.decodeRecord(buf) testutil.Ok(t, err) - testutil.Equals(t, 1, len(wt.seriesLabels)) + testutil.Equals(t, 1, wt.checkNumLabels()) // decode a samples record buf = enc.Samples([]tsdb.RefSample{tsdb.RefSample{Ref: 100, T: 1, V: 1.0}, tsdb.RefSample{Ref: 100, T: 2, V: 2.0}}, nil)