storage/remote: increase timeouts for Travis CI (#5224)

* storage/remote: adapt tests for Travis CI

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Check filesystems on Travis environment

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Run remote/storage tests on CircleCI for troubleshooting

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Try using tmpfs partition

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Revert "Try using tmpfs partition"

This reverts commit 85a30deb72.

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Don't store labels in writeToMock

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Fix data race

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Bump retries to 100 meaning that the total timeout is 10s

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* clean up .travis.yml

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* code fixup

Signed-off-by: Simon Pasquier <spasquie@redhat.com>

* Remove unneeded empty line

Signed-off-by: Simon Pasquier <spasquie@redhat.com>
This commit is contained in:
Simon Pasquier 2019-02-15 16:47:41 +01:00 committed by GitHub
parent b7594f650f
commit b41d6d54f2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 43 additions and 75 deletions

View file

@ -1,5 +1,3 @@
sudo: false
language: go
# Whenever the Go version is updated here, .circleci/config.yml and .promu.yml

View file

@ -276,7 +276,7 @@ func (w *WALWatcher) runWatcher() {
}
}
// Use tail true to indicate thatreader is currently on a segment that is
// Use tail true to indicate that the reader is currently on a segment that is
// actively being written to. If false, assume it's a full segment and we're
// replaying it on start to cache the series records.
func (w *WALWatcher) watch(wl *wal.WAL, reader *wal.LiveReader, tail bool) error {

View file

@ -22,18 +22,33 @@ import (
"testing"
"time"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/util/testutil"
"github.com/prometheus/tsdb"
"github.com/prometheus/tsdb/labels"
"github.com/prometheus/tsdb/wal"
)
var defaultRetryInterval = 100 * time.Millisecond
var defaultRetries = 100
// retry executes f() n times at each interval until it returns true.
func retry(t *testing.T, interval time.Duration, n int, f func() bool) {
t.Helper()
ticker := time.NewTicker(interval)
for i := 0; i <= n; i++ {
if f() {
return
}
t.Logf("retry %d/%d", i, n)
<-ticker.C
}
ticker.Stop()
t.Logf("function returned false")
}
type writeToMock struct {
samplesAppended int
seriesLabels map[uint64][]prompb.Label
seriesLock sync.Mutex
seriesSegmentIndexes map[uint64]int
}
@ -44,20 +59,10 @@ func (wtm *writeToMock) Append(s []tsdb.RefSample) bool {
}
func (wtm *writeToMock) StoreSeries(series []tsdb.RefSeries, index int) {
temp := make(map[uint64][]prompb.Label, len(series))
for _, s := range series {
ls := make(model.LabelSet, len(s.Labels))
for _, label := range s.Labels {
ls[model.LabelName(label.Name)] = model.LabelValue(label.Value)
}
temp[s.Ref] = labelsetToLabelsProto(ls)
}
wtm.seriesLock.Lock()
defer wtm.seriesLock.Unlock()
for ref, labels := range temp {
wtm.seriesLabels[ref] = labels
wtm.seriesSegmentIndexes[ref] = index
for _, s := range series {
wtm.seriesSegmentIndexes[s.Ref] = index
}
}
@ -68,7 +73,6 @@ func (wtm *writeToMock) SeriesReset(index int) {
defer wtm.seriesLock.Unlock()
for k, v := range wtm.seriesSegmentIndexes {
if v < index {
delete(wtm.seriesLabels, k)
delete(wtm.seriesSegmentIndexes, k)
}
}
@ -77,12 +81,11 @@ func (wtm *writeToMock) SeriesReset(index int) {
func (wtm *writeToMock) checkNumLabels() int {
wtm.seriesLock.Lock()
defer wtm.seriesLock.Unlock()
return len(wtm.seriesLabels)
return len(wtm.seriesSegmentIndexes)
}
func newWriteToMock() *writeToMock {
return &writeToMock{
seriesLabels: make(map[uint64][]prompb.Label),
seriesSegmentIndexes: make(map[uint64]int),
}
}
@ -141,21 +144,13 @@ func Test_readToEnd_noCheckpoint(t *testing.T) {
st := timestamp.FromTime(time.Now())
watcher := NewWALWatcher(nil, "", wt, dir, st)
go watcher.Start()
i := 0
ticker := time.NewTicker(100 * time.Millisecond)
for range ticker.C {
if wt.checkNumLabels() >= seriesCount*10*2 {
break
}
i++
if i >= 10 {
break
}
}
expected := seriesCount
retry(t, defaultRetryInterval, defaultRetries, func() bool {
return wt.checkNumLabels() >= expected
})
watcher.Stop()
ticker.Stop()
testutil.Equals(t, seriesCount, wt.checkNumLabels())
testutil.Equals(t, expected, wt.checkNumLabels())
}
func Test_readToEnd_withCheckpoint(t *testing.T) {
@ -231,21 +226,13 @@ func Test_readToEnd_withCheckpoint(t *testing.T) {
st := timestamp.FromTime(time.Now())
watcher := NewWALWatcher(nil, "", wt, dir, st)
go watcher.Start()
i := 0
ticker := time.NewTicker(100 * time.Millisecond)
for range ticker.C {
if wt.checkNumLabels() >= seriesCount*10*2 {
break
}
i++
if i >= 20 {
break
}
}
expected := seriesCount * 10 * 2
retry(t, defaultRetryInterval, defaultRetries, func() bool {
return wt.checkNumLabels() >= expected
})
watcher.Stop()
ticker.Stop()
testutil.Equals(t, seriesCount*10*2, wt.checkNumLabels())
testutil.Equals(t, expected, wt.checkNumLabels())
}
func Test_readCheckpoint(t *testing.T) {
@ -301,21 +288,13 @@ func Test_readCheckpoint(t *testing.T) {
st := timestamp.FromTime(time.Now())
watcher := NewWALWatcher(nil, "", wt, dir, st)
go watcher.Start()
i := 0
ticker := time.NewTicker(100 * time.Millisecond)
for range ticker.C {
if wt.checkNumLabels() >= seriesCount*10*2 {
break
}
i++
if i >= 8 {
break
}
}
expected := seriesCount * 10
retry(t, defaultRetryInterval, defaultRetries, func() bool {
return wt.checkNumLabels() >= expected
})
watcher.Stop()
ticker.Stop()
testutil.Equals(t, seriesCount*10, wt.checkNumLabels())
testutil.Equals(t, expected, wt.checkNumLabels())
}
func Test_checkpoint_seriesReset(t *testing.T) {
@ -366,21 +345,12 @@ func Test_checkpoint_seriesReset(t *testing.T) {
st := timestamp.FromTime(time.Now())
watcher := NewWALWatcher(nil, "", wt, dir, st)
go watcher.Start()
i := 0
ticker := time.NewTicker(100 * time.Millisecond)
for range ticker.C {
if wt.checkNumLabels() >= seriesCount*10*2 {
break
}
i++
if i >= 50 {
break
}
}
expected := seriesCount * 10
retry(t, defaultRetryInterval, defaultRetries, func() bool {
return wt.checkNumLabels() >= expected
})
watcher.Stop()
ticker.Stop()
testutil.Equals(t, seriesCount*10, wt.checkNumLabels())
// If you modify the checkpoint and truncate segment #'s run the test to see how
@ -411,7 +381,7 @@ func Test_decodeRecord(t *testing.T) {
watcher.decodeRecord(buf)
testutil.Ok(t, err)
testutil.Equals(t, 1, len(wt.seriesLabels))
testutil.Equals(t, 1, wt.checkNumLabels())
// decode a samples record
buf = enc.Samples([]tsdb.RefSample{tsdb.RefSample{Ref: 100, T: 1, V: 1.0}, tsdb.RefSample{Ref: 100, T: 2, V: 2.0}}, nil)
@ -436,7 +406,7 @@ func Test_decodeRecord_afterStart(t *testing.T) {
watcher.decodeRecord(buf)
testutil.Ok(t, err)
testutil.Equals(t, 1, len(wt.seriesLabels))
testutil.Equals(t, 1, wt.checkNumLabels())
// decode a samples record
buf = enc.Samples([]tsdb.RefSample{tsdb.RefSample{Ref: 100, T: 1, V: 1.0}, tsdb.RefSample{Ref: 100, T: 2, V: 2.0}}, nil)