2018-09-07 14:26:04 -07:00
|
|
|
// Copyright 2018 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
2019-09-19 02:15:41 -07:00
|
|
|
package wal
|
2018-09-07 14:26:04 -07:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"math/rand"
|
|
|
|
"os"
|
|
|
|
"path"
|
2019-01-18 12:31:36 -08:00
|
|
|
"sync"
|
2018-09-07 14:26:04 -07:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2021-06-11 09:17:59 -07:00
|
|
|
"github.com/go-kit/log"
|
2019-09-19 02:15:41 -07:00
|
|
|
"github.com/prometheus/client_golang/prometheus"
|
2020-10-29 02:43:23 -07:00
|
|
|
"github.com/stretchr/testify/require"
|
2020-10-22 02:00:08 -07:00
|
|
|
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
2021-11-06 03:10:04 -07:00
|
|
|
"github.com/prometheus/prometheus/tsdb/chunks"
|
2019-09-19 02:15:41 -07:00
|
|
|
"github.com/prometheus/prometheus/tsdb/record"
|
2018-09-07 14:26:04 -07:00
|
|
|
)
|
|
|
|
|
2021-10-22 01:06:44 -07:00
|
|
|
var (
|
|
|
|
defaultRetryInterval = 100 * time.Millisecond
|
|
|
|
defaultRetries = 100
|
|
|
|
wMetrics = NewWatcherMetrics(prometheus.DefaultRegisterer)
|
|
|
|
)
|
2019-02-15 07:47:41 -08:00
|
|
|
|
|
|
|
// retry executes f() n times at each interval until it returns true.
|
|
|
|
func retry(t *testing.T, interval time.Duration, n int, f func() bool) {
|
|
|
|
t.Helper()
|
|
|
|
ticker := time.NewTicker(interval)
|
|
|
|
for i := 0; i <= n; i++ {
|
|
|
|
if f() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
<-ticker.C
|
|
|
|
}
|
|
|
|
ticker.Stop()
|
|
|
|
t.Logf("function returned false")
|
|
|
|
}
|
|
|
|
|
2018-09-07 14:26:04 -07:00
|
|
|
type writeToMock struct {
|
|
|
|
samplesAppended int
|
2021-05-06 13:53:52 -07:00
|
|
|
exemplarsAppended int
|
2019-01-18 12:31:36 -08:00
|
|
|
seriesLock sync.Mutex
|
2021-11-06 03:10:04 -07:00
|
|
|
seriesSegmentIndexes map[chunks.HeadSeriesRef]int
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
func (wtm *writeToMock) Append(s []record.RefSample) bool {
|
2018-09-07 14:26:04 -07:00
|
|
|
wtm.samplesAppended += len(s)
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2021-05-06 13:53:52 -07:00
|
|
|
func (wtm *writeToMock) AppendExemplars(e []record.RefExemplar) bool {
|
|
|
|
wtm.exemplarsAppended += len(e)
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
func (wtm *writeToMock) StoreSeries(series []record.RefSeries, index int) {
|
2021-07-27 13:21:48 -07:00
|
|
|
wtm.UpdateSeriesSegment(series, index)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (wtm *writeToMock) UpdateSeriesSegment(series []record.RefSeries, index int) {
|
2019-01-18 12:31:36 -08:00
|
|
|
wtm.seriesLock.Lock()
|
|
|
|
defer wtm.seriesLock.Unlock()
|
2019-02-15 07:47:41 -08:00
|
|
|
for _, s := range series {
|
|
|
|
wtm.seriesSegmentIndexes[s.Ref] = index
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (wtm *writeToMock) SeriesReset(index int) {
|
|
|
|
// Check for series that are in segments older than the checkpoint
|
|
|
|
// that were not also present in the checkpoint.
|
2019-01-18 12:31:36 -08:00
|
|
|
wtm.seriesLock.Lock()
|
|
|
|
defer wtm.seriesLock.Unlock()
|
2018-09-07 14:26:04 -07:00
|
|
|
for k, v := range wtm.seriesSegmentIndexes {
|
|
|
|
if v < index {
|
|
|
|
delete(wtm.seriesSegmentIndexes, k)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-18 12:31:36 -08:00
|
|
|
func (wtm *writeToMock) checkNumLabels() int {
|
|
|
|
wtm.seriesLock.Lock()
|
|
|
|
defer wtm.seriesLock.Unlock()
|
2019-02-15 07:47:41 -08:00
|
|
|
return len(wtm.seriesSegmentIndexes)
|
2019-01-18 12:31:36 -08:00
|
|
|
}
|
|
|
|
|
2018-09-07 14:26:04 -07:00
|
|
|
func newWriteToMock() *writeToMock {
|
|
|
|
return &writeToMock{
|
2021-11-06 03:10:04 -07:00
|
|
|
seriesSegmentIndexes: make(map[chunks.HeadSeriesRef]int),
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-09 02:52:44 -07:00
|
|
|
func TestTailSamples(t *testing.T) {
|
2019-02-19 20:03:41 -08:00
|
|
|
pageSize := 32 * 1024
|
|
|
|
const seriesCount = 10
|
|
|
|
const samplesCount = 250
|
2021-05-06 13:53:52 -07:00
|
|
|
const exemplarsCount = 25
|
2019-07-03 06:23:13 -07:00
|
|
|
for _, compress := range []bool{false, true} {
|
|
|
|
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
|
|
|
now := time.Now()
|
|
|
|
|
2021-10-31 23:58:18 -07:00
|
|
|
dir := t.TempDir()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wdir := path.Join(dir, "wal")
|
2021-10-22 01:06:44 -07:00
|
|
|
err := os.Mkdir(wdir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
enc := record.Encoder{}
|
|
|
|
w, err := NewSize(nil, nil, wdir, 128*pageSize, compress)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2020-07-16 23:17:32 -07:00
|
|
|
defer func() {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Close())
|
2020-07-16 23:17:32 -07:00
|
|
|
}()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
// Write to the initial segment then checkpoint.
|
|
|
|
for i := 0; i < seriesCount; i++ {
|
|
|
|
ref := i + 100
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(ref),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(series))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
for j := 0; j < samplesCount; j++ {
|
|
|
|
inner := rand.Intn(ref + 1)
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(inner),
|
2021-04-05 04:30:48 -07:00
|
|
|
T: now.UnixNano() + 1,
|
2019-07-03 06:23:13 -07:00
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(sample))
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
2021-05-06 13:53:52 -07:00
|
|
|
|
|
|
|
for j := 0; j < exemplarsCount; j++ {
|
|
|
|
inner := rand.Intn(ref + 1)
|
|
|
|
exemplar := enc.Exemplars([]record.RefExemplar{
|
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(inner),
|
2021-05-06 13:53:52 -07:00
|
|
|
T: now.UnixNano() + 1,
|
|
|
|
V: float64(i),
|
|
|
|
Labels: labels.FromStrings("traceID", fmt.Sprintf("trace-%d", inner)),
|
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
require.NoError(t, w.Log(exemplar))
|
|
|
|
}
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
2019-02-19 20:03:41 -08:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
// Start read after checkpoint, no more data written.
|
2020-09-01 02:16:57 -07:00
|
|
|
first, last, err := Segments(w.Dir())
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-02-19 20:03:41 -08:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
wt := newWriteToMock()
|
2021-05-06 13:53:52 -07:00
|
|
|
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, true)
|
2019-11-26 16:53:11 -08:00
|
|
|
watcher.SetStartTime(now)
|
2019-04-23 01:49:17 -07:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
// Set the Watcher's metrics so they're not nil pointers.
|
|
|
|
watcher.setMetrics()
|
|
|
|
for i := first; i <= last; i++ {
|
2019-09-19 02:15:41 -07:00
|
|
|
segment, err := OpenReadSegment(SegmentName(watcher.walDir, i))
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
defer segment.Close()
|
2019-02-19 20:03:41 -08:00
|
|
|
|
2020-03-20 09:34:15 -07:00
|
|
|
reader := NewLiveReader(nil, NewLiveReaderMetrics(nil), segment)
|
2019-07-03 06:23:13 -07:00
|
|
|
// Use tail true so we can ensure we got the right number of samples.
|
|
|
|
watcher.readSegment(reader, i, true)
|
|
|
|
}
|
2019-02-19 20:03:41 -08:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
expectedSeries := seriesCount
|
|
|
|
expectedSamples := seriesCount * samplesCount
|
2021-05-06 13:53:52 -07:00
|
|
|
expectedExemplars := seriesCount * exemplarsCount
|
2019-07-03 06:23:13 -07:00
|
|
|
retry(t, defaultRetryInterval, defaultRetries, func() bool {
|
|
|
|
return wt.checkNumLabels() >= expectedSeries
|
|
|
|
})
|
2021-05-06 13:53:52 -07:00
|
|
|
require.Equal(t, expectedSeries, wt.checkNumLabels(), "did not receive the expected number of series")
|
|
|
|
require.Equal(t, expectedSamples, wt.samplesAppended, "did not receive the expected number of samples")
|
|
|
|
require.Equal(t, expectedExemplars, wt.exemplarsAppended, "did not receive the expected number of exemplars")
|
2019-07-03 06:23:13 -07:00
|
|
|
})
|
|
|
|
}
|
2019-02-19 20:03:41 -08:00
|
|
|
}
|
|
|
|
|
2019-04-09 02:52:44 -07:00
|
|
|
func TestReadToEndNoCheckpoint(t *testing.T) {
|
2018-09-07 14:26:04 -07:00
|
|
|
pageSize := 32 * 1024
|
|
|
|
const seriesCount = 10
|
|
|
|
const samplesCount = 250
|
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
for _, compress := range []bool{false, true} {
|
|
|
|
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
2021-10-31 23:58:18 -07:00
|
|
|
dir := t.TempDir()
|
2019-07-03 06:23:13 -07:00
|
|
|
wdir := path.Join(dir, "wal")
|
2021-10-22 01:06:44 -07:00
|
|
|
err := os.Mkdir(wdir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
w, err := NewSize(nil, nil, wdir, 128*pageSize, compress)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2020-07-16 23:17:32 -07:00
|
|
|
defer func() {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Close())
|
2020-07-16 23:17:32 -07:00
|
|
|
}()
|
2018-09-07 14:26:04 -07:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
var recs [][]byte
|
2018-09-07 14:26:04 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
enc := record.Encoder{}
|
2019-01-18 12:31:36 -08:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
for i := 0; i < seriesCount; i++ {
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(i),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
recs = append(recs, series)
|
|
|
|
for j := 0; j < samplesCount; j++ {
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(j),
|
2019-07-03 06:23:13 -07:00
|
|
|
T: int64(i),
|
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
|
|
|
|
recs = append(recs, sample)
|
|
|
|
|
|
|
|
// Randomly batch up records.
|
|
|
|
if rand.Intn(4) < 3 {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(recs...))
|
2019-07-03 06:23:13 -07:00
|
|
|
recs = recs[:0]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(recs...))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2020-09-01 02:16:57 -07:00
|
|
|
_, _, err = Segments(w.Dir())
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wt := newWriteToMock()
|
2021-05-06 13:53:52 -07:00
|
|
|
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, false)
|
2019-07-03 06:23:13 -07:00
|
|
|
go watcher.Start()
|
|
|
|
|
|
|
|
expected := seriesCount
|
|
|
|
retry(t, defaultRetryInterval, defaultRetries, func() bool {
|
|
|
|
return wt.checkNumLabels() >= expected
|
|
|
|
})
|
|
|
|
watcher.Stop()
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, expected, wt.checkNumLabels())
|
2019-07-03 06:23:13 -07:00
|
|
|
})
|
|
|
|
}
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
|
2019-04-09 02:52:44 -07:00
|
|
|
func TestReadToEndWithCheckpoint(t *testing.T) {
|
2019-02-19 20:03:41 -08:00
|
|
|
segmentSize := 32 * 1024
|
|
|
|
// We need something similar to this # of series and samples
|
|
|
|
// in order to get enough segments for us to checkpoint.
|
2018-09-07 14:26:04 -07:00
|
|
|
const seriesCount = 10
|
|
|
|
const samplesCount = 250
|
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
for _, compress := range []bool{false, true} {
|
|
|
|
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
2021-10-31 23:58:18 -07:00
|
|
|
dir := t.TempDir()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wdir := path.Join(dir, "wal")
|
2021-10-22 01:06:44 -07:00
|
|
|
err := os.Mkdir(wdir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
enc := record.Encoder{}
|
|
|
|
w, err := NewSize(nil, nil, wdir, segmentSize, compress)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2020-07-16 23:17:32 -07:00
|
|
|
defer func() {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Close())
|
2020-07-16 23:17:32 -07:00
|
|
|
}()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
// Write to the initial segment then checkpoint.
|
|
|
|
for i := 0; i < seriesCount; i++ {
|
|
|
|
ref := i + 100
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(ref),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(series))
|
2020-10-05 02:09:59 -07:00
|
|
|
// Add in an unknown record type, which should be ignored.
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log([]byte{255}))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
for j := 0; j < samplesCount; j++ {
|
|
|
|
inner := rand.Intn(ref + 1)
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(inner),
|
2019-07-03 06:23:13 -07:00
|
|
|
T: int64(i),
|
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(sample))
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
|
|
|
}
|
2019-02-19 20:03:41 -08:00
|
|
|
|
2021-11-06 03:10:04 -07:00
|
|
|
Checkpoint(log.NewNopLogger(), w, 0, 1, func(x chunks.HeadSeriesRef) bool { return true }, 0)
|
2019-07-03 06:23:13 -07:00
|
|
|
w.Truncate(1)
|
|
|
|
|
|
|
|
// Write more records after checkpointing.
|
|
|
|
for i := 0; i < seriesCount; i++ {
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(i),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(series))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
for j := 0; j < samplesCount; j++ {
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(j),
|
2019-07-03 06:23:13 -07:00
|
|
|
T: int64(i),
|
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(sample))
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
|
|
|
}
|
2018-09-07 14:26:04 -07:00
|
|
|
|
2020-09-01 02:16:57 -07:00
|
|
|
_, _, err = Segments(w.Dir())
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
wt := newWriteToMock()
|
2021-05-06 13:53:52 -07:00
|
|
|
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, false)
|
2019-07-03 06:23:13 -07:00
|
|
|
go watcher.Start()
|
|
|
|
|
|
|
|
expected := seriesCount * 2
|
|
|
|
retry(t, defaultRetryInterval, defaultRetries, func() bool {
|
|
|
|
return wt.checkNumLabels() >= expected
|
|
|
|
})
|
|
|
|
watcher.Stop()
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, expected, wt.checkNumLabels())
|
2019-07-03 06:23:13 -07:00
|
|
|
})
|
|
|
|
}
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
|
2019-04-09 02:52:44 -07:00
|
|
|
func TestReadCheckpoint(t *testing.T) {
|
2018-09-07 14:26:04 -07:00
|
|
|
pageSize := 32 * 1024
|
|
|
|
const seriesCount = 10
|
|
|
|
const samplesCount = 250
|
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
for _, compress := range []bool{false, true} {
|
|
|
|
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
2021-10-31 23:58:18 -07:00
|
|
|
dir := t.TempDir()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wdir := path.Join(dir, "wal")
|
2021-10-22 01:06:44 -07:00
|
|
|
err := os.Mkdir(wdir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
os.Create(SegmentName(wdir, 30))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
enc := record.Encoder{}
|
|
|
|
w, err := NewSize(nil, nil, wdir, 128*pageSize, compress)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2020-07-16 23:17:32 -07:00
|
|
|
defer func() {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Close())
|
2020-07-16 23:17:32 -07:00
|
|
|
}()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
// Write to the initial segment then checkpoint.
|
|
|
|
for i := 0; i < seriesCount; i++ {
|
|
|
|
ref := i + 100
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(ref),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(series))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
for j := 0; j < samplesCount; j++ {
|
|
|
|
inner := rand.Intn(ref + 1)
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(inner),
|
2019-07-03 06:23:13 -07:00
|
|
|
T: int64(i),
|
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(sample))
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
|
|
|
}
|
2021-11-06 03:10:04 -07:00
|
|
|
Checkpoint(log.NewNopLogger(), w, 30, 31, func(x chunks.HeadSeriesRef) bool { return true }, 0)
|
2019-07-03 06:23:13 -07:00
|
|
|
w.Truncate(32)
|
|
|
|
|
|
|
|
// Start read after checkpoint, no more data written.
|
2020-09-01 02:16:57 -07:00
|
|
|
_, _, err = Segments(w.Dir())
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wt := newWriteToMock()
|
2021-05-06 13:53:52 -07:00
|
|
|
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, false)
|
2019-07-03 06:23:13 -07:00
|
|
|
go watcher.Start()
|
|
|
|
|
|
|
|
expectedSeries := seriesCount
|
|
|
|
retry(t, defaultRetryInterval, defaultRetries, func() bool {
|
|
|
|
return wt.checkNumLabels() >= expectedSeries
|
|
|
|
})
|
|
|
|
watcher.Stop()
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, expectedSeries, wt.checkNumLabels())
|
2019-07-03 06:23:13 -07:00
|
|
|
})
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-09 02:52:44 -07:00
|
|
|
func TestReadCheckpointMultipleSegments(t *testing.T) {
|
|
|
|
pageSize := 32 * 1024
|
|
|
|
|
|
|
|
const segments = 1
|
|
|
|
const seriesCount = 20
|
|
|
|
const samplesCount = 300
|
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
for _, compress := range []bool{false, true} {
|
|
|
|
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
|
2021-10-31 23:58:18 -07:00
|
|
|
dir := t.TempDir()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wdir := path.Join(dir, "wal")
|
2021-10-22 01:06:44 -07:00
|
|
|
err := os.Mkdir(wdir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
enc := record.Encoder{}
|
|
|
|
w, err := NewSize(nil, nil, wdir, pageSize, compress)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
// Write a bunch of data.
|
|
|
|
for i := 0; i < segments; i++ {
|
|
|
|
for j := 0; j < seriesCount; j++ {
|
|
|
|
ref := j + (i * 100)
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(ref),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(series))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
for k := 0; k < samplesCount; k++ {
|
|
|
|
inner := rand.Intn(ref + 1)
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(inner),
|
2019-07-03 06:23:13 -07:00
|
|
|
T: int64(i),
|
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(sample))
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
|
|
|
}
|
2019-04-09 02:52:44 -07:00
|
|
|
}
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Close())
|
2019-04-09 02:52:44 -07:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
// At this point we should have at least 6 segments, lets create a checkpoint dir of the first 5.
|
|
|
|
checkpointDir := dir + "/wal/checkpoint.000004"
|
2021-10-22 01:06:44 -07:00
|
|
|
err = os.Mkdir(checkpointDir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
for i := 0; i <= 4; i++ {
|
2019-09-19 02:15:41 -07:00
|
|
|
err := os.Rename(SegmentName(dir+"/wal", i), SegmentName(checkpointDir, i))
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
2019-04-09 02:52:44 -07:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
wt := newWriteToMock()
|
2021-05-06 13:53:52 -07:00
|
|
|
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, false)
|
2019-09-19 02:15:41 -07:00
|
|
|
watcher.MaxSegment = -1
|
2019-04-09 02:52:44 -07:00
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
// Set the Watcher's metrics so they're not nil pointers.
|
|
|
|
watcher.setMetrics()
|
2019-04-23 01:49:17 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
lastCheckpoint, _, err := LastCheckpoint(watcher.walDir)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-04-09 02:52:44 -07:00
|
|
|
|
2021-07-27 13:21:48 -07:00
|
|
|
err = watcher.readCheckpoint(lastCheckpoint, (*Watcher).readSegment)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
})
|
|
|
|
}
|
2019-04-09 02:52:44 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCheckpointSeriesReset(t *testing.T) {
|
2019-02-19 20:03:41 -08:00
|
|
|
segmentSize := 32 * 1024
|
|
|
|
// We need something similar to this # of series and samples
|
|
|
|
// in order to get enough segments for us to checkpoint.
|
2019-04-09 02:52:44 -07:00
|
|
|
const seriesCount = 20
|
|
|
|
const samplesCount = 350
|
2019-07-03 06:23:13 -07:00
|
|
|
testCases := []struct {
|
|
|
|
compress bool
|
|
|
|
segments int
|
|
|
|
}{
|
|
|
|
{compress: false, segments: 14},
|
|
|
|
{compress: true, segments: 13},
|
2018-09-07 14:26:04 -07:00
|
|
|
}
|
|
|
|
|
2019-07-03 06:23:13 -07:00
|
|
|
for _, tc := range testCases {
|
|
|
|
t.Run(fmt.Sprintf("compress=%t", tc.compress), func(t *testing.T) {
|
2021-10-31 23:58:18 -07:00
|
|
|
dir := t.TempDir()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wdir := path.Join(dir, "wal")
|
2021-10-22 01:06:44 -07:00
|
|
|
err := os.Mkdir(wdir, 0o777)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
enc := record.Encoder{}
|
|
|
|
w, err := NewSize(nil, nil, wdir, segmentSize, tc.compress)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2020-07-16 23:17:32 -07:00
|
|
|
defer func() {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Close())
|
2020-07-16 23:17:32 -07:00
|
|
|
}()
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
// Write to the initial segment, then checkpoint later.
|
|
|
|
for i := 0; i < seriesCount; i++ {
|
|
|
|
ref := i + 100
|
2019-09-19 02:15:41 -07:00
|
|
|
series := enc.Series([]record.RefSeries{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(ref),
|
2022-03-09 14:20:09 -08:00
|
|
|
Labels: labels.FromStrings("__name__", fmt.Sprintf("metric_%d", i)),
|
2019-07-03 06:23:13 -07:00
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(series))
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
for j := 0; j < samplesCount; j++ {
|
|
|
|
inner := rand.Intn(ref + 1)
|
2019-09-19 02:15:41 -07:00
|
|
|
sample := enc.Samples([]record.RefSample{
|
2019-08-13 01:34:14 -07:00
|
|
|
{
|
2021-11-06 03:10:04 -07:00
|
|
|
Ref: chunks.HeadSeriesRef(inner),
|
2019-07-03 06:23:13 -07:00
|
|
|
T: int64(i),
|
|
|
|
V: float64(i),
|
|
|
|
},
|
|
|
|
}, nil)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, w.Log(sample))
|
2019-07-03 06:23:13 -07:00
|
|
|
}
|
|
|
|
}
|
2019-04-09 02:52:44 -07:00
|
|
|
|
2020-09-01 02:16:57 -07:00
|
|
|
_, _, err = Segments(w.Dir())
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
wt := newWriteToMock()
|
2021-05-06 13:53:52 -07:00
|
|
|
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, false)
|
2019-09-19 02:15:41 -07:00
|
|
|
watcher.MaxSegment = -1
|
2019-07-03 06:23:13 -07:00
|
|
|
go watcher.Start()
|
|
|
|
|
|
|
|
expected := seriesCount
|
|
|
|
retry(t, defaultRetryInterval, defaultRetries, func() bool {
|
|
|
|
return wt.checkNumLabels() >= expected
|
|
|
|
})
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, seriesCount, wt.checkNumLabels())
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2021-11-06 03:10:04 -07:00
|
|
|
_, err = Checkpoint(log.NewNopLogger(), w, 2, 4, func(x chunks.HeadSeriesRef) bool { return true }, 0)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
err = w.Truncate(5)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
2019-09-19 02:15:41 -07:00
|
|
|
_, cpi, err := LastCheckpoint(path.Join(dir, "wal"))
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
err = watcher.garbageCollectSeries(cpi + 1)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2019-07-03 06:23:13 -07:00
|
|
|
|
|
|
|
watcher.Stop()
|
|
|
|
// If you modify the checkpoint and truncate segment #'s run the test to see how
|
|
|
|
// many series records you end up with and change the last Equals check accordingly
|
|
|
|
// or modify the Equals to Assert(len(wt.seriesLabels) < seriesCount*10)
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, tc.segments, wt.checkNumLabels())
|
2019-07-03 06:23:13 -07:00
|
|
|
})
|
|
|
|
}
|
2019-01-18 12:31:36 -08:00
|
|
|
}
|