Name float values as "floats", not as "values"

In the past, every sample value was a float, so it was fine to call a
variable holding such a float "value" or "sample". With native
histograms, a sample might have a histogram value. And a histogram
value is still a value. Calling a float value just "value" or "sample"
or "V" is therefore misleading. Over the last few commits, I already
renamed many variables, but this cleans up a few more places where the
changes are more invasive.

Note that we do not to attempt naming in the JSON APIs or in the
protobufs. That would be quite a disruption. However, internally, we
can call variables as we want, and we should go with the option of
avoiding misunderstandings.

Signed-off-by: beorn7 <beorn@grafana.com>
This commit is contained in:
beorn7 2023-03-30 19:50:13 +02:00
parent 462240bc78
commit 817a2396cb
18 changed files with 313 additions and 313 deletions

View file

@ -70,7 +70,7 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
samples = vals[0].(Matrix)[0] samples = vals[0].(Matrix)[0]
rangeStart = enh.Ts - durationMilliseconds(ms.Range+vs.Offset) rangeStart = enh.Ts - durationMilliseconds(ms.Range+vs.Offset)
rangeEnd = enh.Ts - durationMilliseconds(vs.Offset) rangeEnd = enh.Ts - durationMilliseconds(vs.Offset)
resultValue float64 resultFloat float64
resultHistogram *histogram.FloatHistogram resultHistogram *histogram.FloatHistogram
firstT, lastT int64 firstT, lastT int64
numSamplesMinusOne int numSamplesMinusOne int
@ -99,7 +99,7 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
numSamplesMinusOne = len(samples.Floats) - 1 numSamplesMinusOne = len(samples.Floats) - 1
firstT = samples.Floats[0].T firstT = samples.Floats[0].T
lastT = samples.Floats[numSamplesMinusOne].T lastT = samples.Floats[numSamplesMinusOne].T
resultValue = samples.Floats[numSamplesMinusOne].F - samples.Floats[0].F resultFloat = samples.Floats[numSamplesMinusOne].F - samples.Floats[0].F
if !isCounter { if !isCounter {
break break
} }
@ -107,7 +107,7 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
prevValue := samples.Floats[0].F prevValue := samples.Floats[0].F
for _, currPoint := range samples.Floats[1:] { for _, currPoint := range samples.Floats[1:] {
if currPoint.F < prevValue { if currPoint.F < prevValue {
resultValue += prevValue resultFloat += prevValue
} }
prevValue = currPoint.F prevValue = currPoint.F
} }
@ -124,14 +124,14 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
averageDurationBetweenSamples := sampledInterval / float64(numSamplesMinusOne) averageDurationBetweenSamples := sampledInterval / float64(numSamplesMinusOne)
// TODO(beorn7): Do this for histograms, too. // TODO(beorn7): Do this for histograms, too.
if isCounter && resultValue > 0 && len(samples.Floats) > 0 && samples.Floats[0].F >= 0 { if isCounter && resultFloat > 0 && len(samples.Floats) > 0 && samples.Floats[0].F >= 0 {
// Counters cannot be negative. If we have any slope at all // Counters cannot be negative. If we have any slope at all
// (i.e. resultValue went up), we can extrapolate the zero point // (i.e. resultFloat went up), we can extrapolate the zero point
// of the counter. If the duration to the zero point is shorter // of the counter. If the duration to the zero point is shorter
// than the durationToStart, we take the zero point as the start // than the durationToStart, we take the zero point as the start
// of the series, thereby avoiding extrapolation to negative // of the series, thereby avoiding extrapolation to negative
// counter values. // counter values.
durationToZero := sampledInterval * (samples.Floats[0].F / resultValue) durationToZero := sampledInterval * (samples.Floats[0].F / resultFloat)
if durationToZero < durationToStart { if durationToZero < durationToStart {
durationToStart = durationToZero durationToStart = durationToZero
} }
@ -159,12 +159,12 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
factor /= ms.Range.Seconds() factor /= ms.Range.Seconds()
} }
if resultHistogram == nil { if resultHistogram == nil {
resultValue *= factor resultFloat *= factor
} else { } else {
resultHistogram.Scale(factor) resultHistogram.Scale(factor)
} }
return append(enh.Out, Sample{F: resultValue, H: resultHistogram}) return append(enh.Out, Sample{F: resultFloat, H: resultHistogram})
} }
// histogramRate is a helper function for extrapolatedRate. It requires // histogramRate is a helper function for extrapolatedRate. It requires
@ -418,10 +418,10 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
toNearestInverse := 1.0 / toNearest toNearestInverse := 1.0 / toNearest
for _, el := range vec { for _, el := range vec {
v := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse f := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
F: v, F: f,
}) })
} }
return enh.Out return enh.Out

View file

@ -173,7 +173,7 @@ func (s fSample) T() int64 {
return s.t return s.t
} }
func (s fSample) V() float64 { func (s fSample) F() float64 {
return s.f return s.f
} }
@ -198,7 +198,7 @@ func (s hSample) T() int64 {
return s.t return s.t
} }
func (s hSample) V() float64 { func (s hSample) F() float64 {
panic("F() called for hSample") panic("F() called for hSample")
} }
@ -223,7 +223,7 @@ func (s fhSample) T() int64 {
return s.t return s.t
} }
func (s fhSample) V() float64 { func (s fhSample) F() float64 {
panic("F() called for fhSample") panic("F() called for fhSample")
} }
@ -337,7 +337,7 @@ func (it *sampleRingIterator) Next() chunkenc.ValueType {
it.fh = s.FH() it.fh = s.FH()
return chunkenc.ValFloatHistogram return chunkenc.ValFloatHistogram
default: default:
it.f = s.V() it.f = s.F()
return chunkenc.ValFloat return chunkenc.ValFloat
} }
} }

View file

@ -73,7 +73,7 @@ func TestSampleRing(t *testing.T) {
for _, sold := range input[:i] { for _, sold := range input[:i] {
found := false found := false
for _, bs := range buffered { for _, bs := range buffered {
if bs.T() == sold.t && bs.V() == sold.f { if bs.T() == sold.t && bs.F() == sold.f {
found = true found = true
break break
} }
@ -110,7 +110,7 @@ func TestBufferedSeriesIterator(t *testing.T) {
s, ok := it.PeekBack(1) s, ok := it.PeekBack(1)
require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, eok, ok, "exist mismatch")
require.Equal(t, ets, s.T(), "timestamp mismatch") require.Equal(t, ets, s.T(), "timestamp mismatch")
require.Equal(t, ev, s.V(), "value mismatch") require.Equal(t, ev, s.F(), "value mismatch")
} }
it = NewBufferIterator(NewListSeriesIterator(samples{ it = NewBufferIterator(NewListSeriesIterator(samples{

View file

@ -109,7 +109,7 @@ func (it *listSeriesIterator) Reset(samples Samples) {
func (it *listSeriesIterator) At() (int64, float64) { func (it *listSeriesIterator) At() (int64, float64) {
s := it.samples.Get(it.idx) s := it.samples.Get(it.idx)
return s.T(), s.V() return s.T(), s.F()
} }
func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {

View file

@ -133,13 +133,13 @@ func TestCommit(t *testing.T) {
for i := 0; i < numDatapoints; i++ { for i := 0; i < numDatapoints; i++ {
sample := tsdbutil.GenerateSamples(0, 1) sample := tsdbutil.GenerateSamples(0, 1)
ref, err := app.Append(0, lset, sample[0].T(), sample[0].V()) ref, err := app.Append(0, lset, sample[0].T(), sample[0].F())
require.NoError(t, err) require.NoError(t, err)
e := exemplar.Exemplar{ e := exemplar.Exemplar{
Labels: lset, Labels: lset,
Ts: sample[0].T() + int64(i), Ts: sample[0].T() + int64(i),
Value: sample[0].V(), Value: sample[0].F(),
HasTs: true, HasTs: true,
} }
_, err = app.AppendExemplar(ref, lset, e) _, err = app.AppendExemplar(ref, lset, e)
@ -248,7 +248,7 @@ func TestRollback(t *testing.T) {
for i := 0; i < numDatapoints; i++ { for i := 0; i < numDatapoints; i++ {
sample := tsdbutil.GenerateSamples(0, 1) sample := tsdbutil.GenerateSamples(0, 1)
_, err := app.Append(0, lset, sample[0].T(), sample[0].V()) _, err := app.Append(0, lset, sample[0].T(), sample[0].F())
require.NoError(t, err) require.NoError(t, err)
} }
} }

View file

@ -353,14 +353,14 @@ func TestReadIndexFormatV1(t *testing.T) {
q, err := NewBlockQuerier(block, 0, 1000) q, err := NewBlockQuerier(block, 0, 1000)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")), require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")),
map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 1, v: 2}}}) map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 1, f: 2}}})
q, err = NewBlockQuerier(block, 0, 1000) q, err = NewBlockQuerier(block, 0, 1000)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")), require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")),
map[string][]tsdbutil.Sample{ map[string][]tsdbutil.Sample{
`{foo="bar"}`: {sample{t: 1, v: 2}}, `{foo="bar"}`: {sample{t: 1, f: 2}},
`{foo="baz"}`: {sample{t: 3, v: 4}}, `{foo="baz"}`: {sample{t: 3, f: 4}},
}) })
} }
@ -568,7 +568,7 @@ func createHeadWithOOOSamples(tb testing.TB, w *wlog.WL, series []storage.Series
count++ count++
t, v := it.At() t, v := it.At()
if count%oooSampleFrequency == 0 { if count%oooSampleFrequency == 0 {
os = append(os, sample{t: t, v: v}) os = append(os, sample{t: t, f: v})
continue continue
} }
ref, err = app.Append(ref, lset, t, v) ref, err = app.Append(ref, lset, t, v)
@ -589,7 +589,7 @@ func createHeadWithOOOSamples(tb testing.TB, w *wlog.WL, series []storage.Series
for i, lset := range oooSampleLabels { for i, lset := range oooSampleLabels {
ref := storage.SeriesRef(0) ref := storage.SeriesRef(0)
for _, sample := range oooSamples[i] { for _, sample := range oooSamples[i] {
ref, err = app.Append(ref, lset, sample.T(), sample.V()) ref, err = app.Append(ref, lset, sample.T(), sample.F())
require.NoError(tb, err) require.NoError(tb, err)
oooSamplesAppended++ oooSamplesAppended++
} }
@ -613,7 +613,7 @@ const (
// genSeries generates series of float64 samples with a given number of labels and values. // genSeries generates series of float64 samples with a given number of labels and values.
func genSeries(totalSeries, labelCount int, mint, maxt int64) []storage.Series { func genSeries(totalSeries, labelCount int, mint, maxt int64) []storage.Series {
return genSeriesFromSampleGenerator(totalSeries, labelCount, mint, maxt, 1, func(ts int64) tsdbutil.Sample { return genSeriesFromSampleGenerator(totalSeries, labelCount, mint, maxt, 1, func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}) })
} }
@ -657,7 +657,7 @@ func genHistogramAndFloatSeries(totalSeries, labelCount int, mint, maxt, step in
count++ count++
var s sample var s sample
if floatSample { if floatSample {
s = sample{t: ts, v: rand.Float64()} s = sample{t: ts, f: rand.Float64()}
} else { } else {
h := &histogram.Histogram{ h := &histogram.Histogram{
Count: 5 + uint64(ts*4), Count: 5 + uint64(ts*4),
@ -729,7 +729,7 @@ func populateSeries(lbls []map[string]string, mint, maxt int64) []storage.Series
} }
samples := make([]tsdbutil.Sample, 0, maxt-mint+1) samples := make([]tsdbutil.Sample, 0, maxt-mint+1)
for t := mint; t <= maxt; t++ { for t := mint; t <= maxt; t++ {
samples = append(samples, sample{t: t, v: rand.Float64()}) samples = append(samples, sample{t: t, f: rand.Float64()})
} }
series = append(series, storage.NewListSeries(labels.FromMap(lbl), samples)) series = append(series, storage.NewListSeries(labels.FromMap(lbl), samples))
} }

View file

@ -52,8 +52,8 @@ func TestBlockWriter(t *testing.T) {
q, err := NewBlockQuerier(b, math.MinInt64, math.MaxInt64) q, err := NewBlockQuerier(b, math.MinInt64, math.MaxInt64)
require.NoError(t, err) require.NoError(t, err)
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
sample1 := []tsdbutil.Sample{sample{t: ts1, v: v1}} sample1 := []tsdbutil.Sample{sample{t: ts1, f: v1}}
sample2 := []tsdbutil.Sample{sample{t: ts2, v: v2}} sample2 := []tsdbutil.Sample{sample{t: ts2, f: v2}}
expectedSeries := map[string][]tsdbutil.Sample{"{a=\"b\"}": sample1, "{c=\"d\"}": sample2} expectedSeries := map[string][]tsdbutil.Sample{"{a=\"b\"}": sample1, "{c=\"d\"}": sample2}
require.Equal(t, expectedSeries, series) require.Equal(t, expectedSeries, series)

View file

@ -975,7 +975,7 @@ func TestCompaction_populateBlock(t *testing.T) {
s sample s sample
) )
for iter.Next() == chunkenc.ValFloat { for iter.Next() == chunkenc.ValFloat {
s.t, s.v = iter.At() s.t, s.f = iter.At()
if firstTs == math.MaxInt64 { if firstTs == math.MaxInt64 {
firstTs = s.t firstTs = s.t
} }
@ -1350,7 +1350,7 @@ func TestHeadCompactionWithHistograms(t *testing.T) {
for tsMinute := from; tsMinute <= to; tsMinute++ { for tsMinute := from; tsMinute <= to; tsMinute++ {
_, err := app.Append(0, lbls, minute(tsMinute), float64(tsMinute)) _, err := app.Append(0, lbls, minute(tsMinute), float64(tsMinute))
require.NoError(t, err) require.NoError(t, err)
*exp = append(*exp, sample{t: minute(tsMinute), v: float64(tsMinute)}) *exp = append(*exp, sample{t: minute(tsMinute), f: float64(tsMinute)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }

View file

@ -104,7 +104,7 @@ func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[str
switch typ { switch typ {
case chunkenc.ValFloat: case chunkenc.ValFloat:
ts, v := it.At() ts, v := it.At()
samples = append(samples, sample{t: ts, v: v}) samples = append(samples, sample{t: ts, f: v})
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
samples = append(samples, sample{t: ts, h: h}) samples = append(samples, sample{t: ts, h: h})
@ -233,7 +233,7 @@ func TestDataAvailableOnlyAfterCommit(t *testing.T) {
seriesSet = query(t, querier, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) seriesSet = query(t, querier, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar"))
require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 0, v: 0}}}, seriesSet) require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 0, f: 0}}}, seriesSet)
} }
// TestNoPanicAfterWALCorruption ensures that querying the db after a WAL corruption doesn't cause a panic. // TestNoPanicAfterWALCorruption ensures that querying the db after a WAL corruption doesn't cause a panic.
@ -251,7 +251,7 @@ func TestNoPanicAfterWALCorruption(t *testing.T) {
for i := 0; i < 121; i++ { for i := 0; i < 121; i++ {
app := db.Appender(ctx) app := db.Appender(ctx)
_, err := app.Append(0, labels.FromStrings("foo", "bar"), maxt, 0) _, err := app.Append(0, labels.FromStrings("foo", "bar"), maxt, 0)
expSamples = append(expSamples, sample{t: maxt, v: 0}) expSamples = append(expSamples, sample{t: maxt, f: 0})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
maxt++ maxt++
@ -364,11 +364,11 @@ func TestDBAppenderAddRef(t *testing.T) {
require.Equal(t, map[string][]tsdbutil.Sample{ require.Equal(t, map[string][]tsdbutil.Sample{
labels.FromStrings("a", "b").String(): { labels.FromStrings("a", "b").String(): {
sample{t: 123, v: 0}, sample{t: 123, f: 0},
sample{t: 124, v: 1}, sample{t: 124, f: 1},
sample{t: 125, v: 0}, sample{t: 125, f: 0},
sample{t: 133, v: 1}, sample{t: 133, f: 1},
sample{t: 143, v: 2}, sample{t: 143, f: 2},
}, },
}, res) }, res)
} }
@ -1740,7 +1740,7 @@ func expandSeriesSet(ss storage.SeriesSet) ([]labels.Labels, map[string][]sample
it = series.Iterator(it) it = series.Iterator(it)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
samples = append(samples, sample{t: t, v: v}) samples = append(samples, sample{t: t, f: v})
} }
resultLabels = append(resultLabels, series.Labels()) resultLabels = append(resultLabels, series.Labels())
resultSamples[series.Labels().String()] = samples resultSamples[series.Labels().String()] = samples
@ -2617,7 +2617,7 @@ func TestDBCannotSeePartialCommits(t *testing.T) {
values := map[float64]struct{}{} values := map[float64]struct{}{}
for _, series := range seriesSet { for _, series := range seriesSet {
values[series[len(series)-1].v] = struct{}{} values[series[len(series)-1].f] = struct{}{}
} }
if len(values) != 1 { if len(values) != 1 {
inconsistencies++ inconsistencies++
@ -2693,7 +2693,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
_, seriesSet, ws, err = expandSeriesSet(ss) _, seriesSet, ws, err = expandSeriesSet(ss)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 0, len(ws)) require.Equal(t, 0, len(ws))
require.Equal(t, map[string][]sample{`{foo="bar"}`: {{t: 0, v: 0}}}, seriesSet) require.Equal(t, map[string][]sample{`{foo="bar"}`: {{t: 0, f: 0}}}, seriesSet)
} }
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
@ -4575,7 +4575,7 @@ func Test_Querier_OOOQuery(t *testing.T) {
for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() { for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() {
_, err := app.Append(0, series1, min, float64(min)) _, err := app.Append(0, series1, min, float64(min))
if min >= queryMinT && min <= queryMaxT { if min >= queryMinT && min <= queryMaxT {
expSamples = append(expSamples, sample{t: min, v: float64(min)}) expSamples = append(expSamples, sample{t: min, f: float64(min)})
} }
require.NoError(t, err) require.NoError(t, err)
totalAppended++ totalAppended++
@ -4660,7 +4660,7 @@ func Test_ChunkQuerier_OOOQuery(t *testing.T) {
for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() { for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() {
_, err := app.Append(0, series1, min, float64(min)) _, err := app.Append(0, series1, min, float64(min))
if min >= queryMinT && min <= queryMaxT { if min >= queryMinT && min <= queryMaxT {
expSamples = append(expSamples, sample{t: min, v: float64(min)}) expSamples = append(expSamples, sample{t: min, f: float64(min)})
} }
require.NoError(t, err) require.NoError(t, err)
totalAppended++ totalAppended++
@ -4730,7 +4730,7 @@ func Test_ChunkQuerier_OOOQuery(t *testing.T) {
it := chunk.Chunk.Iterator(nil) it := chunk.Chunk.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
gotSamples = append(gotSamples, sample{t: ts, v: v}) gotSamples = append(gotSamples, sample{t: ts, f: v})
} }
} }
require.Equal(t, expSamples, gotSamples) require.Equal(t, expSamples, gotSamples)
@ -4766,7 +4766,7 @@ func TestOOOAppendAndQuery(t *testing.T) {
require.Error(t, err) require.Error(t, err)
} else { } else {
require.NoError(t, err) require.NoError(t, err)
appendedSamples[key] = append(appendedSamples[key], sample{t: min, v: val}) appendedSamples[key] = append(appendedSamples[key], sample{t: min, f: val})
totalSamples++ totalSamples++
} }
} }
@ -4889,7 +4889,7 @@ func TestOOODisabled(t *testing.T) {
failedSamples++ failedSamples++
} else { } else {
require.NoError(t, err) require.NoError(t, err)
expSamples[key] = append(expSamples[key], sample{t: min, v: val}) expSamples[key] = append(expSamples[key], sample{t: min, f: val})
totalSamples++ totalSamples++
} }
} }
@ -4952,7 +4952,7 @@ func TestWBLAndMmapReplay(t *testing.T) {
val := rand.Float64() val := rand.Float64()
_, err := app.Append(0, lbls, min, val) _, err := app.Append(0, lbls, min, val)
require.NoError(t, err) require.NoError(t, err)
expSamples[key] = append(expSamples[key], sample{t: min, v: val}) expSamples[key] = append(expSamples[key], sample{t: min, f: val})
totalSamples++ totalSamples++
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -4995,7 +4995,7 @@ func TestWBLAndMmapReplay(t *testing.T) {
it := chk.Iterator(nil) it := chk.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, val := it.At() ts, val := it.At()
s1MmapSamples = append(s1MmapSamples, sample{t: ts, v: val}) s1MmapSamples = append(s1MmapSamples, sample{t: ts, f: val})
} }
} }
require.Greater(t, len(s1MmapSamples), 0) require.Greater(t, len(s1MmapSamples), 0)
@ -5273,9 +5273,9 @@ func TestWBLCorruption(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
if afterRestart { if afterRestart {
expAfterRestart = append(expAfterRestart, sample{t: ts, v: float64(ts)}) expAfterRestart = append(expAfterRestart, sample{t: ts, f: float64(ts)})
} }
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -5419,9 +5419,9 @@ func TestOOOMmapCorruption(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
if inMmapAfterCorruption { if inMmapAfterCorruption {
expInMmapChunks = append(expInMmapChunks, sample{t: ts, v: float64(ts)}) expInMmapChunks = append(expInMmapChunks, sample{t: ts, f: float64(ts)})
} }
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -5555,7 +5555,7 @@ func TestOutOfOrderRuntimeConfig(t *testing.T) {
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
if success { if success {
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} else { } else {
require.Error(t, err) require.Error(t, err)
} }
@ -5769,7 +5769,7 @@ func TestNoGapAfterRestartWithOOO(t *testing.T) {
var expSamples []tsdbutil.Sample var expSamples []tsdbutil.Sample
for min := fromMins; min <= toMins; min++ { for min := fromMins; min <= toMins; min++ {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
expSamples = append(expSamples, sample{t: ts, v: float64(ts)}) expSamples = append(expSamples, sample{t: ts, f: float64(ts)})
} }
expRes := map[string][]tsdbutil.Sample{ expRes := map[string][]tsdbutil.Sample{
@ -5876,7 +5876,7 @@ func TestWblReplayAfterOOODisableAndRestart(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -5935,7 +5935,7 @@ func TestPanicOnApplyConfig(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -5983,7 +5983,7 @@ func TestDiskFillingUpAfterDisablingOOO(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -6090,7 +6090,7 @@ func testHistogramAppendAndQueryHelper(t *testing.T, floatHistogram bool) {
_, err := app.Append(0, lbls, minute(tsMinute), val) _, err := app.Append(0, lbls, minute(tsMinute), val)
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
*exp = append(*exp, sample{t: minute(tsMinute), v: val}) *exp = append(*exp, sample{t: minute(tsMinute), f: val})
} }
testQuery := func(name, value string, exp map[string][]tsdbutil.Sample) { testQuery := func(name, value string, exp map[string][]tsdbutil.Sample) {
@ -6346,7 +6346,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
switch typ { switch typ {
case chunkenc.ValFloat: case chunkenc.ValFloat:
ts, v := it.At() ts, v := it.At()
slice = append(slice, sample{t: ts, v: v}) slice = append(slice, sample{t: ts, f: v})
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
slice = append(slice, sample{t: ts, h: h}) slice = append(slice, sample{t: ts, h: h})
@ -6418,7 +6418,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
testBlockQuerying(t, testBlockQuerying(t,
genHistogramSeries(10, 5, minute(0), minute(119), minute(1), floatHistogram), genHistogramSeries(10, 5, minute(0), minute(119), minute(1), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(120), minute(239), minute(1), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(120), minute(239), minute(1), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
genHistogramSeries(10, 5, minute(240), minute(359), minute(1), floatHistogram), genHistogramSeries(10, 5, minute(240), minute(359), minute(1), floatHistogram),
) )
@ -6430,7 +6430,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
genHistogramSeries(10, 5, minute(61), minute(120), minute(1), floatHistogram), genHistogramSeries(10, 5, minute(61), minute(120), minute(1), floatHistogram),
genHistogramAndFloatSeries(10, 5, minute(121), minute(180), minute(1), floatHistogram), genHistogramAndFloatSeries(10, 5, minute(121), minute(180), minute(1), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(181), minute(240), minute(1), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(181), minute(240), minute(1), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
) )
}) })
@ -6447,7 +6447,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
testBlockQuerying(t, testBlockQuerying(t,
genHistogramSeries(10, 5, minute(0), minute(120), minute(3), floatHistogram), genHistogramSeries(10, 5, minute(0), minute(120), minute(3), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(1), minute(120), minute(3), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(1), minute(120), minute(3), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
genHistogramSeries(10, 5, minute(2), minute(120), minute(3), floatHistogram), genHistogramSeries(10, 5, minute(2), minute(120), minute(3), floatHistogram),
) )
@ -6459,7 +6459,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
genHistogramSeries(10, 5, minute(46), minute(100), minute(3), floatHistogram), genHistogramSeries(10, 5, minute(46), minute(100), minute(3), floatHistogram),
genHistogramAndFloatSeries(10, 5, minute(89), minute(140), minute(3), floatHistogram), genHistogramAndFloatSeries(10, 5, minute(89), minute(140), minute(3), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(126), minute(200), minute(3), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(126), minute(200), minute(3), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
) )
}) })

View file

@ -1864,7 +1864,7 @@ func (s *stripeSeries) getOrSet(hash uint64, lset labels.Labels, createSeries fu
type sample struct { type sample struct {
t int64 t int64
v float64 f float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram fh *histogram.FloatHistogram
} }
@ -1874,7 +1874,7 @@ func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHi
} }
func (s sample) T() int64 { return s.t } func (s sample) T() int64 { return s.t }
func (s sample) V() float64 { return s.v } func (s sample) F() float64 { return s.f }
func (s sample) H() *histogram.Histogram { return s.h } func (s sample) H() *histogram.Histogram { return s.h }
func (s sample) FH() *histogram.FloatHistogram { return s.fh } func (s sample) FH() *histogram.FloatHistogram { return s.fh }

View file

@ -465,8 +465,8 @@ func TestHead_HighConcurrencyReadAndWrite(t *testing.T) {
if sample.T() != int64(expectedValue) { if sample.T() != int64(expectedValue) {
return false, fmt.Errorf("expected sample %d to have ts %d, got %d", sampleIdx, expectedValue, sample.T()) return false, fmt.Errorf("expected sample %d to have ts %d, got %d", sampleIdx, expectedValue, sample.T())
} }
if sample.V() != float64(expectedValue) { if sample.F() != float64(expectedValue) {
return false, fmt.Errorf("expected sample %d to have value %d, got %f", sampleIdx, expectedValue, sample.V()) return false, fmt.Errorf("expected sample %d to have value %d, got %f", sampleIdx, expectedValue, sample.F())
} }
} }
@ -575,7 +575,7 @@ func TestHead_ReadWAL(t *testing.T) {
expandChunk := func(c chunkenc.Iterator) (x []sample) { expandChunk := func(c chunkenc.Iterator) (x []sample) {
for c.Next() == chunkenc.ValFloat { for c.Next() == chunkenc.ValFloat {
t, v := c.At() t, v := c.At()
x = append(x, sample{t: t, v: v}) x = append(x, sample{t: t, f: v})
} }
require.NoError(t, c.Err()) require.NoError(t, c.Err())
return x return x
@ -870,7 +870,7 @@ func TestHeadDeleteSimple(t *testing.T) {
buildSmpls := func(s []int64) []sample { buildSmpls := func(s []int64) []sample {
ss := make([]sample, 0, len(s)) ss := make([]sample, 0, len(s))
for _, t := range s { for _, t := range s {
ss = append(ss, sample{t: t, v: float64(t)}) ss = append(ss, sample{t: t, f: float64(t)})
} }
return ss return ss
} }
@ -925,7 +925,7 @@ func TestHeadDeleteSimple(t *testing.T) {
app := head.Appender(context.Background()) app := head.Appender(context.Background())
for _, smpl := range smplsAll { for _, smpl := range smplsAll {
_, err := app.Append(0, lblsDefault, smpl.t, smpl.v) _, err := app.Append(0, lblsDefault, smpl.t, smpl.f)
require.NoError(t, err) require.NoError(t, err)
} }
@ -939,7 +939,7 @@ func TestHeadDeleteSimple(t *testing.T) {
// Add more samples. // Add more samples.
app = head.Appender(context.Background()) app = head.Appender(context.Background())
for _, smpl := range c.addSamples { for _, smpl := range c.addSamples {
_, err := app.Append(0, lblsDefault, smpl.t, smpl.v) _, err := app.Append(0, lblsDefault, smpl.t, smpl.f)
require.NoError(t, err) require.NoError(t, err)
} }
@ -1924,7 +1924,7 @@ func TestMemSeriesIsolation(t *testing.T) {
require.Equal(t, 0, len(ws)) require.Equal(t, 0, len(ws))
for _, series := range seriesSet { for _, series := range seriesSet {
return int(series[len(series)-1].v) return int(series[len(series)-1].f)
} }
return -1 return -1
} }
@ -3088,7 +3088,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
ts++ ts++
_, err := app.Append(0, s2, int64(ts), float64(ts)) _, err := app.Append(0, s2, int64(ts), float64(ts))
require.NoError(t, err) require.NoError(t, err)
exp[k2] = append(exp[k2], sample{t: int64(ts), v: float64(ts)}) exp[k2] = append(exp[k2], sample{t: int64(ts), f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
app = head.Appender(context.Background()) app = head.Appender(context.Background())
@ -3125,7 +3125,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
ts++ ts++
_, err := app.Append(0, s2, int64(ts), float64(ts)) _, err := app.Append(0, s2, int64(ts), float64(ts))
require.NoError(t, err) require.NoError(t, err)
exp[k2] = append(exp[k2], sample{t: int64(ts), v: float64(ts)}) exp[k2] = append(exp[k2], sample{t: int64(ts), f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
app = head.Appender(context.Background()) app = head.Appender(context.Background())
@ -3812,7 +3812,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
expChunks: 1, expChunks: 1,
}, },
{ {
samples: []tsdbutil.Sample{sample{t: 200, v: 2}}, samples: []tsdbutil.Sample{sample{t: 200, f: 2}},
expChunks: 2, expChunks: 2,
}, },
{ {
@ -3836,7 +3836,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
expChunks: 6, expChunks: 6,
}, },
{ {
samples: []tsdbutil.Sample{sample{t: 100, v: 2}}, samples: []tsdbutil.Sample{sample{t: 100, f: 2}},
err: storage.ErrOutOfOrderSample, err: storage.ErrOutOfOrderSample,
}, },
{ {
@ -3847,13 +3847,13 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
// Combination of histograms and float64 in the same commit. The behaviour is undefined, but we want to also // Combination of histograms and float64 in the same commit. The behaviour is undefined, but we want to also
// verify how TSDB would behave. Here the histogram is appended at the end, hence will be considered as out of order. // verify how TSDB would behave. Here the histogram is appended at the end, hence will be considered as out of order.
samples: []tsdbutil.Sample{ samples: []tsdbutil.Sample{
sample{t: 400, v: 4}, sample{t: 400, f: 4},
sample{t: 500, h: hists[5]}, // This won't be committed. sample{t: 500, h: hists[5]}, // This won't be committed.
sample{t: 600, v: 6}, sample{t: 600, f: 6},
}, },
addToExp: []tsdbutil.Sample{ addToExp: []tsdbutil.Sample{
sample{t: 400, v: 4}, sample{t: 400, f: 4},
sample{t: 600, v: 6}, sample{t: 600, f: 6},
}, },
expChunks: 7, // Only 1 new chunk for float64. expChunks: 7, // Only 1 new chunk for float64.
}, },
@ -3861,11 +3861,11 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
// Here the histogram is appended at the end, hence the first histogram is out of order. // Here the histogram is appended at the end, hence the first histogram is out of order.
samples: []tsdbutil.Sample{ samples: []tsdbutil.Sample{
sample{t: 700, h: hists[7]}, // Out of order w.r.t. the next float64 sample that is appended first. sample{t: 700, h: hists[7]}, // Out of order w.r.t. the next float64 sample that is appended first.
sample{t: 800, v: 8}, sample{t: 800, f: 8},
sample{t: 900, h: hists[9]}, sample{t: 900, h: hists[9]},
}, },
addToExp: []tsdbutil.Sample{ addToExp: []tsdbutil.Sample{
sample{t: 800, v: 8}, sample{t: 800, f: 8},
sample{t: 900, h: hists[9].Copy()}, sample{t: 900, h: hists[9].Copy()},
}, },
expChunks: 8, // float64 added to old chunk, only 1 new for histograms. expChunks: 8, // float64 added to old chunk, only 1 new for histograms.
@ -3890,7 +3890,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
if s.H() != nil || s.FH() != nil { if s.H() != nil || s.FH() != nil {
_, err = app.AppendHistogram(0, lbls, s.T(), s.H(), s.FH()) _, err = app.AppendHistogram(0, lbls, s.T(), s.H(), s.FH())
} else { } else {
_, err = app.Append(0, lbls, s.T(), s.V()) _, err = app.Append(0, lbls, s.T(), s.F())
} }
require.Equal(t, a.err, err) require.Equal(t, a.err, err)
} }
@ -4056,7 +4056,7 @@ func TestOOOWalReplay(t *testing.T) {
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
if isOOO { if isOOO {
expOOOSamples = append(expOOOSamples, sample{t: ts, v: v}) expOOOSamples = append(expOOOSamples, sample{t: ts, f: v})
} }
} }
@ -4100,7 +4100,7 @@ func TestOOOWalReplay(t *testing.T) {
actOOOSamples := make([]sample, 0, len(expOOOSamples)) actOOOSamples := make([]sample, 0, len(expOOOSamples))
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
actOOOSamples = append(actOOOSamples, sample{t: ts, v: v}) actOOOSamples = append(actOOOSamples, sample{t: ts, f: v})
} }
// OOO chunk will be sorted. Hence sort the expected samples. // OOO chunk will be sorted. Hence sort the expected samples.
@ -4360,7 +4360,7 @@ func TestReplayAfterMmapReplayError(t *testing.T) {
var ref storage.SeriesRef var ref storage.SeriesRef
for i := 0; i < numSamples; i++ { for i := 0; i < numSamples; i++ {
ref, err = app.Append(ref, lbls, lastTs, float64(lastTs)) ref, err = app.Append(ref, lbls, lastTs, float64(lastTs))
expSamples = append(expSamples, sample{t: lastTs, v: float64(lastTs)}) expSamples = append(expSamples, sample{t: lastTs, f: float64(lastTs)})
require.NoError(t, err) require.NoError(t, err)
lastTs += itvl lastTs += itvl
if i%10 == 0 { if i%10 == 0 {

View file

@ -78,7 +78,7 @@ func (o *OOOChunk) ToXOR() (*chunkenc.XORChunk, error) {
return nil, err return nil, err
} }
for _, s := range o.samples { for _, s := range o.samples {
app.Append(s.t, s.v) app.Append(s.t, s.f)
} }
return x, nil return x, nil
} }
@ -96,7 +96,7 @@ func (o *OOOChunk) ToXORBetweenTimestamps(mint, maxt int64) (*chunkenc.XORChunk,
if s.t > maxt { if s.t > maxt {
break break
} }
app.Append(s.t, s.v) app.Append(s.t, s.f)
} }
return x, nil return x, nil
} }

View file

@ -504,8 +504,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
queryMaxT: minutes(100), queryMaxT: minutes(100),
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -514,8 +514,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [--------] (With 2 samples) // Output Graphically [--------] (With 2 samples)
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
}, },
}, },
}, },
@ -526,15 +526,15 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// opts.OOOCapMax is 5 so these will be mmapped to the first mmapped chunk // opts.OOOCapMax is 5 so these will be mmapped to the first mmapped chunk
sample{t: minutes(41), v: float64(0)}, sample{t: minutes(41), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(43), v: float64(0)}, sample{t: minutes(43), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(45), v: float64(0)}, sample{t: minutes(45), f: float64(0)},
// The following samples will go to the head chunk, and we want it // The following samples will go to the head chunk, and we want it
// to overlap with the previous chunk // to overlap with the previous chunk
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -544,13 +544,13 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-----------------] (With 7 samples) // Output Graphically [-----------------] (With 7 samples)
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(41), v: float64(0)}, sample{t: minutes(41), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(43), v: float64(0)}, sample{t: minutes(43), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(45), v: float64(0)}, sample{t: minutes(45), f: float64(0)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
}, },
}, },
}, },
@ -561,26 +561,26 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(29), v: float64(1)}, sample{t: minutes(29), f: float64(1)},
// Chunk 2 // Chunk 2
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(40), v: float64(2)}, sample{t: minutes(40), f: float64(2)},
// Head // Head
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(50), v: float64(3)}, sample{t: minutes(50), f: float64(3)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -592,23 +592,23 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [----------------][-----------------] // Output Graphically [----------------][-----------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(29), v: float64(1)}, sample{t: minutes(29), f: float64(1)},
}, },
{ {
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(50), v: float64(3)}, sample{t: minutes(50), f: float64(3)},
}, },
}, },
}, },
@ -619,26 +619,26 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(46), v: float64(0)}, sample{t: minutes(46), f: float64(0)},
sample{t: minutes(50), v: float64(0)}, sample{t: minutes(50), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(34), v: float64(1)}, sample{t: minutes(34), f: float64(1)},
sample{t: minutes(36), v: float64(1)}, sample{t: minutes(36), f: float64(1)},
sample{t: minutes(40), v: float64(1)}, sample{t: minutes(40), f: float64(1)},
// Chunk 2 // Chunk 2
sample{t: minutes(20), v: float64(2)}, sample{t: minutes(20), f: float64(2)},
sample{t: minutes(22), v: float64(2)}, sample{t: minutes(22), f: float64(2)},
sample{t: minutes(24), v: float64(2)}, sample{t: minutes(24), f: float64(2)},
sample{t: minutes(26), v: float64(2)}, sample{t: minutes(26), f: float64(2)},
sample{t: minutes(29), v: float64(2)}, sample{t: minutes(29), f: float64(2)},
// Head // Head
sample{t: minutes(10), v: float64(3)}, sample{t: minutes(10), f: float64(3)},
sample{t: minutes(20), v: float64(3)}, sample{t: minutes(20), f: float64(3)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -650,23 +650,23 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [----------------][-----------------] // Output Graphically [----------------][-----------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(3)}, sample{t: minutes(10), f: float64(3)},
sample{t: minutes(20), v: float64(2)}, sample{t: minutes(20), f: float64(2)},
sample{t: minutes(22), v: float64(2)}, sample{t: minutes(22), f: float64(2)},
sample{t: minutes(24), v: float64(2)}, sample{t: minutes(24), f: float64(2)},
sample{t: minutes(26), v: float64(2)}, sample{t: minutes(26), f: float64(2)},
sample{t: minutes(29), v: float64(2)}, sample{t: minutes(29), f: float64(2)},
}, },
{ {
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(34), v: float64(1)}, sample{t: minutes(34), f: float64(1)},
sample{t: minutes(36), v: float64(1)}, sample{t: minutes(36), f: float64(1)},
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(46), v: float64(0)}, sample{t: minutes(46), f: float64(0)},
sample{t: minutes(50), v: float64(0)}, sample{t: minutes(50), f: float64(0)},
}, },
}, },
}, },
@ -677,26 +677,26 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(18), v: float64(0)}, sample{t: minutes(18), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(28), v: float64(1)}, sample{t: minutes(28), f: float64(1)},
// Chunk 2 // Chunk 2
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(38), v: float64(2)}, sample{t: minutes(38), f: float64(2)},
// Head // Head
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(42), v: float64(3)}, sample{t: minutes(42), f: float64(3)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -708,29 +708,29 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-------][-------][-------][--------] // Output Graphically [-------][-------][-------][--------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(18), v: float64(0)}, sample{t: minutes(18), f: float64(0)},
}, },
{ {
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(28), v: float64(1)}, sample{t: minutes(28), f: float64(1)},
}, },
{ {
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(38), v: float64(2)}, sample{t: minutes(38), f: float64(2)},
}, },
{ {
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(42), v: float64(3)}, sample{t: minutes(42), f: float64(3)},
}, },
}, },
}, },
@ -741,20 +741,20 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(25), v: float64(0)}, sample{t: minutes(25), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
// Chunk 2 Head // Chunk 2 Head
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -765,15 +765,15 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-----------------------------------] // Output Graphically [-----------------------------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
}, },
}, },
@ -784,20 +784,20 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(25), v: float64(0)}, sample{t: minutes(25), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
// Chunk 2 Head // Chunk 2 Head
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -808,15 +808,15 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-----------------------------------] // Output Graphically [-----------------------------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
}, },
}, },
@ -833,7 +833,7 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// OOO few samples for s1. // OOO few samples for s1.
app = db.Appender(context.Background()) app = db.Appender(context.Background())
for _, s := range tc.inputSamples { for _, s := range tc.inputSamples {
appendSample(app, s1, s.T(), s.V()) appendSample(app, s1, s.T(), s.F())
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -855,7 +855,7 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
it := c.Iterator(nil) it := c.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
resultSamples = append(resultSamples, sample{t: t, v: v}) resultSamples = append(resultSamples, sample{t: t, f: v})
} }
require.Equal(t, tc.expChunksSamples[i], resultSamples) require.Equal(t, tc.expChunksSamples[i], resultSamples)
} }
@ -902,19 +902,19 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
initialSamples: tsdbutil.SampleSlice{ initialSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 Head // Chunk 1 Head
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
samplesAfterSeriesCall: tsdbutil.SampleSlice{ samplesAfterSeriesCall: tsdbutil.SampleSlice{
sample{t: minutes(10), v: float64(1)}, sample{t: minutes(10), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -926,14 +926,14 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// Output Graphically [------------] (With 8 samples, samples newer than lastmint or older than lastmaxt are omitted but the ones in between are kept) // Output Graphically [------------] (With 8 samples, samples newer than lastmint or older than lastmaxt are omitted but the ones in between are kept)
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(32), v: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept sample{t: minutes(32), f: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
}, },
}, },
@ -944,22 +944,22 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
initialSamples: tsdbutil.SampleSlice{ initialSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 Head // Chunk 1 Head
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
samplesAfterSeriesCall: tsdbutil.SampleSlice{ samplesAfterSeriesCall: tsdbutil.SampleSlice{
sample{t: minutes(10), v: float64(1)}, sample{t: minutes(10), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
// Chunk 1 gets mmapped and Chunk 2, the new head is born // Chunk 1 gets mmapped and Chunk 2, the new head is born
sample{t: minutes(25), v: float64(2)}, sample{t: minutes(25), f: float64(2)},
sample{t: minutes(31), v: float64(2)}, sample{t: minutes(31), f: float64(2)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -972,14 +972,14 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// Output Graphically [------------] (8 samples) It has 5 from Chunk 0 and 3 from Chunk 1 // Output Graphically [------------] (8 samples) It has 5 from Chunk 0 and 3 from Chunk 1
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(32), v: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept sample{t: minutes(32), f: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
}, },
}, },
@ -996,7 +996,7 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// OOO few samples for s1. // OOO few samples for s1.
app = db.Appender(context.Background()) app = db.Appender(context.Background())
for _, s := range tc.initialSamples { for _, s := range tc.initialSamples {
appendSample(app, s1, s.T(), s.V()) appendSample(app, s1, s.T(), s.F())
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -1013,7 +1013,7 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// OOO few samples for s1. // OOO few samples for s1.
app = db.Appender(context.Background()) app = db.Appender(context.Background())
for _, s := range tc.samplesAfterSeriesCall { for _, s := range tc.samplesAfterSeriesCall {
appendSample(app, s1, s.T(), s.V()) appendSample(app, s1, s.T(), s.F())
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -1026,7 +1026,7 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
it := c.Iterator(nil) it := c.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
resultSamples = append(resultSamples, sample{t: ts, v: v}) resultSamples = append(resultSamples, sample{t: ts, f: v})
} }
require.Equal(t, tc.expChunksSamples[i], resultSamples) require.Equal(t, tc.expChunksSamples[i], resultSamples)
} }

View file

@ -52,7 +52,7 @@ func TestOOOInsert(t *testing.T) {
chunk := NewOOOChunk() chunk := NewOOOChunk()
chunk.samples = makeEvenSampleSlice(numPreExisting) chunk.samples = makeEvenSampleSlice(numPreExisting)
newSample := samplify(valOdd(insertPos)) newSample := samplify(valOdd(insertPos))
chunk.Insert(newSample.t, newSample.v) chunk.Insert(newSample.t, newSample.f)
var expSamples []sample var expSamples []sample
// Our expected new samples slice, will be first the original samples. // Our expected new samples slice, will be first the original samples.
@ -81,9 +81,9 @@ func TestOOOInsertDuplicate(t *testing.T) {
chunk.samples = makeEvenSampleSlice(num) chunk.samples = makeEvenSampleSlice(num)
dupSample := chunk.samples[dupPos] dupSample := chunk.samples[dupPos]
dupSample.v = 0.123 dupSample.f = 0.123
ok := chunk.Insert(dupSample.t, dupSample.v) ok := chunk.Insert(dupSample.t, dupSample.f)
expSamples := makeEvenSampleSlice(num) // We expect no change. expSamples := makeEvenSampleSlice(num) // We expect no change.
require.False(t, ok) require.False(t, ok)

View file

@ -132,7 +132,7 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe
chunk := chunkenc.NewXORChunk() chunk := chunkenc.NewXORChunk()
app, _ := chunk.Appender() app, _ := chunk.Appender()
for _, smpl := range chk { for _, smpl := range chk {
app.Append(smpl.t, smpl.v) app.Append(smpl.t, smpl.f)
} }
chkReader[chunkRef] = chunk chkReader[chunkRef] = chunk
chunkRef++ chunkRef++
@ -479,7 +479,7 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) {
for _, s := range testData { for _, s := range testData {
for _, chk := range s.chunks { for _, chk := range s.chunks {
for _, sample := range chk { for _, sample := range chk {
_, err = app.Append(0, labels.FromMap(s.lset), sample.t, sample.v) _, err = app.Append(0, labels.FromMap(s.lset), sample.t, sample.f)
require.NoError(t, err) require.NoError(t, err)
} }
} }
@ -882,7 +882,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
if tc.seekSuccess { if tc.seekSuccess {
// After successful seek iterator is ready. Grab the value. // After successful seek iterator is ready. Grab the value.
t, v := it.At() t, v := it.At()
r = append(r, sample{t: t, v: v}) r = append(r, sample{t: t, f: v})
} }
} }
expandedResult, err := storage.ExpandSamples(it, newSample) expandedResult, err := storage.ExpandSamples(it, newSample)
@ -1054,8 +1054,8 @@ func TestDeletedIterator(t *testing.T) {
act := make([]sample, 1000) act := make([]sample, 1000)
for i := 0; i < 1000; i++ { for i := 0; i < 1000; i++ {
act[i].t = int64(i) act[i].t = int64(i)
act[i].v = rand.Float64() act[i].f = rand.Float64()
app.Append(act[i].t, act[i].v) app.Append(act[i].t, act[i].f)
} }
cases := []struct { cases := []struct {
@ -1090,7 +1090,7 @@ func TestDeletedIterator(t *testing.T) {
ts, v := it.At() ts, v := it.At()
require.Equal(t, act[i].t, ts) require.Equal(t, act[i].t, ts)
require.Equal(t, act[i].v, v) require.Equal(t, act[i].f, v)
} }
// There has been an extra call to Next(). // There has been an extra call to Next().
i++ i++
@ -1114,8 +1114,8 @@ func TestDeletedIterator_WithSeek(t *testing.T) {
act := make([]sample, 1000) act := make([]sample, 1000)
for i := 0; i < 1000; i++ { for i := 0; i < 1000; i++ {
act[i].t = int64(i) act[i].t = int64(i)
act[i].v = float64(i) act[i].f = float64(i)
app.Append(act[i].t, act[i].v) app.Append(act[i].t, act[i].f)
} }
cases := []struct { cases := []struct {

View file

@ -28,7 +28,7 @@ type Samples interface {
type Sample interface { type Sample interface {
T() int64 T() int64
V() float64 // TODO(beorn7): Rename to F(). F() float64
H() *histogram.Histogram H() *histogram.Histogram
FH() *histogram.FloatHistogram FH() *histogram.FloatHistogram
Type() chunkenc.ValueType Type() chunkenc.ValueType
@ -69,7 +69,7 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
for i := 0; i < s.Len(); i++ { for i := 0; i < s.Len(); i++ {
switch sampleType { switch sampleType {
case chunkenc.ValFloat: case chunkenc.ValFloat:
ca.Append(s.Get(i).T(), s.Get(i).V()) ca.Append(s.Get(i).T(), s.Get(i).F())
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
ca.AppendHistogram(s.Get(i).T(), s.Get(i).H()) ca.AppendHistogram(s.Get(i).T(), s.Get(i).H())
case chunkenc.ValFloatHistogram: case chunkenc.ValFloatHistogram:
@ -87,7 +87,7 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
type sample struct { type sample struct {
t int64 t int64
v float64 f float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram fh *histogram.FloatHistogram
} }
@ -96,8 +96,8 @@ func (s sample) T() int64 {
return s.t return s.t
} }
func (s sample) V() float64 { func (s sample) F() float64 {
return s.v return s.f
} }
func (s sample) H() *histogram.Histogram { func (s sample) H() *histogram.Histogram {
@ -123,7 +123,7 @@ func (s sample) Type() chunkenc.ValueType {
func PopulatedChunk(numSamples int, minTime int64) chunks.Meta { func PopulatedChunk(numSamples int, minTime int64) chunks.Meta {
samples := make([]Sample, numSamples) samples := make([]Sample, numSamples)
for i := 0; i < numSamples; i++ { for i := 0; i < numSamples; i++ {
samples[i] = sample{t: minTime + int64(i*1000), v: 1.0} samples[i] = sample{t: minTime + int64(i*1000), f: 1.0}
} }
return ChunkFromSamples(samples) return ChunkFromSamples(samples)
} }
@ -133,7 +133,7 @@ func GenerateSamples(start, numSamples int) []Sample {
return generateSamples(start, numSamples, func(i int) Sample { return generateSamples(start, numSamples, func(i int) Sample {
return sample{ return sample{
t: int64(i), t: int64(i),
v: float64(i), f: float64(i),
} }
}) })
} }

View file

@ -45,12 +45,12 @@ func MarshalTimestamp(t int64, stream *jsoniter.Stream) {
} }
// MarshalFloat marshals a float value using the passed jsoniter stream. // MarshalFloat marshals a float value using the passed jsoniter stream.
func MarshalFloat(v float64, stream *jsoniter.Stream) { func MarshalFloat(f float64, stream *jsoniter.Stream) {
stream.WriteRaw(`"`) stream.WriteRaw(`"`)
// Taken from https://github.com/json-iterator/go/blob/master/stream_float.go#L71 as a workaround // Taken from https://github.com/json-iterator/go/blob/master/stream_float.go#L71 as a workaround
// to https://github.com/json-iterator/go/issues/365 (jsoniter, to follow json standard, doesn't allow inf/nan). // to https://github.com/json-iterator/go/issues/365 (jsoniter, to follow json standard, doesn't allow inf/nan).
buf := stream.Buffer() buf := stream.Buffer()
abs := math.Abs(v) abs := math.Abs(f)
fmt := byte('f') fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
if abs != 0 { if abs != 0 {
@ -58,7 +58,7 @@ func MarshalFloat(v float64, stream *jsoniter.Stream) {
fmt = 'e' fmt = 'e'
} }
} }
buf = strconv.AppendFloat(buf, v, fmt, -1, 64) buf = strconv.AppendFloat(buf, f, fmt, -1, 64)
stream.SetBuffer(buf) stream.SetBuffer(buf)
stream.WriteRaw(`"`) stream.WriteRaw(`"`)
} }

View file

@ -132,7 +132,7 @@ Loop:
t = sample.T() t = sample.T()
switch sample.Type() { switch sample.Type() {
case chunkenc.ValFloat: case chunkenc.ValFloat:
f = sample.V() f = sample.F()
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
fh = sample.H().ToFloat() fh = sample.H().ToFloat()
case chunkenc.ValFloatHistogram: case chunkenc.ValFloatHistogram: