Merge pull request #9759 from prometheus/sparse-rate

Prepare evaluation of rate and other functions for sparse histograms
This commit is contained in:
Björn Rabenstein 2021-11-16 14:24:56 +01:00 committed by GitHub
commit 742b504be8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
46 changed files with 1163 additions and 586 deletions

View file

@ -1190,7 +1190,7 @@ func (n notReadyAppender) AppendExemplar(ref uint64, l labels.Labels, e exemplar
return 0, tsdb.ErrNotReady return 0, tsdb.ErrNotReady
} }
func (n notReadyAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, sh histogram.Histogram) (uint64, error) { func (n notReadyAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h *histogram.Histogram) (uint64, error) {
return 0, tsdb.ErrNotReady return 0, tsdb.ErrNotReady
} }

View file

@ -460,7 +460,7 @@ func query(ctx context.Context, qs string, t time.Time, engine *promql.Engine, q
return v, nil return v, nil
case promql.Scalar: case promql.Scalar:
return promql.Vector{promql.Sample{ return promql.Vector{promql.Sample{
Point: promql.Point(v), Point: promql.Point{T: v.T, V: v.V},
Metric: labels.Labels{}, Metric: labels.Labels{},
}}, nil }}, nil
default: default:

View file

@ -14,7 +14,9 @@
package histogram package histogram
import ( import (
"fmt"
"math" "math"
"strings"
) )
// Histogram encodes a sparse, high-resolution histogram. See the design // Histogram encodes a sparse, high-resolution histogram. See the design
@ -45,7 +47,7 @@ type Histogram struct {
ZeroCount uint64 ZeroCount uint64
// Total number of observations. // Total number of observations.
Count uint64 Count uint64
// Sum of observations. // Sum of observations. This is also used as the stale marker.
Sum float64 Sum float64
// Spans for positive and negative buckets (see Span below). // Spans for positive and negative buckets (see Span below).
PositiveSpans, NegativeSpans []Span PositiveSpans, NegativeSpans []Span
@ -65,7 +67,7 @@ type Span struct {
} }
// Copy returns a deep copy of the Histogram. // Copy returns a deep copy of the Histogram.
func (h Histogram) Copy() Histogram { func (h Histogram) Copy() *Histogram {
c := h c := h
if h.PositiveSpans != nil { if h.PositiveSpans != nil {
@ -85,7 +87,61 @@ func (h Histogram) Copy() Histogram {
copy(c.NegativeBuckets, h.NegativeBuckets) copy(c.NegativeBuckets, h.NegativeBuckets)
} }
return c return &c
}
// String returns a string representation of the Histogram.
func (h Histogram) String() string {
var sb strings.Builder
fmt.Fprintf(&sb, "{count:%d, sum:%g", h.Count, h.Sum)
var nBuckets []Bucket
for it := h.NegativeBucketIterator(); it.Next(); {
bucket := it.At()
if bucket.Count != 0 {
nBuckets = append(nBuckets, it.At())
}
}
for i := len(nBuckets) - 1; i >= 0; i-- {
fmt.Fprintf(&sb, ", %s", nBuckets[i].String())
}
if h.ZeroCount != 0 {
fmt.Fprintf(&sb, ", %s", h.ZeroBucket().String())
}
for it := h.PositiveBucketIterator(); it.Next(); {
bucket := it.At()
if bucket.Count != 0 {
fmt.Fprintf(&sb, ", %s", bucket.String())
}
}
sb.WriteRune('}')
return sb.String()
}
// ZeroBucket returns the zero bucket.
func (h Histogram) ZeroBucket() Bucket {
return Bucket{
Lower: -h.ZeroThreshold,
Upper: h.ZeroThreshold,
LowerInclusive: true,
UpperInclusive: true,
Count: h.ZeroCount,
}
}
// PositiveBucketIterator returns a BucketIterator to iterate over all positive
// buckets in ascending order (starting next to the zero bucket and going up).
func (h Histogram) PositiveBucketIterator() BucketIterator {
return newRegularBucketIterator(&h, true)
}
// NegativeBucketIterator returns a BucketIterator to iterate over all negative
// buckets in descending order (starting next to the zero bucket and going down).
func (h Histogram) NegativeBucketIterator() BucketIterator {
return newRegularBucketIterator(&h, false)
} }
// CumulativeBucketIterator returns a BucketIterator to iterate over a // CumulativeBucketIterator returns a BucketIterator to iterate over a
@ -96,7 +152,7 @@ func (h Histogram) CumulativeBucketIterator() BucketIterator {
if len(h.NegativeBuckets) > 0 { if len(h.NegativeBuckets) > 0 {
panic("CumulativeIterator called on Histogram with negative buckets") panic("CumulativeIterator called on Histogram with negative buckets")
} }
return &cumulativeBucketIterator{h: h, posSpansIdx: -1} return &cumulativeBucketIterator{h: &h, posSpansIdx: -1}
} }
// BucketIterator iterates over the buckets of a Histogram, returning decoded // BucketIterator iterates over the buckets of a Histogram, returning decoded
@ -106,26 +162,126 @@ type BucketIterator interface {
Next() bool Next() bool
// At returns the current bucket. // At returns the current bucket.
At() Bucket At() Bucket
// Err returns the current error. It should be used only after iterator is
// exhausted, that is `Next` or `Seek` returns false.
Err() error
} }
// Bucket represents a bucket (currently only a cumulative one with an upper // Bucket represents a bucket with lower and upper limit and the count of
// inclusive bound and a cumulative count). // samples in the bucket. It also specifies if each limit is inclusive or
// not. (Mathematically, inclusive limits create a closed interval, and
// non-inclusive limits an open interval.)
//
// To represent cumulative buckets, Lower is set to -Inf, and the Count is then
// cumulative (including the counts of all buckets for smaller values).
type Bucket struct { type Bucket struct {
Upper float64 Lower, Upper float64
Count uint64 LowerInclusive, UpperInclusive bool
Count uint64
Index int32 // Index within schema. To easily compare buckets that share the same schema.
}
// String returns a string representation, using the usual mathematical notation
// of '['/']' for inclusive bounds and '('/')' for non-inclusive bounds.
func (b Bucket) String() string {
var sb strings.Builder
if b.LowerInclusive {
sb.WriteRune('[')
} else {
sb.WriteRune('(')
}
fmt.Fprintf(&sb, "%g,%g", b.Lower, b.Upper)
if b.UpperInclusive {
sb.WriteRune(']')
} else {
sb.WriteRune(')')
}
fmt.Fprintf(&sb, ":%d", b.Count)
return sb.String()
}
type regularBucketIterator struct {
schema int32
spans []Span
buckets []int64
positive bool // Whether this is for positive buckets.
spansIdx int // Current span within spans slice.
idxInSpan uint32 // Index in the current span. 0 <= idxInSpan < span.Length.
bucketsIdx int // Current bucket within buckets slice.
currCount int64 // Count in the current bucket.
currIdx int32 // The actual bucket index.
currLower, currUpper float64 // Limits of the current bucket.
}
func newRegularBucketIterator(h *Histogram, positive bool) *regularBucketIterator {
r := &regularBucketIterator{schema: h.Schema, positive: positive}
if positive {
r.spans = h.PositiveSpans
r.buckets = h.PositiveBuckets
} else {
r.spans = h.NegativeSpans
r.buckets = h.NegativeBuckets
}
return r
}
func (r *regularBucketIterator) Next() bool {
if r.spansIdx >= len(r.spans) {
return false
}
span := r.spans[r.spansIdx]
// Seed currIdx for the first bucket.
if r.bucketsIdx == 0 {
r.currIdx = span.Offset
} else {
r.currIdx++
}
for r.idxInSpan >= span.Length {
// We have exhausted the current span and have to find a new
// one. We'll even handle pathologic spans of length 0.
r.idxInSpan = 0
r.spansIdx++
if r.spansIdx >= len(r.spans) {
return false
}
span = r.spans[r.spansIdx]
r.currIdx += span.Offset
}
r.currCount += r.buckets[r.bucketsIdx]
if r.positive {
r.currUpper = getBound(r.currIdx, r.schema)
r.currLower = getBound(r.currIdx-1, r.schema)
} else {
r.currLower = -getBound(r.currIdx, r.schema)
r.currUpper = -getBound(r.currIdx-1, r.schema)
}
r.idxInSpan++
r.bucketsIdx++
return true
}
func (r *regularBucketIterator) At() Bucket {
return Bucket{
Count: uint64(r.currCount),
Lower: r.currLower,
Upper: r.currUpper,
LowerInclusive: r.currLower < 0,
UpperInclusive: r.currUpper > 0,
Index: r.currIdx,
}
} }
type cumulativeBucketIterator struct { type cumulativeBucketIterator struct {
h Histogram h *Histogram
posSpansIdx int // Index in h.PositiveSpans we are in. -1 means 0 bucket. posSpansIdx int // Index in h.PositiveSpans we are in. -1 means 0 bucket.
posBucketsIdx int // Index in h.PositiveBuckets. posBucketsIdx int // Index in h.PositiveBuckets.
idxInSpan uint32 // Index in the current span. 0 <= idxInSpan < span.Length. idxInSpan uint32 // Index in the current span. 0 <= idxInSpan < span.Length.
initialised bool initialized bool
currIdx int32 // The actual bucket index after decoding from spans. currIdx int32 // The actual bucket index after decoding from spans.
currUpper float64 // The upper boundary of the current bucket. currUpper float64 // The upper boundary of the current bucket.
currCount int64 // Current non-cumulative count for the current bucket. Does not apply for empty bucket. currCount int64 // Current non-cumulative count for the current bucket. Does not apply for empty bucket.
@ -158,24 +314,24 @@ func (c *cumulativeBucketIterator) Next() bool {
if c.emptyBucketCount > 0 { if c.emptyBucketCount > 0 {
// We are traversing through empty buckets at the moment. // We are traversing through empty buckets at the moment.
c.currUpper = getUpper(c.currIdx, c.h.Schema) c.currUpper = getBound(c.currIdx, c.h.Schema)
c.currIdx++ c.currIdx++
c.emptyBucketCount-- c.emptyBucketCount--
return true return true
} }
span := c.h.PositiveSpans[c.posSpansIdx] span := c.h.PositiveSpans[c.posSpansIdx]
if c.posSpansIdx == 0 && !c.initialised { if c.posSpansIdx == 0 && !c.initialized {
// Initialising. // Initialising.
c.currIdx = span.Offset c.currIdx = span.Offset
// The first bucket is absolute value and not a delta with Zero bucket. // The first bucket is an absolute value and not a delta with Zero bucket.
c.currCount = 0 c.currCount = 0
c.initialised = true c.initialized = true
} }
c.currCount += c.h.PositiveBuckets[c.posBucketsIdx] c.currCount += c.h.PositiveBuckets[c.posBucketsIdx]
c.currCumulativeCount += uint64(c.currCount) c.currCumulativeCount += uint64(c.currCount)
c.currUpper = getUpper(c.currIdx, c.h.Schema) c.currUpper = getBound(c.currIdx, c.h.Schema)
c.posBucketsIdx++ c.posBucketsIdx++
c.idxInSpan++ c.idxInSpan++
@ -191,15 +347,19 @@ func (c *cumulativeBucketIterator) Next() bool {
return true return true
} }
func (c *cumulativeBucketIterator) At() Bucket { func (c *cumulativeBucketIterator) At() Bucket {
return Bucket{ return Bucket{
Upper: c.currUpper, Upper: c.currUpper,
Count: c.currCumulativeCount, Lower: math.Inf(-1),
UpperInclusive: true,
LowerInclusive: true,
Count: c.currCumulativeCount,
Index: c.currIdx - 1,
} }
} }
func (c *cumulativeBucketIterator) Err() error { return nil }
func getUpper(idx, schema int32) float64 { func getBound(idx, schema int32) float64 {
if schema < 0 { if schema < 0 {
return math.Ldexp(1, int(idx)<<(-schema)) return math.Ldexp(1, int(idx)<<(-schema))
} }

View file

@ -15,15 +15,72 @@ package histogram
import ( import (
"fmt" "fmt"
"math"
"testing" "testing"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestHistogramString(t *testing.T) {
cases := []struct {
histogram Histogram
expectedString string
}{
{
histogram: Histogram{
Schema: 0,
},
expectedString: "{count:0, sum:0}",
},
{
histogram: Histogram{
Schema: 0,
Count: 9,
Sum: -3.1415,
ZeroCount: 12,
ZeroThreshold: 0.001,
NegativeSpans: []Span{
{Offset: 0, Length: 5},
{Offset: 1, Length: 1},
},
NegativeBuckets: []int64{1, 2, -2, 1, -1, 0},
},
expectedString: "{count:9, sum:-3.1415, [-64,-32):1, [-16,-8):1, [-8,-4):2, [-4,-2):1, [-2,-1):3, [-1,-0.5):1, [-0.001,0.001]:12}",
},
{
histogram: Histogram{
Schema: 0,
Count: 19,
Sum: 2.7,
PositiveSpans: []Span{
{Offset: 0, Length: 4},
{Offset: 0, Length: 0},
{Offset: 0, Length: 3},
},
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
NegativeSpans: []Span{
{Offset: 0, Length: 5},
{Offset: 1, Length: 0},
{Offset: 0, Length: 1},
},
NegativeBuckets: []int64{1, 2, -2, 1, -1, 0},
},
expectedString: "{count:19, sum:2.7, [-64,-32):1, [-16,-8):1, [-8,-4):2, [-4,-2):1, [-2,-1):3, [-1,-0.5):1, (0.5,1]:1, (1,2]:3, (2,4]:1, (4,8]:2, (8,16]:1, (16,32]:1, (32,64]:1}",
},
}
for i, c := range cases {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
actualString := c.histogram.String()
require.Equal(t, c.expectedString, actualString)
})
}
}
func TestCumulativeBucketIterator(t *testing.T) { func TestCumulativeBucketIterator(t *testing.T) {
cases := []struct { cases := []struct {
histogram Histogram histogram Histogram
expectedCumulativeBuckets []Bucket expectedBuckets []Bucket
}{ }{
{ {
histogram: Histogram{ histogram: Histogram{
@ -34,14 +91,14 @@ func TestCumulativeBucketIterator(t *testing.T) {
}, },
PositiveBuckets: []int64{1, 1, -1, 0}, PositiveBuckets: []int64{1, 1, -1, 0},
}, },
expectedCumulativeBuckets: []Bucket{ expectedBuckets: []Bucket{
{Upper: 1, Count: 1}, {Lower: math.Inf(-1), Upper: 1, Count: 1, LowerInclusive: true, UpperInclusive: true, Index: 0},
{Upper: 2, Count: 3}, {Lower: math.Inf(-1), Upper: 2, Count: 3, LowerInclusive: true, UpperInclusive: true, Index: 1},
{Upper: 4, Count: 3}, {Lower: math.Inf(-1), Upper: 4, Count: 3, LowerInclusive: true, UpperInclusive: true, Index: 2},
{Upper: 8, Count: 4}, {Lower: math.Inf(-1), Upper: 8, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: 3},
{Upper: 16, Count: 5}, {Lower: math.Inf(-1), Upper: 16, Count: 5, LowerInclusive: true, UpperInclusive: true, Index: 4},
}, },
}, },
{ {
@ -53,16 +110,16 @@ func TestCumulativeBucketIterator(t *testing.T) {
}, },
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0}, PositiveBuckets: []int64{1, 2, -2, 1, -1, 0},
}, },
expectedCumulativeBuckets: []Bucket{ expectedBuckets: []Bucket{
{Upper: 1, Count: 1}, {Lower: math.Inf(-1), Upper: 1, Count: 1, LowerInclusive: true, UpperInclusive: true, Index: 0},
{Upper: 2, Count: 4}, {Lower: math.Inf(-1), Upper: 2, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: 1},
{Upper: 4, Count: 5}, {Lower: math.Inf(-1), Upper: 4, Count: 5, LowerInclusive: true, UpperInclusive: true, Index: 2},
{Upper: 8, Count: 7}, {Lower: math.Inf(-1), Upper: 8, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 3},
{Upper: 16, Count: 8}, {Lower: math.Inf(-1), Upper: 16, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 4},
{Upper: 32, Count: 8}, {Lower: math.Inf(-1), Upper: 32, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 5},
{Upper: 64, Count: 9}, {Lower: math.Inf(-1), Upper: 64, Count: 9, LowerInclusive: true, UpperInclusive: true, Index: 6},
}, },
}, },
{ {
@ -73,14 +130,14 @@ func TestCumulativeBucketIterator(t *testing.T) {
}, },
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0}, PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
}, },
expectedCumulativeBuckets: []Bucket{ expectedBuckets: []Bucket{
{Upper: 1, Count: 1}, {Lower: math.Inf(-1), Upper: 1, Count: 1, LowerInclusive: true, UpperInclusive: true, Index: 0},
{Upper: 2, Count: 4}, {Lower: math.Inf(-1), Upper: 2, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: 1},
{Upper: 4, Count: 5}, {Lower: math.Inf(-1), Upper: 4, Count: 5, LowerInclusive: true, UpperInclusive: true, Index: 2},
{Upper: 8, Count: 7}, {Lower: math.Inf(-1), Upper: 8, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 3},
{Upper: 16, Count: 8}, {Lower: math.Inf(-1), Upper: 16, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 4},
{Upper: 32, Count: 9}, {Lower: math.Inf(-1), Upper: 32, Count: 9, LowerInclusive: true, UpperInclusive: true, Index: 5},
{Upper: 64, Count: 10}, {Lower: math.Inf(-1), Upper: 64, Count: 10, LowerInclusive: true, UpperInclusive: true, Index: 6},
}, },
}, },
{ {
@ -93,22 +150,22 @@ func TestCumulativeBucketIterator(t *testing.T) {
}, },
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 3}, PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 3},
}, },
expectedCumulativeBuckets: []Bucket{ expectedBuckets: []Bucket{
{Upper: 0.6484197773255048, Count: 1}, // -5 {Lower: math.Inf(-1), Upper: 0.6484197773255048, Count: 1, LowerInclusive: true, UpperInclusive: true, Index: -5},
{Upper: 0.7071067811865475, Count: 4}, // -4 {Lower: math.Inf(-1), Upper: 0.7071067811865475, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: -4},
{Upper: 0.7711054127039704, Count: 4}, // -3 {Lower: math.Inf(-1), Upper: 0.7711054127039704, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: -3},
{Upper: 0.8408964152537144, Count: 4}, // -2 {Lower: math.Inf(-1), Upper: 0.8408964152537144, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: -2},
{Upper: 0.9170040432046711, Count: 5}, // -1 {Lower: math.Inf(-1), Upper: 0.9170040432046711, Count: 5, LowerInclusive: true, UpperInclusive: true, Index: -1},
{Upper: 1, Count: 7}, // 1 {Lower: math.Inf(-1), Upper: 1, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 0},
{Upper: 1.0905077326652577, Count: 8}, // 0 {Lower: math.Inf(-1), Upper: 1.0905077326652577, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 1},
{Upper: 1.189207115002721, Count: 8}, // 1 {Lower: math.Inf(-1), Upper: 1.189207115002721, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 2},
{Upper: 1.2968395546510096, Count: 8}, // 2 {Lower: math.Inf(-1), Upper: 1.2968395546510096, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 3},
{Upper: 1.414213562373095, Count: 9}, // 3 {Lower: math.Inf(-1), Upper: 1.414213562373095, Count: 9, LowerInclusive: true, UpperInclusive: true, Index: 4},
{Upper: 1.5422108254079407, Count: 13}, // 4 {Lower: math.Inf(-1), Upper: 1.5422108254079407, Count: 13, LowerInclusive: true, UpperInclusive: true, Index: 5},
}, },
}, },
{ {
@ -120,17 +177,17 @@ func TestCumulativeBucketIterator(t *testing.T) {
}, },
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0}, PositiveBuckets: []int64{1, 2, -2, 1, -1, 0},
}, },
expectedCumulativeBuckets: []Bucket{ expectedBuckets: []Bucket{
{Upper: 0.00390625, Count: 1}, // -2 {Lower: math.Inf(-1), Upper: 0.00390625, Count: 1, LowerInclusive: true, UpperInclusive: true, Index: -2},
{Upper: 0.0625, Count: 4}, // -1 {Lower: math.Inf(-1), Upper: 0.0625, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: -1},
{Upper: 1, Count: 5}, // 0 {Lower: math.Inf(-1), Upper: 1, Count: 5, LowerInclusive: true, UpperInclusive: true, Index: 0},
{Upper: 16, Count: 7}, // 1 {Lower: math.Inf(-1), Upper: 16, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 1},
{Upper: 256, Count: 7}, // 2 {Lower: math.Inf(-1), Upper: 256, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 2},
{Upper: 4096, Count: 7}, // 3 {Lower: math.Inf(-1), Upper: 4096, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 3},
{Upper: 65536, Count: 8}, // 4 {Lower: math.Inf(-1), Upper: 65536, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 4},
{Upper: 1048576, Count: 9}, // 5 {Lower: math.Inf(-1), Upper: 1048576, Count: 9, LowerInclusive: true, UpperInclusive: true, Index: 5},
}, },
}, },
{ {
@ -141,12 +198,12 @@ func TestCumulativeBucketIterator(t *testing.T) {
}, },
PositiveBuckets: []int64{1, 2, -2, 1, -1}, PositiveBuckets: []int64{1, 2, -2, 1, -1},
}, },
expectedCumulativeBuckets: []Bucket{ expectedBuckets: []Bucket{
{Upper: 0.0625, Count: 1}, // -2 {Lower: math.Inf(-1), Upper: 0.0625, Count: 1, LowerInclusive: true, UpperInclusive: true, Index: -2},
{Upper: 0.25, Count: 4}, // -1 {Lower: math.Inf(-1), Upper: 0.25, Count: 4, LowerInclusive: true, UpperInclusive: true, Index: -1},
{Upper: 1, Count: 5}, // 0 {Lower: math.Inf(-1), Upper: 1, Count: 5, LowerInclusive: true, UpperInclusive: true, Index: 0},
{Upper: 4, Count: 7}, // 1 {Lower: math.Inf(-1), Upper: 4, Count: 7, LowerInclusive: true, UpperInclusive: true, Index: 1},
{Upper: 16, Count: 8}, // 2 {Lower: math.Inf(-1), Upper: 16, Count: 8, LowerInclusive: true, UpperInclusive: true, Index: 2},
}, },
}, },
} }
@ -154,12 +211,177 @@ func TestCumulativeBucketIterator(t *testing.T) {
for i, c := range cases { for i, c := range cases {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
it := c.histogram.CumulativeBucketIterator() it := c.histogram.CumulativeBucketIterator()
actualBuckets := make([]Bucket, 0, len(c.expectedCumulativeBuckets)) actualBuckets := make([]Bucket, 0, len(c.expectedBuckets))
for it.Next() { for it.Next() {
actualBuckets = append(actualBuckets, it.At()) actualBuckets = append(actualBuckets, it.At())
} }
require.NoError(t, it.Err()) require.Equal(t, c.expectedBuckets, actualBuckets)
require.Equal(t, c.expectedCumulativeBuckets, actualBuckets) })
}
}
func TestRegularBucketIterator(t *testing.T) {
cases := []struct {
histogram Histogram
expectedPositiveBuckets []Bucket
expectedNegativeBuckets []Bucket
}{
{
histogram: Histogram{
Schema: 0,
},
expectedPositiveBuckets: []Bucket{},
expectedNegativeBuckets: []Bucket{},
},
{
histogram: Histogram{
Schema: 0,
PositiveSpans: []Span{
{Offset: 0, Length: 2},
{Offset: 1, Length: 2},
},
PositiveBuckets: []int64{1, 1, -1, 0},
},
expectedPositiveBuckets: []Bucket{
{Lower: 0.5, Upper: 1, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 0},
{Lower: 1, Upper: 2, Count: 2, LowerInclusive: false, UpperInclusive: true, Index: 1},
{Lower: 4, Upper: 8, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 3},
{Lower: 8, Upper: 16, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 4},
},
expectedNegativeBuckets: []Bucket{},
},
{
histogram: Histogram{
Schema: 0,
NegativeSpans: []Span{
{Offset: 0, Length: 5},
{Offset: 1, Length: 1},
},
NegativeBuckets: []int64{1, 2, -2, 1, -1, 0},
},
expectedPositiveBuckets: []Bucket{},
expectedNegativeBuckets: []Bucket{
{Lower: -1, Upper: -0.5, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 0},
{Lower: -2, Upper: -1, Count: 3, LowerInclusive: true, UpperInclusive: false, Index: 1},
{Lower: -4, Upper: -2, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 2},
{Lower: -8, Upper: -4, Count: 2, LowerInclusive: true, UpperInclusive: false, Index: 3},
{Lower: -16, Upper: -8, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 4},
{Lower: -64, Upper: -32, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 6},
},
},
{
histogram: Histogram{
Schema: 0,
PositiveSpans: []Span{
{Offset: 0, Length: 4},
{Offset: 0, Length: 0},
{Offset: 0, Length: 3},
},
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},
NegativeSpans: []Span{
{Offset: 0, Length: 5},
{Offset: 1, Length: 0},
{Offset: 0, Length: 1},
},
NegativeBuckets: []int64{1, 2, -2, 1, -1, 0},
},
expectedPositiveBuckets: []Bucket{
{Lower: 0.5, Upper: 1, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 0},
{Lower: 1, Upper: 2, Count: 3, LowerInclusive: false, UpperInclusive: true, Index: 1},
{Lower: 2, Upper: 4, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 2},
{Lower: 4, Upper: 8, Count: 2, LowerInclusive: false, UpperInclusive: true, Index: 3},
{Lower: 8, Upper: 16, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 4},
{Lower: 16, Upper: 32, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 5},
{Lower: 32, Upper: 64, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 6},
},
expectedNegativeBuckets: []Bucket{
{Lower: -1, Upper: -0.5, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 0},
{Lower: -2, Upper: -1, Count: 3, LowerInclusive: true, UpperInclusive: false, Index: 1},
{Lower: -4, Upper: -2, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 2},
{Lower: -8, Upper: -4, Count: 2, LowerInclusive: true, UpperInclusive: false, Index: 3},
{Lower: -16, Upper: -8, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 4},
{Lower: -64, Upper: -32, Count: 1, LowerInclusive: true, UpperInclusive: false, Index: 6},
},
},
{
histogram: Histogram{
Schema: 3,
PositiveSpans: []Span{
{Offset: -5, Length: 2}, // -5 -4
{Offset: 2, Length: 3}, // -1 0 1
{Offset: 2, Length: 2}, // 4 5
},
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 3},
},
expectedPositiveBuckets: []Bucket{
{Lower: 0.5946035575013605, Upper: 0.6484197773255048, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: -5},
{Lower: 0.6484197773255048, Upper: 0.7071067811865475, Count: 3, LowerInclusive: false, UpperInclusive: true, Index: -4},
{Lower: 0.8408964152537144, Upper: 0.9170040432046711, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: -1},
{Lower: 0.9170040432046711, Upper: 1, Count: 2, LowerInclusive: false, UpperInclusive: true, Index: 0},
{Lower: 1, Upper: 1.0905077326652577, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 1},
{Lower: 1.2968395546510096, Upper: 1.414213562373095, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 4},
{Lower: 1.414213562373095, Upper: 1.5422108254079407, Count: 4, LowerInclusive: false, UpperInclusive: true, Index: 5},
},
expectedNegativeBuckets: []Bucket{},
},
{
histogram: Histogram{
Schema: -2,
PositiveSpans: []Span{
{Offset: -2, Length: 4}, // -2 -1 0 1
{Offset: 2, Length: 2}, // 4 5
},
PositiveBuckets: []int64{1, 2, -2, 1, -1, 0},
},
expectedPositiveBuckets: []Bucket{
{Lower: 0.000244140625, Upper: 0.00390625, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: -2},
{Lower: 0.00390625, Upper: 0.0625, Count: 3, LowerInclusive: false, UpperInclusive: true, Index: -1},
{Lower: 0.0625, Upper: 1, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 0},
{Lower: 1, Upper: 16, Count: 2, LowerInclusive: false, UpperInclusive: true, Index: 1},
{Lower: 4096, Upper: 65536, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 4},
{Lower: 65536, Upper: 1048576, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 5},
},
expectedNegativeBuckets: []Bucket{},
},
{
histogram: Histogram{
Schema: -1,
PositiveSpans: []Span{
{Offset: -2, Length: 5}, // -2 -1 0 1 2
},
PositiveBuckets: []int64{1, 2, -2, 1, -1},
},
expectedPositiveBuckets: []Bucket{
{Lower: 0.015625, Upper: 0.0625, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: -2},
{Lower: 0.0625, Upper: 0.25, Count: 3, LowerInclusive: false, UpperInclusive: true, Index: -1},
{Lower: 0.25, Upper: 1, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 0},
{Lower: 1, Upper: 4, Count: 2, LowerInclusive: false, UpperInclusive: true, Index: 1},
{Lower: 4, Upper: 16, Count: 1, LowerInclusive: false, UpperInclusive: true, Index: 2},
},
expectedNegativeBuckets: []Bucket{},
},
}
for i, c := range cases {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
it := c.histogram.PositiveBucketIterator()
actualPositiveBuckets := make([]Bucket, 0, len(c.expectedPositiveBuckets))
for it.Next() {
actualPositiveBuckets = append(actualPositiveBuckets, it.At())
}
require.Equal(t, c.expectedPositiveBuckets, actualPositiveBuckets)
it = c.histogram.NegativeBucketIterator()
actualNegativeBuckets := make([]Bucket, 0, len(c.expectedNegativeBuckets))
for it.Next() {
actualNegativeBuckets = append(actualNegativeBuckets, it.At())
}
require.Equal(t, c.expectedNegativeBuckets, actualNegativeBuckets)
}) })
} }
} }

View file

@ -30,7 +30,7 @@ type Parser interface {
// Histogram returns the bytes of a series with a sparse histogram as a // Histogram returns the bytes of a series with a sparse histogram as a
// value, the timestamp if set, and the histogram in the current sample. // value, the timestamp if set, and the histogram in the current sample.
Histogram() ([]byte, *int64, histogram.Histogram) Histogram() ([]byte, *int64, *histogram.Histogram)
// Help returns the metric name and help text in the current entry. // Help returns the metric name and help text in the current entry.
// Must only be called after Next returned a help entry. // Must only be called after Next returned a help entry.

View file

@ -114,10 +114,10 @@ func (p *OpenMetricsParser) Series() ([]byte, *int64, float64) {
return p.series, nil, p.val return p.series, nil, p.val
} }
// Histogram always returns (nil, nil, histogram.Histogram{}) because // Histogram always returns (nil, nil, nil) because OpenMetrics does not support
// OpenMetrics does not support sparse histograms. // sparse histograms.
func (p *OpenMetricsParser) Histogram() ([]byte, *int64, histogram.Histogram) { func (p *OpenMetricsParser) Histogram() ([]byte, *int64, *histogram.Histogram) {
return nil, nil, histogram.Histogram{} return nil, nil, nil
} }
// Help returns the metric name and help text in the current entry. // Help returns the metric name and help text in the current entry.

View file

@ -169,10 +169,10 @@ func (p *PromParser) Series() ([]byte, *int64, float64) {
return p.series, nil, p.val return p.series, nil, p.val
} }
// Histogram always returns (nil, nil, histogram.Histogram{}) because the // Histogram always returns (nil, nil, nil) because the Prometheus text format
// Prometheus text format does not support sparse histograms. // does not support sparse histograms.
func (p *PromParser) Histogram() ([]byte, *int64, histogram.Histogram) { func (p *PromParser) Histogram() ([]byte, *int64, *histogram.Histogram) {
return nil, nil, histogram.Histogram{} return nil, nil, nil
} }
// Help returns the metric name and help text in the current entry. // Help returns the metric name and help text in the current entry.

View file

@ -135,7 +135,7 @@ func (p *ProtobufParser) Series() ([]byte, *int64, float64) {
// Histogram returns the bytes of a series with a sparse histogram as a // Histogram returns the bytes of a series with a sparse histogram as a
// value, the timestamp if set, and the sparse histogram in the current // value, the timestamp if set, and the sparse histogram in the current
// sample. // sample.
func (p *ProtobufParser) Histogram() ([]byte, *int64, histogram.Histogram) { func (p *ProtobufParser) Histogram() ([]byte, *int64, *histogram.Histogram) {
var ( var (
m = p.mf.GetMetric()[p.metricPos] m = p.mf.GetMetric()[p.metricPos]
ts = m.GetTimestampMs() ts = m.GetTimestampMs()
@ -161,12 +161,12 @@ func (p *ProtobufParser) Histogram() ([]byte, *int64, histogram.Histogram) {
sh.NegativeSpans[i].Length = span.GetLength() sh.NegativeSpans[i].Length = span.GetLength()
} }
if ts != 0 { if ts != 0 {
return p.metricBytes.Bytes(), &ts, sh return p.metricBytes.Bytes(), &ts, &sh
} }
// Nasty hack: Assume that ts==0 means no timestamp. That's not true in // Nasty hack: Assume that ts==0 means no timestamp. That's not true in
// general, but proto3 has no distinction between unset and // general, but proto3 has no distinction between unset and
// default. Need to avoid in the final format. // default. Need to avoid in the final format.
return p.metricBytes.Bytes(), nil, sh return p.metricBytes.Bytes(), nil, &sh
} }
// Help returns the metric name and help text in the current entry. // Help returns the metric name and help text in the current entry.

View file

@ -266,7 +266,7 @@ metric: <
help string help string
unit string unit string
comment string comment string
shs histogram.Histogram shs *histogram.Histogram
e []exemplar.Exemplar e []exemplar.Exemplar
}{ }{
{ {
@ -332,7 +332,7 @@ metric: <
{ {
m: "test_histogram", m: "test_histogram",
t: 1234568, t: 1234568,
shs: histogram.Histogram{ shs: &histogram.Histogram{
Count: 175, Count: 175,
ZeroCount: 2, ZeroCount: 2,
Sum: 0.0008280461746287094, Sum: 0.0008280461746287094,

View file

@ -35,11 +35,13 @@ import (
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/uber/jaeger-client-go" "github.com/uber/jaeger-client-go"
"github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/pkg/value" "github.com/prometheus/prometheus/pkg/value"
"github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/util/stats" "github.com/prometheus/prometheus/util/stats"
) )
@ -181,7 +183,6 @@ func (q *query) Exec(ctx context.Context) *Result {
// Exec query. // Exec query.
res, warnings, err := q.ng.exec(ctx, q) res, warnings, err := q.ng.exec(ctx, q)
return &Result{Err: err, Value: res, Warnings: warnings} return &Result{Err: err, Value: res, Warnings: warnings}
} }
@ -614,7 +615,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.Eval
for i, s := range mat { for i, s := range mat {
// Point might have a different timestamp, force it to the evaluation // Point might have a different timestamp, force it to the evaluation
// timestamp as that is when we ran the evaluation. // timestamp as that is when we ran the evaluation.
vector[i] = Sample{Metric: s.Metric, Point: Point{V: s.Points[0].V, T: start}} vector[i] = Sample{Metric: s.Metric, Point: Point{V: s.Points[0].V, H: s.Points[0].H, T: start}}
} }
return vector, warnings, nil return vector, warnings, nil
case parser.ValueTypeScalar: case parser.ValueTypeScalar:
@ -1324,7 +1325,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
outVec := call(inArgs, e.Args, enh) outVec := call(inArgs, e.Args, enh)
enh.Out = outVec[:0] enh.Out = outVec[:0]
if len(outVec) > 0 { if len(outVec) > 0 {
ss.Points = append(ss.Points, Point{V: outVec[0].Point.V, T: ts}) ss.Points = append(ss.Points, Point{V: outVec[0].Point.V, H: outVec[0].Point.H, T: ts})
} }
// Only buffer stepRange milliseconds from the second step on. // Only buffer stepRange milliseconds from the second step on.
it.ReduceDelta(stepRange) it.ReduceDelta(stepRange)
@ -1474,10 +1475,10 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
} }
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval { for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
_, v, ok := ev.vectorSelectorSingle(it, e, ts) _, v, h, ok := ev.vectorSelectorSingle(it, e, ts)
if ok { if ok {
if ev.currentSamples < ev.maxSamples { if ev.currentSamples < ev.maxSamples {
ss.Points = append(ss.Points, Point{V: v, T: ts}) ss.Points = append(ss.Points, Point{V: v, H: h, T: ts})
ev.currentSamples++ ev.currentSamples++
} else { } else {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
@ -1576,6 +1577,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
mat[i].Points = append(mat[i].Points, Point{ mat[i].Points = append(mat[i].Points, Point{
T: ts, T: ts,
V: mat[i].Points[0].V, V: mat[i].Points[0].V,
H: mat[i].Points[0].H,
}) })
ev.currentSamples++ ev.currentSamples++
if ev.currentSamples > ev.maxSamples { if ev.currentSamples > ev.maxSamples {
@ -1600,11 +1602,11 @@ func (ev *evaluator) vectorSelector(node *parser.VectorSelector, ts int64) (Vect
for i, s := range node.Series { for i, s := range node.Series {
it.Reset(s.Iterator()) it.Reset(s.Iterator())
t, v, ok := ev.vectorSelectorSingle(it, node, ts) t, v, h, ok := ev.vectorSelectorSingle(it, node, ts)
if ok { if ok {
vec = append(vec, Sample{ vec = append(vec, Sample{
Metric: node.Series[i].Labels(), Metric: node.Series[i].Labels(),
Point: Point{V: v, T: t}, Point: Point{V: v, H: h, T: t},
}) })
ev.currentSamples++ ev.currentSamples++
@ -1617,33 +1619,37 @@ func (ev *evaluator) vectorSelector(node *parser.VectorSelector, ts int64) (Vect
return vec, ws return vec, ws
} }
// vectorSelectorSingle evaluates a instant vector for the iterator of one time series. // vectorSelectorSingle evaluates an instant vector for the iterator of one time series.
func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, node *parser.VectorSelector, ts int64) (int64, float64, bool) { func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, node *parser.VectorSelector, ts int64) (int64, float64, *histogram.Histogram, bool) {
refTime := ts - durationMilliseconds(node.Offset) refTime := ts - durationMilliseconds(node.Offset)
var t int64 var t int64
var v float64 var v float64
var h *histogram.Histogram
ok := it.Seek(refTime) valueType := it.Seek(refTime)
if !ok { switch valueType {
case storage.ValNone:
if it.Err() != nil { if it.Err() != nil {
ev.error(it.Err()) ev.error(it.Err())
} }
} case storage.ValFloat:
if ok {
t, v = it.Values() t, v = it.Values()
case storage.ValHistogram:
t, h = it.HistogramValues()
default:
panic(fmt.Errorf("unknown value type %v", valueType))
} }
if valueType == storage.ValNone || t > refTime {
if !ok || t > refTime { var ok bool
t, v, ok = it.PeekPrev() t, v, h, ok = it.PeekPrev()
if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) { if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) {
return 0, 0, false return 0, 0, nil, false
} }
} }
if value.IsStaleNaN(v) { if value.IsStaleNaN(v) || (h != nil && value.IsStaleNaN(h.Sum)) {
return 0, 0, false return 0, 0, nil, false
} }
return t, v, true return t, v, h, true
} }
var pointPool = sync.Pool{} var pointPool = sync.Pool{}
@ -1735,29 +1741,57 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m
} }
buf := it.Buffer() buf := it.Buffer()
for buf.Next() { if it.ChunkEncoding() == chunkenc.EncHistogram {
t, v := buf.At() for buf.Next() {
if value.IsStaleNaN(v) { t, h := buf.AtHistogram()
continue if value.IsStaleNaN(h.Sum) {
} continue
// Values in the buffer are guaranteed to be smaller than maxt. }
if t >= mint { // Values in the buffer are guaranteed to be smaller than maxt.
if ev.currentSamples >= ev.maxSamples { if t >= mint {
ev.error(ErrTooManySamples(env)) if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env))
}
ev.currentSamples++
out = append(out, Point{T: t, H: h})
}
}
} else {
for buf.Next() {
t, v := buf.At()
if value.IsStaleNaN(v) {
continue
}
// Values in the buffer are guaranteed to be smaller than maxt.
if t >= mint {
if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env))
}
ev.currentSamples++
out = append(out, Point{T: t, V: v})
} }
ev.currentSamples++
out = append(out, Point{T: t, V: v})
} }
} }
// The seeked sample might also be in the range. // The sought sample might also be in the range.
if ok { if ok {
t, v := it.Values() if it.ChunkEncoding() == chunkenc.EncHistogram {
if t == maxt && !value.IsStaleNaN(v) { t, h := it.HistogramValues()
if ev.currentSamples >= ev.maxSamples { if t == maxt && !value.IsStaleNaN(h.Sum) {
ev.error(ErrTooManySamples(env)) if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env))
}
out = append(out, Point{T: t, H: h})
ev.currentSamples++
}
} else {
t, v := it.Values()
if t == maxt && !value.IsStaleNaN(v) {
if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env))
}
out = append(out, Point{T: t, V: v})
ev.currentSamples++
} }
out = append(out, Point{T: t, V: v})
ev.currentSamples++
} }
} }
return out return out

View file

@ -16,6 +16,7 @@ package promql
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"io/ioutil" "io/ioutil"
"os" "os"
"sort" "sort"
@ -30,6 +31,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb"
) )
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
@ -2429,3 +2431,33 @@ func TestRangeQuery(t *testing.T) {
}) })
} }
} }
func TestSparseHistogramRate(t *testing.T) {
// Currently, this test it to only find panics or errors in the engine execution path.
// The panic stack trace will mostly tell you what code path is breaking and needs fixing for
// fetching the raw histograms and passing it rightly upto the rate() function implementation.
// TODO: Check the result for correctness once implementation is ready.
test, err := NewTest(t, "")
require.NoError(t, err)
defer test.Close()
seriesName := "sparse_histogram_series"
lbls := labels.FromStrings("__name__", seriesName)
app := test.Storage().Appender(context.TODO())
for i, h := range tsdb.GenerateTestHistograms(100) {
_, err := app.AppendHistogram(0, lbls, int64(i)*int64(15*time.Second/time.Millisecond), h)
require.NoError(t, err)
}
require.NoError(t, app.Commit())
require.NoError(t, test.Run())
engine := test.QueryEngine()
queryString := fmt.Sprintf("rate(%s[1m])", seriesName)
qry, err := engine.NewInstantQuery(test.Queryable(), queryString, timestamp.Time(int64(5*time.Minute/time.Millisecond)))
require.NoError(t, err)
res := qry.Exec(test.Context())
require.NoError(t, res.Err)
}

View file

@ -47,7 +47,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Points: []Point{
{0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5}, {0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil},
}, },
}, },
}, },
@ -58,7 +58,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Points: []Point{
{0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5}, {0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil},
}, },
}, },
}, },
@ -69,7 +69,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Points: []Point{
{0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5}, {50000, 6}, {60000, 7}, {0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil}, {50000, 6, nil}, {60000, 7, nil},
}, },
}, },
}, },
@ -89,13 +89,13 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Points: []Point{
{0, 1}, {10000, 1}, {20000, 1}, {30000, 1}, {40000, 1}, {50000, 1}, {0, 1, nil}, {10000, 1, nil}, {20000, 1, nil}, {30000, 1, nil}, {40000, 1, nil}, {50000, 1, nil},
}, },
}, },
{ {
Metric: labels.FromStrings("__name__", "metric2"), Metric: labels.FromStrings("__name__", "metric2"),
Points: []Point{ Points: []Point{
{0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5}, {50000, 6}, {60000, 7}, {70000, 8}, {0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil}, {50000, 6, nil}, {60000, 7, nil}, {70000, 8, nil},
}, },
}, },
}, },

View file

@ -78,14 +78,22 @@ func (s Series) String() string {
} }
// Point represents a single data point for a given timestamp. // Point represents a single data point for a given timestamp.
// If H is not nil, then this is a histogram point and only (T, H) is valid.
// If H is nil, then only (T, V) is valid.
type Point struct { type Point struct {
T int64 T int64
V float64 V float64
H *histogram.Histogram
} }
func (p Point) String() string { func (p Point) String() string {
v := strconv.FormatFloat(p.V, 'f', -1, 64) var s string
return fmt.Sprintf("%v @[%v]", v, p.T) if p.H != nil {
s = p.H.String()
} else {
s = strconv.FormatFloat(p.V, 'f', -1, 64)
}
return fmt.Sprintf("%s @[%v]", s, p.T)
} }
// MarshalJSON implements json.Marshaler. // MarshalJSON implements json.Marshaler.
@ -296,11 +304,9 @@ func (ssi *storageSeriesIterator) At() (t int64, v float64) {
return p.T, p.V return p.T, p.V
} }
// AtHistogram always returns (0, histogram.Histogram{}) because there is no func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
// support for histogram values yet. p := ssi.points[ssi.curr]
// TODO(beorn7): Fix that for histogram support in PromQL. return p.T, p.H
func (ssi *storageSeriesIterator) AtHistogram() (int64, histogram.Histogram) {
return 0, histogram.Histogram{}
} }
func (ssi *storageSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (ssi *storageSeriesIterator) ChunkEncoding() chunkenc.Encoding {

View file

@ -197,7 +197,7 @@ func EngineQueryFunc(engine *promql.Engine, q storage.Queryable) QueryFunc {
return v, nil return v, nil
case promql.Scalar: case promql.Scalar:
return promql.Vector{promql.Sample{ return promql.Vector{promql.Sample{
Point: promql.Point(v), Point: promql.Point{T: v.T, V: v.V},
Metric: labels.Labels{}, Metric: labels.Labels{},
}}, nil }}, nil
default: default:

View file

@ -35,7 +35,7 @@ func (a nopAppender) Append(uint64, labels.Labels, int64, float64) (uint64, erro
func (a nopAppender) AppendExemplar(uint64, labels.Labels, exemplar.Exemplar) (uint64, error) { func (a nopAppender) AppendExemplar(uint64, labels.Labels, exemplar.Exemplar) (uint64, error) {
return 0, nil return 0, nil
} }
func (a nopAppender) AppendHistogram(uint64, labels.Labels, int64, histogram.Histogram) (uint64, error) { func (a nopAppender) AppendHistogram(uint64, labels.Labels, int64, *histogram.Histogram) (uint64, error) {
return 0, nil return 0, nil
} }
func (a nopAppender) Commit() error { return nil } func (a nopAppender) Commit() error { return nil }
@ -49,7 +49,7 @@ type sample struct {
type histogramSample struct { type histogramSample struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
// collectResultAppender records all samples that were added through the appender. // collectResultAppender records all samples that were added through the appender.
@ -96,7 +96,7 @@ func (a *collectResultAppender) AppendExemplar(ref uint64, l labels.Labels, e ex
return a.next.AppendExemplar(ref, l, e) return a.next.AppendExemplar(ref, l, e)
} }
func (a *collectResultAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h histogram.Histogram) (uint64, error) { func (a *collectResultAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h *histogram.Histogram) (uint64, error) {
a.pendingHistograms = append(a.pendingHistograms, histogramSample{h: h, t: t}) a.pendingHistograms = append(a.pendingHistograms, histogramSample{h: h, t: t})
if a.next == nil { if a.next == nil {
return 0, nil return 0, nil

View file

@ -1440,7 +1440,7 @@ loop:
met []byte met []byte
parsedTimestamp *int64 parsedTimestamp *int64
val float64 val float64
h histogram.Histogram h *histogram.Histogram
) )
if et, err = p.Next(); err != nil { if et, err = p.Next(); err != nil {
if err == io.EOF { if err == io.EOF {

View file

@ -40,8 +40,9 @@ func NewBuffer(delta int64) *BufferedSeriesIterator {
// NewBufferIterator returns a new iterator that buffers the values within the // NewBufferIterator returns a new iterator that buffers the values within the
// time range of the current element and the duration of delta before. // time range of the current element and the duration of delta before.
func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator { func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator {
// TODO(codesome): based on encoding, allocate different buffer.
bit := &BufferedSeriesIterator{ bit := &BufferedSeriesIterator{
buf: newSampleRing(delta, 16), buf: newSampleRing(delta, 16, it.ChunkEncoding()),
delta: delta, delta: delta,
} }
bit.Reset(it) bit.Reset(it)
@ -67,8 +68,9 @@ func (b *BufferedSeriesIterator) ReduceDelta(delta int64) bool {
// PeekBack returns the nth previous element of the iterator. If there is none buffered, // PeekBack returns the nth previous element of the iterator. If there is none buffered,
// ok is false. // ok is false.
func (b *BufferedSeriesIterator) PeekBack(n int) (t int64, v float64, ok bool) { func (b *BufferedSeriesIterator) PeekBack(n int) (t int64, v float64, h *histogram.Histogram, ok bool) {
return b.buf.nthLast(n) s, ok := b.buf.nthLast(n)
return s.t, s.v, s.h, ok
} }
// Buffer returns an iterator over the buffered data. Invalidates previously // Buffer returns an iterator over the buffered data. Invalidates previously
@ -90,7 +92,11 @@ func (b *BufferedSeriesIterator) Seek(t int64) bool {
if !b.ok { if !b.ok {
return false return false
} }
b.lastTime, _ = b.Values() if b.it.ChunkEncoding() == chunkenc.EncHistogram {
b.lastTime, _ = b.HistogramValues()
} else {
b.lastTime, _ = b.Values()
}
} }
if b.lastTime >= t { if b.lastTime >= t {
@ -112,11 +118,21 @@ func (b *BufferedSeriesIterator) Next() bool {
} }
// Add current element to buffer before advancing. // Add current element to buffer before advancing.
b.buf.add(b.it.At()) if b.it.ChunkEncoding() == chunkenc.EncHistogram {
t, h := b.it.AtHistogram()
b.buf.add(sample{t: t, h: h})
} else {
t, v := b.it.At()
b.buf.add(sample{t: t, v: v})
}
b.ok = b.it.Next() b.ok = b.it.Next()
if b.ok { if b.ok {
b.lastTime, _ = b.Values() if b.it.ChunkEncoding() == chunkenc.EncHistogram {
b.lastTime, _ = b.HistogramValues()
} else {
b.lastTime, _ = b.Values()
}
} }
return b.ok return b.ok
@ -127,6 +143,16 @@ func (b *BufferedSeriesIterator) Values() (int64, float64) {
return b.it.At() return b.it.At()
} }
// HistogramValues returns the current histogram element of the iterator.
func (b *BufferedSeriesIterator) HistogramValues() (int64, *histogram.Histogram) {
return b.it.AtHistogram()
}
// ChunkEncoding return the chunk encoding of the underlying iterator.
func (b *BufferedSeriesIterator) ChunkEncoding() chunkenc.Encoding {
return b.it.ChunkEncoding()
}
// Err returns the last encountered error. // Err returns the last encountered error.
func (b *BufferedSeriesIterator) Err() error { func (b *BufferedSeriesIterator) Err() error {
return b.it.Err() return b.it.Err()
@ -135,6 +161,7 @@ func (b *BufferedSeriesIterator) Err() error {
type sample struct { type sample struct {
t int64 t int64
v float64 v float64
h *histogram.Histogram
} }
func (s sample) T() int64 { func (s sample) T() int64 {
@ -145,9 +172,14 @@ func (s sample) V() float64 {
return s.v return s.v
} }
func (s sample) H() *histogram.Histogram {
return s.h
}
type sampleRing struct { type sampleRing struct {
delta int64 delta int64
enc chunkenc.Encoding
buf []sample // lookback buffer buf []sample // lookback buffer
i int // position of most recent element in ring buffer i int // position of most recent element in ring buffer
f int // position of first element in ring buffer f int // position of first element in ring buffer
@ -156,8 +188,8 @@ type sampleRing struct {
it sampleRingIterator it sampleRingIterator
} }
func newSampleRing(delta int64, sz int) *sampleRing { func newSampleRing(delta int64, sz int, enc chunkenc.Encoding) *sampleRing {
r := &sampleRing{delta: delta, buf: make([]sample, sz)} r := &sampleRing{delta: delta, buf: make([]sample, sz), enc: enc}
r.reset() r.reset()
return r return r
@ -198,15 +230,12 @@ func (it *sampleRingIterator) At() (int64, float64) {
return it.r.at(it.i) return it.r.at(it.i)
} }
// AtHistogram always returns (0, histogram.Histogram{}) because there is no func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) {
// support for histogram values yet. return it.r.atHistogram(it.i)
// TODO(beorn7): Fix that for histogram support in PromQL.
func (it *sampleRingIterator) AtHistogram() (int64, histogram.Histogram) {
return 0, histogram.Histogram{}
} }
func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding { func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding {
return chunkenc.EncXOR return it.r.enc
} }
func (r *sampleRing) at(i int) (int64, float64) { func (r *sampleRing) at(i int) (int64, float64) {
@ -215,9 +244,20 @@ func (r *sampleRing) at(i int) (int64, float64) {
return s.t, s.v return s.t, s.v
} }
func (r *sampleRing) atHistogram(i int) (int64, *histogram.Histogram) {
j := (r.f + i) % len(r.buf)
s := r.buf[j]
return s.t, s.h
}
func (r *sampleRing) atSample(i int) sample {
j := (r.f + i) % len(r.buf)
return r.buf[j]
}
// add adds a sample to the ring buffer and frees all samples that fall // add adds a sample to the ring buffer and frees all samples that fall
// out of the delta range. // out of the delta range.
func (r *sampleRing) add(t int64, v float64) { func (r *sampleRing) add(s sample) {
l := len(r.buf) l := len(r.buf)
// Grow the ring buffer if it fits no more elements. // Grow the ring buffer if it fits no more elements.
if l == r.l { if l == r.l {
@ -236,11 +276,11 @@ func (r *sampleRing) add(t int64, v float64) {
} }
} }
r.buf[r.i] = sample{t: t, v: v} r.buf[r.i] = s
r.l++ r.l++
// Free head of the buffer of samples that just fell out of the range. // Free head of the buffer of samples that just fell out of the range.
tmin := t - r.delta tmin := s.t - r.delta
for r.buf[r.f].t < tmin { for r.buf[r.f].t < tmin {
r.f++ r.f++
if r.f >= l { if r.f >= l {
@ -276,12 +316,11 @@ func (r *sampleRing) reduceDelta(delta int64) bool {
} }
// nthLast returns the nth most recent element added to the ring. // nthLast returns the nth most recent element added to the ring.
func (r *sampleRing) nthLast(n int) (int64, float64, bool) { func (r *sampleRing) nthLast(n int) (sample, bool) {
if n > r.l { if n > r.l {
return 0, 0, false return sample{}, false
} }
t, v := r.at(r.l - n) return r.atSample(r.l - n), true
return t, v, true
} }
func (r *sampleRing) samples() []sample { func (r *sampleRing) samples() []sample {

View file

@ -55,7 +55,7 @@ func TestSampleRing(t *testing.T) {
}, },
} }
for _, c := range cases { for _, c := range cases {
r := newSampleRing(c.delta, c.size) r := newSampleRing(c.delta, c.size, chunkenc.EncNone)
input := []sample{} input := []sample{}
for _, t := range c.input { for _, t := range c.input {
@ -66,7 +66,7 @@ func TestSampleRing(t *testing.T) {
} }
for i, s := range input { for i, s := range input {
r.add(s.t, s.v) r.add(s)
buffered := r.samples() buffered := r.samples()
for _, sold := range input[:i] { for _, sold := range input[:i] {
@ -106,7 +106,7 @@ func TestBufferedSeriesIterator(t *testing.T) {
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
} }
prevSampleEq := func(ets int64, ev float64, eok bool) { prevSampleEq := func(ets int64, ev float64, eok bool) {
ts, v, ok := it.PeekBack(1) ts, v, _, ok := it.PeekBack(1)
require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, eok, ok, "exist mismatch")
require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, ets, ts, "timestamp mismatch")
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
@ -196,8 +196,8 @@ type mockSeriesIterator struct {
func (m *mockSeriesIterator) Seek(t int64) bool { return m.seek(t) } func (m *mockSeriesIterator) Seek(t int64) bool { return m.seek(t) }
func (m *mockSeriesIterator) At() (int64, float64) { return m.at() } func (m *mockSeriesIterator) At() (int64, float64) { return m.at() }
func (m *mockSeriesIterator) AtHistogram() (int64, histogram.Histogram) { func (m *mockSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, histogram.Histogram{} return 0, nil
} }
func (m *mockSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (m *mockSeriesIterator) ChunkEncoding() chunkenc.Encoding {
return chunkenc.EncXOR return chunkenc.EncXOR
@ -216,11 +216,11 @@ func newFakeSeriesIterator(nsamples, step int64) *fakeSeriesIterator {
} }
func (it *fakeSeriesIterator) At() (int64, float64) { func (it *fakeSeriesIterator) At() (int64, float64) {
return it.idx * it.step, 123 // value doesn't matter return it.idx * it.step, 123 // Value doesn't matter.
} }
func (it *fakeSeriesIterator) AtHistogram() (int64, histogram.Histogram) { func (it *fakeSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return it.idx * it.step, histogram.Histogram{} // value doesn't matter return it.idx * it.step, &histogram.Histogram{} // Value doesn't matter.
} }
func (it *fakeSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (it *fakeSeriesIterator) ChunkEncoding() chunkenc.Encoding {

View file

@ -173,7 +173,7 @@ func (f *fanoutAppender) AppendExemplar(ref uint64, l labels.Labels, e exemplar.
return ref, nil return ref, nil
} }
func (f *fanoutAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h histogram.Histogram) (uint64, error) { func (f *fanoutAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h *histogram.Histogram) (uint64, error) {
ref, err := f.primary.AppendHistogram(ref, l, t, h) ref, err := f.primary.AppendHistogram(ref, l, t, h)
if err != nil { if err != nil {
return ref, err return ref, err

View file

@ -222,7 +222,7 @@ type HistogramAppender interface {
// numbers are ephemeral and may be rejected in calls to Append() at any // numbers are ephemeral and may be rejected in calls to Append() at any
// point. Adding the sample via Append() returns a new reference number. // point. Adding the sample via Append() returns a new reference number.
// If the reference is 0 it must not be used for caching. // If the reference is 0 it must not be used for caching.
AppendHistogram(ref uint64, l labels.Labels, t int64, h histogram.Histogram) (uint64, error) AppendHistogram(ref uint64, l labels.Labels, t int64, h *histogram.Histogram) (uint64, error)
} }
// SeriesSet contains a set of series. // SeriesSet contains a set of series.

View file

@ -16,20 +16,31 @@ package storage
import ( import (
"math" "math"
"github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
) )
// ValueType defines the type of a value in the storage.
type ValueType int
const (
ValNone ValueType = iota
ValFloat
ValHistogram
)
// MemoizedSeriesIterator wraps an iterator with a buffer to look back the previous element. // MemoizedSeriesIterator wraps an iterator with a buffer to look back the previous element.
type MemoizedSeriesIterator struct { type MemoizedSeriesIterator struct {
it chunkenc.Iterator it chunkenc.Iterator
delta int64 delta int64
lastTime int64 lastTime int64
ok bool valueType ValueType
// Keep track of the previously returned value. // Keep track of the previously returned value.
prevTime int64 prevTime int64
prevValue float64 prevValue float64
prevHistogram *histogram.Histogram
} }
// NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator. // NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator.
@ -53,22 +64,26 @@ func NewMemoizedIterator(it chunkenc.Iterator, delta int64) *MemoizedSeriesItera
func (b *MemoizedSeriesIterator) Reset(it chunkenc.Iterator) { func (b *MemoizedSeriesIterator) Reset(it chunkenc.Iterator) {
b.it = it b.it = it
b.lastTime = math.MinInt64 b.lastTime = math.MinInt64
b.ok = true
b.prevTime = math.MinInt64 b.prevTime = math.MinInt64
it.Next() it.Next()
if it.ChunkEncoding() == chunkenc.EncHistogram {
b.valueType = ValHistogram
} else {
b.valueType = ValFloat
}
} }
// PeekPrev returns the previous element of the iterator. If there is none buffered, // PeekPrev returns the previous element of the iterator. If there is none buffered,
// ok is false. // ok is false.
func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, ok bool) { func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, h *histogram.Histogram, ok bool) {
if b.prevTime == math.MinInt64 { if b.prevTime == math.MinInt64 {
return 0, 0, false return 0, 0, nil, false
} }
return b.prevTime, b.prevValue, true return b.prevTime, b.prevValue, b.prevHistogram, true
} }
// Seek advances the iterator to the element at time t or greater. // Seek advances the iterator to the element at time t or greater.
func (b *MemoizedSeriesIterator) Seek(t int64) bool { func (b *MemoizedSeriesIterator) Seek(t int64) ValueType {
t0 := t - b.delta t0 := t - b.delta
if t0 > b.lastTime { if t0 > b.lastTime {
@ -76,40 +91,61 @@ func (b *MemoizedSeriesIterator) Seek(t int64) bool {
// more than the delta. // more than the delta.
b.prevTime = math.MinInt64 b.prevTime = math.MinInt64
b.ok = b.it.Seek(t0) ok := b.it.Seek(t0)
if !b.ok { if !ok {
return false b.valueType = ValNone
return ValNone
}
if b.it.ChunkEncoding() == chunkenc.EncHistogram {
b.valueType = ValHistogram
b.lastTime, _ = b.it.AtHistogram()
} else {
b.valueType = ValFloat
b.lastTime, _ = b.it.At()
} }
b.lastTime, _ = b.it.At()
} }
if b.lastTime >= t { if b.lastTime >= t {
return true return b.valueType
} }
for b.Next() { for b.Next() != ValNone {
if b.lastTime >= t { if b.lastTime >= t {
return true return b.valueType
} }
} }
return false return ValNone
} }
// Next advances the iterator to the next element. // Next advances the iterator to the next element.
func (b *MemoizedSeriesIterator) Next() bool { func (b *MemoizedSeriesIterator) Next() ValueType {
if !b.ok { if b.valueType == ValNone {
return false return ValNone
} }
// Keep track of the previous element. // Keep track of the previous element.
b.prevTime, b.prevValue = b.it.At() if b.it.ChunkEncoding() == chunkenc.EncHistogram {
b.prevTime, b.prevHistogram = b.it.AtHistogram()
b.ok = b.it.Next() b.prevValue = 0
if b.ok { } else {
b.lastTime, _ = b.it.At() b.prevTime, b.prevValue = b.it.At()
b.prevHistogram = nil
} }
return b.ok ok := b.it.Next()
if ok {
if b.it.ChunkEncoding() == chunkenc.EncHistogram {
b.lastTime, _ = b.it.AtHistogram()
b.valueType = ValHistogram
} else {
b.lastTime, _ = b.it.At()
b.valueType = ValFloat
}
} else {
b.valueType = ValNone
}
return b.valueType
} }
// Values returns the current element of the iterator. // Values returns the current element of the iterator.
@ -117,6 +153,11 @@ func (b *MemoizedSeriesIterator) Values() (int64, float64) {
return b.it.At() return b.it.At()
} }
// Values returns the current element of the iterator.
func (b *MemoizedSeriesIterator) HistogramValues() (int64, *histogram.Histogram) {
return b.it.AtHistogram()
}
// Err returns the last encountered error. // Err returns the last encountered error.
func (b *MemoizedSeriesIterator) Err() error { func (b *MemoizedSeriesIterator) Err() error {
return b.it.Err() return b.it.Err()

View file

@ -20,6 +20,7 @@ import (
) )
func TestMemoizedSeriesIterator(t *testing.T) { func TestMemoizedSeriesIterator(t *testing.T) {
// TODO(beorn7): Include histograms in testing.
var it *MemoizedSeriesIterator var it *MemoizedSeriesIterator
sampleEq := func(ets int64, ev float64) { sampleEq := func(ets int64, ev float64) {
@ -28,7 +29,7 @@ func TestMemoizedSeriesIterator(t *testing.T) {
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
} }
prevSampleEq := func(ets int64, ev float64, eok bool) { prevSampleEq := func(ets int64, ev float64, eok bool) {
ts, v, ok := it.PeekPrev() ts, v, _, ok := it.PeekPrev()
require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, eok, ok, "exist mismatch")
require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, ets, ts, "timestamp mismatch")
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
@ -45,29 +46,29 @@ func TestMemoizedSeriesIterator(t *testing.T) {
sample{t: 101, v: 10}, sample{t: 101, v: 10},
}), 2) }), 2)
require.True(t, it.Seek(-123), "seek failed") require.Equal(t, it.Seek(-123), ValFloat, "seek failed")
sampleEq(1, 2) sampleEq(1, 2)
prevSampleEq(0, 0, false) prevSampleEq(0, 0, false)
require.True(t, it.Next(), "next failed") require.Equal(t, it.Next(), ValFloat, "next failed")
sampleEq(2, 3) sampleEq(2, 3)
prevSampleEq(1, 2, true) prevSampleEq(1, 2, true)
require.True(t, it.Next(), "next failed") require.Equal(t, it.Next(), ValFloat, "next failed")
require.True(t, it.Next(), "next failed") require.Equal(t, it.Next(), ValFloat, "next failed")
require.True(t, it.Next(), "next failed") require.Equal(t, it.Next(), ValFloat, "next failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
require.True(t, it.Seek(5), "seek failed") require.Equal(t, it.Seek(5), ValFloat, "seek failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
require.True(t, it.Seek(101), "seek failed") require.Equal(t, it.Seek(101), ValFloat, "seek failed")
sampleEq(101, 10) sampleEq(101, 10)
prevSampleEq(100, 9, true) prevSampleEq(100, 9, true)
require.False(t, it.Next(), "next succeeded unexpectedly") require.Equal(t, it.Next(), ValNone, "next succeeded unexpectedly")
} }
func BenchmarkMemoizedSeriesIterator(b *testing.B) { func BenchmarkMemoizedSeriesIterator(b *testing.B) {
@ -78,7 +79,7 @@ func BenchmarkMemoizedSeriesIterator(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
b.ResetTimer() b.ResetTimer()
for it.Next() { for it.Next() != ValNone {
// scan everything // scan everything
} }
require.NoError(b, it.Err()) require.NoError(b, it.Err())

View file

@ -486,7 +486,7 @@ func (c *chainSampleIterator) At() (t int64, v float64) {
return c.curr.At() return c.curr.At()
} }
func (c *chainSampleIterator) AtHistogram() (int64, histogram.Histogram) { func (c *chainSampleIterator) AtHistogram() (int64, *histogram.Histogram) {
if c.curr == nil { if c.curr == nil {
panic("chainSampleIterator.AtHistogram() called before first .Next() or after .Next() returned false.") panic("chainSampleIterator.AtHistogram() called before first .Next() or after .Next() returned false.")
} }

View file

@ -62,116 +62,116 @@ func TestMergeQuerierWithChainMerger(t *testing.T) {
{ {
name: "one querier, two series", name: "one querier, two series",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
), ),
}, },
{ {
name: "two queriers, one different series each", name: "two queriers, one different series each",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
), ),
}, },
{ {
name: "two time unsorted queriers, two series each", name: "two time unsorted queriers, two series each",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5}, sample{6, 6}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}, sample{4, 4}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "five queriers, only two queriers have two time unsorted series each", name: "five queriers, only two queriers have two time unsorted series each",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5}, sample{6, 6}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}, sample{4, 4}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}),
}, {}}, }, {}},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together", name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5}, sample{6, 6}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}, sample{4, 4}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}),
}, {}}, }, {}},
extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()}, extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "two queriers, with two series, one is overlapping", name: "two queriers, with two series, one is overlapping",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21}, sample{3, 31}, sample{5, 5}, sample{6, 6}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21, nil}, sample{3, 31, nil}, sample{5, 5, nil}, sample{6, 6, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 22}, sample{3, 32}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 22, nil}, sample{3, 32, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}, sample{4, 4}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}),
}, {}}, }, {}},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 21}, sample{3, 31}, sample{5, 5}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 21, nil}, sample{3, 31, nil}, sample{5, 5, nil}, sample{6, 6, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "two queries, one with NaN samples series", name: "two queries, one with NaN samples series",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN()}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}),
}, { }, {
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN()}, sample{1, 1}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}, sample{1, 1, nil}}),
), ),
}, },
} { } {
@ -245,108 +245,108 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) {
{ {
name: "one querier, two series", name: "one querier, two series",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
), ),
}, },
{ {
name: "two secondaries, one different series each", name: "two secondaries, one different series each",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
), ),
}, },
{ {
name: "two secondaries, two not in time order series each", name: "two secondaries, two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5}}, []tsdbutil.Sample{sample{6, 6}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{4, 4}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}},
[]tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{3, 3, nil}},
[]tsdbutil.Sample{sample{5, 5}}, []tsdbutil.Sample{sample{5, 5, nil}},
[]tsdbutil.Sample{sample{6, 6}}, []tsdbutil.Sample{sample{6, 6, nil}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}},
[]tsdbutil.Sample{sample{2, 2}}, []tsdbutil.Sample{sample{2, 2, nil}},
[]tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{3, 3, nil}},
[]tsdbutil.Sample{sample{4, 4}}, []tsdbutil.Sample{sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "five secondaries, only two have two not in time order series each", name: "five secondaries, only two have two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{}, {}, { chkQuerierSeries: [][]ChunkSeries{{}, {}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5}}, []tsdbutil.Sample{sample{6, 6}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{4, 4}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}),
}, {}}, }, {}},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}},
[]tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{3, 3, nil}},
[]tsdbutil.Sample{sample{5, 5}}, []tsdbutil.Sample{sample{5, 5, nil}},
[]tsdbutil.Sample{sample{6, 6}}, []tsdbutil.Sample{sample{6, 6, nil}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}},
[]tsdbutil.Sample{sample{2, 2}}, []tsdbutil.Sample{sample{2, 2, nil}},
[]tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{3, 3, nil}},
[]tsdbutil.Sample{sample{4, 4}}, []tsdbutil.Sample{sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together", name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5}}, []tsdbutil.Sample{sample{6, 6}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{2, 2}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{4, 4}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}),
}}, }},
extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()}, extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}},
[]tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{3, 3, nil}},
[]tsdbutil.Sample{sample{5, 5}}, []tsdbutil.Sample{sample{5, 5, nil}},
[]tsdbutil.Sample{sample{6, 6}}, []tsdbutil.Sample{sample{6, 6, nil}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}},
[]tsdbutil.Sample{sample{2, 2}}, []tsdbutil.Sample{sample{2, 2, nil}},
[]tsdbutil.Sample{sample{3, 3}}, []tsdbutil.Sample{sample{3, 3, nil}},
[]tsdbutil.Sample{sample{4, 4}}, []tsdbutil.Sample{sample{4, 4, nil}},
), ),
), ),
}, },
{ {
name: "two queries, one with NaN samples series", name: "two queries, one with NaN samples series",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN()}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN()}}, []tsdbutil.Sample{sample{1, 1}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}, []tsdbutil.Sample{sample{1, 1, nil}}),
), ),
}, },
} { } {
@ -399,9 +399,9 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{ {
name: "single series", name: "single series",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}),
}, },
{ {
name: "two empty series", name: "two empty series",
@ -414,55 +414,55 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{ {
name: "two non overlapping", name: "two non overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}, sample{5, 5}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{5, 5, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7}, sample{9, 9}}, []tsdbutil.Sample{sample{10, 10}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}, sample{5, 5}}, []tsdbutil.Sample{sample{7, 7}, sample{9, 9}}, []tsdbutil.Sample{sample{10, 10}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}),
}, },
{ {
name: "two overlapping", name: "two overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}, sample{8, 8}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{8, 8, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7}, sample{9, 9}}, []tsdbutil.Sample{sample{10, 10}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, []tsdbutil.Sample{sample{3, 3}, sample{7, 7}, sample{8, 8}, sample{9, 9}}, []tsdbutil.Sample{sample{10, 10}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{7, 7, nil}, sample{8, 8, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}),
}, },
{ {
name: "two duplicated", name: "two duplicated",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 5}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}),
}, },
{ {
name: "three overlapping", name: "three overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{6, 6}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{6, 6, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0}, sample{4, 4}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{4, 4, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}, sample{5, 5}, sample{6, 6}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{6, 6, nil}}),
}, },
{ {
name: "three in chained overlap", name: "three in chained overlap",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 5}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4}, sample{6, 66}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil}, sample{6, 66, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6}, sample{10, 10}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil}, sample{10, 10, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}, sample{5, 5}, sample{6, 66}, sample{10, 10}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{6, 66, nil}, sample{10, 10, nil}}),
}, },
{ {
name: "three in chained overlap complex", name: "three in chained overlap complex",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0}, sample{5, 5}}, []tsdbutil.Sample{sample{10, 10}, sample{15, 15}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{10, 10, nil}, sample{15, 15, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2}, sample{20, 20}}, []tsdbutil.Sample{sample{25, 25}, sample{30, 30}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{20, 20, nil}}, []tsdbutil.Sample{sample{25, 25, nil}, sample{30, 30, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18}, sample{26, 26}}, []tsdbutil.Sample{sample{31, 31}, sample{35, 35}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil}, sample{26, 26, nil}}, []tsdbutil.Sample{sample{31, 31, nil}, sample{35, 35, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{0, 0}, sample{2, 2}, sample{5, 5}, sample{10, 10}, sample{15, 15}, sample{18, 18}, sample{20, 20}, sample{25, 25}, sample{26, 26}, sample{30, 30}}, []tsdbutil.Sample{sample{0, 0, nil}, sample{2, 2, nil}, sample{5, 5, nil}, sample{10, 10, nil}, sample{15, 15, nil}, sample{18, 18, nil}, sample{20, 20, nil}, sample{25, 25, nil}, sample{26, 26, nil}, sample{30, 30, nil}},
[]tsdbutil.Sample{sample{31, 31}, sample{35, 35}}, []tsdbutil.Sample{sample{31, 31, nil}, sample{35, 35, nil}},
), ),
}, },
{ {
@ -598,37 +598,37 @@ func TestChainSampleIterator(t *testing.T) {
}{ }{
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{1, 1}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}),
}, },
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}}, expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{1, 1}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}),
NewListSeriesIterator(samples{sample{2, 2}, sample{3, 3}}), NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}),
}, },
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}}, expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{3, 3}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{3, 3, nil}}),
NewListSeriesIterator(samples{sample{1, 1}, sample{4, 4}}), NewListSeriesIterator(samples{sample{1, 1, nil}, sample{4, 4, nil}}),
NewListSeriesIterator(samples{sample{2, 2}, sample{5, 5}}), NewListSeriesIterator(samples{sample{2, 2, nil}, sample{5, 5, nil}}),
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{4, 4}, sample{5, 5}}, sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}},
}, },
// Overlap. // Overlap.
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{1, 1}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}),
NewListSeriesIterator(samples{sample{0, 0}, sample{2, 2}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{2, 2, nil}}),
NewListSeriesIterator(samples{sample{2, 2}, sample{3, 3}}), NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
}, },
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}}, expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}},
}, },
} { } {
merged := NewChainSampleIterator(tc.input) merged := NewChainSampleIterator(tc.input)
@ -646,42 +646,42 @@ func TestChainSampleIteratorSeek(t *testing.T) {
}{ }{
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}, },
seek: 1, seek: 1,
expected: []tsdbutil.Sample{sample{1, 1}, sample{2, 2}}, expected: []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{1, 1}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}),
NewListSeriesIterator(samples{sample{2, 2}, sample{3, 3}}), NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}),
}, },
seek: 2, seek: 2,
expected: []tsdbutil.Sample{sample{2, 2}, sample{3, 3}}, expected: []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{3, 3}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{3, 3, nil}}),
NewListSeriesIterator(samples{sample{1, 1}, sample{4, 4}}), NewListSeriesIterator(samples{sample{1, 1, nil}, sample{4, 4, nil}}),
NewListSeriesIterator(samples{sample{2, 2}, sample{5, 5}}), NewListSeriesIterator(samples{sample{2, 2, nil}, sample{5, 5, nil}}),
}, },
seek: 2, seek: 2,
expected: []tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{4, 4}, sample{5, 5}}, expected: []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0}, sample{2, 2}, sample{3, 3}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{2, 2, nil}, sample{3, 3, nil}}),
NewListSeriesIterator(samples{sample{0, 0}, sample{1, 1}, sample{2, 2}}), NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}),
}, },
seek: 0, seek: 0,
expected: []tsdbutil.Sample{sample{0, 0}, sample{1, 1}, sample{2, 2}, sample{3, 3}}, expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}},
}, },
} { } {
merged := NewChainSampleIterator(tc.input) merged := NewChainSampleIterator(tc.input)
actual := []tsdbutil.Sample{} actual := []tsdbutil.Sample{}
if merged.Seek(tc.seek) { if merged.Seek(tc.seek) {
t, v := merged.At() t, v := merged.At()
actual = append(actual, sample{t, v}) actual = append(actual, sample{t, v, nil})
} }
s, err := ExpandSamples(merged, nil) s, err := ExpandSamples(merged, nil)
require.NoError(t, err) require.NoError(t, err)

View file

@ -370,11 +370,11 @@ func (c *concreteSeriesIterator) At() (t int64, v float64) {
return s.Timestamp, s.Value return s.Timestamp, s.Value
} }
// AtHistogram always returns (0, histogram.Histogram{}) because there is no // AtHistogram always returns (0, nil) because there is no support for histogram
// support for histogram values yet. // values yet.
// TODO(beorn7): Fix that for histogram support in remote storage. // TODO(beorn7): Fix that for histogram support in remote storage.
func (c *concreteSeriesIterator) AtHistogram() (int64, histogram.Histogram) { func (c *concreteSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, histogram.Histogram{} return 0, nil
} }
func (c *concreteSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (c *concreteSeriesIterator) ChunkEncoding() chunkenc.Encoding {

View file

@ -241,7 +241,7 @@ func (t *timestampTracker) AppendExemplar(_ uint64, _ labels.Labels, _ exemplar.
return 0, nil return 0, nil
} }
func (t *timestampTracker) AppendHistogram(_ uint64, _ labels.Labels, ts int64, _ histogram.Histogram) (uint64, error) { func (t *timestampTracker) AppendHistogram(_ uint64, _ labels.Labels, ts int64, _ *histogram.Histogram) (uint64, error) {
t.histograms++ t.histograms++
if ts > t.highestTimestamp { if ts > t.highestTimestamp {
t.highestTimestamp = ts t.highestTimestamp = ts

View file

@ -188,7 +188,7 @@ func (m *mockAppendable) AppendExemplar(_ uint64, l labels.Labels, e exemplar.Ex
return 0, nil return 0, nil
} }
func (*mockAppendable) AppendHistogram(ref uint64, l labels.Labels, t int64, h histogram.Histogram) (uint64, error) { func (*mockAppendable) AppendHistogram(ref uint64, l labels.Labels, t int64, h *histogram.Histogram) (uint64, error) {
// TODO(beorn7): Noop until we implement sparse histograms over remote write. // TODO(beorn7): Noop until we implement sparse histograms over remote write.
return 0, nil return 0, nil
} }

View file

@ -91,10 +91,9 @@ func (it *listSeriesIterator) At() (int64, float64) {
return s.T(), s.V() return s.T(), s.V()
} }
// AtHistogram always returns (0, histogram.Histogram{}) because there is no func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
// support for histogram values yet. s := it.samples.Get(it.idx)
func (it *listSeriesIterator) AtHistogram() (int64, histogram.Histogram) { return s.T(), s.H()
return 0, histogram.Histogram{}
} }
func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding {
@ -297,19 +296,25 @@ func (e errChunksIterator) Err() error { return e.err }
// ExpandSamples iterates over all samples in the iterator, buffering all in slice. // ExpandSamples iterates over all samples in the iterator, buffering all in slice.
// Optionally it takes samples constructor, useful when you want to compare sample slices with different // Optionally it takes samples constructor, useful when you want to compare sample slices with different
// sample implementations. if nil, sample type from this package will be used. // sample implementations. if nil, sample type from this package will be used.
func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64) tsdbutil.Sample) ([]tsdbutil.Sample, error) { func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) {
if newSampleFn == nil { if newSampleFn == nil {
newSampleFn = func(t int64, v float64) tsdbutil.Sample { return sample{t, v} } newSampleFn = func(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample { return sample{t, v, h} }
} }
var result []tsdbutil.Sample var result []tsdbutil.Sample
for iter.Next() { for iter.Next() {
t, v := iter.At() // Only after Next() returned true, it is safe to ask for the ChunkEncoding.
// NaNs can't be compared normally, so substitute for another value. if iter.ChunkEncoding() == chunkenc.EncHistogram {
if math.IsNaN(v) { t, h := iter.AtHistogram()
v = -42 result = append(result, newSampleFn(t, 0, h))
} else {
t, v := iter.At()
// NaNs can't be compared normally, so substitute for another value.
if math.IsNaN(v) {
v = -42
}
result = append(result, newSampleFn(t, v, nil))
} }
result = append(result, newSampleFn(t, v))
} }
return result, iter.Err() return result, iter.Err()
} }

View file

@ -179,7 +179,7 @@ func TestCorruptedChunk(t *testing.T) {
require.NoError(t, os.RemoveAll(tmpdir)) require.NoError(t, os.RemoveAll(tmpdir))
}() }()
series := storage.NewListSeries(labels.FromStrings("a", "b"), []tsdbutil.Sample{sample{1, 1}}) series := storage.NewListSeries(labels.FromStrings("a", "b"), []tsdbutil.Sample{sample{1, 1, nil}})
blockDir := createBlock(t, tmpdir, []storage.Series{series}) blockDir := createBlock(t, tmpdir, []storage.Series{series})
files, err := sequenceFiles(chunkDir(blockDir)) files, err := sequenceFiles(chunkDir(blockDir))
require.NoError(t, err) require.NoError(t, err)
@ -226,7 +226,7 @@ func TestLabelValuesWithMatchers(t *testing.T) {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
}, []tsdbutil.Sample{sample{100, 0}})) }, []tsdbutil.Sample{sample{100, 0, nil}}))
} }
blockDir := createBlock(t, tmpdir, seriesEntries) blockDir := createBlock(t, tmpdir, seriesEntries)
@ -389,7 +389,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))}, {Name: "tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))},
{Name: "ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1" {Name: "ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1"
}, []tsdbutil.Sample{sample{100, 0}})) }, []tsdbutil.Sample{sample{100, 0, nil}}))
} }
blockDir := createBlock(b, tmpdir, seriesEntries) blockDir := createBlock(b, tmpdir, seriesEntries)
@ -427,13 +427,13 @@ func TestLabelNamesWithMatchers(t *testing.T) {
for i := 0; i < 100; i++ { for i := 0; i < 100; i++ {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0}})) }, []tsdbutil.Sample{sample{100, 0, nil}}))
if i%10 == 0 { if i%10 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
}, []tsdbutil.Sample{sample{100, 0}})) }, []tsdbutil.Sample{sample{100, 0, nil}}))
} }
if i%20 == 0 { if i%20 == 0 {
@ -441,7 +441,7 @@ func TestLabelNamesWithMatchers(t *testing.T) {
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "twenties", Value: fmt.Sprintf("value%d", i/20)}, {Name: "twenties", Value: fmt.Sprintf("value%d", i/20)},
}, []tsdbutil.Sample{sample{100, 0}})) }, []tsdbutil.Sample{sample{100, 0, nil}}))
} }
} }

View file

@ -82,13 +82,15 @@ type Chunk interface {
// Appender adds sample pairs to a chunk. // Appender adds sample pairs to a chunk.
type Appender interface { type Appender interface {
Append(int64, float64) Append(int64, float64)
AppendHistogram(t int64, h histogram.Histogram) AppendHistogram(t int64, h *histogram.Histogram)
} }
// Iterator is a simple iterator that can only get the next value. // Iterator is a simple iterator that can only get the next value.
// Iterator iterates over the samples of a time series, in timestamp-increasing order. // Iterator iterates over the samples of a time series, in timestamp-increasing order.
type Iterator interface { type Iterator interface {
// Next advances the iterator by one. // Next advances the iterator by one.
// TODO(beorn7): Perhaps this should return if the next value is a float or a histogram
// to make it easier calling the right method (At vs AtHistogram)?
Next() bool Next() bool
// Seek advances the iterator forward to the first sample with the timestamp equal or greater than t. // Seek advances the iterator forward to the first sample with the timestamp equal or greater than t.
// If current sample found by previous `Next` or `Seek` operation already has this property, Seek has no effect. // If current sample found by previous `Next` or `Seek` operation already has this property, Seek has no effect.
@ -100,7 +102,7 @@ type Iterator interface {
At() (int64, float64) At() (int64, float64)
// AtHistogram returns the current timestamp/histogram pair. // AtHistogram returns the current timestamp/histogram pair.
// Before the iterator has advanced AtHistogram behaviour is unspecified. // Before the iterator has advanced AtHistogram behaviour is unspecified.
AtHistogram() (int64, histogram.Histogram) AtHistogram() (int64, *histogram.Histogram)
// Err returns the current error. It should be used only after iterator is // Err returns the current error. It should be used only after iterator is
// exhausted, that is `Next` or `Seek` returns false. // exhausted, that is `Next` or `Seek` returns false.
Err() error Err() error
@ -117,8 +119,8 @@ type nopIterator struct{}
func (nopIterator) Seek(int64) bool { return false } func (nopIterator) Seek(int64) bool { return false }
func (nopIterator) At() (int64, float64) { return math.MinInt64, 0 } func (nopIterator) At() (int64, float64) { return math.MinInt64, 0 }
func (nopIterator) AtHistogram() (int64, histogram.Histogram) { func (nopIterator) AtHistogram() (int64, *histogram.Histogram) {
return math.MinInt64, histogram.Histogram{} return math.MinInt64, nil
} }
func (nopIterator) Next() bool { return false } func (nopIterator) Next() bool { return false }
func (nopIterator) Err() error { return nil } func (nopIterator) Err() error { return nil }

View file

@ -245,7 +245,7 @@ func (a *HistogramAppender) Append(int64, float64) {
// The method returns an additional boolean set to true if it is not appendable // The method returns an additional boolean set to true if it is not appendable
// because of a counter reset. If the given sample is stale, it is always ok to // because of a counter reset. If the given sample is stale, it is always ok to
// append. If counterReset is true, okToAppend is always false. // append. If counterReset is true, okToAppend is always false.
func (a *HistogramAppender) Appendable(h histogram.Histogram) ( func (a *HistogramAppender) Appendable(h *histogram.Histogram) (
positiveInterjections, negativeInterjections []Interjection, positiveInterjections, negativeInterjections []Interjection,
okToAppend bool, counterReset bool, okToAppend bool, counterReset bool,
) { ) {
@ -369,14 +369,14 @@ func counterResetInAnyBucket(oldBuckets, newBuckets []int64, oldSpans, newSpans
// the histogram is properly structured, e.g. the number of buckets used // the histogram is properly structured, e.g. the number of buckets used
// corresponds to the number conveyed by the span structures. First call // corresponds to the number conveyed by the span structures. First call
// Appendable() and act accordingly! // Appendable() and act accordingly!
func (a *HistogramAppender) AppendHistogram(t int64, h histogram.Histogram) { func (a *HistogramAppender) AppendHistogram(t int64, h *histogram.Histogram) {
var tDelta, cntDelta, zCntDelta int64 var tDelta, cntDelta, zCntDelta int64
num := binary.BigEndian.Uint16(a.b.bytes()) num := binary.BigEndian.Uint16(a.b.bytes())
if value.IsStaleNaN(h.Sum) { if value.IsStaleNaN(h.Sum) {
// Emptying out other fields to write no buckets, and an empty // Emptying out other fields to write no buckets, and an empty
// layout in case of first histogram in the chunk. // layout in case of first histogram in the chunk.
h = histogram.Histogram{Sum: h.Sum} h = &histogram.Histogram{Sum: h.Sum}
} }
switch num { switch num {
@ -401,7 +401,7 @@ func (a *HistogramAppender) AppendHistogram(t int64, h histogram.Histogram) {
// Now store the actual data. // Now store the actual data.
putVarbitInt(a.b, t) putVarbitInt(a.b, t)
putVarbitUint(a.b, h.Count) putVarbitUint(a.b, h.Count)
putVarbitUint(a.b, h.ZeroCount) // putVarbitUint(a.b, h.ZeroCount)
a.b.writeBits(math.Float64bits(h.Sum), 64) a.b.writeBits(math.Float64bits(h.Sum), 64)
for _, b := range h.PositiveBuckets { for _, b := range h.PositiveBuckets {
putVarbitInt(a.b, b) putVarbitInt(a.b, b)
@ -582,11 +582,11 @@ func (it *histogramIterator) ChunkEncoding() Encoding {
return EncHistogram return EncHistogram
} }
func (it *histogramIterator) AtHistogram() (int64, histogram.Histogram) { func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) {
if value.IsStaleNaN(it.sum) { if value.IsStaleNaN(it.sum) {
return it.t, histogram.Histogram{Sum: it.sum} return it.t, &histogram.Histogram{Sum: it.sum}
} }
return it.t, histogram.Histogram{ return it.t, &histogram.Histogram{
Count: it.cnt, Count: it.cnt,
ZeroCount: it.zCnt, ZeroCount: it.zCnt,
Sum: it.sum, Sum: it.sum,

View file

@ -30,7 +30,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
require.Equal(t, 0, c.NumSamples()) require.Equal(t, 0, c.NumSamples())
ts := int64(1234567890) ts := int64(1234567890)
h := histogram.Histogram{ h := &histogram.Histogram{
Count: 5, Count: 5,
ZeroCount: 2, ZeroCount: 2,
Sum: 18.4, Sum: 18.4,
@ -48,6 +48,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
// Add an updated histogram. // Add an updated histogram.
ts += 16 ts += 16
h = h.Copy()
h.Count += 9 h.Count += 9
h.ZeroCount++ h.ZeroCount++
h.Sum = 24.4 h.Sum = 24.4
@ -61,6 +62,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
ts += 14 ts += 14
h = h.Copy()
h.Count += 13 h.Count += 13
h.ZeroCount += 2 h.ZeroCount += 2
h.Sum = 24.4 h.Sum = 24.4
@ -113,7 +115,7 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
type res struct { type res struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
// Mimics the scenario described for compareSpans(). // Mimics the scenario described for compareSpans().
@ -126,7 +128,7 @@ func TestHistogramChunkBucketChanges(t *testing.T) {
require.Equal(t, 0, c.NumSamples()) require.Equal(t, 0, c.NumSamples())
ts1 := int64(1234567890) ts1 := int64(1234567890)
h1 := histogram.Histogram{ h1 := &histogram.Histogram{
Count: 5, Count: 5,
ZeroCount: 2, ZeroCount: 2,
Sum: 18.4, Sum: 18.4,
@ -147,7 +149,7 @@ func TestHistogramChunkBucketChanges(t *testing.T) {
// Add a new histogram that has expanded buckets. // Add a new histogram that has expanded buckets.
ts2 := ts1 + 16 ts2 := ts1 + 16
h2 := h1 h2 := h1.Copy()
h2.PositiveSpans = []histogram.Span{ h2.PositiveSpans = []histogram.Span{
{Offset: 0, Length: 3}, {Offset: 0, Length: 3},
{Offset: 1, Length: 1}, {Offset: 1, Length: 1},
@ -202,7 +204,7 @@ func TestHistoChunkAppendable(t *testing.T) {
require.Equal(t, 0, c.NumSamples()) require.Equal(t, 0, c.NumSamples())
ts := int64(1234567890) ts := int64(1234567890)
h1 := histogram.Histogram{ h1 := &histogram.Histogram{
Count: 5, Count: 5,
ZeroCount: 2, ZeroCount: 2,
Sum: 18.4, Sum: 18.4,

View file

@ -150,7 +150,7 @@ type xorAppender struct {
trailing uint8 trailing uint8
} }
func (a *xorAppender) AppendHistogram(t int64, h histogram.Histogram) { func (a *xorAppender) AppendHistogram(t int64, h *histogram.Histogram) {
panic("appended a histogram to an xor chunk") panic("appended a histogram to an xor chunk")
} }
@ -253,8 +253,8 @@ func (it *xorIterator) At() (int64, float64) {
return it.t, it.val return it.t, it.val
} }
func (it *xorIterator) AtHistogram() (int64, histogram.Histogram) { func (it *xorIterator) AtHistogram() (int64, *histogram.Histogram) {
panic("cannot call xorIterator.AtHistogram().") panic("cannot call xorIterator.AtHistogram")
} }
func (it *xorIterator) ChunkEncoding() Encoding { func (it *xorIterator) ChunkEncoding() Encoding {

View file

@ -1326,20 +1326,20 @@ func TestHeadCompactionWithHistograms(t *testing.T) {
require.NoError(t, head.Init(0)) require.NoError(t, head.Init(0))
app := head.Appender(context.Background()) app := head.Appender(context.Background())
type timedHist struct { type timedHistogram struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
// Ingest samples. // Ingest samples.
numHistograms := 120 * 4 numHistograms := 120 * 4
timeStep := DefaultBlockDuration / int64(numHistograms) timeStep := DefaultBlockDuration / int64(numHistograms)
expHists := make([]timedHist, 0, numHistograms) expHists := make([]timedHistogram, 0, numHistograms)
l := labels.Labels{{Name: "a", Value: "b"}} l := labels.Labels{{Name: "a", Value: "b"}}
for i, h := range generateHistograms(numHistograms) { for i, h := range GenerateTestHistograms(numHistograms) {
_, err := app.AppendHistogram(0, l, int64(i)*timeStep, h) _, err := app.AppendHistogram(0, l, int64(i)*timeStep, h)
require.NoError(t, err) require.NoError(t, err)
expHists = append(expHists, timedHist{int64(i) * timeStep, h}) expHists = append(expHists, timedHistogram{int64(i) * timeStep, h})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -1372,10 +1372,10 @@ func TestHeadCompactionWithHistograms(t *testing.T) {
require.False(t, ss.Next()) require.False(t, ss.Next())
it := s.Iterator() it := s.Iterator()
actHists := make([]timedHist, 0, len(expHists)) actHists := make([]timedHistogram, 0, len(expHists))
for it.Next() { for it.Next() {
t, h := it.AtHistogram() t, h := it.AtHistogram()
actHists = append(actHists, timedHist{t, h.Copy()}) actHists = append(actHists, timedHistogram{t, h.Copy()})
} }
require.Equal(t, expHists, actHists) require.Equal(t, expHists, actHists)
@ -1455,7 +1455,7 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
var allSparseSeries []struct { var allSparseSeries []struct {
baseLabels labels.Labels baseLabels labels.Labels
hists []histogram.Histogram hists []*histogram.Histogram
} }
for sid, schema := range allSchemas { for sid, schema := range allSchemas {
@ -1467,7 +1467,7 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
} }
allSparseSeries = append(allSparseSeries, struct { allSparseSeries = append(allSparseSeries, struct {
baseLabels labels.Labels baseLabels labels.Labels
hists []histogram.Histogram hists []*histogram.Histogram
}{baseLabels: lbls, hists: generateCustomHistograms(numHistograms, c.numBuckets, c.numSpans, c.gapBetweenSpans, schema)}) }{baseLabels: lbls, hists: generateCustomHistograms(numHistograms, c.numBuckets, c.numSpans, c.gapBetweenSpans, schema)})
} }
} }
@ -1533,7 +1533,6 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
itIdx++ itIdx++
} }
require.NoError(t, it.Err())
// _count metric. // _count metric.
countLbls := ah.baseLabels.Copy() countLbls := ah.baseLabels.Copy()
countLbls[0].Value = countLbls[0].Value + "_count" countLbls[0].Value = countLbls[0].Value + "_count"
@ -1614,9 +1613,9 @@ Savings: Index=%.2f%%, Chunks=%.2f%%, Total=%.2f%%
} }
} }
func generateCustomHistograms(numHists, numBuckets, numSpans, gapBetweenSpans, schema int) (r []histogram.Histogram) { func generateCustomHistograms(numHists, numBuckets, numSpans, gapBetweenSpans, schema int) (r []*histogram.Histogram) {
// First histogram with all the settings. // First histogram with all the settings.
h := histogram.Histogram{ h := &histogram.Histogram{
Sum: 1000 * rand.Float64(), Sum: 1000 * rand.Float64(),
Schema: int32(schema), Schema: int32(schema),
} }
@ -1709,14 +1708,14 @@ func TestSparseHistogramCompactionAndQuery(t *testing.T) {
}) })
db.DisableCompactions() db.DisableCompactions()
type timedHist struct { type timedHistogram struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
expHists := make(map[string][]timedHist) expHists := make(map[string][]timedHistogram)
series1Histograms := generateHistograms(20) series1Histograms := GenerateTestHistograms(20)
series2Histograms := generateHistograms(20) series2Histograms := GenerateTestHistograms(20)
idx1, idx2 := -1, -1 idx1, idx2 := -1, -1
addNextHists := func(ts int64, app storage.Appender) { addNextHists := func(ts int64, app storage.Appender) {
lbls1 := labels.Labels{{Name: "a", Value: "b"}} lbls1 := labels.Labels{{Name: "a", Value: "b"}}
@ -1729,8 +1728,8 @@ func TestSparseHistogramCompactionAndQuery(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
l1, l2 := lbls1.String(), lbls2.String() l1, l2 := lbls1.String(), lbls2.String()
expHists[l1] = append(expHists[l1], timedHist{t: ts, h: series1Histograms[idx1]}) expHists[l1] = append(expHists[l1], timedHistogram{t: ts, h: series1Histograms[idx1]})
expHists[l2] = append(expHists[l2], timedHist{t: ts, h: series2Histograms[idx2]}) expHists[l2] = append(expHists[l2], timedHistogram{t: ts, h: series2Histograms[idx2]})
} }
testQuery := func() { testQuery := func() {
@ -1741,13 +1740,13 @@ func TestSparseHistogramCompactionAndQuery(t *testing.T) {
}() }()
ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")) ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "a", ".*"))
actHists := make(map[string][]timedHist) actHists := make(map[string][]timedHistogram)
for ss.Next() { for ss.Next() {
s := ss.At() s := ss.At()
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() {
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
actHists[s.Labels().String()] = append(actHists[s.Labels().String()], timedHist{ts, h.Copy()}) actHists[s.Labels().String()] = append(actHists[s.Labels().String()], timedHistogram{ts, h.Copy()})
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())
} }

View file

@ -420,7 +420,7 @@ Outer:
expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) expSamples := make([]tsdbutil.Sample, 0, len(c.remaint))
for _, ts := range c.remaint { for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts]}) expSamples = append(expSamples, sample{ts, smpls[ts], nil})
} }
expss := newMockSeriesSet([]storage.Series{ expss := newMockSeriesSet([]storage.Series{
@ -536,7 +536,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]tsdbutil.Sample{ require.Equal(t, map[string][]tsdbutil.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1}}, labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil}},
}, ssMap) }, ssMap)
// Append Out of Order Value. // Append Out of Order Value.
@ -553,7 +553,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]tsdbutil.Sample{ require.Equal(t, map[string][]tsdbutil.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1}, sample{10, 3}}, labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil}, sample{10, 3, nil}},
}, ssMap) }, ssMap)
} }
@ -716,7 +716,7 @@ Outer:
expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) expSamples := make([]tsdbutil.Sample, 0, len(c.remaint))
for _, ts := range c.remaint { for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts]}) expSamples = append(expSamples, sample{ts, smpls[ts], nil})
} }
expss := newMockSeriesSet([]storage.Series{ expss := newMockSeriesSet([]storage.Series{
@ -821,7 +821,7 @@ func TestDB_e2e(t *testing.T) {
for i := 0; i < numDatapoints; i++ { for i := 0; i < numDatapoints; i++ {
v := rand.Float64() v := rand.Float64()
series = append(series, sample{ts, v}) series = append(series, sample{ts, v, nil})
_, err := app.Append(0, lset, ts, v) _, err := app.Append(0, lset, ts, v)
require.NoError(t, err) require.NoError(t, err)
@ -1066,7 +1066,7 @@ func TestTombstoneClean(t *testing.T) {
expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) expSamples := make([]tsdbutil.Sample, 0, len(c.remaint))
for _, ts := range c.remaint { for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts]}) expSamples = append(expSamples, sample{ts, smpls[ts], nil})
} }
expss := newMockSeriesSet([]storage.Series{ expss := newMockSeriesSet([]storage.Series{
@ -2541,11 +2541,11 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
// that the resulted segments includes the expected chunks data. // that the resulted segments includes the expected chunks data.
func TestChunkWriter_ReadAfterWrite(t *testing.T) { func TestChunkWriter_ReadAfterWrite(t *testing.T) {
chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1}}) chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil}})
chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2}}) chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil}})
chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3}}) chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil}})
chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4}}) chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil}})
chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5}}) chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil}})
chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size
tests := []struct { tests := []struct {
@ -2746,11 +2746,11 @@ func TestRangeForTimestamp(t *testing.T) {
// Regression test for https://github.com/prometheus/prometheus/pull/6514. // Regression test for https://github.com/prometheus/prometheus/pull/6514.
func TestChunkReader_ConcurrentReads(t *testing.T) { func TestChunkReader_ConcurrentReads(t *testing.T) {
chks := []chunks.Meta{ chks := []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil}}),
} }
tempDir, err := ioutil.TempDir("", "test_chunk_writer") tempDir, err := ioutil.TempDir("", "test_chunk_writer")
@ -2815,7 +2815,7 @@ func TestCompactHead(t *testing.T) {
val := rand.Float64() val := rand.Float64()
_, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val) _, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val)
require.NoError(t, err) require.NoError(t, err)
expSamples = append(expSamples, sample{int64(i), val}) expSamples = append(expSamples, sample{int64(i), val, nil})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -2842,7 +2842,7 @@ func TestCompactHead(t *testing.T) {
series := seriesSet.At().Iterator() series := seriesSet.At().Iterator()
for series.Next() { for series.Next() {
time, val := series.At() time, val := series.At()
actSamples = append(actSamples, sample{int64(time), val}) actSamples = append(actSamples, sample{int64(time), val, nil})
} }
require.NoError(t, series.Err()) require.NoError(t, series.Err())
} }

View file

@ -1490,17 +1490,20 @@ func (s *stripeSeries) getOrSet(hash uint64, lset labels.Labels, createSeries fu
type histogramSample struct { type histogramSample struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
type sample struct { type sample struct {
t int64 t int64
v float64 v float64
h *histogram.Histogram
} }
func newSample(t int64, v float64) tsdbutil.Sample { return sample{t, v} } func newSample(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample { return sample{t, v, h} }
func (s sample) T() int64 { return s.t }
func (s sample) V() float64 { return s.v } func (s sample) T() int64 { return s.t }
func (s sample) V() float64 { return s.v }
func (s sample) H() *histogram.Histogram { return s.h }
// memSeries is the in-memory representation of a series. None of its methods // memSeries is the in-memory representation of a series. None of its methods
// are goroutine safe and it is the caller's responsibility to lock it. // are goroutine safe and it is the caller's responsibility to lock it.
@ -1658,3 +1661,22 @@ func (h *Head) updateWALReplayStatusRead(current int) {
h.stats.WALReplayStatus.Current = current h.stats.WALReplayStatus.Current = current
} }
func GenerateTestHistograms(n int) (r []*histogram.Histogram) {
for i := 0; i < n; i++ {
r = append(r, &histogram.Histogram{
Count: 5 + uint64(i*4),
ZeroCount: 2 + uint64(i),
ZeroThreshold: 0.001,
Sum: 18.4 * float64(i+1),
Schema: 1,
PositiveSpans: []histogram.Span{
{Offset: 0, Length: 2},
{Offset: 1, Length: 2},
},
PositiveBuckets: []int64{int64(i + 1), 1, -1, 0},
})
}
return r
}

View file

@ -67,7 +67,7 @@ func (a *initAppender) AppendExemplar(ref uint64, l labels.Labels, e exemplar.Ex
return a.app.AppendExemplar(ref, l, e) return a.app.AppendExemplar(ref, l, e)
} }
func (a *initAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h histogram.Histogram) (uint64, error) { func (a *initAppender) AppendHistogram(ref uint64, l labels.Labels, t int64, h *histogram.Histogram) (uint64, error) {
if a.app != nil { if a.app != nil {
return a.app.AppendHistogram(ref, l, t, h) return a.app.AppendHistogram(ref, l, t, h)
} }
@ -270,7 +270,7 @@ func (a *headAppender) Append(ref uint64, lset labels.Labels, t int64, v float64
} }
if value.IsStaleNaN(v) && s.histogramSeries { if value.IsStaleNaN(v) && s.histogramSeries {
return a.AppendHistogram(ref, lset, t, histogram.Histogram{Sum: v}) return a.AppendHistogram(ref, lset, t, &histogram.Histogram{Sum: v})
} }
s.Lock() s.Lock()
@ -322,7 +322,7 @@ func (s *memSeries) appendable(t int64, v float64) error {
} }
// appendableHistogram checks whether the given sample is valid for appending to the series. // appendableHistogram checks whether the given sample is valid for appending to the series.
func (s *memSeries) appendableHistogram(t int64, sh histogram.Histogram) error { func (s *memSeries) appendableHistogram(t int64, h *histogram.Histogram) error {
c := s.head() c := s.head()
if c == nil { if c == nil {
return nil return nil
@ -334,7 +334,7 @@ func (s *memSeries) appendableHistogram(t int64, sh histogram.Histogram) error {
if t < c.maxTime { if t < c.maxTime {
return storage.ErrOutOfOrderSample return storage.ErrOutOfOrderSample
} }
// TODO: do it for histogram. // TODO(beorn7): do it for histogram.
// We are allowing exact duplicates as we can encounter them in valid cases // We are allowing exact duplicates as we can encounter them in valid cases
// like federation and erroring out at that time would be extremely noisy. // like federation and erroring out at that time would be extremely noisy.
//if math.Float64bits(s.sampleBuf[3].v) != math.Float64bits(v) { //if math.Float64bits(s.sampleBuf[3].v) != math.Float64bits(v) {
@ -372,7 +372,7 @@ func (a *headAppender) AppendExemplar(ref uint64, _ labels.Labels, e exemplar.Ex
return s.ref, nil return s.ref, nil
} }
func (a *headAppender) AppendHistogram(ref uint64, lset labels.Labels, t int64, h histogram.Histogram) (uint64, error) { func (a *headAppender) AppendHistogram(ref uint64, lset labels.Labels, t int64, h *histogram.Histogram) (uint64, error) {
if t < a.minValidTime { if t < a.minValidTime {
a.head.metrics.outOfBoundSamples.Inc() a.head.metrics.outOfBoundSamples.Inc()
return 0, storage.ErrOutOfBounds return 0, storage.ErrOutOfBounds
@ -606,7 +606,7 @@ func (s *memSeries) append(t int64, v float64, appendID uint64, chunkDiskMapper
// appendHistogram adds the histogram. // appendHistogram adds the histogram.
// It is unsafe to call this concurrently with s.iterator(...) without holding the series lock. // It is unsafe to call this concurrently with s.iterator(...) without holding the series lock.
func (s *memSeries) appendHistogram(t int64, h histogram.Histogram, appendID uint64, chunkDiskMapper *chunks.ChunkDiskMapper) (sampleInOrder, chunkCreated bool) { func (s *memSeries) appendHistogram(t int64, h *histogram.Histogram, appendID uint64, chunkDiskMapper *chunks.ChunkDiskMapper) (sampleInOrder, chunkCreated bool) {
// Head controls the execution of recoding, so that we own the proper chunk reference afterwards. // Head controls the execution of recoding, so that we own the proper chunk reference afterwards.
// We check for Appendable before appendPreprocessor because in case it ends up creating a new chunk, // We check for Appendable before appendPreprocessor because in case it ends up creating a new chunk,
// we need to know if there was also a counter reset or not to set the meta properly. // we need to know if there was also a counter reset or not to set the meta properly.

View file

@ -444,6 +444,7 @@ func (s *memSeries) iterator(id int, isoState *isolationState, chunkDiskMapper *
msIter.stopAfter = stopAfter msIter.stopAfter = stopAfter
msIter.buf = s.sampleBuf msIter.buf = s.sampleBuf
msIter.histogramBuf = s.histogramBuf msIter.histogramBuf = s.histogramBuf
msIter.histogramSeries = s.histogramSeries
return msIter return msIter
} }
return &memSafeIterator{ return &memSafeIterator{
@ -452,18 +453,20 @@ func (s *memSeries) iterator(id int, isoState *isolationState, chunkDiskMapper *
i: -1, i: -1,
stopAfter: stopAfter, stopAfter: stopAfter,
}, },
total: numSamples, total: numSamples,
buf: s.sampleBuf, buf: s.sampleBuf,
histogramBuf: s.histogramBuf, histogramBuf: s.histogramBuf,
histogramSeries: s.histogramSeries,
} }
} }
type memSafeIterator struct { type memSafeIterator struct {
stopIterator stopIterator
total int histogramSeries bool
buf [4]sample total int
histogramBuf [4]histogramSample buf [4]sample
histogramBuf [4]histogramSample
} }
func (it *memSafeIterator) Seek(t int64) bool { func (it *memSafeIterator) Seek(t int64) bool {
@ -471,15 +474,29 @@ func (it *memSafeIterator) Seek(t int64) bool {
return false return false
} }
ts, _ := it.At() var ts int64
if it.histogramSeries {
for t > ts || it.i == -1 { ts, _ = it.AtHistogram()
if !it.Next() { } else {
return false
}
ts, _ = it.At() ts, _ = it.At()
} }
if it.histogramSeries {
for t > ts || it.i == -1 {
if !it.Next() {
return false
}
ts, _ = it.AtHistogram()
}
} else {
for t > ts || it.i == -1 {
if !it.Next() {
return false
}
ts, _ = it.At()
}
}
return true return true
} }
@ -502,7 +519,7 @@ func (it *memSafeIterator) At() (int64, float64) {
return s.t, s.v return s.t, s.v
} }
func (it *memSafeIterator) AtHistogram() (int64, histogram.Histogram) { func (it *memSafeIterator) AtHistogram() (int64, *histogram.Histogram) {
if it.total-it.i > 4 { if it.total-it.i > 4 {
return it.Iterator.AtHistogram() return it.Iterator.AtHistogram()
} }

View file

@ -331,11 +331,11 @@ func TestHead_ReadWAL(t *testing.T) {
require.NoError(t, c.Err()) require.NoError(t, c.Err())
return x return x
} }
require.Equal(t, []sample{{100, 2}, {101, 5}}, expandChunk(s10.iterator(0, nil, head.chunkDiskMapper, nil))) require.Equal(t, []sample{{100, 2, nil}, {101, 5, nil}}, expandChunk(s10.iterator(0, nil, head.chunkDiskMapper, nil)))
require.Equal(t, []sample{{101, 6}}, expandChunk(s50.iterator(0, nil, head.chunkDiskMapper, nil))) require.Equal(t, []sample{{101, 6, nil}}, expandChunk(s50.iterator(0, nil, head.chunkDiskMapper, nil)))
// The samples before the new series record should be discarded since a duplicate record // The samples before the new series record should be discarded since a duplicate record
// is only possible when old samples were compacted. // is only possible when old samples were compacted.
require.Equal(t, []sample{{101, 7}}, expandChunk(s100.iterator(0, nil, head.chunkDiskMapper, nil))) require.Equal(t, []sample{{101, 7, nil}}, expandChunk(s100.iterator(0, nil, head.chunkDiskMapper, nil)))
q, err := head.ExemplarQuerier(context.Background()) q, err := head.ExemplarQuerier(context.Background())
require.NoError(t, err) require.NoError(t, err)
@ -401,8 +401,8 @@ func TestHead_WALMultiRef(t *testing.T) {
// The samples before the new ref should be discarded since Head truncation // The samples before the new ref should be discarded since Head truncation
// happens only after compacting the Head. // happens only after compacting the Head.
require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: { require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: {
sample{1700, 3}, sample{1700, 3, nil},
sample{2000, 4}, sample{2000, 4, nil},
}}, series) }}, series)
} }
@ -798,7 +798,7 @@ func TestDeleteUntilCurMax(t *testing.T) {
it = exps.Iterator() it = exps.Iterator()
resSamples, err := storage.ExpandSamples(it, newSample) resSamples, err := storage.ExpandSamples(it, newSample)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, []tsdbutil.Sample{sample{11, 1}}, resSamples) require.Equal(t, []tsdbutil.Sample{sample{11, 1, nil}}, resSamples)
for res.Next() { for res.Next() {
} }
require.NoError(t, res.Err()) require.NoError(t, res.Err())
@ -913,7 +913,7 @@ func TestDelete_e2e(t *testing.T) {
v := rand.Float64() v := rand.Float64()
_, err := app.Append(0, ls, ts, v) _, err := app.Append(0, ls, ts, v)
require.NoError(t, err) require.NoError(t, err)
series = append(series, sample{ts, v}) series = append(series, sample{ts, v, nil})
ts += rand.Int63n(timeInterval) + 1 ts += rand.Int63n(timeInterval) + 1
} }
seriesMap[labels.New(l...).String()] = series seriesMap[labels.New(l...).String()] = series
@ -2397,7 +2397,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) {
ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i)) ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i))
require.NoError(t, err) require.NoError(t, err)
maxt = ts maxt = ts
expSamples = append(expSamples, sample{ts, float64(i)}) expSamples = append(expSamples, sample{ts, float64(i), nil})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -2541,10 +2541,10 @@ func TestAppendHistogram(t *testing.T) {
type timedHistogram struct { type timedHistogram struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
expHistograms := make([]timedHistogram, 0, numHistograms) expHistograms := make([]timedHistogram, 0, numHistograms)
for i, h := range generateHistograms(numHistograms) { for i, h := range GenerateTestHistograms(numHistograms) {
_, err := app.AppendHistogram(0, l, int64(i), h) _, err := app.AppendHistogram(0, l, int64(i), h)
require.NoError(t, err) require.NoError(t, err)
expHistograms = append(expHistograms, timedHistogram{int64(i), h}) expHistograms = append(expHistograms, timedHistogram{int64(i), h})
@ -2588,10 +2588,10 @@ func TestHistogramInWAL(t *testing.T) {
type timedHistogram struct { type timedHistogram struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
expHistograms := make([]timedHistogram, 0, numHistograms) expHistograms := make([]timedHistogram, 0, numHistograms)
for i, h := range generateHistograms(numHistograms) { for i, h := range GenerateTestHistograms(numHistograms) {
h.NegativeSpans = h.PositiveSpans h.NegativeSpans = h.PositiveSpans
h.NegativeBuckets = h.PositiveBuckets h.NegativeBuckets = h.PositiveBuckets
_, err := app.AppendHistogram(0, l, int64(i), h) _, err := app.AppendHistogram(0, l, int64(i), h)
@ -2630,25 +2630,6 @@ func TestHistogramInWAL(t *testing.T) {
require.Equal(t, expHistograms, actHistograms) require.Equal(t, expHistograms, actHistograms)
} }
func generateHistograms(n int) (r []histogram.Histogram) {
for i := 0; i < n; i++ {
r = append(r, histogram.Histogram{
Count: 5 + uint64(i*4),
ZeroCount: 2 + uint64(i),
ZeroThreshold: 0.001,
Sum: 18.4 * float64(i+1),
Schema: 1,
PositiveSpans: []histogram.Span{
{Offset: 0, Length: 2},
{Offset: 1, Length: 2},
},
PositiveBuckets: []int64{int64(i + 1), 1, -1, 0},
})
}
return r
}
func TestChunkSnapshot(t *testing.T) { func TestChunkSnapshot(t *testing.T) {
head, _ := newTestHead(t, 120*4, false) head, _ := newTestHead(t, 120*4, false)
defer func() { defer func() {
@ -2747,7 +2728,7 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk. // 240 samples should m-map at least 1 chunk.
for ts := int64(1); ts <= 240; ts++ { for ts := int64(1); ts <= 240; ts++ {
val := rand.Float64() val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val}) expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil})
ref, err := app.Append(0, lbls, ts, val) ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err) require.NoError(t, err)
@ -2807,7 +2788,7 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk. // 240 samples should m-map at least 1 chunk.
for ts := int64(241); ts <= 480; ts++ { for ts := int64(241); ts <= 480; ts++ {
val := rand.Float64() val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val}) expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil})
ref, err := app.Append(0, lbls, ts, val) ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err) require.NoError(t, err)
@ -2962,7 +2943,7 @@ func TestHistogramMetrics(t *testing.T) {
for x := 0; x < 5; x++ { for x := 0; x < 5; x++ {
expHSeries++ expHSeries++
l := labels.Labels{{Name: "a", Value: fmt.Sprintf("b%d", x)}} l := labels.Labels{{Name: "a", Value: fmt.Sprintf("b%d", x)}}
for i, h := range generateHistograms(10) { for i, h := range GenerateTestHistograms(10) {
app := head.Appender(context.Background()) app := head.Appender(context.Background())
_, err := app.AppendHistogram(0, l, int64(i), h) _, err := app.AppendHistogram(0, l, int64(i), h)
require.NoError(t, err) require.NoError(t, err)
@ -2996,7 +2977,7 @@ func TestHistogramStaleSample(t *testing.T) {
type timedHistogram struct { type timedHistogram struct {
t int64 t int64
h histogram.Histogram h *histogram.Histogram
} }
expHistograms := make([]timedHistogram, 0, numHistograms) expHistograms := make([]timedHistogram, 0, numHistograms)
@ -3030,6 +3011,7 @@ func TestHistogramStaleSample(t *testing.T) {
require.True(t, value.IsStaleNaN(ah.h.Sum)) require.True(t, value.IsStaleNaN(ah.h.Sum))
// To make require.Equal work. // To make require.Equal work.
ah.h.Sum = 0 ah.h.Sum = 0
eh.h = eh.h.Copy()
eh.h.Sum = 0 eh.h.Sum = 0
} }
require.Equal(t, eh, ah) require.Equal(t, eh, ah)
@ -3039,7 +3021,7 @@ func TestHistogramStaleSample(t *testing.T) {
// Adding stale in the same appender. // Adding stale in the same appender.
app := head.Appender(context.Background()) app := head.Appender(context.Background())
for _, h := range generateHistograms(numHistograms) { for _, h := range GenerateTestHistograms(numHistograms) {
_, err := app.AppendHistogram(0, l, 100*int64(len(expHistograms)), h) _, err := app.AppendHistogram(0, l, 100*int64(len(expHistograms)), h)
require.NoError(t, err) require.NoError(t, err)
expHistograms = append(expHistograms, timedHistogram{100 * int64(len(expHistograms)), h}) expHistograms = append(expHistograms, timedHistogram{100 * int64(len(expHistograms)), h})
@ -3047,7 +3029,7 @@ func TestHistogramStaleSample(t *testing.T) {
// +1 so that delta-of-delta is not 0. // +1 so that delta-of-delta is not 0.
_, err := app.Append(0, l, 100*int64(len(expHistograms))+1, math.Float64frombits(value.StaleNaN)) _, err := app.Append(0, l, 100*int64(len(expHistograms))+1, math.Float64frombits(value.StaleNaN))
require.NoError(t, err) require.NoError(t, err)
expHistograms = append(expHistograms, timedHistogram{100*int64(len(expHistograms)) + 1, histogram.Histogram{Sum: math.Float64frombits(value.StaleNaN)}}) expHistograms = append(expHistograms, timedHistogram{100*int64(len(expHistograms)) + 1, &histogram.Histogram{Sum: math.Float64frombits(value.StaleNaN)}})
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
// Only 1 chunk in the memory, no m-mapped chunk. // Only 1 chunk in the memory, no m-mapped chunk.
@ -3058,7 +3040,7 @@ func TestHistogramStaleSample(t *testing.T) {
// Adding stale in different appender and continuing series after a stale sample. // Adding stale in different appender and continuing series after a stale sample.
app = head.Appender(context.Background()) app = head.Appender(context.Background())
for _, h := range generateHistograms(2 * numHistograms)[numHistograms:] { for _, h := range GenerateTestHistograms(2 * numHistograms)[numHistograms:] {
_, err := app.AppendHistogram(0, l, 100*int64(len(expHistograms)), h) _, err := app.AppendHistogram(0, l, 100*int64(len(expHistograms)), h)
require.NoError(t, err) require.NoError(t, err)
expHistograms = append(expHistograms, timedHistogram{100 * int64(len(expHistograms)), h}) expHistograms = append(expHistograms, timedHistogram{100 * int64(len(expHistograms)), h})
@ -3069,7 +3051,7 @@ func TestHistogramStaleSample(t *testing.T) {
// +1 so that delta-of-delta is not 0. // +1 so that delta-of-delta is not 0.
_, err = app.Append(0, l, 100*int64(len(expHistograms))+1, math.Float64frombits(value.StaleNaN)) _, err = app.Append(0, l, 100*int64(len(expHistograms))+1, math.Float64frombits(value.StaleNaN))
require.NoError(t, err) require.NoError(t, err)
expHistograms = append(expHistograms, timedHistogram{100*int64(len(expHistograms)) + 1, histogram.Histogram{Sum: math.Float64frombits(value.StaleNaN)}}) expHistograms = append(expHistograms, timedHistogram{100*int64(len(expHistograms)) + 1, &histogram.Histogram{Sum: math.Float64frombits(value.StaleNaN)}})
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
// Total 2 chunks, 1 m-mapped. // Total 2 chunks, 1 m-mapped.
@ -3088,7 +3070,7 @@ func TestHistogramCounterResetHeader(t *testing.T) {
require.NoError(t, head.Init(0)) require.NoError(t, head.Init(0))
ts := int64(0) ts := int64(0)
appendHistogram := func(h histogram.Histogram) { appendHistogram := func(h *histogram.Histogram) {
ts++ ts++
app := head.Appender(context.Background()) app := head.Appender(context.Background())
_, err := app.AppendHistogram(0, l, ts, h) _, err := app.AppendHistogram(0, l, ts, h)
@ -3112,7 +3094,7 @@ func TestHistogramCounterResetHeader(t *testing.T) {
require.Equal(t, expHeaders[len(expHeaders)-1], ms.headChunk.chunk.(*chunkenc.HistogramChunk).GetCounterResetHeader()) require.Equal(t, expHeaders[len(expHeaders)-1], ms.headChunk.chunk.(*chunkenc.HistogramChunk).GetCounterResetHeader())
} }
h := generateHistograms(1)[0] h := GenerateTestHistograms(1)[0]
if len(h.NegativeBuckets) == 0 { if len(h.NegativeBuckets) == 0 {
h.NegativeSpans = append([]histogram.Span{}, h.PositiveSpans...) h.NegativeSpans = append([]histogram.Span{}, h.PositiveSpans...)
h.NegativeBuckets = append([]int64{}, h.PositiveBuckets...) h.NegativeBuckets = append([]int64{}, h.PositiveBuckets...)

View file

@ -649,9 +649,11 @@ func (p *populateWithDelSeriesIterator) Seek(t int64) bool {
} }
func (p *populateWithDelSeriesIterator) At() (int64, float64) { return p.curr.At() } func (p *populateWithDelSeriesIterator) At() (int64, float64) { return p.curr.At() }
func (p *populateWithDelSeriesIterator) AtHistogram() (int64, histogram.Histogram) {
func (p *populateWithDelSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return p.curr.AtHistogram() return p.curr.AtHistogram()
} }
func (p *populateWithDelSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (p *populateWithDelSeriesIterator) ChunkEncoding() chunkenc.Encoding {
return p.curr.ChunkEncoding() return p.curr.ChunkEncoding()
} }
@ -714,7 +716,7 @@ func (p *populateWithDelChunkSeriesIterator) Next() bool {
var ( var (
t int64 t int64
v float64 v float64
h histogram.Histogram h *histogram.Histogram
) )
if p.currDelIter.ChunkEncoding() == chunkenc.EncHistogram { if p.currDelIter.ChunkEncoding() == chunkenc.EncHistogram {
if hc, ok := p.currChkMeta.Chunk.(*chunkenc.HistogramChunk); ok { if hc, ok := p.currChkMeta.Chunk.(*chunkenc.HistogramChunk); ok {
@ -870,7 +872,7 @@ func (it *DeletedIterator) At() (int64, float64) {
return it.Iter.At() return it.Iter.At()
} }
func (it *DeletedIterator) AtHistogram() (int64, histogram.Histogram) { func (it *DeletedIterator) AtHistogram() (int64, *histogram.Histogram) {
t, h := it.Iter.AtHistogram() t, h := it.Iter.AtHistogram()
return t, h return t, h
} }

View file

@ -277,24 +277,24 @@ func TestBlockQuerier(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}, sample{6, 7}, sample{7, 2}}, []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}}, []tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}, []tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}}, []tsdbutil.Sample{sample{5, 1}, sample{6, 7}, sample{7, 2}}, []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}}, []tsdbutil.Sample{sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}},
), ),
}), }),
}, },
@ -304,18 +304,18 @@ func TestBlockQuerier(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}}, []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}}, []tsdbutil.Sample{sample{5, 2}, sample{6, 3}}, []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}}, []tsdbutil.Sample{sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}},
), ),
}), }),
}, },
@ -363,24 +363,24 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}, sample{6, 7}, sample{7, 2}}, []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2}, sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}, sample{7, 4}}, []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}, sample{6, 7}, sample{7, 2}}, []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}},
), ),
}), }),
}, },
@ -390,18 +390,18 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}}, []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3}, sample{3, 4}, sample{5, 2}, sample{6, 3}}, []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2}, sample{3, 3}, sample{5, 3}, sample{6, 6}}, []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}},
), ),
}), }),
}, },
@ -442,22 +442,22 @@ var testData = []seriesSamples{
{ {
lset: map[string]string{"a": "a"}, lset: map[string]string{"a": "a"},
chunks: [][]sample{ chunks: [][]sample{
{{1, 2}, {2, 3}, {3, 4}}, {{1, 2, nil}, {2, 3, nil}, {3, 4, nil}},
{{5, 2}, {6, 3}, {7, 4}}, {{5, 2, nil}, {6, 3, nil}, {7, 4, nil}},
}, },
}, },
{ {
lset: map[string]string{"a": "a", "b": "b"}, lset: map[string]string{"a": "a", "b": "b"},
chunks: [][]sample{ chunks: [][]sample{
{{1, 1}, {2, 2}, {3, 3}}, {{1, 1, nil}, {2, 2, nil}, {3, 3, nil}},
{{5, 3}, {6, 6}}, {{5, 3, nil}, {6, 6, nil}},
}, },
}, },
{ {
lset: map[string]string{"b": "b"}, lset: map[string]string{"b": "b"},
chunks: [][]sample{ chunks: [][]sample{
{{1, 3}, {2, 2}, {3, 6}}, {{1, 3, nil}, {2, 2, nil}, {3, 6, nil}},
{{5, 1}, {6, 7}, {7, 2}}, {{5, 1, nil}, {6, 7, nil}, {7, 2, nil}},
}, },
}, },
} }
@ -504,24 +504,24 @@ func TestBlockQuerierDelete(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3}}, []tsdbutil.Sample{sample{5, 3, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}, sample{5, 1}}, []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}, sample{7, 4}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3}}, []tsdbutil.Sample{sample{5, 3, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3}, sample{2, 2}, sample{3, 6}}, []tsdbutil.Sample{sample{5, 1}}, []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}}, []tsdbutil.Sample{sample{5, 1, nil}},
), ),
}), }),
}, },
@ -531,18 +531,18 @@ func TestBlockQuerierDelete(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3}}, []tsdbutil.Sample{sample{5, 3, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2}, sample{6, 3}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3}}, []tsdbutil.Sample{sample{5, 3, nil}},
), ),
}), }),
}, },
@ -625,57 +625,57 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "one chunk", name: "one chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil},
}), }),
}, },
}, },
{ {
name: "two full chunks", name: "two full chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89}, sample{9, 8}, sample{7, 89, nil}, sample{9, 8, nil},
}), }),
}, },
}, },
{ {
name: "three full chunks", name: "three full chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
{sample{10, 22}, sample{203, 3493}}, {sample{10, 22, nil}, sample{203, 3493, nil}},
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{10, 22}, sample{203, 3493}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{10, 22, nil}, sample{203, 3493, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89}, sample{9, 8}, sample{7, 89, nil}, sample{9, 8, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{10, 22}, sample{203, 3493}, sample{10, 22, nil}, sample{203, 3493, nil},
}), }),
}, },
}, },
@ -690,8 +690,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks and seek beyond chunks", name: "two chunks and seek beyond chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
seek: 10, seek: 10,
@ -700,27 +700,27 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks and seek on middle of first chunk", name: "two chunks and seek on middle of first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
seek: 2, seek: 2,
seekSuccess: true, seekSuccess: true,
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil},
}, },
}, },
{ {
name: "two chunks and seek before first chunk", name: "two chunks and seek before first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
seek: -32, seek: -32,
seekSuccess: true, seekSuccess: true,
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2}, sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil},
}, },
}, },
// Deletion / Trim cases. // Deletion / Trim cases.
@ -732,60 +732,60 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks with trimmed first and last samples from edge chunks", name: "two chunks with trimmed first and last samples from edge chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}),
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{3, 5}, sample{6, 1}, sample{3, 5, nil}, sample{6, 1, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89}, sample{7, 89, nil},
}), }),
}, },
}, },
{ {
name: "two chunks with trimmed middle sample of first chunk", name: "two chunks with trimmed middle sample of first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}}, intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}},
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2}, sample{6, 1}, sample{7, 89}, sample{9, 8}, sample{1, 2, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2}, sample{6, 1}, sample{1, 2, nil}, sample{6, 1, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89}, sample{9, 8}, sample{7, 89, nil}, sample{9, 8, nil},
}), }),
}, },
}, },
{ {
name: "two chunks with deletion across two chunks", name: "two chunks with deletion across two chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}}, intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}},
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{9, 8}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{9, 8, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{9, 8}, sample{9, 8, nil},
}), }),
}, },
}, },
@ -793,15 +793,15 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk", name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2}, sample{2, 3}, sample{3, 5}, sample{6, 1}}, {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}},
{sample{7, 89}, sample{9, 8}}, {sample{7, 89, nil}, sample{9, 8, nil}},
}, },
intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}),
seek: 3, seek: 3,
seekSuccess: true, seekSuccess: true,
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{3, 5}, sample{6, 1}, sample{7, 89}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil},
}, },
}, },
} }
@ -857,8 +857,8 @@ func rmChunkRefs(chks []chunks.Meta) {
func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{}, []tsdbutil.Sample{},
[]tsdbutil.Sample{sample{1, 1}, sample{2, 2}, sample{3, 3}}, []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}},
[]tsdbutil.Sample{sample{4, 4}, sample{5, 5}}, []tsdbutil.Sample{sample{4, 4, nil}, sample{5, 5, nil}},
) )
it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator()
@ -875,7 +875,7 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) {
func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{}, []tsdbutil.Sample{},
[]tsdbutil.Sample{sample{1, 2}, sample{3, 4}, sample{5, 6}, sample{7, 8}}, []tsdbutil.Sample{sample{1, 2, nil}, sample{3, 4, nil}, sample{5, 6, nil}, sample{7, 8, nil}},
[]tsdbutil.Sample{}, []tsdbutil.Sample{},
) )
@ -893,7 +893,7 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) {
func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) { func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{sample{1, 6}, sample{5, 6}, sample{6, 8}}, []tsdbutil.Sample{sample{1, 6, nil}, sample{5, 6, nil}, sample{6, 8, nil}},
) )
it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator()
@ -905,7 +905,7 @@ func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) {
// Seek gets called and advances beyond the max time, which was just accepted as a valid sample. // Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) { func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{sample{1, 6}, sample{5, 6}, sample{7, 8}}, []tsdbutil.Sample{sample{1, 6, nil}, sample{5, 6, nil}, sample{7, 8, nil}},
) )
it := newPopulateWithDelGenericSeriesIterator( it := newPopulateWithDelGenericSeriesIterator(

View file

@ -74,7 +74,7 @@ type RefExemplar struct {
type RefHistogram struct { type RefHistogram struct {
Ref uint64 Ref uint64
T int64 T int64
H histogram.Histogram H *histogram.Histogram
} }
// Decoder decodes series, sample, and tombstone records. // Decoder decodes series, sample, and tombstone records.
@ -253,7 +253,7 @@ func (d *Decoder) Histograms(rec []byte, histograms []RefHistogram) ([]RefHistog
rh := RefHistogram{ rh := RefHistogram{
Ref: baseRef + uint64(dref), Ref: baseRef + uint64(dref),
T: baseTime + dtime, T: baseTime + dtime,
H: histogram.Histogram{ H: &histogram.Histogram{
Schema: 0, Schema: 0,
ZeroThreshold: 0, ZeroThreshold: 0,
ZeroCount: 0, ZeroCount: 0,

View file

@ -102,6 +102,7 @@ func (b *BufferedSeriesIterator) Err() error {
type sample struct { type sample struct {
t int64 t int64
v float64 v float64
h *histogram.Histogram
} }
func (s sample) T() int64 { func (s sample) T() int64 {
@ -112,6 +113,10 @@ func (s sample) V() float64 {
return s.v return s.v
} }
func (s sample) H() *histogram.Histogram {
return s.h
}
type sampleRing struct { type sampleRing struct {
delta int64 delta int64
@ -160,8 +165,9 @@ func (it *sampleRingIterator) At() (int64, float64) {
return it.r.at(it.i) return it.r.at(it.i)
} }
func (it *sampleRingIterator) AtHistogram() (int64, histogram.Histogram) { func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, histogram.Histogram{} // TODO(beorn7): Add proper histogram support.
return 0, nil
} }
func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding { func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding {

View file

@ -152,8 +152,9 @@ func (it *listSeriesIterator) At() (int64, float64) {
return s.t, s.v return s.t, s.v
} }
func (it *listSeriesIterator) AtHistogram() (int64, histogram.Histogram) { func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, histogram.Histogram{} s := it.list[it.idx]
return s.t, s.h
} }
func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding {

View file

@ -14,6 +14,7 @@
package tsdbutil package tsdbutil
import ( import (
"github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/chunks"
) )
@ -26,6 +27,7 @@ type Samples interface {
type Sample interface { type Sample interface {
T() int64 T() int64
V() float64 V() float64
H() *histogram.Histogram
} }
type SampleSlice []Sample type SampleSlice []Sample
@ -61,7 +63,7 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
func PopulatedChunk(numSamples int, minTime int64) chunks.Meta { func PopulatedChunk(numSamples int, minTime int64) chunks.Meta {
samples := make([]Sample, numSamples) samples := make([]Sample, numSamples)
for i := 0; i < numSamples; i++ { for i := 0; i < numSamples; i++ {
samples[i] = sample{minTime + int64(i*1000), 1.0} samples[i] = sample{t: minTime + int64(i*1000), v: 1.0}
} }
return ChunkFromSamples(samples) return ChunkFromSamples(samples)
} }

View file

@ -115,7 +115,8 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
if ok { if ok {
t, v = it.Values() t, v = it.Values()
} else { } else {
t, v, ok = it.PeekBack(1) // TODO(beorn7): Handle histograms.
t, v, _, ok = it.PeekBack(1)
if !ok { if !ok {
continue continue
} }
@ -220,6 +221,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
protMetric.TimestampMs = proto.Int64(s.T) protMetric.TimestampMs = proto.Int64(s.T)
protMetric.Untyped.Value = proto.Float64(s.V) protMetric.Untyped.Value = proto.Float64(s.V)
// TODO(beorn7): Handle histograms.
protMetricFam.Metric = append(protMetricFam.Metric, protMetric) protMetricFam.Metric = append(protMetricFam.Metric, protMetric)
} }