2020-03-24 13:15:47 -07:00
|
|
|
// Copyright 2020 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package storage
|
|
|
|
|
|
|
|
import (
|
2022-08-22 06:34:39 -07:00
|
|
|
"fmt"
|
2020-06-24 06:41:52 -07:00
|
|
|
"math"
|
2020-03-24 13:15:47 -07:00
|
|
|
"sort"
|
|
|
|
|
Style cleanup of all the changes in sparsehistogram so far
A lot of this code was hacked together, literally during a
hackathon. This commit intends not to change the code substantially,
but just make the code obey the usual style practices.
A (possibly incomplete) list of areas:
* Generally address linter warnings.
* The `pgk` directory is deprecated as per dev-summit. No new packages should
be added to it. I moved the new `pkg/histogram` package to `model`
anticipating what's proposed in #9478.
* Make the naming of the Sparse Histogram more consistent. Including
abbreviations, there were just too many names for it: SparseHistogram,
Histogram, Histo, hist, his, shs, h. The idea is to call it "Histogram" in
general. Only add "Sparse" if it is needed to avoid confusion with
conventional Histograms (which is rare because the TSDB really has no notion
of conventional Histograms). Use abbreviations only in local scope, and then
really abbreviate (not just removing three out of seven letters like in
"Histo"). This is in the spirit of
https://github.com/golang/go/wiki/CodeReviewComments#variable-names
* Several other minor name changes.
* A lot of formatting of doc comments. For one, following
https://github.com/golang/go/wiki/CodeReviewComments#comment-sentences
, but also layout question, anticipating how things will look like
when rendered by `godoc` (even where `godoc` doesn't render them
right now because they are for unexported types or not a doc comment
at all but just a normal code comment - consistency is queen!).
* Re-enabled `TestQueryLog` and `TestEndopints` (they pass now,
leaving them disabled was presumably an oversight).
* Bucket iterator for histogram.Histogram is now created with a
method.
* HistogramChunk.iterator now allows iterator recycling. (I think
@dieterbe only commented it out because he was confused by the
question in the comment.)
* HistogramAppender.Append panics now because we decided to treat
staleness marker differently.
Signed-off-by: beorn7 <beorn@grafana.com>
2021-10-09 06:57:07 -07:00
|
|
|
"github.com/prometheus/prometheus/model/histogram"
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
2020-03-24 13:15:47 -07:00
|
|
|
"github.com/prometheus/prometheus/tsdb/chunkenc"
|
|
|
|
"github.com/prometheus/prometheus/tsdb/chunks"
|
|
|
|
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
|
|
|
)
|
|
|
|
|
2020-07-31 08:03:02 -07:00
|
|
|
type SeriesEntry struct {
|
|
|
|
Lset labels.Labels
|
2022-09-20 10:16:45 -07:00
|
|
|
SampleIteratorFn func(chunkenc.Iterator) chunkenc.Iterator
|
2020-07-31 08:03:02 -07:00
|
|
|
}
|
|
|
|
|
2022-09-20 10:16:45 -07:00
|
|
|
func (s *SeriesEntry) Labels() labels.Labels { return s.Lset }
|
|
|
|
func (s *SeriesEntry) Iterator(it chunkenc.Iterator) chunkenc.Iterator { return s.SampleIteratorFn(it) }
|
2020-07-31 08:03:02 -07:00
|
|
|
|
|
|
|
type ChunkSeriesEntry struct {
|
|
|
|
Lset labels.Labels
|
2022-09-20 10:16:45 -07:00
|
|
|
ChunkIteratorFn func(chunks.Iterator) chunks.Iterator
|
2020-07-31 08:03:02 -07:00
|
|
|
}
|
|
|
|
|
2022-09-20 10:16:45 -07:00
|
|
|
func (s *ChunkSeriesEntry) Labels() labels.Labels { return s.Lset }
|
|
|
|
func (s *ChunkSeriesEntry) Iterator(it chunks.Iterator) chunks.Iterator { return s.ChunkIteratorFn(it) }
|
2020-07-31 08:03:02 -07:00
|
|
|
|
|
|
|
// NewListSeries returns series entry with iterator that allows to iterate over provided samples.
|
|
|
|
func NewListSeries(lset labels.Labels, s []tsdbutil.Sample) *SeriesEntry {
|
2022-09-20 11:31:28 -07:00
|
|
|
samplesS := Samples(samples(s))
|
2020-07-31 08:03:02 -07:00
|
|
|
return &SeriesEntry{
|
|
|
|
Lset: lset,
|
2022-09-20 10:16:45 -07:00
|
|
|
SampleIteratorFn: func(it chunkenc.Iterator) chunkenc.Iterator {
|
2022-09-20 11:31:28 -07:00
|
|
|
if lsi, ok := it.(*listSeriesIterator); ok {
|
|
|
|
lsi.Reset(samplesS)
|
|
|
|
return lsi
|
|
|
|
}
|
|
|
|
return NewListSeriesIterator(samplesS)
|
2020-07-31 08:03:02 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-06 12:27:37 -07:00
|
|
|
// NewListChunkSeriesFromSamples returns chunk series entry that allows to iterate over provided samples.
|
2020-07-31 08:03:02 -07:00
|
|
|
// NOTE: It uses inefficient chunks encoding implementation, not caring about chunk size.
|
2023-07-20 08:01:34 -07:00
|
|
|
// Use only for testing.
|
2020-07-31 08:03:02 -07:00
|
|
|
func NewListChunkSeriesFromSamples(lset labels.Labels, samples ...[]tsdbutil.Sample) *ChunkSeriesEntry {
|
2023-07-20 08:01:34 -07:00
|
|
|
chksFromSamples := make([]chunks.Meta, 0, len(samples))
|
|
|
|
for _, s := range samples {
|
|
|
|
cfs, err := tsdbutil.ChunkFromSamples(s)
|
|
|
|
if err != nil {
|
|
|
|
return &ChunkSeriesEntry{
|
|
|
|
Lset: lset,
|
|
|
|
ChunkIteratorFn: func(it chunks.Iterator) chunks.Iterator {
|
|
|
|
return errChunksIterator{err: err}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
chksFromSamples = append(chksFromSamples, cfs)
|
|
|
|
}
|
2020-07-31 08:03:02 -07:00
|
|
|
return &ChunkSeriesEntry{
|
|
|
|
Lset: lset,
|
2022-09-20 10:16:45 -07:00
|
|
|
ChunkIteratorFn: func(it chunks.Iterator) chunks.Iterator {
|
2022-09-20 11:31:28 -07:00
|
|
|
lcsi, existing := it.(*listChunkSeriesIterator)
|
|
|
|
var chks []chunks.Meta
|
|
|
|
if existing {
|
|
|
|
chks = lcsi.chks[:0]
|
|
|
|
} else {
|
|
|
|
chks = make([]chunks.Meta, 0, len(samples))
|
|
|
|
}
|
2023-07-20 08:01:34 -07:00
|
|
|
chks = append(chks, chksFromSamples...)
|
2022-09-20 11:31:28 -07:00
|
|
|
if existing {
|
|
|
|
lcsi.Reset(chks...)
|
|
|
|
return lcsi
|
|
|
|
}
|
2020-07-31 08:03:02 -07:00
|
|
|
return NewListChunkSeriesIterator(chks...)
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-24 13:15:47 -07:00
|
|
|
type listSeriesIterator struct {
|
2020-06-24 06:41:52 -07:00
|
|
|
samples Samples
|
2020-03-24 13:15:47 -07:00
|
|
|
idx int
|
|
|
|
}
|
|
|
|
|
2020-06-24 06:41:52 -07:00
|
|
|
type samples []tsdbutil.Sample
|
|
|
|
|
|
|
|
func (s samples) Get(i int) tsdbutil.Sample { return s[i] }
|
|
|
|
func (s samples) Len() int { return len(s) }
|
|
|
|
|
|
|
|
// Samples interface allows to work on arrays of types that are compatible with tsdbutil.Sample.
|
|
|
|
type Samples interface {
|
|
|
|
Get(i int) tsdbutil.Sample
|
|
|
|
Len() int
|
|
|
|
}
|
|
|
|
|
2020-07-31 08:03:02 -07:00
|
|
|
// NewListSeriesIterator returns listSeriesIterator that allows to iterate over provided samples.
|
2020-06-24 06:41:52 -07:00
|
|
|
func NewListSeriesIterator(samples Samples) chunkenc.Iterator {
|
2020-03-24 13:15:47 -07:00
|
|
|
return &listSeriesIterator{samples: samples, idx: -1}
|
|
|
|
}
|
|
|
|
|
2022-09-20 11:31:28 -07:00
|
|
|
func (it *listSeriesIterator) Reset(samples Samples) {
|
|
|
|
it.samples = samples
|
|
|
|
it.idx = -1
|
|
|
|
}
|
|
|
|
|
2020-03-24 13:15:47 -07:00
|
|
|
func (it *listSeriesIterator) At() (int64, float64) {
|
2020-06-24 06:41:52 -07:00
|
|
|
s := it.samples.Get(it.idx)
|
2023-03-30 10:50:13 -07:00
|
|
|
return s.T(), s.F()
|
2020-03-24 13:15:47 -07:00
|
|
|
}
|
|
|
|
|
2021-11-12 10:07:41 -08:00
|
|
|
func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
|
2021-11-07 08:12:04 -08:00
|
|
|
s := it.samples.Get(it.idx)
|
2021-11-12 10:07:41 -08:00
|
|
|
return s.T(), s.H()
|
2021-06-29 07:38:46 -07:00
|
|
|
}
|
|
|
|
|
2021-11-28 23:54:23 -08:00
|
|
|
func (it *listSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
|
|
|
|
s := it.samples.Get(it.idx)
|
|
|
|
return s.T(), s.FH()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) AtT() int64 {
|
|
|
|
s := it.samples.Get(it.idx)
|
|
|
|
return s.T()
|
2021-06-30 07:48:13 -07:00
|
|
|
}
|
|
|
|
|
2021-11-28 23:54:23 -08:00
|
|
|
func (it *listSeriesIterator) Next() chunkenc.ValueType {
|
2020-03-24 13:15:47 -07:00
|
|
|
it.idx++
|
2021-11-28 23:54:23 -08:00
|
|
|
if it.idx >= it.samples.Len() {
|
|
|
|
return chunkenc.ValNone
|
|
|
|
}
|
|
|
|
return it.samples.Get(it.idx).Type()
|
2020-03-24 13:15:47 -07:00
|
|
|
}
|
|
|
|
|
2021-11-28 23:54:23 -08:00
|
|
|
func (it *listSeriesIterator) Seek(t int64) chunkenc.ValueType {
|
2020-03-24 13:15:47 -07:00
|
|
|
if it.idx == -1 {
|
|
|
|
it.idx = 0
|
|
|
|
}
|
2021-12-16 03:07:07 -08:00
|
|
|
if it.idx >= it.samples.Len() {
|
2021-12-18 05:12:01 -08:00
|
|
|
return chunkenc.ValNone
|
2021-12-16 03:07:07 -08:00
|
|
|
}
|
2021-11-28 23:54:23 -08:00
|
|
|
// No-op check.
|
|
|
|
if s := it.samples.Get(it.idx); s.T() >= t {
|
|
|
|
return s.Type()
|
|
|
|
}
|
2020-03-24 13:15:47 -07:00
|
|
|
// Do binary search between current position and end.
|
2021-11-28 23:54:23 -08:00
|
|
|
it.idx += sort.Search(it.samples.Len()-it.idx, func(i int) bool {
|
2020-06-24 06:41:52 -07:00
|
|
|
s := it.samples.Get(i + it.idx)
|
2020-03-24 13:15:47 -07:00
|
|
|
return s.T() >= t
|
|
|
|
})
|
|
|
|
|
2021-11-28 23:54:23 -08:00
|
|
|
if it.idx >= it.samples.Len() {
|
|
|
|
return chunkenc.ValNone
|
|
|
|
}
|
|
|
|
return it.samples.Get(it.idx).Type()
|
2020-03-24 13:15:47 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listSeriesIterator) Err() error { return nil }
|
|
|
|
|
|
|
|
type listChunkSeriesIterator struct {
|
|
|
|
chks []chunks.Meta
|
2020-07-31 08:03:02 -07:00
|
|
|
idx int
|
2020-03-24 13:15:47 -07:00
|
|
|
}
|
|
|
|
|
2020-07-31 08:03:02 -07:00
|
|
|
// NewListChunkSeriesIterator returns listChunkSeriesIterator that allows to iterate over provided chunks.
|
2020-03-24 13:15:47 -07:00
|
|
|
func NewListChunkSeriesIterator(chks ...chunks.Meta) chunks.Iterator {
|
|
|
|
return &listChunkSeriesIterator{chks: chks, idx: -1}
|
|
|
|
}
|
|
|
|
|
2022-09-20 11:31:28 -07:00
|
|
|
func (it *listChunkSeriesIterator) Reset(chks ...chunks.Meta) {
|
|
|
|
it.chks = chks
|
|
|
|
it.idx = -1
|
|
|
|
}
|
|
|
|
|
2020-03-24 13:15:47 -07:00
|
|
|
func (it *listChunkSeriesIterator) At() chunks.Meta {
|
|
|
|
return it.chks[it.idx]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listChunkSeriesIterator) Next() bool {
|
|
|
|
it.idx++
|
|
|
|
return it.idx < len(it.chks)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (it *listChunkSeriesIterator) Err() error { return nil }
|
|
|
|
|
|
|
|
type chunkSetToSeriesSet struct {
|
|
|
|
ChunkSeriesSet
|
|
|
|
|
2022-09-20 11:31:28 -07:00
|
|
|
iter chunks.Iterator
|
2020-03-24 13:15:47 -07:00
|
|
|
chkIterErr error
|
|
|
|
sameSeriesChunks []Series
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewSeriesSetFromChunkSeriesSet converts ChunkSeriesSet to SeriesSet by decoding chunks one by one.
|
|
|
|
func NewSeriesSetFromChunkSeriesSet(chk ChunkSeriesSet) SeriesSet {
|
|
|
|
return &chunkSetToSeriesSet{ChunkSeriesSet: chk}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *chunkSetToSeriesSet) Next() bool {
|
|
|
|
if c.Err() != nil || !c.ChunkSeriesSet.Next() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2022-09-20 11:31:28 -07:00
|
|
|
c.iter = c.ChunkSeriesSet.At().Iterator(c.iter)
|
2022-11-29 03:04:13 -08:00
|
|
|
c.sameSeriesChunks = nil
|
2020-03-24 13:15:47 -07:00
|
|
|
|
2022-09-20 11:31:28 -07:00
|
|
|
for c.iter.Next() {
|
2020-07-31 08:03:02 -07:00
|
|
|
c.sameSeriesChunks = append(
|
|
|
|
c.sameSeriesChunks,
|
2022-09-20 11:31:28 -07:00
|
|
|
newChunkToSeriesDecoder(c.ChunkSeriesSet.At().Labels(), c.iter.At()),
|
2020-07-31 08:03:02 -07:00
|
|
|
)
|
2020-03-24 13:15:47 -07:00
|
|
|
}
|
|
|
|
|
2022-09-20 11:31:28 -07:00
|
|
|
if c.iter.Err() != nil {
|
|
|
|
c.chkIterErr = c.iter.Err()
|
2020-03-24 13:15:47 -07:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *chunkSetToSeriesSet) At() Series {
|
|
|
|
// Series composed of same chunks for the same series.
|
|
|
|
return ChainedSeriesMerge(c.sameSeriesChunks...)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *chunkSetToSeriesSet) Err() error {
|
|
|
|
if c.chkIterErr != nil {
|
|
|
|
return c.chkIterErr
|
|
|
|
}
|
|
|
|
return c.ChunkSeriesSet.Err()
|
|
|
|
}
|
|
|
|
|
2020-07-31 08:03:02 -07:00
|
|
|
func newChunkToSeriesDecoder(labels labels.Labels, chk chunks.Meta) Series {
|
|
|
|
return &SeriesEntry{
|
|
|
|
Lset: labels,
|
2022-09-20 10:16:45 -07:00
|
|
|
SampleIteratorFn: func(it chunkenc.Iterator) chunkenc.Iterator {
|
2020-07-31 08:03:02 -07:00
|
|
|
// TODO(bwplotka): Can we provide any chunkenc buffer?
|
2022-09-20 10:16:45 -07:00
|
|
|
return chk.Chunk.Iterator(it)
|
2020-07-31 08:03:02 -07:00
|
|
|
},
|
|
|
|
}
|
2020-03-24 13:15:47 -07:00
|
|
|
}
|
|
|
|
|
2020-06-24 06:41:52 -07:00
|
|
|
type seriesSetToChunkSet struct {
|
|
|
|
SeriesSet
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewSeriesSetToChunkSet converts SeriesSet to ChunkSeriesSet by encoding chunks from samples.
|
|
|
|
func NewSeriesSetToChunkSet(chk SeriesSet) ChunkSeriesSet {
|
|
|
|
return &seriesSetToChunkSet{SeriesSet: chk}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *seriesSetToChunkSet) Next() bool {
|
|
|
|
if c.Err() != nil || !c.SeriesSet.Next() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *seriesSetToChunkSet) At() ChunkSeries {
|
2021-05-19 05:01:35 -07:00
|
|
|
return NewSeriesToChunkEncoder(c.SeriesSet.At())
|
2020-06-24 06:41:52 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *seriesSetToChunkSet) Err() error {
|
|
|
|
return c.SeriesSet.Err()
|
|
|
|
}
|
|
|
|
|
|
|
|
type seriesToChunkEncoder struct {
|
|
|
|
Series
|
|
|
|
}
|
|
|
|
|
2021-05-18 09:37:16 -07:00
|
|
|
const seriesToChunkEncoderSplit = 120
|
|
|
|
|
2021-05-19 05:01:35 -07:00
|
|
|
// NewSeriesToChunkEncoder encodes samples to chunks with 120 samples limit.
|
|
|
|
func NewSeriesToChunkEncoder(series Series) ChunkSeries {
|
|
|
|
return &seriesToChunkEncoder{series}
|
|
|
|
}
|
|
|
|
|
2022-09-20 10:16:45 -07:00
|
|
|
func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator {
|
2022-08-22 06:34:39 -07:00
|
|
|
var (
|
|
|
|
chk chunkenc.Chunk
|
2023-04-28 13:52:21 -07:00
|
|
|
app *RecodingAppender
|
2022-08-22 06:34:39 -07:00
|
|
|
err error
|
|
|
|
)
|
2020-06-24 06:41:52 -07:00
|
|
|
mint := int64(math.MaxInt64)
|
|
|
|
maxt := int64(math.MinInt64)
|
|
|
|
|
2022-12-15 10:29:44 -08:00
|
|
|
var chks []chunks.Meta
|
2022-09-20 11:31:28 -07:00
|
|
|
lcsi, existing := it.(*listChunkSeriesIterator)
|
|
|
|
if existing {
|
|
|
|
chks = lcsi.chks[:0]
|
|
|
|
}
|
|
|
|
|
2021-05-18 09:37:16 -07:00
|
|
|
i := 0
|
2022-09-20 10:16:45 -07:00
|
|
|
seriesIter := s.Series.Iterator(nil)
|
2022-08-22 06:34:39 -07:00
|
|
|
lastType := chunkenc.ValNone
|
|
|
|
for typ := seriesIter.Next(); typ != chunkenc.ValNone; typ = seriesIter.Next() {
|
2023-05-05 05:34:30 -07:00
|
|
|
chunkCreated := false
|
2022-08-22 06:34:39 -07:00
|
|
|
if typ != lastType || i >= seriesToChunkEncoderSplit {
|
|
|
|
// Create a new chunk if the sample type changed or too many samples in the current one.
|
2023-04-28 13:52:21 -07:00
|
|
|
chks = appendChunk(chks, mint, maxt, chk)
|
2023-05-05 05:34:30 -07:00
|
|
|
chunkCreated = true
|
2022-08-22 06:34:39 -07:00
|
|
|
chk, err = chunkenc.NewEmptyChunk(typ.ChunkEncoding())
|
|
|
|
if err != nil {
|
|
|
|
return errChunksIterator{err: err}
|
|
|
|
}
|
2023-04-28 13:52:21 -07:00
|
|
|
chkAppender, err := chk.Appender()
|
2021-05-18 09:37:16 -07:00
|
|
|
if err != nil {
|
|
|
|
return errChunksIterator{err: err}
|
|
|
|
}
|
2023-04-28 13:52:21 -07:00
|
|
|
app = NewRecodingAppender(&chk, chkAppender)
|
2021-05-18 09:37:16 -07:00
|
|
|
mint = int64(math.MaxInt64)
|
|
|
|
// maxt is immediately overwritten below which is why setting it here won't make a difference.
|
|
|
|
i = 0
|
|
|
|
}
|
2022-08-22 06:34:39 -07:00
|
|
|
lastType = typ
|
2021-05-18 09:37:16 -07:00
|
|
|
|
2022-08-22 06:34:39 -07:00
|
|
|
var (
|
2022-12-28 00:55:07 -08:00
|
|
|
t int64
|
|
|
|
v float64
|
|
|
|
h *histogram.Histogram
|
|
|
|
fh *histogram.FloatHistogram
|
2022-08-22 06:34:39 -07:00
|
|
|
)
|
|
|
|
switch typ {
|
|
|
|
case chunkenc.ValFloat:
|
|
|
|
t, v = seriesIter.At()
|
|
|
|
app.Append(t, v)
|
|
|
|
case chunkenc.ValHistogram:
|
|
|
|
t, h = seriesIter.AtHistogram()
|
2023-04-28 13:52:21 -07:00
|
|
|
if ok, counterReset := app.AppendHistogram(t, h); !ok {
|
|
|
|
chks = appendChunk(chks, mint, maxt, chk)
|
|
|
|
histChunk := chunkenc.NewHistogramChunk()
|
2023-05-05 05:34:30 -07:00
|
|
|
chunkCreated = true
|
2023-04-28 13:52:21 -07:00
|
|
|
if counterReset {
|
|
|
|
histChunk.SetCounterResetHeader(chunkenc.CounterReset)
|
|
|
|
}
|
|
|
|
chk = histChunk
|
|
|
|
|
|
|
|
chkAppender, err := chk.Appender()
|
|
|
|
if err != nil {
|
|
|
|
return errChunksIterator{err: err}
|
|
|
|
}
|
|
|
|
mint = int64(math.MaxInt64)
|
|
|
|
i = 0
|
|
|
|
app = NewRecodingAppender(&chk, chkAppender)
|
|
|
|
if ok, _ := app.AppendHistogram(t, h); !ok {
|
|
|
|
panic("unexpected error while appending histogram")
|
|
|
|
}
|
|
|
|
}
|
2023-05-05 05:34:30 -07:00
|
|
|
if chunkCreated && h.CounterResetHint == histogram.GaugeType {
|
|
|
|
chk.(*chunkenc.HistogramChunk).SetCounterResetHeader(chunkenc.GaugeType)
|
|
|
|
}
|
2022-12-28 00:55:07 -08:00
|
|
|
case chunkenc.ValFloatHistogram:
|
|
|
|
t, fh = seriesIter.AtFloatHistogram()
|
2023-04-28 13:52:21 -07:00
|
|
|
if ok, counterReset := app.AppendFloatHistogram(t, fh); !ok {
|
|
|
|
chks = appendChunk(chks, mint, maxt, chk)
|
|
|
|
floatHistChunk := chunkenc.NewFloatHistogramChunk()
|
2023-05-05 05:34:30 -07:00
|
|
|
chunkCreated = true
|
2023-04-28 13:52:21 -07:00
|
|
|
if counterReset {
|
|
|
|
floatHistChunk.SetCounterResetHeader(chunkenc.CounterReset)
|
|
|
|
}
|
|
|
|
chk = floatHistChunk
|
|
|
|
chkAppender, err := chk.Appender()
|
|
|
|
if err != nil {
|
|
|
|
return errChunksIterator{err: err}
|
|
|
|
}
|
|
|
|
mint = int64(math.MaxInt64)
|
|
|
|
i = 0
|
|
|
|
app = NewRecodingAppender(&chk, chkAppender)
|
|
|
|
if ok, _ := app.AppendFloatHistogram(t, fh); !ok {
|
|
|
|
panic("unexpected error while float appending histogram")
|
|
|
|
}
|
|
|
|
}
|
2023-05-05 05:34:30 -07:00
|
|
|
if chunkCreated && fh.CounterResetHint == histogram.GaugeType {
|
|
|
|
chk.(*chunkenc.FloatHistogramChunk).SetCounterResetHeader(chunkenc.GaugeType)
|
|
|
|
}
|
2022-08-22 06:34:39 -07:00
|
|
|
default:
|
|
|
|
return errChunksIterator{err: fmt.Errorf("unknown sample type %s", typ.String())}
|
|
|
|
}
|
2020-06-24 06:41:52 -07:00
|
|
|
|
|
|
|
maxt = t
|
|
|
|
if mint == math.MaxInt64 {
|
|
|
|
mint = t
|
|
|
|
}
|
2021-05-18 09:37:16 -07:00
|
|
|
i++
|
2020-06-24 06:41:52 -07:00
|
|
|
}
|
|
|
|
if err := seriesIter.Err(); err != nil {
|
|
|
|
return errChunksIterator{err: err}
|
|
|
|
}
|
|
|
|
|
2023-04-28 13:52:21 -07:00
|
|
|
chks = appendChunk(chks, mint, maxt, chk)
|
|
|
|
|
|
|
|
if existing {
|
|
|
|
lcsi.Reset(chks...)
|
|
|
|
return lcsi
|
|
|
|
}
|
|
|
|
return NewListChunkSeriesIterator(chks...)
|
|
|
|
}
|
|
|
|
|
|
|
|
func appendChunk(chks []chunks.Meta, mint, maxt int64, chk chunkenc.Chunk) []chunks.Meta {
|
2022-08-22 06:34:39 -07:00
|
|
|
if chk != nil {
|
|
|
|
chks = append(chks, chunks.Meta{
|
|
|
|
MinTime: mint,
|
|
|
|
MaxTime: maxt,
|
|
|
|
Chunk: chk,
|
|
|
|
})
|
|
|
|
}
|
2023-04-28 13:52:21 -07:00
|
|
|
return chks
|
2020-06-24 06:41:52 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
type errChunksIterator struct {
|
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e errChunksIterator) At() chunks.Meta { return chunks.Meta{} }
|
|
|
|
func (e errChunksIterator) Next() bool { return false }
|
|
|
|
func (e errChunksIterator) Err() error { return e.err }
|
2020-07-31 08:03:02 -07:00
|
|
|
|
|
|
|
// ExpandSamples iterates over all samples in the iterator, buffering all in slice.
|
|
|
|
// Optionally it takes samples constructor, useful when you want to compare sample slices with different
|
|
|
|
// sample implementations. if nil, sample type from this package will be used.
|
2022-12-08 04:31:08 -08:00
|
|
|
func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) {
|
2020-07-31 08:03:02 -07:00
|
|
|
if newSampleFn == nil {
|
2022-12-08 04:31:08 -08:00
|
|
|
newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample {
|
|
|
|
switch {
|
|
|
|
case h != nil:
|
|
|
|
return hSample{t, h}
|
|
|
|
case fh != nil:
|
|
|
|
return fhSample{t, fh}
|
|
|
|
default:
|
|
|
|
return fSample{t, f}
|
|
|
|
}
|
2021-11-28 23:54:23 -08:00
|
|
|
}
|
2020-07-31 08:03:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
var result []tsdbutil.Sample
|
2021-11-28 23:54:23 -08:00
|
|
|
for {
|
|
|
|
switch iter.Next() {
|
|
|
|
case chunkenc.ValNone:
|
|
|
|
return result, iter.Err()
|
|
|
|
case chunkenc.ValFloat:
|
2022-12-08 04:31:08 -08:00
|
|
|
t, f := iter.At()
|
2021-11-02 08:01:32 -07:00
|
|
|
// NaNs can't be compared normally, so substitute for another value.
|
2022-12-08 04:31:08 -08:00
|
|
|
if math.IsNaN(f) {
|
|
|
|
f = -42
|
2021-11-02 08:01:32 -07:00
|
|
|
}
|
2022-12-08 04:31:08 -08:00
|
|
|
result = append(result, newSampleFn(t, f, nil, nil))
|
2021-11-28 23:54:23 -08:00
|
|
|
case chunkenc.ValHistogram:
|
|
|
|
t, h := iter.AtHistogram()
|
|
|
|
result = append(result, newSampleFn(t, 0, h, nil))
|
|
|
|
case chunkenc.ValFloatHistogram:
|
|
|
|
t, fh := iter.AtFloatHistogram()
|
|
|
|
result = append(result, newSampleFn(t, 0, nil, fh))
|
2020-07-31 08:03:02 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// ExpandChunks iterates over all chunks in the iterator, buffering all in slice.
|
|
|
|
func ExpandChunks(iter chunks.Iterator) ([]chunks.Meta, error) {
|
|
|
|
var result []chunks.Meta
|
|
|
|
for iter.Next() {
|
|
|
|
result = append(result, iter.At())
|
|
|
|
}
|
|
|
|
return result, iter.Err()
|
|
|
|
}
|
2023-04-28 13:52:21 -07:00
|
|
|
|
|
|
|
// RecodingAppender is a tsdb.Appender that recodes histogram samples if needed during appends.
|
|
|
|
// It takes an existing appender and a chunk to which samples are appended.
|
|
|
|
type RecodingAppender struct {
|
|
|
|
chk *chunkenc.Chunk
|
|
|
|
app chunkenc.Appender
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewRecodingAppender(chk *chunkenc.Chunk, app chunkenc.Appender) *RecodingAppender {
|
|
|
|
return &RecodingAppender{
|
|
|
|
chk: chk,
|
|
|
|
app: app,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Append appends a float sample to the appender.
|
|
|
|
func (a *RecodingAppender) Append(t int64, v float64) {
|
|
|
|
a.app.Append(t, v)
|
|
|
|
}
|
|
|
|
|
|
|
|
// AppendHistogram appends a histogram sample to the underlying chunk.
|
|
|
|
// The method returns false if the sample cannot be appended and a boolean value set to true
|
|
|
|
// when it is not appendable because of a counter reset.
|
|
|
|
// If counterReset is true, okToAppend is always false.
|
|
|
|
func (a *RecodingAppender) AppendHistogram(t int64, h *histogram.Histogram) (okToAppend, counterReset bool) {
|
|
|
|
app, ok := a.app.(*chunkenc.HistogramAppender)
|
|
|
|
if !ok {
|
|
|
|
return false, false
|
|
|
|
}
|
|
|
|
|
|
|
|
if app.NumSamples() == 0 {
|
|
|
|
a.app.AppendHistogram(t, h)
|
|
|
|
return true, false
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
pForwardInserts, nForwardInserts []chunkenc.Insert
|
|
|
|
pBackwardInserts, nBackwardInserts []chunkenc.Insert
|
|
|
|
pMergedSpans, nMergedSpans []histogram.Span
|
|
|
|
)
|
|
|
|
switch h.CounterResetHint {
|
|
|
|
case histogram.GaugeType:
|
|
|
|
pForwardInserts, nForwardInserts,
|
|
|
|
pBackwardInserts, nBackwardInserts,
|
|
|
|
pMergedSpans, nMergedSpans,
|
|
|
|
okToAppend = app.AppendableGauge(h)
|
|
|
|
default:
|
|
|
|
pForwardInserts, nForwardInserts, okToAppend, counterReset = app.Appendable(h)
|
|
|
|
}
|
|
|
|
if !okToAppend || counterReset {
|
|
|
|
return false, counterReset
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(pBackwardInserts)+len(nBackwardInserts) > 0 {
|
|
|
|
h.PositiveSpans = pMergedSpans
|
|
|
|
h.NegativeSpans = nMergedSpans
|
|
|
|
app.RecodeHistogram(h, pBackwardInserts, nBackwardInserts)
|
|
|
|
}
|
|
|
|
if len(pForwardInserts) > 0 || len(nForwardInserts) > 0 {
|
|
|
|
chk, app := app.Recode(
|
|
|
|
pForwardInserts, nForwardInserts,
|
|
|
|
h.PositiveSpans, h.NegativeSpans,
|
|
|
|
)
|
|
|
|
*a.chk = chk
|
|
|
|
a.app = app
|
|
|
|
}
|
|
|
|
|
|
|
|
a.app.AppendHistogram(t, h)
|
|
|
|
return true, counterReset
|
|
|
|
}
|
|
|
|
|
|
|
|
// AppendFloatHistogram appends a float histogram sample to the underlying chunk.
|
|
|
|
// The method returns false if the sample cannot be appended and a boolean value set to true
|
|
|
|
// when it is not appendable because of a counter reset.
|
|
|
|
// If counterReset is true, okToAppend is always false.
|
|
|
|
func (a *RecodingAppender) AppendFloatHistogram(t int64, fh *histogram.FloatHistogram) (okToAppend, counterReset bool) {
|
|
|
|
app, ok := a.app.(*chunkenc.FloatHistogramAppender)
|
|
|
|
if !ok {
|
|
|
|
return false, false
|
|
|
|
}
|
|
|
|
|
|
|
|
if app.NumSamples() == 0 {
|
|
|
|
a.app.AppendFloatHistogram(t, fh)
|
|
|
|
return true, false
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
pForwardInserts, nForwardInserts []chunkenc.Insert
|
|
|
|
pBackwardInserts, nBackwardInserts []chunkenc.Insert
|
|
|
|
pMergedSpans, nMergedSpans []histogram.Span
|
|
|
|
)
|
|
|
|
switch fh.CounterResetHint {
|
|
|
|
case histogram.GaugeType:
|
|
|
|
pForwardInserts, nForwardInserts,
|
|
|
|
pBackwardInserts, nBackwardInserts,
|
|
|
|
pMergedSpans, nMergedSpans,
|
|
|
|
okToAppend = app.AppendableGauge(fh)
|
|
|
|
default:
|
|
|
|
pForwardInserts, nForwardInserts, okToAppend, counterReset = app.Appendable(fh)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !okToAppend || counterReset {
|
|
|
|
return false, counterReset
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(pBackwardInserts)+len(nBackwardInserts) > 0 {
|
|
|
|
fh.PositiveSpans = pMergedSpans
|
|
|
|
fh.NegativeSpans = nMergedSpans
|
|
|
|
app.RecodeHistogramm(fh, pBackwardInserts, nBackwardInserts)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(pForwardInserts) > 0 || len(nForwardInserts) > 0 {
|
|
|
|
chunk, app := app.Recode(
|
|
|
|
pForwardInserts, nForwardInserts,
|
|
|
|
fh.PositiveSpans, fh.NegativeSpans,
|
|
|
|
)
|
|
|
|
*a.chk = chunk
|
|
|
|
a.app = app
|
|
|
|
}
|
|
|
|
|
|
|
|
a.app.AppendFloatHistogram(t, fh)
|
|
|
|
return true, counterReset
|
|
|
|
}
|