mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
Rename EstimatedChunkCount to ChunkCount
This commit is contained in:
parent
a9445622ad
commit
a0634e7408
|
@ -416,11 +416,11 @@ type ChunkSeries interface {
|
|||
Labels
|
||||
ChunkIterable
|
||||
|
||||
// EstimatedChunkCount returns an estimate of the number of chunks available from this ChunkSeries.
|
||||
// ChunkCount returns the number of chunks available from this ChunkSeries.
|
||||
//
|
||||
// This estimate is used by Mimir's ingesters to report the number of chunks expected to be returned by a query,
|
||||
// This value is used by Mimir's ingesters to report the number of chunks expected to be returned by a query,
|
||||
// which is used by queriers to enforce the 'max chunks per query' limit.
|
||||
EstimatedChunkCount() (int, error)
|
||||
ChunkCount() (int, error)
|
||||
}
|
||||
|
||||
// Labels represents an item that has labels e.g. time series.
|
||||
|
|
|
@ -837,7 +837,7 @@ func NewConcatenatingChunkSeriesMerger() VerticalChunkSeriesMergeFunc {
|
|||
chunkCount := 0
|
||||
|
||||
for _, series := range series {
|
||||
c, err := series.EstimatedChunkCount()
|
||||
c, err := series.ChunkCount()
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
|
|
@ -579,7 +579,7 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
|
|||
require.Equal(t, expErr, actErr)
|
||||
require.Equal(t, expChks, actChks)
|
||||
|
||||
count, err := merged.EstimatedChunkCount()
|
||||
count, err := merged.ChunkCount()
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actChks, count)
|
||||
})
|
||||
|
@ -717,7 +717,7 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) {
|
|||
require.Equal(t, expErr, actErr)
|
||||
require.Equal(t, expChks, actChks)
|
||||
|
||||
count, err := merged.EstimatedChunkCount()
|
||||
count, err := merged.ChunkCount()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(expChks), count)
|
||||
})
|
||||
|
|
|
@ -41,7 +41,7 @@ type ChunkSeriesEntry struct {
|
|||
|
||||
func (s *ChunkSeriesEntry) Labels() labels.Labels { return s.Lset }
|
||||
func (s *ChunkSeriesEntry) Iterator(it chunks.Iterator) chunks.Iterator { return s.ChunkIteratorFn(it) }
|
||||
func (s *ChunkSeriesEntry) EstimatedChunkCount() (int, error) { return s.ChunkCountFn() }
|
||||
func (s *ChunkSeriesEntry) ChunkCount() (int, error) { return s.ChunkCountFn() }
|
||||
|
||||
// NewListSeries returns series entry with iterator that allows to iterate over provided samples.
|
||||
func NewListSeries(lset labels.Labels, s []tsdbutil.Sample) *SeriesEntry {
|
||||
|
@ -402,8 +402,7 @@ func (s *seriesToChunkEncoder) Iterator(it chunks.Iterator) chunks.Iterator {
|
|||
return NewListChunkSeriesIterator(chks...)
|
||||
}
|
||||
|
||||
// EstimatedChunkCount returns an estimate of the number of chunks produced by Iterator.
|
||||
func (s *seriesToChunkEncoder) EstimatedChunkCount() (int, error) {
|
||||
func (s *seriesToChunkEncoder) ChunkCount() (int, error) {
|
||||
// This method is expensive, but we don't expect to ever actually use this on the ingester query path in Mimir -
|
||||
// it's just here to ensure things don't break if this assumption ever changes.
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ func TestNewListChunkSeriesFromSamples(t *testing.T) {
|
|||
require.NoError(t, it.Err())
|
||||
require.Len(t, chks, 2)
|
||||
|
||||
count, err := series.EstimatedChunkCount()
|
||||
count, err := series.ChunkCount()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(chks), count, "should have one chunk per group of samples")
|
||||
}
|
||||
|
@ -224,7 +224,7 @@ func TestSeriesToChunks(t *testing.T) {
|
|||
chks, err := ExpandChunks(encoder.Iterator(nil))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, chks, testCase.expectedChunkCount)
|
||||
count, err := encoder.EstimatedChunkCount()
|
||||
count, err := encoder.ChunkCount()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, testCase.expectedChunkCount, count)
|
||||
|
||||
|
@ -539,7 +539,7 @@ func testHistogramsSeriesToChunks(t *testing.T, test histogramTest) {
|
|||
require.NoError(t, err)
|
||||
require.Equal(t, len(test.expectedCounterResetHeaders), len(chks))
|
||||
|
||||
count, err := encoder.EstimatedChunkCount()
|
||||
count, err := encoder.ChunkCount()
|
||||
require.NoError(t, err)
|
||||
require.Len(t, chks, count)
|
||||
|
||||
|
|
|
@ -649,7 +649,7 @@ func (s *chunkSeriesEntry) Iterator(it chunks.Iterator) chunks.Iterator {
|
|||
return pi
|
||||
}
|
||||
|
||||
func (s *chunkSeriesEntry) EstimatedChunkCount() (int, error) {
|
||||
func (s *chunkSeriesEntry) ChunkCount() (int, error) {
|
||||
return len(s.chks), nil
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue