mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
Avoid infinite loop on duplicate NaN values. (#4275)
Fixes #4254 NaNs don't equal themselves, so a duplicate NaN would always hit the break statement and never get popped. We should not be returning multiple data point for the same timestamp, so don't compare values at all. Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
This commit is contained in:
parent
8cd59da857
commit
78efdc6d6b
|
@ -450,10 +450,10 @@ func (c *mergeIterator) Next() bool {
|
|||
return false
|
||||
}
|
||||
|
||||
currt, currv := c.At()
|
||||
currt, _ := c.At()
|
||||
for len(c.h) > 0 {
|
||||
nextt, nextv := c.h[0].At()
|
||||
if nextt != currt || nextv != currv {
|
||||
nextt, _ := c.h[0].At()
|
||||
if nextt != currt {
|
||||
break
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ package storage
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@ -97,6 +98,16 @@ func TestMergeSeriesSet(t *testing.T) {
|
|||
newMockSeries(labels.FromStrings("foo", "bar"), []sample{{0, 0}, {1, 1}, {2, 2}, {3, 3}}),
|
||||
),
|
||||
},
|
||||
{
|
||||
input: []SeriesSet{newMockSeriesSet(
|
||||
newMockSeries(labels.FromStrings("foo", "bar"), []sample{{0, math.NaN()}}),
|
||||
), newMockSeriesSet(
|
||||
newMockSeries(labels.FromStrings("foo", "bar"), []sample{{0, math.NaN()}}),
|
||||
)},
|
||||
expected: newMockSeriesSet(
|
||||
newMockSeries(labels.FromStrings("foo", "bar"), []sample{{0, math.NaN()}}),
|
||||
),
|
||||
},
|
||||
} {
|
||||
merged := NewMergeSeriesSet(tc.input)
|
||||
for merged.Next() {
|
||||
|
@ -197,6 +208,10 @@ func drainSamples(iter SeriesIterator) []sample {
|
|||
result := []sample{}
|
||||
for iter.Next() {
|
||||
t, v := iter.At()
|
||||
// NaNs can't be compared normally, so substitute for another value.
|
||||
if math.IsNaN(v) {
|
||||
v = -42
|
||||
}
|
||||
result = append(result, sample{t, v})
|
||||
}
|
||||
return result
|
||||
|
|
Loading…
Reference in a new issue