2021-07-09 12:00:18 -07:00
|
|
|
// Copyright 2021 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package textparse
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"encoding/binary"
|
2022-12-29 07:23:07 -08:00
|
|
|
"errors"
|
2021-07-09 12:00:18 -07:00
|
|
|
"io"
|
|
|
|
"testing"
|
|
|
|
|
|
|
|
"github.com/gogo/protobuf/proto"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
|
2021-11-17 10:57:31 -08:00
|
|
|
"github.com/prometheus/prometheus/model/exemplar"
|
Style cleanup of all the changes in sparsehistogram so far
A lot of this code was hacked together, literally during a
hackathon. This commit intends not to change the code substantially,
but just make the code obey the usual style practices.
A (possibly incomplete) list of areas:
* Generally address linter warnings.
* The `pgk` directory is deprecated as per dev-summit. No new packages should
be added to it. I moved the new `pkg/histogram` package to `model`
anticipating what's proposed in #9478.
* Make the naming of the Sparse Histogram more consistent. Including
abbreviations, there were just too many names for it: SparseHistogram,
Histogram, Histo, hist, his, shs, h. The idea is to call it "Histogram" in
general. Only add "Sparse" if it is needed to avoid confusion with
conventional Histograms (which is rare because the TSDB really has no notion
of conventional Histograms). Use abbreviations only in local scope, and then
really abbreviate (not just removing three out of seven letters like in
"Histo"). This is in the spirit of
https://github.com/golang/go/wiki/CodeReviewComments#variable-names
* Several other minor name changes.
* A lot of formatting of doc comments. For one, following
https://github.com/golang/go/wiki/CodeReviewComments#comment-sentences
, but also layout question, anticipating how things will look like
when rendered by `godoc` (even where `godoc` doesn't render them
right now because they are for unexported types or not a doc comment
at all but just a normal code comment - consistency is queen!).
* Re-enabled `TestQueryLog` and `TestEndopints` (they pass now,
leaving them disabled was presumably an oversight).
* Bucket iterator for histogram.Histogram is now created with a
method.
* HistogramChunk.iterator now allows iterator recycling. (I think
@dieterbe only commented it out because he was confused by the
question in the comment.)
* HistogramAppender.Append panics now because we decided to treat
staleness marker differently.
Signed-off-by: beorn7 <beorn@grafana.com>
2021-10-09 06:57:07 -07:00
|
|
|
"github.com/prometheus/prometheus/model/histogram"
|
2021-11-17 10:57:31 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
2021-07-09 12:00:18 -07:00
|
|
|
|
|
|
|
dto "github.com/prometheus/prometheus/prompb/io/prometheus/client"
|
|
|
|
)
|
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
func createTestProtoBuf(t *testing.T) *bytes.Buffer {
|
|
|
|
testMetricFamilies := []string{
|
2021-07-09 12:00:18 -07:00
|
|
|
`name: "go_build_info"
|
|
|
|
help: "Build information about the main Go module."
|
|
|
|
type: GAUGE
|
|
|
|
metric: <
|
|
|
|
label: <
|
|
|
|
name: "checksum"
|
|
|
|
value: ""
|
|
|
|
>
|
|
|
|
label: <
|
|
|
|
name: "path"
|
|
|
|
value: "github.com/prometheus/client_golang"
|
|
|
|
>
|
|
|
|
label: <
|
|
|
|
name: "version"
|
|
|
|
value: "(devel)"
|
|
|
|
>
|
|
|
|
gauge: <
|
|
|
|
value: 1
|
|
|
|
>
|
|
|
|
>
|
|
|
|
|
|
|
|
`,
|
|
|
|
`name: "go_memstats_alloc_bytes_total"
|
|
|
|
help: "Total number of bytes allocated, even if freed."
|
|
|
|
type: COUNTER
|
|
|
|
metric: <
|
|
|
|
counter: <
|
|
|
|
value: 1.546544e+06
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "42"
|
|
|
|
>
|
|
|
|
value: 12
|
|
|
|
timestamp: <
|
|
|
|
seconds: 1625851151
|
|
|
|
nanos: 233181499
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
|
|
|
|
`,
|
|
|
|
`name: "something_untyped"
|
|
|
|
help: "Just to test the untyped type."
|
|
|
|
type: UNTYPED
|
|
|
|
metric: <
|
|
|
|
untyped: <
|
|
|
|
value: 42
|
|
|
|
>
|
|
|
|
timestamp_ms: 1234567
|
|
|
|
>
|
|
|
|
|
|
|
|
`,
|
|
|
|
`name: "test_histogram"
|
|
|
|
help: "Test histogram with many buckets removed to keep it manageable in size."
|
|
|
|
type: HISTOGRAM
|
|
|
|
metric: <
|
|
|
|
histogram: <
|
|
|
|
sample_count: 175
|
|
|
|
sample_sum: 0.0008280461746287094
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 2
|
|
|
|
upper_bound: -0.0004899999999999998
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 4
|
|
|
|
upper_bound: -0.0003899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "59727"
|
|
|
|
>
|
2021-07-13 11:01:44 -07:00
|
|
|
value: -0.00039
|
2021-07-09 12:00:18 -07:00
|
|
|
timestamp: <
|
|
|
|
seconds: 1625851155
|
|
|
|
nanos: 146848499
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 16
|
|
|
|
upper_bound: -0.0002899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "5617"
|
|
|
|
>
|
2021-07-13 11:01:44 -07:00
|
|
|
value: -0.00029
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
|
|
|
>
|
2022-07-19 09:11:33 -07:00
|
|
|
schema: 3
|
|
|
|
zero_threshold: 2.938735877055719e-39
|
|
|
|
zero_count: 2
|
|
|
|
negative_span: <
|
|
|
|
offset: -162
|
|
|
|
length: 1
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
2022-07-19 09:11:33 -07:00
|
|
|
negative_span: <
|
|
|
|
offset: 23
|
|
|
|
length: 4
|
|
|
|
>
|
|
|
|
negative_delta: 1
|
|
|
|
negative_delta: 3
|
|
|
|
negative_delta: -2
|
|
|
|
negative_delta: -1
|
|
|
|
negative_delta: 1
|
|
|
|
positive_span: <
|
|
|
|
offset: -161
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
positive_span: <
|
|
|
|
offset: 8
|
|
|
|
length: 3
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
2022-07-19 09:11:33 -07:00
|
|
|
positive_delta: 1
|
|
|
|
positive_delta: 2
|
|
|
|
positive_delta: -1
|
|
|
|
positive_delta: -1
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
|
|
|
timestamp_ms: 1234568
|
|
|
|
>
|
|
|
|
|
2022-08-25 08:07:41 -07:00
|
|
|
`,
|
2023-01-10 09:42:05 -08:00
|
|
|
`name: "test_gauge_histogram"
|
|
|
|
help: "Like test_histogram but as gauge histogram."
|
|
|
|
type: GAUGE_HISTOGRAM
|
|
|
|
metric: <
|
|
|
|
histogram: <
|
|
|
|
sample_count: 175
|
|
|
|
sample_sum: 0.0008280461746287094
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 2
|
|
|
|
upper_bound: -0.0004899999999999998
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 4
|
|
|
|
upper_bound: -0.0003899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "59727"
|
|
|
|
>
|
|
|
|
value: -0.00039
|
|
|
|
timestamp: <
|
|
|
|
seconds: 1625851155
|
|
|
|
nanos: 146848499
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 16
|
|
|
|
upper_bound: -0.0002899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "5617"
|
|
|
|
>
|
|
|
|
value: -0.00029
|
|
|
|
>
|
|
|
|
>
|
|
|
|
schema: 3
|
|
|
|
zero_threshold: 2.938735877055719e-39
|
|
|
|
zero_count: 2
|
|
|
|
negative_span: <
|
|
|
|
offset: -162
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
negative_span: <
|
|
|
|
offset: 23
|
|
|
|
length: 4
|
|
|
|
>
|
|
|
|
negative_delta: 1
|
|
|
|
negative_delta: 3
|
|
|
|
negative_delta: -2
|
|
|
|
negative_delta: -1
|
|
|
|
negative_delta: 1
|
|
|
|
positive_span: <
|
|
|
|
offset: -161
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
positive_span: <
|
|
|
|
offset: 8
|
|
|
|
length: 3
|
|
|
|
>
|
|
|
|
positive_delta: 1
|
|
|
|
positive_delta: 2
|
|
|
|
positive_delta: -1
|
|
|
|
positive_delta: -1
|
|
|
|
>
|
|
|
|
timestamp_ms: 1234568
|
|
|
|
>
|
2022-08-25 08:07:41 -07:00
|
|
|
|
2023-01-10 09:42:05 -08:00
|
|
|
`,
|
2022-08-25 08:07:41 -07:00
|
|
|
`name: "test_float_histogram"
|
|
|
|
help: "Test float histogram with many buckets removed to keep it manageable in size."
|
|
|
|
type: HISTOGRAM
|
|
|
|
metric: <
|
|
|
|
histogram: <
|
2023-05-10 16:59:21 -07:00
|
|
|
sample_count_float: 175.0
|
2022-08-25 08:07:41 -07:00
|
|
|
sample_sum: 0.0008280461746287094
|
|
|
|
bucket: <
|
|
|
|
cumulative_count_float: 2.0
|
|
|
|
upper_bound: -0.0004899999999999998
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count_float: 4.0
|
|
|
|
upper_bound: -0.0003899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "59727"
|
|
|
|
>
|
|
|
|
value: -0.00039
|
|
|
|
timestamp: <
|
|
|
|
seconds: 1625851155
|
|
|
|
nanos: 146848499
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count_float: 16
|
|
|
|
upper_bound: -0.0002899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "5617"
|
|
|
|
>
|
|
|
|
value: -0.00029
|
|
|
|
>
|
|
|
|
>
|
|
|
|
schema: 3
|
|
|
|
zero_threshold: 2.938735877055719e-39
|
|
|
|
zero_count_float: 2.0
|
|
|
|
negative_span: <
|
|
|
|
offset: -162
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
negative_span: <
|
|
|
|
offset: 23
|
|
|
|
length: 4
|
|
|
|
>
|
|
|
|
negative_count: 1.0
|
|
|
|
negative_count: 3.0
|
|
|
|
negative_count: -2.0
|
|
|
|
negative_count: -1.0
|
|
|
|
negative_count: 1.0
|
|
|
|
positive_span: <
|
|
|
|
offset: -161
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
positive_span: <
|
|
|
|
offset: 8
|
|
|
|
length: 3
|
|
|
|
>
|
|
|
|
positive_count: 1.0
|
|
|
|
positive_count: 2.0
|
|
|
|
positive_count: -1.0
|
|
|
|
positive_count: -1.0
|
|
|
|
>
|
|
|
|
timestamp_ms: 1234568
|
|
|
|
>
|
|
|
|
|
2023-01-10 09:42:05 -08:00
|
|
|
`,
|
|
|
|
`name: "test_gauge_float_histogram"
|
|
|
|
help: "Like test_float_histogram but as gauge histogram."
|
|
|
|
type: GAUGE_HISTOGRAM
|
|
|
|
metric: <
|
|
|
|
histogram: <
|
2023-05-10 16:59:21 -07:00
|
|
|
sample_count_float: 175.0
|
2023-01-10 09:42:05 -08:00
|
|
|
sample_sum: 0.0008280461746287094
|
|
|
|
bucket: <
|
|
|
|
cumulative_count_float: 2.0
|
|
|
|
upper_bound: -0.0004899999999999998
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count_float: 4.0
|
|
|
|
upper_bound: -0.0003899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "59727"
|
|
|
|
>
|
|
|
|
value: -0.00039
|
|
|
|
timestamp: <
|
|
|
|
seconds: 1625851155
|
|
|
|
nanos: 146848499
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count_float: 16
|
|
|
|
upper_bound: -0.0002899999999999998
|
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "5617"
|
|
|
|
>
|
|
|
|
value: -0.00029
|
|
|
|
>
|
|
|
|
>
|
|
|
|
schema: 3
|
|
|
|
zero_threshold: 2.938735877055719e-39
|
|
|
|
zero_count_float: 2.0
|
|
|
|
negative_span: <
|
|
|
|
offset: -162
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
negative_span: <
|
|
|
|
offset: 23
|
|
|
|
length: 4
|
|
|
|
>
|
|
|
|
negative_count: 1.0
|
|
|
|
negative_count: 3.0
|
|
|
|
negative_count: -2.0
|
|
|
|
negative_count: -1.0
|
|
|
|
negative_count: 1.0
|
|
|
|
positive_span: <
|
|
|
|
offset: -161
|
|
|
|
length: 1
|
|
|
|
>
|
|
|
|
positive_span: <
|
|
|
|
offset: 8
|
|
|
|
length: 3
|
|
|
|
>
|
|
|
|
positive_count: 1.0
|
|
|
|
positive_count: 2.0
|
|
|
|
positive_count: -1.0
|
|
|
|
positive_count: -1.0
|
|
|
|
>
|
|
|
|
timestamp_ms: 1234568
|
|
|
|
>
|
|
|
|
|
2021-07-09 12:00:18 -07:00
|
|
|
`,
|
|
|
|
`name: "test_histogram2"
|
2021-07-13 11:01:44 -07:00
|
|
|
help: "Similar histogram as before but now without sparse buckets."
|
2021-07-09 12:00:18 -07:00
|
|
|
type: HISTOGRAM
|
|
|
|
metric: <
|
|
|
|
histogram: <
|
|
|
|
sample_count: 175
|
2021-07-13 11:01:44 -07:00
|
|
|
sample_sum: 0.000828
|
2021-07-09 12:00:18 -07:00
|
|
|
bucket: <
|
|
|
|
cumulative_count: 2
|
2021-07-13 11:01:44 -07:00
|
|
|
upper_bound: -0.00048
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 4
|
2021-07-13 11:01:44 -07:00
|
|
|
upper_bound: -0.00038
|
2021-07-09 12:00:18 -07:00
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "59727"
|
|
|
|
>
|
2021-07-13 11:01:44 -07:00
|
|
|
value: -0.00038
|
2021-07-09 12:00:18 -07:00
|
|
|
timestamp: <
|
2021-07-13 11:01:44 -07:00
|
|
|
seconds: 1625851153
|
2021-07-09 12:00:18 -07:00
|
|
|
nanos: 146848499
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
|
|
|
bucket: <
|
|
|
|
cumulative_count: 16
|
2021-07-13 11:01:44 -07:00
|
|
|
upper_bound: 1
|
2021-07-09 12:00:18 -07:00
|
|
|
exemplar: <
|
|
|
|
label: <
|
|
|
|
name: "dummyID"
|
|
|
|
value: "5617"
|
|
|
|
>
|
2021-07-13 11:01:44 -07:00
|
|
|
value: -0.000295
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
|
|
|
>
|
2022-07-19 09:11:33 -07:00
|
|
|
schema: 0
|
|
|
|
zero_threshold: 0
|
2021-07-09 12:00:18 -07:00
|
|
|
>
|
|
|
|
>
|
|
|
|
|
|
|
|
`,
|
|
|
|
`name: "rpc_durations_seconds"
|
|
|
|
help: "RPC latency distributions."
|
|
|
|
type: SUMMARY
|
|
|
|
metric: <
|
|
|
|
label: <
|
|
|
|
name: "service"
|
|
|
|
value: "exponential"
|
|
|
|
>
|
|
|
|
summary: <
|
|
|
|
sample_count: 262
|
|
|
|
sample_sum: 0.00025551262820703587
|
|
|
|
quantile: <
|
|
|
|
quantile: 0.5
|
|
|
|
value: 6.442786329648548e-07
|
|
|
|
>
|
|
|
|
quantile: <
|
|
|
|
quantile: 0.9
|
|
|
|
value: 1.9435742936658396e-06
|
|
|
|
>
|
|
|
|
quantile: <
|
|
|
|
quantile: 0.99
|
|
|
|
value: 4.0471608667037015e-06
|
|
|
|
>
|
|
|
|
>
|
|
|
|
>
|
2021-08-30 22:17:57 -07:00
|
|
|
`,
|
|
|
|
`name: "without_quantiles"
|
|
|
|
help: "A summary without quantiles."
|
|
|
|
type: SUMMARY
|
|
|
|
metric: <
|
|
|
|
summary: <
|
|
|
|
sample_count: 42
|
|
|
|
sample_sum: 1.234
|
|
|
|
>
|
|
|
|
>
|
2021-07-09 12:00:18 -07:00
|
|
|
`,
|
|
|
|
}
|
|
|
|
|
|
|
|
varintBuf := make([]byte, binary.MaxVarintLen32)
|
2023-05-10 16:59:21 -07:00
|
|
|
buf := &bytes.Buffer{}
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
for _, tmf := range testMetricFamilies {
|
2021-07-09 12:00:18 -07:00
|
|
|
pb := &dto.MetricFamily{}
|
|
|
|
// From text to proto message.
|
|
|
|
require.NoError(t, proto.UnmarshalText(tmf, pb))
|
|
|
|
// From proto message to binary protobuf.
|
|
|
|
protoBuf, err := proto.Marshal(pb)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Write first length, then binary protobuf.
|
|
|
|
varintLength := binary.PutUvarint(varintBuf, uint64(len(protoBuf)))
|
2023-05-10 16:59:21 -07:00
|
|
|
buf.Write(varintBuf[:varintLength])
|
|
|
|
buf.Write(protoBuf)
|
2021-07-09 12:00:18 -07:00
|
|
|
}
|
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
return buf
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestProtobufParse(t *testing.T) {
|
|
|
|
type parseResult struct {
|
2021-07-09 12:00:18 -07:00
|
|
|
lset labels.Labels
|
|
|
|
m string
|
|
|
|
t int64
|
|
|
|
v float64
|
|
|
|
typ MetricType
|
|
|
|
help string
|
|
|
|
unit string
|
|
|
|
comment string
|
2021-11-12 10:07:41 -08:00
|
|
|
shs *histogram.Histogram
|
2022-08-25 08:07:41 -07:00
|
|
|
fhs *histogram.FloatHistogram
|
2021-07-13 11:01:44 -07:00
|
|
|
e []exemplar.Exemplar
|
2023-05-10 16:59:21 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
inputBuf := createTestProtoBuf(t)
|
|
|
|
|
|
|
|
scenarios := []struct {
|
|
|
|
name string
|
|
|
|
parser Parser
|
|
|
|
expected []parseResult
|
2021-07-09 12:00:18 -07:00
|
|
|
}{
|
|
|
|
{
|
2023-05-10 16:59:21 -07:00
|
|
|
name: "ignore classic buckets of native histograms",
|
|
|
|
parser: NewProtobufParser(inputBuf.Bytes(), false),
|
|
|
|
expected: []parseResult{
|
|
|
|
{
|
|
|
|
m: "go_build_info",
|
|
|
|
help: "Build information about the main Go module.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "go_build_info",
|
|
|
|
typ: MetricTypeGauge,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "go_build_info\xFFchecksum\xFF\xFFpath\xFFgithub.com/prometheus/client_golang\xFFversion\xFF(devel)",
|
|
|
|
v: 1,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "go_build_info",
|
|
|
|
"checksum", "",
|
|
|
|
"path", "github.com/prometheus/client_golang",
|
|
|
|
"version", "(devel)",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "go_memstats_alloc_bytes_total",
|
|
|
|
help: "Total number of bytes allocated, even if freed.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "go_memstats_alloc_bytes_total",
|
|
|
|
typ: MetricTypeCounter,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "go_memstats_alloc_bytes_total",
|
|
|
|
v: 1.546544e+06,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "go_memstats_alloc_bytes_total",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "42"), Value: 12, HasTs: true, Ts: 1625851151233},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "something_untyped",
|
|
|
|
help: "Just to test the untyped type.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "something_untyped",
|
|
|
|
typ: MetricTypeUnknown,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "something_untyped",
|
|
|
|
t: 1234567,
|
|
|
|
v: 42,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "something_untyped",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram",
|
|
|
|
help: "Test histogram with many buckets removed to keep it manageable in size.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram",
|
|
|
|
typ: MetricTypeHistogram,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
shs: &histogram.Histogram{
|
|
|
|
Count: 175,
|
|
|
|
ZeroCount: 2,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []int64{1, 2, -1, -1},
|
|
|
|
NegativeBuckets: []int64{1, 3, -2, -1, 1},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_gauge_histogram",
|
|
|
|
help: "Like test_histogram but as gauge histogram.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_gauge_histogram",
|
|
|
|
typ: MetricTypeGaugeHistogram,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_gauge_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
shs: &histogram.Histogram{
|
|
|
|
CounterResetHint: histogram.GaugeType,
|
|
|
|
Count: 175,
|
|
|
|
ZeroCount: 2,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []int64{1, 2, -1, -1},
|
|
|
|
NegativeBuckets: []int64{1, 3, -2, -1, 1},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_float_histogram",
|
|
|
|
help: "Test float histogram with many buckets removed to keep it manageable in size.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_float_histogram",
|
|
|
|
typ: MetricTypeHistogram,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_float_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
fhs: &histogram.FloatHistogram{
|
|
|
|
Count: 175.0,
|
|
|
|
ZeroCount: 2.0,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []float64{1.0, 2.0, -1.0, -1.0},
|
|
|
|
NegativeBuckets: []float64{1.0, 3.0, -2.0, -1.0, 1.0},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_gauge_float_histogram",
|
|
|
|
help: "Like test_float_histogram but as gauge histogram.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_gauge_float_histogram",
|
|
|
|
typ: MetricTypeGaugeHistogram,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_gauge_float_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
fhs: &histogram.FloatHistogram{
|
|
|
|
CounterResetHint: histogram.GaugeType,
|
|
|
|
Count: 175.0,
|
|
|
|
ZeroCount: 2.0,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []float64{1.0, 2.0, -1.0, -1.0},
|
|
|
|
NegativeBuckets: []float64{1.0, 3.0, -2.0, -1.0, 1.0},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2",
|
|
|
|
help: "Similar histogram as before but now without sparse buckets.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2",
|
|
|
|
typ: MetricTypeHistogram,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2_count",
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2_sum",
|
|
|
|
v: 0.000828,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_sum",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2_bucket\xffle\xff-0.00048",
|
|
|
|
v: 2,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "-0.00048",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2_bucket\xffle\xff-0.00038",
|
|
|
|
v: 4,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "-0.00038",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00038, HasTs: true, Ts: 1625851153146},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2_bucket\xffle\xff1.0",
|
|
|
|
v: 16,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "1.0",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.000295, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "test_histogram2_bucket\xffle\xff+Inf",
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "+Inf",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds",
|
|
|
|
help: "RPC latency distributions.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds",
|
|
|
|
typ: MetricTypeSummary,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds_count\xffservice\xffexponential",
|
|
|
|
v: 262,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds_count",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds_sum\xffservice\xffexponential",
|
|
|
|
v: 0.00025551262820703587,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds_sum",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds\xffservice\xffexponential\xffquantile\xff0.5",
|
|
|
|
v: 6.442786329648548e-07,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds",
|
|
|
|
"quantile", "0.5",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds\xffservice\xffexponential\xffquantile\xff0.9",
|
|
|
|
v: 1.9435742936658396e-06,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds",
|
|
|
|
"quantile", "0.9",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "rpc_durations_seconds\xffservice\xffexponential\xffquantile\xff0.99",
|
|
|
|
v: 4.0471608667037015e-06,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds",
|
|
|
|
"quantile", "0.99",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "without_quantiles",
|
|
|
|
help: "A summary without quantiles.",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "without_quantiles",
|
|
|
|
typ: MetricTypeSummary,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "without_quantiles_count",
|
|
|
|
v: 42,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "without_quantiles_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
m: "without_quantiles_sum",
|
|
|
|
v: 1.234,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "without_quantiles_sum",
|
|
|
|
),
|
|
|
|
},
|
2021-07-13 11:01:44 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2023-05-10 16:59:21 -07:00
|
|
|
name: "parse classic and native buckets",
|
|
|
|
parser: NewProtobufParser(inputBuf.Bytes(), true),
|
|
|
|
expected: []parseResult{
|
|
|
|
{ // 0
|
|
|
|
m: "go_build_info",
|
|
|
|
help: "Build information about the main Go module.",
|
|
|
|
},
|
|
|
|
{ // 1
|
|
|
|
m: "go_build_info",
|
|
|
|
typ: MetricTypeGauge,
|
|
|
|
},
|
|
|
|
{ // 2
|
|
|
|
m: "go_build_info\xFFchecksum\xFF\xFFpath\xFFgithub.com/prometheus/client_golang\xFFversion\xFF(devel)",
|
|
|
|
v: 1,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "go_build_info",
|
|
|
|
"checksum", "",
|
|
|
|
"path", "github.com/prometheus/client_golang",
|
|
|
|
"version", "(devel)",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 3
|
|
|
|
m: "go_memstats_alloc_bytes_total",
|
|
|
|
help: "Total number of bytes allocated, even if freed.",
|
|
|
|
},
|
|
|
|
{ // 4
|
|
|
|
m: "go_memstats_alloc_bytes_total",
|
|
|
|
typ: MetricTypeCounter,
|
|
|
|
},
|
|
|
|
{ // 5
|
|
|
|
m: "go_memstats_alloc_bytes_total",
|
|
|
|
v: 1.546544e+06,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "go_memstats_alloc_bytes_total",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "42"), Value: 12, HasTs: true, Ts: 1625851151233},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 6
|
|
|
|
m: "something_untyped",
|
|
|
|
help: "Just to test the untyped type.",
|
|
|
|
},
|
|
|
|
{ // 7
|
|
|
|
m: "something_untyped",
|
|
|
|
typ: MetricTypeUnknown,
|
|
|
|
},
|
|
|
|
{ // 8
|
|
|
|
m: "something_untyped",
|
|
|
|
t: 1234567,
|
|
|
|
v: 42,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "something_untyped",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 9
|
|
|
|
m: "test_histogram",
|
|
|
|
help: "Test histogram with many buckets removed to keep it manageable in size.",
|
|
|
|
},
|
|
|
|
{ // 10
|
|
|
|
m: "test_histogram",
|
|
|
|
typ: MetricTypeHistogram,
|
|
|
|
},
|
|
|
|
{ // 11
|
|
|
|
m: "test_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
shs: &histogram.Histogram{
|
|
|
|
Count: 175,
|
|
|
|
ZeroCount: 2,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []int64{1, 2, -1, -1},
|
|
|
|
NegativeBuckets: []int64{1, 3, -2, -1, 1},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 12
|
|
|
|
m: "test_histogram_count",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 13
|
|
|
|
m: "test_histogram_sum",
|
|
|
|
t: 1234568,
|
|
|
|
v: 0.0008280461746287094,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram_sum",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 14
|
|
|
|
m: "test_histogram_bucket\xffle\xff-0.0004899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 2,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram_bucket",
|
|
|
|
"le", "-0.0004899999999999998",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 15
|
|
|
|
m: "test_histogram_bucket\xffle\xff-0.0003899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 4,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram_bucket",
|
|
|
|
"le", "-0.0003899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 16
|
|
|
|
m: "test_histogram_bucket\xffle\xff-0.0002899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 16,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram_bucket",
|
|
|
|
"le", "-0.0002899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 17
|
|
|
|
m: "test_histogram_bucket\xffle\xff+Inf",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram_bucket",
|
|
|
|
"le", "+Inf",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 18
|
|
|
|
m: "test_gauge_histogram",
|
|
|
|
help: "Like test_histogram but as gauge histogram.",
|
|
|
|
},
|
|
|
|
{ // 19
|
|
|
|
m: "test_gauge_histogram",
|
|
|
|
typ: MetricTypeGaugeHistogram,
|
|
|
|
},
|
|
|
|
{ // 20
|
|
|
|
m: "test_gauge_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
shs: &histogram.Histogram{
|
|
|
|
CounterResetHint: histogram.GaugeType,
|
|
|
|
Count: 175,
|
|
|
|
ZeroCount: 2,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []int64{1, 2, -1, -1},
|
|
|
|
NegativeBuckets: []int64{1, 3, -2, -1, 1},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 21
|
|
|
|
m: "test_gauge_histogram_count",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 22
|
|
|
|
m: "test_gauge_histogram_sum",
|
|
|
|
t: 1234568,
|
|
|
|
v: 0.0008280461746287094,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram_sum",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 23
|
|
|
|
m: "test_gauge_histogram_bucket\xffle\xff-0.0004899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 2,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram_bucket",
|
|
|
|
"le", "-0.0004899999999999998",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 24
|
|
|
|
m: "test_gauge_histogram_bucket\xffle\xff-0.0003899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 4,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram_bucket",
|
|
|
|
"le", "-0.0003899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 25
|
|
|
|
m: "test_gauge_histogram_bucket\xffle\xff-0.0002899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 16,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram_bucket",
|
|
|
|
"le", "-0.0002899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 26
|
|
|
|
m: "test_gauge_histogram_bucket\xffle\xff+Inf",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_histogram_bucket",
|
|
|
|
"le", "+Inf",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 27
|
|
|
|
m: "test_float_histogram",
|
|
|
|
help: "Test float histogram with many buckets removed to keep it manageable in size.",
|
|
|
|
},
|
|
|
|
{ // 28
|
|
|
|
m: "test_float_histogram",
|
|
|
|
typ: MetricTypeHistogram,
|
|
|
|
},
|
|
|
|
{ // 29
|
|
|
|
m: "test_float_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
fhs: &histogram.FloatHistogram{
|
|
|
|
Count: 175.0,
|
|
|
|
ZeroCount: 2.0,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []float64{1.0, 2.0, -1.0, -1.0},
|
|
|
|
NegativeBuckets: []float64{1.0, 3.0, -2.0, -1.0, 1.0},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 30
|
|
|
|
m: "test_float_histogram_count",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 31
|
|
|
|
m: "test_float_histogram_sum",
|
|
|
|
t: 1234568,
|
|
|
|
v: 0.0008280461746287094,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram_sum",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 32
|
|
|
|
m: "test_float_histogram_bucket\xffle\xff-0.0004899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 2,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram_bucket",
|
|
|
|
"le", "-0.0004899999999999998",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 33
|
|
|
|
m: "test_float_histogram_bucket\xffle\xff-0.0003899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 4,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram_bucket",
|
|
|
|
"le", "-0.0003899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 34
|
|
|
|
m: "test_float_histogram_bucket\xffle\xff-0.0002899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 16,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram_bucket",
|
|
|
|
"le", "-0.0002899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 35
|
|
|
|
m: "test_float_histogram_bucket\xffle\xff+Inf",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_float_histogram_bucket",
|
|
|
|
"le", "+Inf",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 36
|
|
|
|
m: "test_gauge_float_histogram",
|
|
|
|
help: "Like test_float_histogram but as gauge histogram.",
|
|
|
|
},
|
|
|
|
{ // 37
|
|
|
|
m: "test_gauge_float_histogram",
|
|
|
|
typ: MetricTypeGaugeHistogram,
|
|
|
|
},
|
|
|
|
{ // 38
|
|
|
|
m: "test_gauge_float_histogram",
|
|
|
|
t: 1234568,
|
|
|
|
fhs: &histogram.FloatHistogram{
|
|
|
|
CounterResetHint: histogram.GaugeType,
|
|
|
|
Count: 175.0,
|
|
|
|
ZeroCount: 2.0,
|
|
|
|
Sum: 0.0008280461746287094,
|
|
|
|
ZeroThreshold: 2.938735877055719e-39,
|
|
|
|
Schema: 3,
|
|
|
|
PositiveSpans: []histogram.Span{
|
|
|
|
{Offset: -161, Length: 1},
|
|
|
|
{Offset: 8, Length: 3},
|
|
|
|
},
|
|
|
|
NegativeSpans: []histogram.Span{
|
|
|
|
{Offset: -162, Length: 1},
|
|
|
|
{Offset: 23, Length: 4},
|
|
|
|
},
|
|
|
|
PositiveBuckets: []float64{1.0, 2.0, -1.0, -1.0},
|
|
|
|
NegativeBuckets: []float64{1.0, 3.0, -2.0, -1.0, 1.0},
|
|
|
|
},
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 39
|
|
|
|
m: "test_gauge_float_histogram_count",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 40
|
|
|
|
m: "test_gauge_float_histogram_sum",
|
|
|
|
t: 1234568,
|
|
|
|
v: 0.0008280461746287094,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram_sum",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 41
|
|
|
|
m: "test_gauge_float_histogram_bucket\xffle\xff-0.0004899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 2,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram_bucket",
|
|
|
|
"le", "-0.0004899999999999998",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 42
|
|
|
|
m: "test_gauge_float_histogram_bucket\xffle\xff-0.0003899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 4,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram_bucket",
|
|
|
|
"le", "-0.0003899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00039, HasTs: true, Ts: 1625851155146},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 43
|
|
|
|
m: "test_gauge_float_histogram_bucket\xffle\xff-0.0002899999999999998",
|
|
|
|
t: 1234568,
|
|
|
|
v: 16,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram_bucket",
|
|
|
|
"le", "-0.0002899999999999998",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.00029, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 44
|
|
|
|
m: "test_gauge_float_histogram_bucket\xffle\xff+Inf",
|
|
|
|
t: 1234568,
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_gauge_float_histogram_bucket",
|
|
|
|
"le", "+Inf",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 45
|
|
|
|
m: "test_histogram2",
|
|
|
|
help: "Similar histogram as before but now without sparse buckets.",
|
|
|
|
},
|
|
|
|
{ // 46
|
|
|
|
m: "test_histogram2",
|
|
|
|
typ: MetricTypeHistogram,
|
|
|
|
},
|
|
|
|
{ // 47
|
|
|
|
m: "test_histogram2_count",
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 48
|
|
|
|
m: "test_histogram2_sum",
|
|
|
|
v: 0.000828,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_sum",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 49
|
|
|
|
m: "test_histogram2_bucket\xffle\xff-0.00048",
|
|
|
|
v: 2,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "-0.00048",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 50
|
|
|
|
m: "test_histogram2_bucket\xffle\xff-0.00038",
|
|
|
|
v: 4,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "-0.00038",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "59727"), Value: -0.00038, HasTs: true, Ts: 1625851153146},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 51
|
|
|
|
m: "test_histogram2_bucket\xffle\xff1.0",
|
|
|
|
v: 16,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "1.0",
|
|
|
|
),
|
|
|
|
e: []exemplar.Exemplar{
|
|
|
|
{Labels: labels.FromStrings("dummyID", "5617"), Value: -0.000295, HasTs: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{ // 52
|
|
|
|
m: "test_histogram2_bucket\xffle\xff+Inf",
|
|
|
|
v: 175,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "test_histogram2_bucket",
|
|
|
|
"le", "+Inf",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 53
|
|
|
|
m: "rpc_durations_seconds",
|
|
|
|
help: "RPC latency distributions.",
|
|
|
|
},
|
|
|
|
{ // 54
|
|
|
|
m: "rpc_durations_seconds",
|
|
|
|
typ: MetricTypeSummary,
|
|
|
|
},
|
|
|
|
{ // 55
|
|
|
|
m: "rpc_durations_seconds_count\xffservice\xffexponential",
|
|
|
|
v: 262,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds_count",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 56
|
|
|
|
m: "rpc_durations_seconds_sum\xffservice\xffexponential",
|
|
|
|
v: 0.00025551262820703587,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds_sum",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 57
|
|
|
|
m: "rpc_durations_seconds\xffservice\xffexponential\xffquantile\xff0.5",
|
|
|
|
v: 6.442786329648548e-07,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds",
|
|
|
|
"quantile", "0.5",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 58
|
|
|
|
m: "rpc_durations_seconds\xffservice\xffexponential\xffquantile\xff0.9",
|
|
|
|
v: 1.9435742936658396e-06,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds",
|
|
|
|
"quantile", "0.9",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 59
|
|
|
|
m: "rpc_durations_seconds\xffservice\xffexponential\xffquantile\xff0.99",
|
|
|
|
v: 4.0471608667037015e-06,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "rpc_durations_seconds",
|
|
|
|
"quantile", "0.99",
|
|
|
|
"service", "exponential",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 60
|
|
|
|
m: "without_quantiles",
|
|
|
|
help: "A summary without quantiles.",
|
|
|
|
},
|
|
|
|
{ // 61
|
|
|
|
m: "without_quantiles",
|
|
|
|
typ: MetricTypeSummary,
|
|
|
|
},
|
|
|
|
{ // 62
|
|
|
|
m: "without_quantiles_count",
|
|
|
|
v: 42,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "without_quantiles_count",
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{ // 63
|
|
|
|
m: "without_quantiles_sum",
|
|
|
|
v: 1.234,
|
|
|
|
lset: labels.FromStrings(
|
|
|
|
"__name__", "without_quantiles_sum",
|
|
|
|
),
|
|
|
|
},
|
2021-07-13 11:01:44 -07:00
|
|
|
},
|
|
|
|
},
|
2021-07-09 12:00:18 -07:00
|
|
|
}
|
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
for _, scenario := range scenarios {
|
|
|
|
t.Run(scenario.name, func(t *testing.T) {
|
|
|
|
var (
|
|
|
|
i int
|
|
|
|
res labels.Labels
|
|
|
|
p = scenario.parser
|
|
|
|
exp = scenario.expected
|
|
|
|
)
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
for {
|
|
|
|
et, err := p.Next()
|
|
|
|
if errors.Is(err, io.EOF) {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
require.NoError(t, err)
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
switch et {
|
|
|
|
case EntrySeries:
|
|
|
|
m, ts, v := p.Series()
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
var e exemplar.Exemplar
|
|
|
|
p.Metric(&res)
|
|
|
|
found := p.Exemplar(&e)
|
|
|
|
require.Equal(t, exp[i].m, string(m))
|
|
|
|
if ts != nil {
|
|
|
|
require.Equal(t, exp[i].t, *ts)
|
|
|
|
} else {
|
|
|
|
require.Equal(t, exp[i].t, int64(0))
|
|
|
|
}
|
|
|
|
require.Equal(t, exp[i].v, v)
|
|
|
|
require.Equal(t, exp[i].lset, res)
|
|
|
|
if len(exp[i].e) == 0 {
|
|
|
|
require.Equal(t, false, found)
|
|
|
|
} else {
|
|
|
|
require.Equal(t, true, found)
|
|
|
|
require.Equal(t, exp[i].e[0], e)
|
|
|
|
}
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
case EntryHistogram:
|
|
|
|
m, ts, shs, fhs := p.Histogram()
|
|
|
|
p.Metric(&res)
|
|
|
|
require.Equal(t, exp[i].m, string(m))
|
|
|
|
if ts != nil {
|
|
|
|
require.Equal(t, exp[i].t, *ts)
|
|
|
|
} else {
|
|
|
|
require.Equal(t, exp[i].t, int64(0))
|
|
|
|
}
|
|
|
|
require.Equal(t, exp[i].lset, res)
|
|
|
|
require.Equal(t, exp[i].m, string(m))
|
|
|
|
if shs != nil {
|
|
|
|
require.Equal(t, exp[i].shs, shs)
|
|
|
|
} else {
|
|
|
|
require.Equal(t, exp[i].fhs, fhs)
|
|
|
|
}
|
|
|
|
j := 0
|
|
|
|
for e := (exemplar.Exemplar{}); p.Exemplar(&e); j++ {
|
|
|
|
require.Equal(t, exp[i].e[j], e)
|
|
|
|
e = exemplar.Exemplar{}
|
|
|
|
}
|
|
|
|
require.Equal(t, len(exp[i].e), j, "not enough exemplars found")
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
case EntryType:
|
|
|
|
m, typ := p.Type()
|
|
|
|
require.Equal(t, exp[i].m, string(m))
|
|
|
|
require.Equal(t, exp[i].typ, typ)
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
case EntryHelp:
|
|
|
|
m, h := p.Help()
|
|
|
|
require.Equal(t, exp[i].m, string(m))
|
|
|
|
require.Equal(t, exp[i].help, string(h))
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
case EntryUnit:
|
|
|
|
m, u := p.Unit()
|
|
|
|
require.Equal(t, exp[i].m, string(m))
|
|
|
|
require.Equal(t, exp[i].unit, string(u))
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
case EntryComment:
|
|
|
|
require.Equal(t, exp[i].comment, string(p.Comment()))
|
|
|
|
}
|
2021-07-09 12:00:18 -07:00
|
|
|
|
2023-05-10 16:59:21 -07:00
|
|
|
i++
|
|
|
|
}
|
|
|
|
require.Equal(t, len(exp), i)
|
|
|
|
})
|
2021-07-09 12:00:18 -07:00
|
|
|
}
|
|
|
|
}
|