textparse: Refactored benchmark (#15083)

* textparse: Refactored benchmark.

Signed-off-by: bwplotka <bwplotka@gmail.com>

* Kill sinks, tested, nothing is inlined.

Signed-off-by: bwplotka <bwplotka@gmail.com>

* Addressed comments.

Signed-off-by: bwplotka <bwplotka@gmail.com>

---------

Signed-off-by: bwplotka <bwplotka@gmail.com>
This commit is contained in:
Bartlomiej Plotka 2024-10-07 10:55:26 +02:00 committed by GitHub
parent 989db7bc3a
commit 8d281c3491
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 180 additions and 236 deletions

View file

@ -0,0 +1,177 @@
// Copyright 2024 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package textparse
import (
"bytes"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"testing"
"github.com/prometheus/prometheus/model/exemplar"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/common/expfmt"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
)
type newParser func([]byte, *labels.SymbolTable) Parser
var newTestParserFns = map[string]newParser{
"promtext": NewPromParser,
"promproto": func(b []byte, st *labels.SymbolTable) Parser {
return NewProtobufParser(b, true, st)
},
"omtext": func(b []byte, st *labels.SymbolTable) Parser {
return NewOpenMetricsParser(b, st, WithOMParserCTSeriesSkipped())
},
}
// BenchmarkParse benchmarks parsing, mimicking how scrape/scrape.go#append use it.
// Typically used as follows:
/*
export bench=v1 && go test ./model/textparse/... \
-run '^$' -bench '^BenchmarkParse' \
-benchtime 2s -count 6 -cpu 2 -benchmem -timeout 999m \
| tee ${bench}.txt
*/
// For profiles, add -memprofile=${bench}.mem.pprof -cpuprofile=${bench}.cpu.pprof
// options.
//
// NOTE(bwplotka): Previous iterations of this benchmark had different cases for isolated
// Series, Series+Metrics with and without reuse, Series+CT. Those cases are sometimes
// good to know if you are working on a certain optimization, but it does not
// make sense to persist such cases for everybody (e.g. for CI one day).
// For local iteration, feel free to adjust cases/comment out code etc.
//
// NOTE(bwplotka): Do not try to conclude "what parser (OM, proto, prom) is the fastest"
// as the testdata has different amount and type of metrics and features (e.g. exemplars).
func BenchmarkParse(b *testing.B) {
for _, bcase := range []struct {
dataFile string // Localized to "./testdata".
dataProto []byte
parser string
compareToExpfmtFormat expfmt.FormatType
}{
{dataFile: "promtestdata.txt", parser: "promtext", compareToExpfmtFormat: expfmt.TypeTextPlain},
{dataFile: "promtestdata.nometa.txt", parser: "promtext", compareToExpfmtFormat: expfmt.TypeTextPlain},
// We don't pass compareToExpfmtFormat: expfmt.TypeProtoDelim as expfmt does not support GAUGE_HISTOGRAM, see https://github.com/prometheus/common/issues/430.
{dataProto: createTestProtoBuf(b).Bytes(), parser: "promproto"},
// We don't pass compareToExpfmtFormat: expfmt.TypeOpenMetrics as expfmt does not support OM exemplars, see https://github.com/prometheus/common/issues/703.
{dataFile: "omtestdata.txt", parser: "omtext"},
{dataFile: "promtestdata.txt", parser: "omtext"}, // Compare how omtext parser deals with Prometheus text format vs promtext.
} {
var buf []byte
dataCase := bcase.dataFile
if len(bcase.dataProto) > 0 {
dataCase = "createTestProtoBuf()"
buf = bcase.dataProto
} else {
f, err := os.Open(filepath.Join("testdata", bcase.dataFile))
require.NoError(b, err)
b.Cleanup(func() {
_ = f.Close()
})
buf, err = io.ReadAll(f)
require.NoError(b, err)
}
b.Run(fmt.Sprintf("data=%v/parser=%v", dataCase, bcase.parser), func(b *testing.B) {
newParserFn := newTestParserFns[bcase.parser]
var (
res labels.Labels
e exemplar.Exemplar
)
b.SetBytes(int64(len(buf)))
b.ReportAllocs()
b.ResetTimer()
st := labels.NewSymbolTable()
for i := 0; i < b.N; i++ {
p := newParserFn(buf, st)
Inner:
for {
t, err := p.Next()
switch t {
case EntryInvalid:
if errors.Is(err, io.EOF) {
break Inner
}
b.Fatal(err)
case EntryType:
_, _ = p.Type()
continue
case EntryHelp:
_, _ = p.Help()
continue
case EntryUnit:
_, _ = p.Unit()
continue
case EntryComment:
continue
case EntryHistogram:
_, _, _, _ = p.Histogram()
case EntrySeries:
_, _, _ = p.Series()
default:
b.Fatal("not implemented entry", t)
}
_ = p.Metric(&res)
_ = p.CreatedTimestamp()
for hasExemplar := p.Exemplar(&e); hasExemplar; hasExemplar = p.Exemplar(&e) {
}
}
}
})
b.Run(fmt.Sprintf("data=%v/parser=xpfmt", dataCase), func(b *testing.B) {
if bcase.compareToExpfmtFormat == expfmt.TypeUnknown {
b.Skip("compareToExpfmtFormat not set")
}
b.SetBytes(int64(len(buf)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
decSamples := make(model.Vector, 0, 50)
sdec := expfmt.SampleDecoder{
Dec: expfmt.NewDecoder(bytes.NewReader(buf), expfmt.NewFormat(bcase.compareToExpfmtFormat)),
Opts: &expfmt.DecodeOptions{
Timestamp: model.TimeFromUnixNano(0),
},
}
for {
if err := sdec.Decode(&decSamples); err != nil {
if errors.Is(err, io.EOF) {
break
}
b.Fatal(err)
}
decSamples = decSamples[:0]
}
}
})
}
}

View file

@ -16,7 +16,6 @@ package textparse
import (
"errors"
"io"
"os"
"testing"
"github.com/prometheus/common/model"
@ -958,46 +957,3 @@ thing_bucket{le="+Inf"} 17`
i++
}
}
func BenchmarkOMParseCreatedTimestamp(b *testing.B) {
for parserName, parser := range map[string]func([]byte, *labels.SymbolTable) Parser{
"openmetrics": func(b []byte, st *labels.SymbolTable) Parser {
return NewOpenMetricsParser(b, st)
},
"openmetrics-skip-ct": func(b []byte, st *labels.SymbolTable) Parser {
return NewOpenMetricsParser(b, st, WithOMParserCTSeriesSkipped())
},
} {
f, err := os.Open("omtestdata.txt")
require.NoError(b, err)
defer f.Close()
buf, err := io.ReadAll(f)
require.NoError(b, err)
b.Run(parserName+"/parse-ct/"+"omtestdata.txt", func(b *testing.B) {
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
b.ReportAllocs()
b.ResetTimer()
st := labels.NewSymbolTable()
for i := 0; i < b.N; i += promtestdataSampleCount {
p := parser(buf, st)
Outer:
for i < b.N {
t, err := p.Next()
switch t {
case EntryInvalid:
if errors.Is(err, io.EOF) {
break Outer
}
b.Fatal(err)
case EntrySeries:
p.CreatedTimestamp()
}
}
}
})
}
}

View file

@ -14,17 +14,13 @@
package textparse
import (
"bytes"
"errors"
"io"
"os"
"strings"
"testing"
"github.com/klauspost/compress/gzip"
"github.com/stretchr/testify/require"
"github.com/prometheus/common/expfmt"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/model/exemplar"
@ -485,190 +481,3 @@ func TestPromNullByteHandling(t *testing.T) {
require.EqualError(t, err, c.err, "test %d", i)
}
}
const (
promtestdataSampleCount = 410
)
func BenchmarkPromParse(b *testing.B) {
for parserName, parser := range map[string]func([]byte, *labels.SymbolTable) Parser{
"prometheus": NewPromParser,
"openmetrics": func(b []byte, st *labels.SymbolTable) Parser {
return NewOpenMetricsParser(b, st)
},
} {
for _, fn := range []string{"promtestdata.txt", "promtestdata.nometa.txt"} {
f, err := os.Open(fn)
require.NoError(b, err)
defer f.Close()
buf, err := io.ReadAll(f)
require.NoError(b, err)
b.Run(parserName+"/no-decode-metric/"+fn, func(b *testing.B) {
total := 0
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
b.ReportAllocs()
b.ResetTimer()
st := labels.NewSymbolTable()
for i := 0; i < b.N; i += promtestdataSampleCount {
p := parser(buf, st)
Outer:
for i < b.N {
t, err := p.Next()
switch t {
case EntryInvalid:
if errors.Is(err, io.EOF) {
break Outer
}
b.Fatal(err)
case EntrySeries:
m, _, _ := p.Series()
total += len(m)
i++
}
}
}
_ = total
})
b.Run(parserName+"/decode-metric/"+fn, func(b *testing.B) {
total := 0
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
b.ReportAllocs()
b.ResetTimer()
st := labels.NewSymbolTable()
for i := 0; i < b.N; i += promtestdataSampleCount {
p := parser(buf, st)
Outer:
for i < b.N {
t, err := p.Next()
switch t {
case EntryInvalid:
if errors.Is(err, io.EOF) {
break Outer
}
b.Fatal(err)
case EntrySeries:
m, _, _ := p.Series()
var res labels.Labels
p.Metric(&res)
total += len(m)
i++
}
}
}
_ = total
})
b.Run(parserName+"/decode-metric-reuse/"+fn, func(b *testing.B) {
total := 0
var res labels.Labels
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
b.ReportAllocs()
b.ResetTimer()
st := labels.NewSymbolTable()
for i := 0; i < b.N; i += promtestdataSampleCount {
p := parser(buf, st)
Outer:
for i < b.N {
t, err := p.Next()
switch t {
case EntryInvalid:
if errors.Is(err, io.EOF) {
break Outer
}
b.Fatal(err)
case EntrySeries:
m, _, _ := p.Series()
p.Metric(&res)
total += len(m)
i++
}
}
}
_ = total
})
b.Run("expfmt-text/"+fn, func(b *testing.B) {
if parserName != "prometheus" {
b.Skip()
}
b.SetBytes(int64(len(buf) / promtestdataSampleCount))
b.ReportAllocs()
b.ResetTimer()
total := 0
for i := 0; i < b.N; i += promtestdataSampleCount {
decSamples := make(model.Vector, 0, 50)
sdec := expfmt.SampleDecoder{
Dec: expfmt.NewDecoder(bytes.NewReader(buf), expfmt.NewFormat(expfmt.TypeTextPlain)),
Opts: &expfmt.DecodeOptions{
Timestamp: model.TimeFromUnixNano(0),
},
}
for {
if err = sdec.Decode(&decSamples); err != nil {
break
}
total += len(decSamples)
decSamples = decSamples[:0]
}
}
_ = total
})
}
}
}
func BenchmarkGzip(b *testing.B) {
for _, fn := range []string{"promtestdata.txt", "promtestdata.nometa.txt"} {
b.Run(fn, func(b *testing.B) {
f, err := os.Open(fn)
require.NoError(b, err)
defer f.Close()
var buf bytes.Buffer
gw := gzip.NewWriter(&buf)
n, err := io.Copy(gw, f)
require.NoError(b, err)
require.NoError(b, gw.Close())
gbuf, err := io.ReadAll(&buf)
require.NoError(b, err)
k := b.N / promtestdataSampleCount
b.ReportAllocs()
b.SetBytes(n / promtestdataSampleCount)
b.ResetTimer()
total := 0
for i := 0; i < k; i++ {
gr, err := gzip.NewReader(bytes.NewReader(gbuf))
require.NoError(b, err)
d, err := io.ReadAll(gr)
require.NoError(b, err)
require.NoError(b, gr.Close())
total += len(d)
}
_ = total
})
}
}

View file

@ -32,7 +32,9 @@ import (
dto "github.com/prometheus/prometheus/prompb/io/prometheus/client"
)
func createTestProtoBuf(t *testing.T) *bytes.Buffer {
func createTestProtoBuf(t testing.TB) *bytes.Buffer {
t.Helper()
testMetricFamilies := []string{
`name: "go_build_info"
help: "Build information about the main Go module."