2018-09-25 09:06:26 -07:00
|
|
|
// Copyright 2018 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2024-01-18 06:49:16 -08:00
|
|
|
"encoding/json"
|
2022-05-23 23:58:59 -07:00
|
|
|
"errors"
|
2018-09-25 09:06:26 -07:00
|
|
|
"fmt"
|
2024-07-29 04:28:08 -07:00
|
|
|
"io"
|
2018-09-25 09:06:26 -07:00
|
|
|
"os"
|
2019-03-27 02:27:26 -07:00
|
|
|
"path/filepath"
|
2018-09-25 09:06:26 -07:00
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2024-01-24 01:52:16 -08:00
|
|
|
"github.com/google/go-cmp/cmp"
|
2023-03-31 16:40:04 -07:00
|
|
|
"github.com/grafana/regexp"
|
2024-01-18 06:49:16 -08:00
|
|
|
"github.com/nsf/jsondiff"
|
2022-08-31 06:50:38 -07:00
|
|
|
"gopkg.in/yaml.v2"
|
2018-09-25 09:06:26 -07:00
|
|
|
|
2024-07-29 04:28:08 -07:00
|
|
|
"github.com/prometheus/common/model"
|
2024-09-09 18:41:53 -07:00
|
|
|
"github.com/prometheus/common/promslog"
|
2024-07-29 04:28:08 -07:00
|
|
|
|
2023-08-25 14:35:42 -07:00
|
|
|
"github.com/prometheus/prometheus/model/histogram"
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
2018-09-25 09:06:26 -07:00
|
|
|
"github.com/prometheus/prometheus/promql"
|
2020-02-03 10:09:10 -08:00
|
|
|
"github.com/prometheus/prometheus/promql/parser"
|
2024-04-29 02:48:24 -07:00
|
|
|
"github.com/prometheus/prometheus/promql/promqltest"
|
2018-09-25 09:06:26 -07:00
|
|
|
"github.com/prometheus/prometheus/rules"
|
|
|
|
"github.com/prometheus/prometheus/storage"
|
2024-07-29 04:28:08 -07:00
|
|
|
"github.com/prometheus/prometheus/util/junitxml"
|
2018-09-25 09:06:26 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
// RulesUnitTest does unit testing of rules based on the unit testing files provided.
|
|
|
|
// More info about the file format can be found in the docs.
|
2024-10-22 06:24:36 -07:00
|
|
|
func RulesUnitTest(queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug bool, files ...string) int {
|
|
|
|
return RulesUnitTestResult(io.Discard, queryOpts, runStrings, diffFlag, debug, files...)
|
2024-07-29 04:28:08 -07:00
|
|
|
}
|
|
|
|
|
2024-10-22 06:24:36 -07:00
|
|
|
func RulesUnitTestResult(results io.Writer, queryOpts promqltest.LazyLoaderOpts, runStrings []string, diffFlag, debug bool, files ...string) int {
|
2018-09-25 09:06:26 -07:00
|
|
|
failed := false
|
2024-07-29 04:28:08 -07:00
|
|
|
junit := &junitxml.JUnitXML{}
|
2018-09-25 09:06:26 -07:00
|
|
|
|
2023-03-31 16:40:04 -07:00
|
|
|
var run *regexp.Regexp
|
|
|
|
if runStrings != nil {
|
|
|
|
run = regexp.MustCompile(strings.Join(runStrings, "|"))
|
|
|
|
}
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
for _, f := range files {
|
2024-10-22 06:24:36 -07:00
|
|
|
if errs := ruleUnitTest(f, queryOpts, run, diffFlag, debug, junit.Suite(f)); errs != nil {
|
2018-09-25 09:06:26 -07:00
|
|
|
fmt.Fprintln(os.Stderr, " FAILED:")
|
|
|
|
for _, e := range errs {
|
|
|
|
fmt.Fprintln(os.Stderr, e.Error())
|
2021-10-28 04:17:18 -07:00
|
|
|
fmt.Println()
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
failed = true
|
|
|
|
} else {
|
|
|
|
fmt.Println(" SUCCESS")
|
|
|
|
}
|
|
|
|
fmt.Println()
|
|
|
|
}
|
2024-07-29 04:28:08 -07:00
|
|
|
err := junit.WriteXML(results)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Fprintf(os.Stderr, "failed to write JUnit XML: %s\n", err)
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
if failed {
|
2021-12-01 04:45:18 -08:00
|
|
|
return failureExitCode
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
2021-12-01 04:45:18 -08:00
|
|
|
return successExitCode
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
2024-10-22 06:24:36 -07:00
|
|
|
func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *regexp.Regexp, diffFlag, debug bool, ts *junitxml.TestSuite) []error {
|
2022-04-27 02:24:36 -07:00
|
|
|
b, err := os.ReadFile(filename)
|
2018-09-25 09:06:26 -07:00
|
|
|
if err != nil {
|
2024-07-29 04:28:08 -07:00
|
|
|
ts.Abort(err)
|
2018-09-25 09:06:26 -07:00
|
|
|
return []error{err}
|
|
|
|
}
|
|
|
|
|
|
|
|
var unitTestInp unitTestFile
|
|
|
|
if err := yaml.UnmarshalStrict(b, &unitTestInp); err != nil {
|
2024-07-29 04:28:08 -07:00
|
|
|
ts.Abort(err)
|
2018-09-25 09:06:26 -07:00
|
|
|
return []error{err}
|
|
|
|
}
|
2019-06-12 03:31:07 -07:00
|
|
|
if err := resolveAndGlobFilepaths(filepath.Dir(filename), &unitTestInp); err != nil {
|
2024-07-29 04:28:08 -07:00
|
|
|
ts.Abort(err)
|
2019-06-12 03:31:07 -07:00
|
|
|
return []error{err}
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
|
|
|
|
if unitTestInp.EvaluationInterval == 0 {
|
2020-06-15 08:03:07 -07:00
|
|
|
unitTestInp.EvaluationInterval = model.Duration(1 * time.Minute)
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
2020-06-15 08:03:07 -07:00
|
|
|
evalInterval := time.Duration(unitTestInp.EvaluationInterval)
|
2024-07-29 04:28:08 -07:00
|
|
|
ts.Settime(time.Now().Format("2006-01-02T15:04:05"))
|
2018-09-25 09:06:26 -07:00
|
|
|
// Giving number for groups mentioned in the file for ordering.
|
|
|
|
// Lower number group should be evaluated before higher number group.
|
|
|
|
groupOrderMap := make(map[string]int)
|
|
|
|
for i, gn := range unitTestInp.GroupEvalOrder {
|
|
|
|
if _, ok := groupOrderMap[gn]; ok {
|
2024-07-29 04:28:08 -07:00
|
|
|
err := fmt.Errorf("group name repeated in evaluation order: %s", gn)
|
|
|
|
ts.Abort(err)
|
|
|
|
return []error{err}
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
groupOrderMap[gn] = i
|
|
|
|
}
|
|
|
|
|
|
|
|
// Testing.
|
|
|
|
var errs []error
|
2024-07-29 04:28:08 -07:00
|
|
|
for i, t := range unitTestInp.Tests {
|
2023-03-31 16:40:04 -07:00
|
|
|
if !matchesRun(t.TestGroupName, run) {
|
|
|
|
continue
|
|
|
|
}
|
2024-07-29 04:28:08 -07:00
|
|
|
testname := t.TestGroupName
|
|
|
|
if testname == "" {
|
|
|
|
testname = fmt.Sprintf("unnamed#%d", i)
|
|
|
|
}
|
|
|
|
tc := ts.Case(testname)
|
2023-10-20 03:32:46 -07:00
|
|
|
if t.Interval == 0 {
|
|
|
|
t.Interval = unitTestInp.EvaluationInterval
|
|
|
|
}
|
2024-10-22 06:24:36 -07:00
|
|
|
ers := t.test(testname, evalInterval, groupOrderMap, queryOpts, diffFlag, debug, unitTestInp.RuleFiles...)
|
2018-09-25 09:06:26 -07:00
|
|
|
if ers != nil {
|
2024-07-29 04:28:08 -07:00
|
|
|
for _, e := range ers {
|
|
|
|
tc.Fail(e.Error())
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
errs = append(errs, ers...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-03-31 16:40:04 -07:00
|
|
|
func matchesRun(name string, run *regexp.Regexp) bool {
|
|
|
|
if run == nil {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
return run.MatchString(name)
|
|
|
|
}
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
// unitTestFile holds the contents of a single unit test file.
|
|
|
|
type unitTestFile struct {
|
2020-06-15 08:03:07 -07:00
|
|
|
RuleFiles []string `yaml:"rule_files"`
|
|
|
|
EvaluationInterval model.Duration `yaml:"evaluation_interval,omitempty"`
|
|
|
|
GroupEvalOrder []string `yaml:"group_eval_order"`
|
|
|
|
Tests []testGroup `yaml:"tests"`
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
2019-06-12 03:31:07 -07:00
|
|
|
// resolveAndGlobFilepaths joins all relative paths in a configuration
|
|
|
|
// with a given base directory and replaces all globs with matching files.
|
|
|
|
func resolveAndGlobFilepaths(baseDir string, utf *unitTestFile) error {
|
2019-03-27 02:27:26 -07:00
|
|
|
for i, rf := range utf.RuleFiles {
|
|
|
|
if rf != "" && !filepath.IsAbs(rf) {
|
|
|
|
utf.RuleFiles[i] = filepath.Join(baseDir, rf)
|
|
|
|
}
|
|
|
|
}
|
2019-06-12 03:31:07 -07:00
|
|
|
|
|
|
|
var globbedFiles []string
|
|
|
|
for _, rf := range utf.RuleFiles {
|
|
|
|
m, err := filepath.Glob(rf)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-04-09 00:08:40 -07:00
|
|
|
if len(m) == 0 {
|
2019-11-18 13:54:52 -08:00
|
|
|
fmt.Fprintln(os.Stderr, " WARNING: no file match pattern", rf)
|
|
|
|
}
|
2019-06-12 03:31:07 -07:00
|
|
|
globbedFiles = append(globbedFiles, m...)
|
|
|
|
}
|
|
|
|
utf.RuleFiles = globbedFiles
|
|
|
|
return nil
|
2019-03-27 02:27:26 -07:00
|
|
|
}
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
// testGroup is a group of input series and tests associated with it.
|
|
|
|
type testGroup struct {
|
2020-06-15 08:03:07 -07:00
|
|
|
Interval model.Duration `yaml:"interval"`
|
2018-09-25 09:06:26 -07:00
|
|
|
InputSeries []series `yaml:"input_series"`
|
|
|
|
AlertRuleTests []alertTestCase `yaml:"alert_rule_test,omitempty"`
|
|
|
|
PromqlExprTests []promqlTestCase `yaml:"promql_expr_test,omitempty"`
|
2019-05-29 07:39:58 -07:00
|
|
|
ExternalLabels labels.Labels `yaml:"external_labels,omitempty"`
|
2021-05-30 20:35:26 -07:00
|
|
|
ExternalURL string `yaml:"external_url,omitempty"`
|
2021-02-03 09:04:31 -08:00
|
|
|
TestGroupName string `yaml:"name,omitempty"`
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// test performs the unit tests.
|
2024-10-22 06:24:36 -07:00
|
|
|
func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrderMap map[string]int, queryOpts promqltest.LazyLoaderOpts, diffFlag, debug bool, ruleFiles ...string) (outErr []error) {
|
|
|
|
if debug {
|
|
|
|
testStart := time.Now()
|
|
|
|
fmt.Printf("DEBUG: Starting test %s\n", testname)
|
|
|
|
defer func() {
|
|
|
|
fmt.Printf("DEBUG: Test %s finished, took %v\n", testname, time.Since(testStart))
|
|
|
|
}()
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
// Setup testing suite.
|
2024-04-29 02:48:24 -07:00
|
|
|
suite, err := promqltest.NewLazyLoader(tg.seriesLoadingString(), queryOpts)
|
2018-09-25 09:06:26 -07:00
|
|
|
if err != nil {
|
|
|
|
return []error{err}
|
|
|
|
}
|
2024-03-21 02:23:40 -07:00
|
|
|
defer func() {
|
|
|
|
err := suite.Close()
|
|
|
|
if err != nil {
|
|
|
|
outErr = append(outErr, err)
|
|
|
|
}
|
|
|
|
}()
|
2021-03-07 00:19:33 -08:00
|
|
|
suite.SubqueryInterval = evalInterval
|
2018-09-25 09:06:26 -07:00
|
|
|
|
|
|
|
// Load the rule files.
|
|
|
|
opts := &rules.ManagerOptions{
|
|
|
|
QueryFunc: rules.EngineQueryFunc(suite.QueryEngine(), suite.Storage()),
|
|
|
|
Appendable: suite.Storage(),
|
|
|
|
Context: context.Background(),
|
|
|
|
NotifyFunc: func(ctx context.Context, expr string, alerts ...*rules.Alert) {},
|
2024-09-09 18:41:53 -07:00
|
|
|
Logger: promslog.NewNopLogger(),
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
m := rules.NewManager(opts)
|
2022-03-28 17:16:46 -07:00
|
|
|
groupsMap, ers := m.LoadGroups(time.Duration(tg.Interval), tg.ExternalLabels, tg.ExternalURL, nil, ruleFiles...)
|
2018-09-25 09:06:26 -07:00
|
|
|
if ers != nil {
|
|
|
|
return ers
|
|
|
|
}
|
|
|
|
groups := orderedGroups(groupsMap, groupOrderMap)
|
|
|
|
|
2020-10-24 04:03:55 -07:00
|
|
|
// Bounds for evaluating the rules.
|
|
|
|
mint := time.Unix(0, 0).UTC()
|
|
|
|
maxt := mint.Add(tg.maxEvalTime())
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
// Pre-processing some data for testing alerts.
|
|
|
|
// All this preparation is so that we can test alerts as we evaluate the rules.
|
|
|
|
// This avoids storing them in memory, as the number of evals might be high.
|
|
|
|
|
2018-12-18 00:40:03 -08:00
|
|
|
// All the `eval_time` for which we have unit tests for alerts.
|
2020-06-15 08:03:07 -07:00
|
|
|
alertEvalTimesMap := map[model.Duration]struct{}{}
|
2018-09-25 09:06:26 -07:00
|
|
|
// Map of all the eval_time+alertname combination present in the unit tests.
|
2020-06-15 08:03:07 -07:00
|
|
|
alertsInTest := make(map[model.Duration]map[string]struct{})
|
2018-09-25 09:06:26 -07:00
|
|
|
// Map of all the unit tests for given eval_time.
|
2020-06-15 08:03:07 -07:00
|
|
|
alertTests := make(map[model.Duration][]alertTestCase)
|
2018-09-25 09:06:26 -07:00
|
|
|
for _, alert := range tg.AlertRuleTests {
|
2021-02-17 01:02:09 -08:00
|
|
|
if alert.Alertname == "" {
|
|
|
|
var testGroupLog string
|
|
|
|
if tg.TestGroupName != "" {
|
|
|
|
testGroupLog = fmt.Sprintf(" (in TestGroup %s)", tg.TestGroupName)
|
|
|
|
}
|
2022-05-23 23:58:59 -07:00
|
|
|
return []error{fmt.Errorf("an item under alert_rule_test misses required attribute alertname at eval_time %v%s", alert.EvalTime, testGroupLog)}
|
2021-02-17 01:02:09 -08:00
|
|
|
}
|
2018-12-18 00:40:03 -08:00
|
|
|
alertEvalTimesMap[alert.EvalTime] = struct{}{}
|
2018-09-25 09:06:26 -07:00
|
|
|
|
|
|
|
if _, ok := alertsInTest[alert.EvalTime]; !ok {
|
|
|
|
alertsInTest[alert.EvalTime] = make(map[string]struct{})
|
|
|
|
}
|
|
|
|
alertsInTest[alert.EvalTime][alert.Alertname] = struct{}{}
|
|
|
|
|
|
|
|
alertTests[alert.EvalTime] = append(alertTests[alert.EvalTime], alert)
|
|
|
|
}
|
2020-06-15 08:03:07 -07:00
|
|
|
alertEvalTimes := make([]model.Duration, 0, len(alertEvalTimesMap))
|
2018-12-18 00:40:03 -08:00
|
|
|
for k := range alertEvalTimesMap {
|
|
|
|
alertEvalTimes = append(alertEvalTimes, k)
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
sort.Slice(alertEvalTimes, func(i, j int) bool {
|
|
|
|
return alertEvalTimes[i] < alertEvalTimes[j]
|
|
|
|
})
|
|
|
|
|
|
|
|
// Current index in alertEvalTimes what we are looking at.
|
|
|
|
curr := 0
|
|
|
|
|
2020-09-21 09:15:34 -07:00
|
|
|
for _, g := range groups {
|
|
|
|
for _, r := range g.Rules() {
|
|
|
|
if alertRule, ok := r.(*rules.AlertingRule); ok {
|
|
|
|
// Mark alerting rules as restored, to ensure the ALERTS timeseries is
|
|
|
|
// created when they run.
|
|
|
|
alertRule.SetRestored(true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
var errs []error
|
2020-10-24 04:03:55 -07:00
|
|
|
for ts := mint; ts.Before(maxt) || ts.Equal(maxt); ts = ts.Add(evalInterval) {
|
2018-09-25 09:06:26 -07:00
|
|
|
// Collects the alerts asked for unit testing.
|
2020-10-09 04:53:20 -07:00
|
|
|
var evalErrs []error
|
2018-11-22 00:51:38 -08:00
|
|
|
suite.WithSamplesTill(ts, func(err error) {
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, g := range groups {
|
|
|
|
g.Eval(suite.Context(), ts)
|
2019-04-23 00:59:03 -07:00
|
|
|
for _, r := range g.Rules() {
|
|
|
|
if r.LastError() != nil {
|
2023-09-27 14:34:18 -07:00
|
|
|
evalErrs = append(evalErrs, fmt.Errorf(" rule: %s, time: %s, err: %w",
|
2020-03-29 09:35:39 -07:00
|
|
|
r.Name(), ts.Sub(time.Unix(0, 0).UTC()), r.LastError()))
|
2019-04-23 00:59:03 -07:00
|
|
|
}
|
|
|
|
}
|
2018-11-22 00:51:38 -08:00
|
|
|
}
|
|
|
|
})
|
2020-10-09 04:53:20 -07:00
|
|
|
errs = append(errs, evalErrs...)
|
|
|
|
// Only end testing at this point if errors occurred evaluating above,
|
|
|
|
// rather than any test failures already collected in errs.
|
|
|
|
if len(evalErrs) > 0 {
|
2018-11-22 00:51:38 -08:00
|
|
|
return errs
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
for {
|
2020-06-15 08:03:07 -07:00
|
|
|
if !(curr < len(alertEvalTimes) && ts.Sub(mint) <= time.Duration(alertEvalTimes[curr]) &&
|
2021-07-28 01:03:46 -07:00
|
|
|
time.Duration(alertEvalTimes[curr]) < ts.Add(evalInterval).Sub(mint)) {
|
2018-09-25 09:06:26 -07:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
// We need to check alerts for this time.
|
|
|
|
// If 'ts <= `eval_time=alertEvalTimes[curr]` < ts+evalInterval'
|
|
|
|
// then we compare alerts with the Eval at `ts`.
|
|
|
|
t := alertEvalTimes[curr]
|
|
|
|
|
|
|
|
presentAlerts := alertsInTest[t]
|
|
|
|
got := make(map[string]labelsAndAnnotations)
|
|
|
|
|
|
|
|
// Same Alert name can be present in multiple groups.
|
|
|
|
// Hence we collect them all to check against expected alerts.
|
|
|
|
for _, g := range groups {
|
|
|
|
grules := g.Rules()
|
|
|
|
for _, r := range grules {
|
|
|
|
ar, ok := r.(*rules.AlertingRule)
|
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if _, ok := presentAlerts[ar.Name()]; !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
var alerts labelsAndAnnotations
|
|
|
|
for _, a := range ar.ActiveAlerts() {
|
|
|
|
if a.State == rules.StateFiring {
|
|
|
|
alerts = append(alerts, labelAndAnnotation{
|
2022-02-27 06:36:53 -08:00
|
|
|
Labels: a.Labels.Copy(),
|
|
|
|
Annotations: a.Annotations.Copy(),
|
2018-09-25 09:06:26 -07:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
got[ar.Name()] = append(got[ar.Name()], alerts...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, testcase := range alertTests[t] {
|
|
|
|
// Checking alerts.
|
|
|
|
gotAlerts := got[testcase.Alertname]
|
|
|
|
|
|
|
|
var expAlerts labelsAndAnnotations
|
|
|
|
for _, a := range testcase.ExpAlerts {
|
|
|
|
// User gives only the labels from alerting rule, which doesn't
|
|
|
|
// include this label (added by Prometheus during Eval).
|
2019-07-17 08:02:31 -07:00
|
|
|
if a.ExpLabels == nil {
|
|
|
|
a.ExpLabels = make(map[string]string)
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
a.ExpLabels[labels.AlertName] = testcase.Alertname
|
|
|
|
|
|
|
|
expAlerts = append(expAlerts, labelAndAnnotation{
|
|
|
|
Labels: labels.FromMap(a.ExpLabels),
|
|
|
|
Annotations: labels.FromMap(a.ExpAnnotations),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-10-28 04:17:18 -07:00
|
|
|
sort.Sort(gotAlerts)
|
|
|
|
sort.Sort(expAlerts)
|
2021-02-05 03:19:09 -08:00
|
|
|
|
2024-01-24 01:52:16 -08:00
|
|
|
if !cmp.Equal(expAlerts, gotAlerts, cmp.Comparer(labels.Equal)) {
|
2021-10-28 04:17:18 -07:00
|
|
|
var testName string
|
|
|
|
if tg.TestGroupName != "" {
|
|
|
|
testName = fmt.Sprintf(" name: %s,\n", tg.TestGroupName)
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
2021-10-28 04:17:18 -07:00
|
|
|
expString := indentLines(expAlerts.String(), " ")
|
|
|
|
gotString := indentLines(gotAlerts.String(), " ")
|
2024-01-18 06:49:16 -08:00
|
|
|
if diffFlag {
|
|
|
|
// If empty, populates an empty value
|
|
|
|
if gotAlerts.Len() == 0 {
|
|
|
|
gotAlerts = append(gotAlerts, labelAndAnnotation{
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
// If empty, populates an empty value
|
|
|
|
if expAlerts.Len() == 0 {
|
|
|
|
expAlerts = append(expAlerts, labelAndAnnotation{
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
diffOpts := jsondiff.DefaultConsoleOptions()
|
|
|
|
expAlertsJSON, err := json.Marshal(expAlerts)
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, fmt.Errorf("error marshaling expected %s alert: [%s]", tg.TestGroupName, err.Error()))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
gotAlertsJSON, err := json.Marshal(gotAlerts)
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, fmt.Errorf("error marshaling received %s alert: [%s]", tg.TestGroupName, err.Error()))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
res, diff := jsondiff.Compare(expAlertsJSON, gotAlertsJSON, &diffOpts)
|
|
|
|
if res != jsondiff.FullMatch {
|
|
|
|
errs = append(errs, fmt.Errorf("%s alertname: %s, time: %s, \n diff: %v",
|
|
|
|
testName, testcase.Alertname, testcase.EvalTime.String(), indentLines(diff, " ")))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
errs = append(errs, fmt.Errorf("%s alertname: %s, time: %s, \n exp:%v, \n got:%v",
|
|
|
|
testName, testcase.Alertname, testcase.EvalTime.String(), expString, gotString))
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
curr++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Checking promql expressions.
|
|
|
|
Outer:
|
|
|
|
for _, testCase := range tg.PromqlExprTests {
|
2020-06-15 08:03:07 -07:00
|
|
|
got, err := query(suite.Context(), testCase.Expr, mint.Add(time.Duration(testCase.EvalTime)),
|
2018-09-25 09:06:26 -07:00
|
|
|
suite.QueryEngine(), suite.Queryable())
|
|
|
|
if err != nil {
|
2022-05-23 23:58:59 -07:00
|
|
|
errs = append(errs, fmt.Errorf(" expr: %q, time: %s, err: %s", testCase.Expr,
|
2018-09-25 09:06:26 -07:00
|
|
|
testCase.EvalTime.String(), err.Error()))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
var gotSamples []parsedSample
|
|
|
|
for _, s := range got {
|
|
|
|
gotSamples = append(gotSamples, parsedSample{
|
2023-08-25 14:35:42 -07:00
|
|
|
Labels: s.Metric.Copy(),
|
|
|
|
Value: s.F,
|
2024-04-29 02:48:24 -07:00
|
|
|
Histogram: promqltest.HistogramTestExpression(s.H),
|
2018-09-25 09:06:26 -07:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
var expSamples []parsedSample
|
|
|
|
for _, s := range testCase.ExpSamples {
|
2020-02-03 10:09:10 -08:00
|
|
|
lb, err := parser.ParseMetric(s.Labels)
|
2023-08-25 14:35:42 -07:00
|
|
|
var hist *histogram.FloatHistogram
|
|
|
|
if err == nil && s.Histogram != "" {
|
|
|
|
_, values, parseErr := parser.ParseSeriesDesc("{} " + s.Histogram)
|
|
|
|
switch {
|
|
|
|
case parseErr != nil:
|
|
|
|
err = parseErr
|
|
|
|
case len(values) != 1:
|
|
|
|
err = fmt.Errorf("expected 1 value, got %d", len(values))
|
|
|
|
case values[0].Histogram == nil:
|
|
|
|
err = fmt.Errorf("expected histogram, got %v", values[0])
|
|
|
|
default:
|
|
|
|
hist = values[0].Histogram
|
|
|
|
}
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
if err != nil {
|
2022-05-23 23:58:59 -07:00
|
|
|
err = fmt.Errorf("labels %q: %w", s.Labels, err)
|
|
|
|
errs = append(errs, fmt.Errorf(" expr: %q, time: %s, err: %w", testCase.Expr,
|
|
|
|
testCase.EvalTime.String(), err))
|
2018-09-25 09:06:26 -07:00
|
|
|
continue Outer
|
|
|
|
}
|
|
|
|
expSamples = append(expSamples, parsedSample{
|
2023-08-25 14:35:42 -07:00
|
|
|
Labels: lb,
|
|
|
|
Value: s.Value,
|
2024-04-29 02:48:24 -07:00
|
|
|
Histogram: promqltest.HistogramTestExpression(hist),
|
2018-09-25 09:06:26 -07:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-12-31 02:55:49 -08:00
|
|
|
sort.Slice(expSamples, func(i, j int) bool {
|
|
|
|
return labels.Compare(expSamples[i].Labels, expSamples[j].Labels) <= 0
|
|
|
|
})
|
|
|
|
sort.Slice(gotSamples, func(i, j int) bool {
|
|
|
|
return labels.Compare(gotSamples[i].Labels, gotSamples[j].Labels) <= 0
|
|
|
|
})
|
2024-01-24 01:52:16 -08:00
|
|
|
if !cmp.Equal(expSamples, gotSamples, cmp.Comparer(labels.Equal)) {
|
2022-05-23 23:58:59 -07:00
|
|
|
errs = append(errs, fmt.Errorf(" expr: %q, time: %s,\n exp: %v\n got: %v", testCase.Expr,
|
2018-09-25 09:06:26 -07:00
|
|
|
testCase.EvalTime.String(), parsedSamplesString(expSamples), parsedSamplesString(gotSamples)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-22 06:24:36 -07:00
|
|
|
if debug {
|
|
|
|
ts := tg.maxEvalTime()
|
|
|
|
// Potentially a test can be specified at a time with fractional seconds,
|
|
|
|
// which PromQL cannot represent, so round up to the next whole second.
|
|
|
|
ts = (ts + time.Second).Truncate(time.Second)
|
|
|
|
expr := fmt.Sprintf(`{__name__=~".+"}[%v]`, ts)
|
|
|
|
q, err := suite.QueryEngine().NewInstantQuery(context.Background(), suite.Queryable(), nil, expr, mint.Add(ts))
|
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("DEBUG: Failed querying, expr: %q, err: %v\n", expr, err)
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
res := q.Exec(suite.Context())
|
|
|
|
if res.Err != nil {
|
|
|
|
fmt.Printf("DEBUG: Failed query exec, expr: %q, err: %v\n", expr, res.Err)
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
switch v := res.Value.(type) {
|
|
|
|
case promql.Matrix:
|
|
|
|
fmt.Printf("DEBUG: Dump of all data (input_series and rules) at %v:\n", ts)
|
|
|
|
fmt.Println(v.String())
|
|
|
|
default:
|
|
|
|
fmt.Printf("DEBUG: Got unexpected type %T\n", v)
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
if len(errs) > 0 {
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// seriesLoadingString returns the input series in PromQL notation.
|
|
|
|
func (tg *testGroup) seriesLoadingString() string {
|
2020-06-15 08:03:07 -07:00
|
|
|
result := fmt.Sprintf("load %v\n", shortDuration(tg.Interval))
|
2018-09-25 09:06:26 -07:00
|
|
|
for _, is := range tg.InputSeries {
|
2020-06-15 08:03:07 -07:00
|
|
|
result += fmt.Sprintf(" %v %v\n", is.Series, is.Values)
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-06-15 08:03:07 -07:00
|
|
|
func shortDuration(d model.Duration) string {
|
2018-09-25 09:06:26 -07:00
|
|
|
s := d.String()
|
|
|
|
if strings.HasSuffix(s, "m0s") {
|
|
|
|
s = s[:len(s)-2]
|
|
|
|
}
|
|
|
|
if strings.HasSuffix(s, "h0m") {
|
|
|
|
s = s[:len(s)-2]
|
|
|
|
}
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
// orderedGroups returns a slice of `*rules.Group` from `groupsMap` which follows the order
|
|
|
|
// mentioned by `groupOrderMap`. NOTE: This is partial ordering.
|
|
|
|
func orderedGroups(groupsMap map[string]*rules.Group, groupOrderMap map[string]int) []*rules.Group {
|
|
|
|
groups := make([]*rules.Group, 0, len(groupsMap))
|
|
|
|
for _, g := range groupsMap {
|
|
|
|
groups = append(groups, g)
|
|
|
|
}
|
|
|
|
sort.Slice(groups, func(i, j int) bool {
|
|
|
|
return groupOrderMap[groups[i].Name()] < groupOrderMap[groups[j].Name()]
|
|
|
|
})
|
|
|
|
return groups
|
|
|
|
}
|
|
|
|
|
|
|
|
// maxEvalTime returns the max eval time among all alert and promql unit tests.
|
|
|
|
func (tg *testGroup) maxEvalTime() time.Duration {
|
2020-06-15 08:03:07 -07:00
|
|
|
var maxd model.Duration
|
2018-09-25 09:06:26 -07:00
|
|
|
for _, alert := range tg.AlertRuleTests {
|
|
|
|
if alert.EvalTime > maxd {
|
|
|
|
maxd = alert.EvalTime
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, pet := range tg.PromqlExprTests {
|
|
|
|
if pet.EvalTime > maxd {
|
|
|
|
maxd = pet.EvalTime
|
|
|
|
}
|
|
|
|
}
|
2020-06-15 08:03:07 -07:00
|
|
|
return time.Duration(maxd)
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func query(ctx context.Context, qs string, t time.Time, engine *promql.Engine, qu storage.Queryable) (promql.Vector, error) {
|
2023-04-17 21:32:38 -07:00
|
|
|
q, err := engine.NewInstantQuery(ctx, qu, nil, qs, t)
|
2018-09-25 09:06:26 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
res := q.Exec(ctx)
|
|
|
|
if res.Err != nil {
|
|
|
|
return nil, res.Err
|
|
|
|
}
|
|
|
|
switch v := res.Value.(type) {
|
|
|
|
case promql.Vector:
|
|
|
|
return v, nil
|
|
|
|
case promql.Scalar:
|
|
|
|
return promql.Vector{promql.Sample{
|
promql: Separate `Point` into `FPoint` and `HPoint`
In other words: Instead of having a “polymorphous” `Point` that can
either contain a float value or a histogram value, use an `FPoint` for
floats and an `HPoint` for histograms.
This seemingly small change has a _lot_ of repercussions throughout
the codebase.
The idea here is to avoid the increase in size of `Point` arrays that
happened after native histograms had been added.
The higher-level data structures (`Sample`, `Series`, etc.) are still
“polymorphous”. The same idea could be applied to them, but at each
step the trade-offs needed to be evaluated.
The idea with this change is to do the minimum necessary to get back
to pre-histogram performance for functions that do not touch
histograms. Here are comparisons for the `changes` function. The test
data doesn't include histograms yet. Ideally, there would be no change
in the benchmark result at all.
First runtime v2.39 compared to directly prior to this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 542µs ± 1% +38.58% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 617µs ± 2% +36.48% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.36ms ± 2% +21.58% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 8.94ms ± 1% +14.21% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.30ms ± 1% +10.67% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.10ms ± 1% +11.82% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 11.8ms ± 1% +12.50% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 87.4ms ± 1% +12.63% (p=0.000 n=9+9)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 32.8ms ± 1% +8.01% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.6ms ± 2% +9.64% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 117ms ± 1% +11.69% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 876ms ± 1% +11.83% (p=0.000 n=9+10)
```
And then runtime v2.39 compared to after this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 547µs ± 1% +39.84% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 616µs ± 2% +36.15% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.26ms ± 1% +12.20% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 7.95ms ± 1% +1.59% (p=0.000 n=10+8)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.38ms ± 2% +13.49% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.02ms ± 1% +9.80% (p=0.000 n=10+9)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 10.8ms ± 1% +3.08% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 78.1ms ± 1% +0.58% (p=0.035 n=9+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 33.5ms ± 4% +10.18% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.0ms ± 1% +7.98% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 107ms ± 1% +1.92% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 775ms ± 1% -1.02% (p=0.019 n=9+9)
```
In summary, the runtime doesn't really improve with this change for
queries with just a few steps. For queries with many steps, this
commit essentially reinstates the old performance. This is good
because the many-step queries are the one that matter most (longest
absolute runtime).
In terms of allocations, though, this commit doesn't make a dent at
all (numbers not shown). The reason is that most of the allocations
happen in the sampleRingIterator (in the storage package), which has
to be addressed in a separate commit.
Signed-off-by: beorn7 <beorn@grafana.com>
2022-10-28 07:58:40 -07:00
|
|
|
T: v.T,
|
|
|
|
F: v.V,
|
2018-09-25 09:06:26 -07:00
|
|
|
Metric: labels.Labels{},
|
|
|
|
}}, nil
|
|
|
|
default:
|
2019-03-25 16:01:12 -07:00
|
|
|
return nil, errors.New("rule result is not a vector or scalar")
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-28 04:17:18 -07:00
|
|
|
// indentLines prefixes each line in the supplied string with the given "indent"
|
|
|
|
// string.
|
|
|
|
func indentLines(lines, indent string) string {
|
|
|
|
sb := strings.Builder{}
|
|
|
|
n := strings.Split(lines, "\n")
|
|
|
|
for i, l := range n {
|
|
|
|
if i > 0 {
|
|
|
|
sb.WriteString(indent)
|
|
|
|
}
|
|
|
|
sb.WriteString(l)
|
|
|
|
if i != len(n)-1 {
|
|
|
|
sb.WriteRune('\n')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return sb.String()
|
|
|
|
}
|
|
|
|
|
2018-09-25 09:06:26 -07:00
|
|
|
type labelsAndAnnotations []labelAndAnnotation
|
|
|
|
|
|
|
|
func (la labelsAndAnnotations) Len() int { return len(la) }
|
|
|
|
func (la labelsAndAnnotations) Swap(i, j int) { la[i], la[j] = la[j], la[i] }
|
|
|
|
func (la labelsAndAnnotations) Less(i, j int) bool {
|
|
|
|
diff := labels.Compare(la[i].Labels, la[j].Labels)
|
|
|
|
if diff != 0 {
|
|
|
|
return diff < 0
|
|
|
|
}
|
|
|
|
return labels.Compare(la[i].Annotations, la[j].Annotations) < 0
|
|
|
|
}
|
|
|
|
|
|
|
|
func (la labelsAndAnnotations) String() string {
|
|
|
|
if len(la) == 0 {
|
|
|
|
return "[]"
|
|
|
|
}
|
2021-10-28 04:17:18 -07:00
|
|
|
s := "[\n0:" + indentLines("\n"+la[0].String(), " ")
|
|
|
|
for i, l := range la[1:] {
|
2024-05-13 08:36:19 -07:00
|
|
|
s += ",\n" + strconv.Itoa(i+1) + ":" + indentLines("\n"+l.String(), " ")
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
2021-10-28 04:17:18 -07:00
|
|
|
s += "\n]"
|
2018-09-25 09:06:26 -07:00
|
|
|
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
type labelAndAnnotation struct {
|
|
|
|
Labels labels.Labels
|
|
|
|
Annotations labels.Labels
|
|
|
|
}
|
|
|
|
|
|
|
|
func (la *labelAndAnnotation) String() string {
|
2021-10-28 04:17:18 -07:00
|
|
|
return "Labels:" + la.Labels.String() + "\nAnnotations:" + la.Annotations.String()
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
type series struct {
|
|
|
|
Series string `yaml:"series"`
|
|
|
|
Values string `yaml:"values"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type alertTestCase struct {
|
2020-06-15 08:03:07 -07:00
|
|
|
EvalTime model.Duration `yaml:"eval_time"`
|
|
|
|
Alertname string `yaml:"alertname"`
|
|
|
|
ExpAlerts []alert `yaml:"exp_alerts"`
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
type alert struct {
|
|
|
|
ExpLabels map[string]string `yaml:"exp_labels"`
|
|
|
|
ExpAnnotations map[string]string `yaml:"exp_annotations"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type promqlTestCase struct {
|
2020-06-15 08:03:07 -07:00
|
|
|
Expr string `yaml:"expr"`
|
|
|
|
EvalTime model.Duration `yaml:"eval_time"`
|
|
|
|
ExpSamples []sample `yaml:"exp_samples"`
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
type sample struct {
|
2023-08-25 14:35:42 -07:00
|
|
|
Labels string `yaml:"labels"`
|
|
|
|
Value float64 `yaml:"value"`
|
|
|
|
Histogram string `yaml:"histogram"` // A non-empty string means Value is ignored.
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// parsedSample is a sample with parsed Labels.
|
|
|
|
type parsedSample struct {
|
2023-08-25 14:35:42 -07:00
|
|
|
Labels labels.Labels
|
|
|
|
Value float64
|
|
|
|
Histogram string // TestExpression() of histogram.FloatHistogram
|
2018-09-25 09:06:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func parsedSamplesString(pss []parsedSample) string {
|
|
|
|
if len(pss) == 0 {
|
|
|
|
return "nil"
|
|
|
|
}
|
|
|
|
s := pss[0].String()
|
2019-09-25 00:26:29 -07:00
|
|
|
for _, ps := range pss[1:] {
|
2018-09-25 09:06:26 -07:00
|
|
|
s += ", " + ps.String()
|
|
|
|
}
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func (ps *parsedSample) String() string {
|
2023-08-25 14:35:42 -07:00
|
|
|
if ps.Histogram != "" {
|
|
|
|
return ps.Labels.String() + " " + ps.Histogram
|
|
|
|
}
|
2018-09-25 09:06:26 -07:00
|
|
|
return ps.Labels.String() + " " + strconv.FormatFloat(ps.Value, 'E', -1, 64)
|
|
|
|
}
|