refine error handling in prometheus (#5388)

i) Uses the more idiomatic Wrap and Wrapf methods for creating nested errors.
ii) Fixes some incorrect usages of fmt.Errorf where the error messages don't have any formatting directives.
iii) Does away with the use of fmt package for errors in favour of pkg/errors

Signed-off-by: tariqibrahim <tariq181290@gmail.com>
This commit is contained in:
Tariq Ibrahim 2019-03-25 16:01:12 -07:00 committed by Julius Volz
parent 0a87dcd416
commit 8fdfa8abea
59 changed files with 396 additions and 356 deletions

View file

@ -35,23 +35,23 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
conntrack "github.com/mwitkow/go-conntrack"
"github.com/oklog/oklog/pkg/group" "github.com/oklog/oklog/pkg/group"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/promlog"
"github.com/prometheus/common/version" "github.com/prometheus/common/version"
prom_runtime "github.com/prometheus/prometheus/pkg/runtime" kingpin "gopkg.in/alecthomas/kingpin.v2"
"gopkg.in/alecthomas/kingpin.v2"
"k8s.io/klog" "k8s.io/klog"
"github.com/mwitkow/go-conntrack"
"github.com/prometheus/common/promlog"
promlogflag "github.com/prometheus/common/promlog/flag" promlogflag "github.com/prometheus/common/promlog/flag"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery" "github.com/prometheus/prometheus/discovery"
sd_config "github.com/prometheus/prometheus/discovery/config" sd_config "github.com/prometheus/prometheus/discovery/config"
"github.com/prometheus/prometheus/notifier" "github.com/prometheus/prometheus/notifier"
"github.com/prometheus/prometheus/pkg/relabel" "github.com/prometheus/prometheus/pkg/relabel"
prom_runtime "github.com/prometheus/prometheus/pkg/runtime"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape" "github.com/prometheus/prometheus/scrape"
@ -439,7 +439,7 @@ func main() {
fs, err := filepath.Glob(pat) fs, err := filepath.Glob(pat)
if err != nil { if err != nil {
// The only error can be a bad pattern. // The only error can be a bad pattern.
return fmt.Errorf("error retrieving rule files for %s: %s", pat, err) return errors.Wrapf(err, "error retrieving rule files for %s", pat)
} }
files = append(files, fs...) files = append(files, fs...)
} }
@ -600,7 +600,7 @@ func main() {
} }
if err := reloadConfig(cfg.configFile, logger, reloaders...); err != nil { if err := reloadConfig(cfg.configFile, logger, reloaders...); err != nil {
return fmt.Errorf("error loading config from %q: %s", cfg.configFile, err) return errors.Wrapf(err, "error loading config from %q", cfg.configFile)
} }
reloadReady.Close() reloadReady.Close()
@ -650,7 +650,7 @@ func main() {
&cfg.tsdb, &cfg.tsdb,
) )
if err != nil { if err != nil {
return fmt.Errorf("opening storage failed: %s", err) return errors.Wrapf(err, "opening storage failed")
} }
level.Info(logger).Log("msg", "TSDB started") level.Info(logger).Log("msg", "TSDB started")
level.Debug(logger).Log("msg", "TSDB options", level.Debug(logger).Log("msg", "TSDB options",
@ -682,7 +682,7 @@ func main() {
g.Add( g.Add(
func() error { func() error {
if err := webHandler.Run(ctxWeb); err != nil { if err := webHandler.Run(ctxWeb); err != nil {
return fmt.Errorf("error starting web server: %s", err) return errors.Wrapf(err, "error starting web server")
} }
return nil return nil
}, },
@ -734,7 +734,7 @@ func reloadConfig(filename string, logger log.Logger, rls ...func(*config.Config
conf, err := config.LoadFile(filename) conf, err := config.LoadFile(filename)
if err != nil { if err != nil {
return fmt.Errorf("couldn't load configuration (--config.file=%q): %v", filename, err) return errors.Wrapf(err, "couldn't load configuration (--config.file=%q)", filename)
} }
failed := false failed := false
@ -745,7 +745,7 @@ func reloadConfig(filename string, logger log.Logger, rls ...func(*config.Config
} }
} }
if failed { if failed {
return fmt.Errorf("one or more errors occurred while applying the new configuration (--config.file=%q)", filename) return errors.Errorf("one or more errors occurred while applying the new configuration (--config.file=%q)", filename)
} }
promql.SetDefaultEvaluationInterval(time.Duration(conf.GlobalConfig.EvaluationInterval)) promql.SetDefaultEvaluationInterval(time.Duration(conf.GlobalConfig.EvaluationInterval))
level.Info(logger).Log("msg", "Completed loading of configuration file", "filename", filename) level.Info(logger).Log("msg", "Completed loading of configuration file", "filename", filename)
@ -782,7 +782,7 @@ func computeExternalURL(u, listenAddr string) (*url.URL, error) {
} }
if startsOrEndsWithQuote(u) { if startsOrEndsWithQuote(u) {
return nil, fmt.Errorf("URL must not begin or end with quotes") return nil, errors.New("URL must not begin or end with quotes")
} }
eu, err := url.Parse(u) eu, err := url.Parse(u)

View file

@ -16,8 +16,9 @@ package main
import ( import (
"archive/tar" "archive/tar"
"compress/gzip" "compress/gzip"
"fmt"
"os" "os"
"github.com/pkg/errors"
) )
const filePerm = 0644 const filePerm = 0644
@ -31,7 +32,7 @@ type tarGzFileWriter struct {
func newTarGzFileWriter(archiveName string) (*tarGzFileWriter, error) { func newTarGzFileWriter(archiveName string) (*tarGzFileWriter, error) {
file, err := os.Create(archiveName) file, err := os.Create(archiveName)
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating archive %q: %s", archiveName, err) return nil, errors.Wrapf(err, "error creating archive %q", archiveName)
} }
gzw := gzip.NewWriter(file) gzw := gzip.NewWriter(file)
tw := tar.NewWriter(gzw) tw := tar.NewWriter(gzw)

View file

@ -15,10 +15,10 @@ package main
import ( import (
"context" "context"
"fmt"
"net/http" "net/http"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/api" "github.com/prometheus/client_golang/api"
) )
@ -34,7 +34,7 @@ func newPrometheusHTTPClient(serverURL string) (*prometheusHTTPClient, error) {
Address: serverURL, Address: serverURL,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating HTTP client: %s", err) return nil, errors.Wrapf(err, "error creating HTTP client")
} }
return &prometheusHTTPClient{ return &prometheusHTTPClient{
requestTimeout: defaultTimeout, requestTimeout: defaultTimeout,

View file

@ -26,15 +26,15 @@ import (
"strings" "strings"
"time" "time"
"gopkg.in/alecthomas/kingpin.v2"
"github.com/google/pprof/profile" "github.com/google/pprof/profile"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/prometheus/client_golang/api" "github.com/prometheus/client_golang/api"
"github.com/prometheus/client_golang/api/prometheus/v1" v1 "github.com/prometheus/client_golang/api/prometheus/v1"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/version" "github.com/prometheus/common/version"
kingpin "gopkg.in/alecthomas/kingpin.v2"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/pkg/rulefmt" "github.com/prometheus/prometheus/pkg/rulefmt"
"github.com/prometheus/prometheus/util/promlint" "github.com/prometheus/prometheus/util/promlint"
@ -202,10 +202,10 @@ func checkConfig(filename string) ([]string, error) {
// If an explicit file was given, error if it is not accessible. // If an explicit file was given, error if it is not accessible.
if !strings.Contains(rf, "*") { if !strings.Contains(rf, "*") {
if len(rfs) == 0 { if len(rfs) == 0 {
return nil, fmt.Errorf("%q does not point to an existing file", rf) return nil, errors.Errorf("%q does not point to an existing file", rf)
} }
if err := checkFileExists(rfs[0]); err != nil { if err := checkFileExists(rfs[0]); err != nil {
return nil, fmt.Errorf("error checking rule file %q: %s", rfs[0], err) return nil, errors.Wrapf(err, "error checking rule file %q", rfs[0])
} }
} }
ruleFiles = append(ruleFiles, rfs...) ruleFiles = append(ruleFiles, rfs...)
@ -213,7 +213,7 @@ func checkConfig(filename string) ([]string, error) {
for _, scfg := range cfg.ScrapeConfigs { for _, scfg := range cfg.ScrapeConfigs {
if err := checkFileExists(scfg.HTTPClientConfig.BearerTokenFile); err != nil { if err := checkFileExists(scfg.HTTPClientConfig.BearerTokenFile); err != nil {
return nil, fmt.Errorf("error checking bearer token file %q: %s", scfg.HTTPClientConfig.BearerTokenFile, err) return nil, errors.Wrapf(err, "error checking bearer token file %q", scfg.HTTPClientConfig.BearerTokenFile)
} }
if err := checkTLSConfig(scfg.HTTPClientConfig.TLSConfig); err != nil { if err := checkTLSConfig(scfg.HTTPClientConfig.TLSConfig); err != nil {
@ -247,17 +247,17 @@ func checkConfig(filename string) ([]string, error) {
func checkTLSConfig(tlsConfig config_util.TLSConfig) error { func checkTLSConfig(tlsConfig config_util.TLSConfig) error {
if err := checkFileExists(tlsConfig.CertFile); err != nil { if err := checkFileExists(tlsConfig.CertFile); err != nil {
return fmt.Errorf("error checking client cert file %q: %s", tlsConfig.CertFile, err) return errors.Wrapf(err, "error checking client cert file %q", tlsConfig.CertFile)
} }
if err := checkFileExists(tlsConfig.KeyFile); err != nil { if err := checkFileExists(tlsConfig.KeyFile); err != nil {
return fmt.Errorf("error checking client key file %q: %s", tlsConfig.KeyFile, err) return errors.Wrapf(err, "error checking client key file %q", tlsConfig.KeyFile)
} }
if len(tlsConfig.CertFile) > 0 && len(tlsConfig.KeyFile) == 0 { if len(tlsConfig.CertFile) > 0 && len(tlsConfig.KeyFile) == 0 {
return fmt.Errorf("client cert file %q specified without client key file", tlsConfig.CertFile) return errors.Errorf("client cert file %q specified without client key file", tlsConfig.CertFile)
} }
if len(tlsConfig.KeyFile) > 0 && len(tlsConfig.CertFile) == 0 { if len(tlsConfig.KeyFile) > 0 && len(tlsConfig.CertFile) == 0 {
return fmt.Errorf("client key file %q specified without client cert file", tlsConfig.KeyFile) return errors.Errorf("client key file %q specified without client cert file", tlsConfig.KeyFile)
} }
return nil return nil
@ -510,7 +510,7 @@ func parseTime(s string) (time.Time, error) {
if t, err := time.Parse(time.RFC3339Nano, s); err == nil { if t, err := time.Parse(time.RFC3339Nano, s); err == nil {
return t, nil return t, nil
} }
return time.Time{}, fmt.Errorf("cannot parse %q to a valid timestamp", s) return time.Time{}, errors.Errorf("cannot parse %q to a valid timestamp", s)
} }
type endpointsGroup struct { type endpointsGroup struct {

View file

@ -24,7 +24,8 @@ import (
"strings" "strings"
"time" "time"
"gopkg.in/yaml.v2" "github.com/pkg/errors"
yaml "gopkg.in/yaml.v2"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
@ -84,7 +85,7 @@ func ruleUnitTest(filename string) []error {
groupOrderMap := make(map[string]int) groupOrderMap := make(map[string]int)
for i, gn := range unitTestInp.GroupEvalOrder { for i, gn := range unitTestInp.GroupEvalOrder {
if _, ok := groupOrderMap[gn]; ok { if _, ok := groupOrderMap[gn]; ok {
return []error{fmt.Errorf("group name repeated in evaluation order: %s", gn)} return []error{errors.Errorf("group name repeated in evaluation order: %s", gn)}
} }
groupOrderMap[gn] = i groupOrderMap[gn] = i
} }
@ -261,14 +262,14 @@ func (tg *testGroup) test(mint, maxt time.Time, evalInterval time.Duration, grou
} }
if gotAlerts.Len() != expAlerts.Len() { if gotAlerts.Len() != expAlerts.Len() {
errs = append(errs, fmt.Errorf(" alertname:%s, time:%s, \n exp:%#v, \n got:%#v", errs = append(errs, errors.Errorf(" alertname:%s, time:%s, \n exp:%#v, \n got:%#v",
testcase.Alertname, testcase.EvalTime.String(), expAlerts.String(), gotAlerts.String())) testcase.Alertname, testcase.EvalTime.String(), expAlerts.String(), gotAlerts.String()))
} else { } else {
sort.Sort(gotAlerts) sort.Sort(gotAlerts)
sort.Sort(expAlerts) sort.Sort(expAlerts)
if !reflect.DeepEqual(expAlerts, gotAlerts) { if !reflect.DeepEqual(expAlerts, gotAlerts) {
errs = append(errs, fmt.Errorf(" alertname:%s, time:%s, \n exp:%#v, \n got:%#v", errs = append(errs, errors.Errorf(" alertname:%s, time:%s, \n exp:%#v, \n got:%#v",
testcase.Alertname, testcase.EvalTime.String(), expAlerts.String(), gotAlerts.String())) testcase.Alertname, testcase.EvalTime.String(), expAlerts.String(), gotAlerts.String()))
} }
} }
@ -284,7 +285,7 @@ Outer:
got, err := query(suite.Context(), testCase.Expr, mint.Add(testCase.EvalTime), got, err := query(suite.Context(), testCase.Expr, mint.Add(testCase.EvalTime),
suite.QueryEngine(), suite.Queryable()) suite.QueryEngine(), suite.Queryable())
if err != nil { if err != nil {
errs = append(errs, fmt.Errorf(" expr:'%s', time:%s, err:%s", testCase.Expr, errs = append(errs, errors.Errorf(" expr:'%s', time:%s, err:%s", testCase.Expr,
testCase.EvalTime.String(), err.Error())) testCase.EvalTime.String(), err.Error()))
continue continue
} }
@ -301,7 +302,7 @@ Outer:
for _, s := range testCase.ExpSamples { for _, s := range testCase.ExpSamples {
lb, err := promql.ParseMetric(s.Labels) lb, err := promql.ParseMetric(s.Labels)
if err != nil { if err != nil {
errs = append(errs, fmt.Errorf(" expr:'%s', time:%s, err:%s", testCase.Expr, errs = append(errs, errors.Errorf(" expr:'%s', time:%s, err:%s", testCase.Expr,
testCase.EvalTime.String(), err.Error())) testCase.EvalTime.String(), err.Error()))
continue Outer continue Outer
} }
@ -318,7 +319,7 @@ Outer:
return labels.Compare(gotSamples[i].Labels, gotSamples[j].Labels) <= 0 return labels.Compare(gotSamples[i].Labels, gotSamples[j].Labels) <= 0
}) })
if !reflect.DeepEqual(expSamples, gotSamples) { if !reflect.DeepEqual(expSamples, gotSamples) {
errs = append(errs, fmt.Errorf(" expr:'%s', time:%s, \n exp:%#v, \n got:%#v", testCase.Expr, errs = append(errs, errors.Errorf(" expr:'%s', time:%s, \n exp:%#v, \n got:%#v", testCase.Expr,
testCase.EvalTime.String(), parsedSamplesString(expSamples), parsedSamplesString(gotSamples))) testCase.EvalTime.String(), parsedSamplesString(expSamples), parsedSamplesString(gotSamples)))
} }
} }
@ -397,7 +398,7 @@ func query(ctx context.Context, qs string, t time.Time, engine *promql.Engine, q
Metric: labels.Labels{}, Metric: labels.Labels{},
}}, nil }}, nil
default: default:
return nil, fmt.Errorf("rule result is not a vector or scalar") return nil, errors.New("rule result is not a vector or scalar")
} }
} }

View file

@ -22,13 +22,14 @@ import (
"strings" "strings"
"time" "time"
"github.com/prometheus/prometheus/pkg/labels" "github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/relabel"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
yaml "gopkg.in/yaml.v2"
sd_config "github.com/prometheus/prometheus/discovery/config" sd_config "github.com/prometheus/prometheus/discovery/config"
"gopkg.in/yaml.v2" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/relabel"
) )
var ( var (
@ -59,7 +60,7 @@ func LoadFile(filename string) (*Config, error) {
} }
cfg, err := Load(string(content)) cfg, err := Load(string(content))
if err != nil { if err != nil {
return nil, fmt.Errorf("parsing YAML file %s: %v", filename, err) return nil, errors.Wrapf(err, "parsing YAML file %s", filename)
} }
resolveFilepaths(filepath.Dir(filename), cfg) resolveFilepaths(filepath.Dir(filename), cfg)
return cfg, nil return cfg, nil
@ -234,14 +235,14 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
for _, rf := range c.RuleFiles { for _, rf := range c.RuleFiles {
if !patRulePath.MatchString(rf) { if !patRulePath.MatchString(rf) {
return fmt.Errorf("invalid rule file path %q", rf) return errors.Errorf("invalid rule file path %q", rf)
} }
} }
// Do global overrides and validate unique names. // Do global overrides and validate unique names.
jobNames := map[string]struct{}{} jobNames := map[string]struct{}{}
for _, scfg := range c.ScrapeConfigs { for _, scfg := range c.ScrapeConfigs {
if scfg == nil { if scfg == nil {
return fmt.Errorf("empty or null scrape config section") return errors.New("empty or null scrape config section")
} }
// First set the correct scrape interval, then check that the timeout // First set the correct scrape interval, then check that the timeout
// (inferred or explicit) is not greater than that. // (inferred or explicit) is not greater than that.
@ -249,7 +250,7 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
scfg.ScrapeInterval = c.GlobalConfig.ScrapeInterval scfg.ScrapeInterval = c.GlobalConfig.ScrapeInterval
} }
if scfg.ScrapeTimeout > scfg.ScrapeInterval { if scfg.ScrapeTimeout > scfg.ScrapeInterval {
return fmt.Errorf("scrape timeout greater than scrape interval for scrape config with job name %q", scfg.JobName) return errors.Errorf("scrape timeout greater than scrape interval for scrape config with job name %q", scfg.JobName)
} }
if scfg.ScrapeTimeout == 0 { if scfg.ScrapeTimeout == 0 {
if c.GlobalConfig.ScrapeTimeout > scfg.ScrapeInterval { if c.GlobalConfig.ScrapeTimeout > scfg.ScrapeInterval {
@ -260,18 +261,18 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
} }
if _, ok := jobNames[scfg.JobName]; ok { if _, ok := jobNames[scfg.JobName]; ok {
return fmt.Errorf("found multiple scrape configs with job name %q", scfg.JobName) return errors.Errorf("found multiple scrape configs with job name %q", scfg.JobName)
} }
jobNames[scfg.JobName] = struct{}{} jobNames[scfg.JobName] = struct{}{}
} }
for _, rwcfg := range c.RemoteWriteConfigs { for _, rwcfg := range c.RemoteWriteConfigs {
if rwcfg == nil { if rwcfg == nil {
return fmt.Errorf("empty or null remote write config section") return errors.New("empty or null remote write config section")
} }
} }
for _, rrcfg := range c.RemoteReadConfigs { for _, rrcfg := range c.RemoteReadConfigs {
if rrcfg == nil { if rrcfg == nil {
return fmt.Errorf("empty or null remote read config section") return errors.New("empty or null remote read config section")
} }
} }
return nil return nil
@ -302,10 +303,10 @@ func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
for _, l := range gc.ExternalLabels { for _, l := range gc.ExternalLabels {
if !model.LabelName(l.Name).IsValid() { if !model.LabelName(l.Name).IsValid() {
return fmt.Errorf("%q is not a valid label name", l.Name) return errors.Errorf("%q is not a valid label name", l.Name)
} }
if !model.LabelValue(l.Value).IsValid() { if !model.LabelValue(l.Value).IsValid() {
return fmt.Errorf("%q is not a valid label value", l.Value) return errors.Errorf("%q is not a valid label value", l.Value)
} }
} }
@ -315,7 +316,7 @@ func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
gc.ScrapeInterval = DefaultGlobalConfig.ScrapeInterval gc.ScrapeInterval = DefaultGlobalConfig.ScrapeInterval
} }
if gc.ScrapeTimeout > gc.ScrapeInterval { if gc.ScrapeTimeout > gc.ScrapeInterval {
return fmt.Errorf("global scrape timeout greater than scrape interval") return errors.New("global scrape timeout greater than scrape interval")
} }
if gc.ScrapeTimeout == 0 { if gc.ScrapeTimeout == 0 {
if DefaultGlobalConfig.ScrapeTimeout > gc.ScrapeInterval { if DefaultGlobalConfig.ScrapeTimeout > gc.ScrapeInterval {
@ -381,7 +382,7 @@ func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if len(c.JobName) == 0 { if len(c.JobName) == 0 {
return fmt.Errorf("job_name is empty") return errors.New("job_name is empty")
} }
// The UnmarshalYAML method of HTTPClientConfig is not being called because it's not a pointer. // The UnmarshalYAML method of HTTPClientConfig is not being called because it's not a pointer.
@ -411,12 +412,12 @@ func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
for _, rlcfg := range c.RelabelConfigs { for _, rlcfg := range c.RelabelConfigs {
if rlcfg == nil { if rlcfg == nil {
return fmt.Errorf("empty or null target relabeling rule in scrape config") return errors.New("empty or null target relabeling rule in scrape config")
} }
} }
for _, rlcfg := range c.MetricRelabelConfigs { for _, rlcfg := range c.MetricRelabelConfigs {
if rlcfg == nil { if rlcfg == nil {
return fmt.Errorf("empty or null metric relabeling rule in scrape config") return errors.New("empty or null metric relabeling rule in scrape config")
} }
} }
@ -447,7 +448,7 @@ func (c *AlertingConfig) UnmarshalYAML(unmarshal func(interface{}) error) error
for _, rlcfg := range c.AlertRelabelConfigs { for _, rlcfg := range c.AlertRelabelConfigs {
if rlcfg == nil { if rlcfg == nil {
return fmt.Errorf("empty or null alert relabeling rule") return errors.New("empty or null alert relabeling rule")
} }
} }
return nil return nil
@ -507,7 +508,7 @@ func (c *AlertmanagerConfig) UnmarshalYAML(unmarshal func(interface{}) error) er
for _, rlcfg := range c.RelabelConfigs { for _, rlcfg := range c.RelabelConfigs {
if rlcfg == nil { if rlcfg == nil {
return fmt.Errorf("empty or null Alertmanager target relabeling rule") return errors.New("empty or null Alertmanager target relabeling rule")
} }
} }
@ -524,7 +525,7 @@ func (c *AlertmanagerConfig) UnmarshalYAML(unmarshal func(interface{}) error) er
func CheckTargetAddress(address model.LabelValue) error { func CheckTargetAddress(address model.LabelValue) error {
// For now check for a URL, we may want to expand this later. // For now check for a URL, we may want to expand this later.
if strings.Contains(string(address), "/") { if strings.Contains(string(address), "/") {
return fmt.Errorf("%q is not a valid hostname", address) return errors.Errorf("%q is not a valid hostname", address)
} }
return nil return nil
} }
@ -561,11 +562,11 @@ func (c *RemoteWriteConfig) UnmarshalYAML(unmarshal func(interface{}) error) err
return err return err
} }
if c.URL == nil { if c.URL == nil {
return fmt.Errorf("url for remote_write is empty") return errors.New("url for remote_write is empty")
} }
for _, rlcfg := range c.WriteRelabelConfigs { for _, rlcfg := range c.WriteRelabelConfigs {
if rlcfg == nil { if rlcfg == nil {
return fmt.Errorf("empty or null relabeling rule in remote write config") return errors.New("empty or null relabeling rule in remote write config")
} }
} }
@ -623,7 +624,7 @@ func (c *RemoteReadConfig) UnmarshalYAML(unmarshal func(interface{}) error) erro
return err return err
} }
if c.URL == nil { if c.URL == nil {
return fmt.Errorf("url for remote_read is empty") return errors.New("url for remote_read is empty")
} }
// The UnmarshalYAML method of HTTPClientConfig is not being called because it's not a pointer. // The UnmarshalYAML method of HTTPClientConfig is not being called because it's not a pointer.
// We cannot make it a pointer as the parser panics for inlined pointer structs. // We cannot make it a pointer as the parser panics for inlined pointer structs.

View file

@ -29,6 +29,7 @@ import (
"github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/autorest/azure"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -76,7 +77,7 @@ type SDConfig struct {
func validateAuthParam(param, name string) error { func validateAuthParam(param, name string) error {
if len(param) == 0 { if len(param) == 0 {
return fmt.Errorf("azure SD configuration requires a %s", name) return errors.Errorf("azure SD configuration requires a %s", name)
} }
return nil return nil
} }
@ -107,7 +108,7 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
} }
if c.AuthenticationMethod != authMethodOAuth && c.AuthenticationMethod != authMethodManagedIdentity { if c.AuthenticationMethod != authMethodOAuth && c.AuthenticationMethod != authMethodManagedIdentity {
return fmt.Errorf("unknown authentication_type %q. Supported types are %q or %q", c.AuthenticationMethod, authMethodOAuth, authMethodManagedIdentity) return errors.Errorf("unknown authentication_type %q. Supported types are %q or %q", c.AuthenticationMethod, authMethodOAuth, authMethodManagedIdentity)
} }
return nil return nil
@ -227,7 +228,7 @@ func newAzureResourceFromID(id string, logger log.Logger) (azureResource, error)
// /subscriptions/SUBSCRIPTION_ID/resourceGroups/RESOURCE_GROUP/providers/PROVIDER/TYPE/NAME/TYPE/NAME // /subscriptions/SUBSCRIPTION_ID/resourceGroups/RESOURCE_GROUP/providers/PROVIDER/TYPE/NAME/TYPE/NAME
s := strings.Split(id, "/") s := strings.Split(id, "/")
if len(s) != 9 && len(s) != 11 { if len(s) != 9 && len(s) != 11 {
err := fmt.Errorf("invalid ID '%s'. Refusing to create azureResource", id) err := errors.Errorf("invalid ID '%s'. Refusing to create azureResource", id)
level.Error(logger).Log("err", err) level.Error(logger).Log("err", err)
return azureResource{}, err return azureResource{}, err
} }
@ -243,12 +244,12 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
client, err := createAzureClient(*d.cfg) client, err := createAzureClient(*d.cfg)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not create Azure client: %s", err) return nil, errors.Wrap(err, "could not create Azure client")
} }
machines, err := client.getVMs(ctx) machines, err := client.getVMs(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not get virtual machines: %s", err) return nil, errors.Wrap(err, "could not get virtual machines")
} }
level.Debug(d.logger).Log("msg", "Found virtual machines during Azure discovery.", "count", len(machines)) level.Debug(d.logger).Log("msg", "Found virtual machines during Azure discovery.", "count", len(machines))
@ -256,13 +257,13 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
// Load the vms managed by scale sets. // Load the vms managed by scale sets.
scaleSets, err := client.getScaleSets(ctx) scaleSets, err := client.getScaleSets(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not get virtual machine scale sets: %s", err) return nil, errors.Wrap(err, "could not get virtual machine scale sets")
} }
for _, scaleSet := range scaleSets { for _, scaleSet := range scaleSets {
scaleSetVms, err := client.getScaleSetVMs(ctx, scaleSet) scaleSetVms, err := client.getScaleSetVMs(ctx, scaleSet)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not get virtual machine scale set vms: %s", err) return nil, errors.Wrap(err, "could not get virtual machine scale set vms")
} }
machines = append(machines, scaleSetVms...) machines = append(machines, scaleSetVms...)
} }
@ -342,7 +343,7 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
} }
// If we made it here, we don't have a private IP which should be impossible. // If we made it here, we don't have a private IP which should be impossible.
// Return an empty target and error to ensure an all or nothing situation. // Return an empty target and error to ensure an all or nothing situation.
err = fmt.Errorf("unable to find a private IP for VM %s", vm.Name) err = errors.Errorf("unable to find a private IP for VM %s", vm.Name)
ch <- target{labelSet: nil, err: err} ch <- target{labelSet: nil, err: err}
return return
} }
@ -357,7 +358,7 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
var tg targetgroup.Group var tg targetgroup.Group
for tgt := range ch { for tgt := range ch {
if tgt.err != nil { if tgt.err != nil {
return nil, fmt.Errorf("unable to complete Azure service discovery: %s", err) return nil, errors.Wrap(err, "unable to complete Azure service discovery")
} }
if tgt.labelSet != nil { if tgt.labelSet != nil {
tg.Targets = append(tg.Targets, tgt.labelSet) tg.Targets = append(tg.Targets, tgt.labelSet)
@ -371,7 +372,7 @@ func (client *azureClient) getVMs(ctx context.Context) ([]virtualMachine, error)
var vms []virtualMachine var vms []virtualMachine
result, err := client.vm.ListAll(ctx) result, err := client.vm.ListAll(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not list virtual machines: %s", err.Error()) return nil, errors.Wrap(err, "could not list virtual machines")
} }
for result.NotDone() { for result.NotDone() {
for _, vm := range result.Values() { for _, vm := range result.Values() {
@ -379,7 +380,7 @@ func (client *azureClient) getVMs(ctx context.Context) ([]virtualMachine, error)
} }
err = result.NextWithContext(ctx) err = result.NextWithContext(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not list virtual machines: %s", err.Error()) return nil, errors.Wrap(err, "could not list virtual machines")
} }
} }
@ -390,13 +391,13 @@ func (client *azureClient) getScaleSets(ctx context.Context) ([]compute.VirtualM
var scaleSets []compute.VirtualMachineScaleSet var scaleSets []compute.VirtualMachineScaleSet
result, err := client.vmss.ListAll(ctx) result, err := client.vmss.ListAll(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not list virtual machine scale sets: %s", err.Error()) return nil, errors.Wrap(err, "could not list virtual machine scale sets")
} }
for result.NotDone() { for result.NotDone() {
scaleSets = append(scaleSets, result.Values()...) scaleSets = append(scaleSets, result.Values()...)
err = result.NextWithContext(ctx) err = result.NextWithContext(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not list virtual machine scale sets: %s", err.Error()) return nil, errors.Wrap(err, "could not list virtual machine scale sets")
} }
} }
@ -409,12 +410,12 @@ func (client *azureClient) getScaleSetVMs(ctx context.Context, scaleSet compute.
r, err := newAzureResourceFromID(*scaleSet.ID, nil) r, err := newAzureResourceFromID(*scaleSet.ID, nil)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not parse scale set ID: %s", err) return nil, errors.Wrap(err, "could not parse scale set ID")
} }
result, err := client.vmssvm.List(ctx, r.ResourceGroup, *(scaleSet.Name), "", "", "") result, err := client.vmssvm.List(ctx, r.ResourceGroup, *(scaleSet.Name), "", "", "")
if err != nil { if err != nil {
return nil, fmt.Errorf("could not list virtual machine scale set vms: %s", err.Error()) return nil, errors.Wrap(err, "could not list virtual machine scale set vms")
} }
for result.NotDone() { for result.NotDone() {
for _, vm := range result.Values() { for _, vm := range result.Values() {
@ -422,7 +423,7 @@ func (client *azureClient) getScaleSetVMs(ctx context.Context, scaleSet compute.
} }
err = result.NextWithContext(ctx) err = result.NextWithContext(ctx)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not list virtual machine scale set vms: %s", err.Error()) return nil, errors.Wrap(err, "could not list virtual machine scale set vms")
} }
} }

View file

@ -14,7 +14,7 @@
package config package config
import ( import (
"fmt" "github.com/pkg/errors"
"github.com/prometheus/prometheus/discovery/azure" "github.com/prometheus/prometheus/discovery/azure"
"github.com/prometheus/prometheus/discovery/consul" "github.com/prometheus/prometheus/discovery/consul"
@ -64,62 +64,62 @@ type ServiceDiscoveryConfig struct {
func (c *ServiceDiscoveryConfig) Validate() error { func (c *ServiceDiscoveryConfig) Validate() error {
for _, cfg := range c.AzureSDConfigs { for _, cfg := range c.AzureSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in azure_sd_configs") return errors.New("empty or null section in azure_sd_configs")
} }
} }
for _, cfg := range c.ConsulSDConfigs { for _, cfg := range c.ConsulSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in consul_sd_configs") return errors.New("empty or null section in consul_sd_configs")
} }
} }
for _, cfg := range c.DNSSDConfigs { for _, cfg := range c.DNSSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in dns_sd_configs") return errors.New("empty or null section in dns_sd_configs")
} }
} }
for _, cfg := range c.EC2SDConfigs { for _, cfg := range c.EC2SDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in ec2_sd_configs") return errors.New("empty or null section in ec2_sd_configs")
} }
} }
for _, cfg := range c.FileSDConfigs { for _, cfg := range c.FileSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in file_sd_configs") return errors.New("empty or null section in file_sd_configs")
} }
} }
for _, cfg := range c.GCESDConfigs { for _, cfg := range c.GCESDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in gce_sd_configs") return errors.New("empty or null section in gce_sd_configs")
} }
} }
for _, cfg := range c.KubernetesSDConfigs { for _, cfg := range c.KubernetesSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in kubernetes_sd_configs") return errors.New("empty or null section in kubernetes_sd_configs")
} }
} }
for _, cfg := range c.MarathonSDConfigs { for _, cfg := range c.MarathonSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in marathon_sd_configs") return errors.New("empty or null section in marathon_sd_configs")
} }
} }
for _, cfg := range c.NerveSDConfigs { for _, cfg := range c.NerveSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in nerve_sd_configs") return errors.New("empty or null section in nerve_sd_configs")
} }
} }
for _, cfg := range c.OpenstackSDConfigs { for _, cfg := range c.OpenstackSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in openstack_sd_configs") return errors.New("empty or null section in openstack_sd_configs")
} }
} }
for _, cfg := range c.ServersetSDConfigs { for _, cfg := range c.ServersetSDConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in serverset_sd_configs") return errors.New("empty or null section in serverset_sd_configs")
} }
} }
for _, cfg := range c.StaticConfigs { for _, cfg := range c.StaticConfigs {
if cfg == nil { if cfg == nil {
return fmt.Errorf("empty or null section in static_configs") return errors.New("empty or null section in static_configs")
} }
} }
return nil return nil

View file

@ -25,10 +25,12 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
consul "github.com/hashicorp/consul/api" consul "github.com/hashicorp/consul/api"
"github.com/mwitkow/go-conntrack" conntrack "github.com/mwitkow/go-conntrack"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
) )
@ -130,7 +132,7 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if strings.TrimSpace(c.Server) == "" { if strings.TrimSpace(c.Server) == "" {
return fmt.Errorf("consul SD configuration requires a server address") return errors.New("consul SD configuration requires a server address")
} }
return nil return nil
} }
@ -270,7 +272,7 @@ func (d *Discovery) getDatacenter() error {
dc, ok := info["Config"]["Datacenter"].(string) dc, ok := info["Config"]["Datacenter"].(string)
if !ok { if !ok {
err := fmt.Errorf("invalid value '%v' for Config.Datacenter", info["Config"]["Datacenter"]) err := errors.Errorf("invalid value '%v' for Config.Datacenter", info["Config"]["Datacenter"])
level.Error(d.logger).Log("msg", "Error retrieving datacenter name", "err", err) level.Error(d.logger).Log("msg", "Error retrieving datacenter name", "err", err)
return err return err
} }

View file

@ -24,8 +24,10 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/miekg/dns" "github.com/miekg/dns"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/refresh" "github.com/prometheus/prometheus/discovery/refresh"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
) )
@ -77,16 +79,16 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if len(c.Names) == 0 { if len(c.Names) == 0 {
return fmt.Errorf("DNS-SD config must contain at least one SRV record name") return errors.New("DNS-SD config must contain at least one SRV record name")
} }
switch strings.ToUpper(c.Type) { switch strings.ToUpper(c.Type) {
case "SRV": case "SRV":
case "A", "AAAA": case "A", "AAAA":
if c.Port == 0 { if c.Port == 0 {
return fmt.Errorf("a port is required in DNS-SD configs for all record types except SRV") return errors.New("a port is required in DNS-SD configs for all record types except SRV")
} }
default: default:
return fmt.Errorf("invalid DNS-SD records type %s", c.Type) return errors.Errorf("invalid DNS-SD records type %s", c.Type)
} }
return nil return nil
} }
@ -239,7 +241,7 @@ func (d *Discovery) refreshOne(ctx context.Context, name string, ch chan<- *targ
func lookupWithSearchPath(name string, qtype uint16, logger log.Logger) (*dns.Msg, error) { func lookupWithSearchPath(name string, qtype uint16, logger log.Logger) (*dns.Msg, error) {
conf, err := dns.ClientConfigFromFile(resolvConf) conf, err := dns.ClientConfigFromFile(resolvConf)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not load resolv.conf: %s", err) return nil, errors.Wrap(err, "could not load resolv.conf")
} }
allResponsesValid := true allResponsesValid := true
@ -265,7 +267,7 @@ func lookupWithSearchPath(name string, qtype uint16, logger log.Logger) (*dns.Ms
return &dns.Msg{}, nil return &dns.Msg{}, nil
} }
// Outcome 3: boned. // Outcome 3: boned.
return nil, fmt.Errorf("could not resolve %q: all servers responded with errors to at least one search domain", name) return nil, errors.Errorf("could not resolve %q: all servers responded with errors to at least one search domain", name)
} }
// lookupFromAnyServer uses all configured servers to try and resolve a specific // lookupFromAnyServer uses all configured servers to try and resolve a specific
@ -301,7 +303,7 @@ func lookupFromAnyServer(name string, qtype uint16, conf *dns.ClientConfig, logg
} }
} }
return nil, fmt.Errorf("could not resolve %s: no servers returned a viable answer", name) return nil, errors.Errorf("could not resolve %s: no servers returned a viable answer", name)
} }
// askServerForName makes a request to a specific DNS server for a specific // askServerForName makes a request to a specific DNS server for a specific
@ -323,7 +325,7 @@ func askServerForName(name string, queryType uint16, client *dns.Client, servAdd
if response.Truncated { if response.Truncated {
if client.Net == "tcp" { if client.Net == "tcp" {
return nil, fmt.Errorf("got truncated message on TCP (64kiB limit exceeded?)") return nil, errors.New("got truncated message on TCP (64kiB limit exceeded?)")
} }
client.Net = "tcp" client.Net = "tcp"

View file

@ -25,11 +25,12 @@ import (
"github.com/aws/aws-sdk-go/aws/credentials/stscreds" "github.com/aws/aws-sdk-go/aws/credentials/stscreds"
"github.com/aws/aws-sdk-go/aws/ec2metadata" "github.com/aws/aws-sdk-go/aws/ec2metadata"
"github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/ec2"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/aws/aws-sdk-go/service/ec2"
config_util "github.com/prometheus/common/config"
"github.com/prometheus/prometheus/discovery/refresh" "github.com/prometheus/prometheus/discovery/refresh"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -95,13 +96,13 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
metadata := ec2metadata.New(sess) metadata := ec2metadata.New(sess)
region, err := metadata.Region() region, err := metadata.Region()
if err != nil { if err != nil {
return fmt.Errorf("EC2 SD configuration requires a region") return errors.New("EC2 SD configuration requires a region")
} }
c.Region = region c.Region = region
} }
for _, f := range c.Filters { for _, f := range c.Filters {
if len(f.Values) == 0 { if len(f.Values) == 0 {
return fmt.Errorf("EC2 SD configuration filter values cannot be empty") return errors.New("EC2 SD configuration filter values cannot be empty")
} }
} }
return nil return nil
@ -155,7 +156,7 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
Profile: d.profile, Profile: d.profile,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("could not create aws session: %s", err) return nil, errors.Wrap(err, "could not create aws session")
} }
var ec2s *ec2.EC2 var ec2s *ec2.EC2
@ -247,7 +248,7 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
} }
return true return true
}); err != nil { }); err != nil {
return nil, fmt.Errorf("could not describe instances: %s", err) return nil, errors.Wrap(err, "could not describe instances")
} }
return []*targetgroup.Group{tg}, nil return []*targetgroup.Group{tg}, nil
} }

View file

@ -16,7 +16,6 @@ package file
import ( import (
"context" "context"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os" "os"
@ -28,11 +27,13 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
fsnotify "gopkg.in/fsnotify/fsnotify.v1"
yaml "gopkg.in/yaml.v2"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
"gopkg.in/fsnotify/fsnotify.v1"
"gopkg.in/yaml.v2"
) )
var ( var (
@ -59,11 +60,11 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if len(c.Files) == 0 { if len(c.Files) == 0 {
return fmt.Errorf("file service discovery config must contain at least one path name") return errors.New("file service discovery config must contain at least one path name")
} }
for _, name := range c.Files { for _, name := range c.Files {
if !patFileSDName.MatchString(name) { if !patFileSDName.MatchString(name) {
return fmt.Errorf("path name %q is not valid for file discovery", name) return errors.Errorf("path name %q is not valid for file discovery", name)
} }
} }
return nil return nil
@ -382,7 +383,7 @@ func (d *Discovery) readFile(filename string) ([]*targetgroup.Group, error) {
return nil, err return nil, err
} }
default: default:
panic(fmt.Errorf("discovery.File.readFile: unhandled file extension %q", ext)) panic(errors.Errorf("discovery.File.readFile: unhandled file extension %q", ext))
} }
for i, tg := range targetGroups { for i, tg := range targetGroups {

View file

@ -22,6 +22,7 @@ import (
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"golang.org/x/oauth2/google" "golang.org/x/oauth2/google"
compute "google.golang.org/api/compute/v1" compute "google.golang.org/api/compute/v1"
@ -83,10 +84,10 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if c.Project == "" { if c.Project == "" {
return fmt.Errorf("GCE SD configuration requires a project") return errors.New("GCE SD configuration requires a project")
} }
if c.Zone == "" { if c.Zone == "" {
return fmt.Errorf("GCE SD configuration requires a zone") return errors.New("GCE SD configuration requires a zone")
} }
return nil return nil
} }
@ -117,11 +118,11 @@ func NewDiscovery(conf SDConfig, logger log.Logger) (*Discovery, error) {
var err error var err error
d.client, err = google.DefaultClient(context.Background(), compute.ComputeReadonlyScope) d.client, err = google.DefaultClient(context.Background(), compute.ComputeReadonlyScope)
if err != nil { if err != nil {
return nil, fmt.Errorf("error setting up communication with GCE service: %s", err) return nil, errors.Wrap(err, "error setting up communication with GCE service")
} }
d.svc, err = compute.New(d.client) d.svc, err = compute.New(d.client)
if err != nil { if err != nil {
return nil, fmt.Errorf("error setting up communication with GCE service: %s", err) return nil, errors.Wrap(err, "error setting up communication with GCE service")
} }
d.isvc = compute.NewInstancesService(d.svc) d.isvc = compute.NewInstancesService(d.svc)
@ -200,7 +201,7 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
return nil return nil
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("error retrieving refresh targets from gce: %s", err) return nil, errors.Wrap(err, "error retrieving refresh targets from gce")
} }
return []*targetgroup.Group{tg}, nil return []*targetgroup.Group{tg}, nil
} }

View file

@ -15,17 +15,18 @@ package kubernetes
import ( import (
"context" "context"
"fmt"
"net" "net"
"strconv" "strconv"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup"
apiv1 "k8s.io/api/core/v1" apiv1 "k8s.io/api/core/v1"
"k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/workqueue" "k8s.io/client-go/util/workqueue"
"github.com/prometheus/prometheus/discovery/targetgroup"
) )
// Endpoints discovers new endpoint targets. // Endpoints discovers new endpoint targets.
@ -178,7 +179,7 @@ func convertToEndpoints(o interface{}) (*apiv1.Endpoints, error) {
return endpoints, nil return endpoints, nil
} }
return nil, fmt.Errorf("received unexpected object: %v", o) return nil, errors.Errorf("received unexpected object: %v", o)
} }
func endpointsSource(ep *apiv1.Endpoints) string { func endpointsSource(ep *apiv1.Endpoints) string {

View file

@ -15,16 +15,17 @@ package kubernetes
import ( import (
"context" "context"
"fmt"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/util/strutil"
"k8s.io/api/extensions/v1beta1" "k8s.io/api/extensions/v1beta1"
"k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/workqueue" "k8s.io/client-go/util/workqueue"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/util/strutil"
) )
// Ingress implements discovery of Kubernetes ingresss. // Ingress implements discovery of Kubernetes ingresss.
@ -118,7 +119,7 @@ func convertToIngress(o interface{}) (*v1beta1.Ingress, error) {
return ingress, nil return ingress, nil
} }
return nil, fmt.Errorf("received unexpected object: %v", o) return nil, errors.Errorf("received unexpected object: %v", o)
} }
func ingressSource(s *v1beta1.Ingress) string { func ingressSource(s *v1beta1.Ingress) string {

View file

@ -15,13 +15,13 @@ package kubernetes
import ( import (
"context" "context"
"fmt"
"reflect" "reflect"
"sync" "sync"
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -80,7 +80,7 @@ func (c *Role) UnmarshalYAML(unmarshal func(interface{}) error) error {
case RoleNode, RolePod, RoleService, RoleEndpoint, RoleIngress: case RoleNode, RolePod, RoleService, RoleEndpoint, RoleIngress:
return nil return nil
default: default:
return fmt.Errorf("unknown Kubernetes SD role %q", *c) return errors.Errorf("unknown Kubernetes SD role %q", *c)
} }
} }
@ -101,14 +101,14 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if c.Role == "" { if c.Role == "" {
return fmt.Errorf("role missing (one of: pod, service, endpoints, node, ingress)") return errors.Errorf("role missing (one of: pod, service, endpoints, node, ingress)")
} }
err = c.HTTPClientConfig.Validate() err = c.HTTPClientConfig.Validate()
if err != nil { if err != nil {
return err return err
} }
if c.APIServer.URL == nil && !reflect.DeepEqual(c.HTTPClientConfig, config_util.HTTPClientConfig{}) { if c.APIServer.URL == nil && !reflect.DeepEqual(c.HTTPClientConfig, config_util.HTTPClientConfig{}) {
return fmt.Errorf("to use custom HTTP client configuration please provide the 'api_server' URL explicitly") return errors.Errorf("to use custom HTTP client configuration please provide the 'api_server' URL explicitly")
} }
return nil return nil
} }

View file

@ -15,18 +15,19 @@ package kubernetes
import ( import (
"context" "context"
"fmt"
"net" "net"
"strconv" "strconv"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/util/strutil"
apiv1 "k8s.io/api/core/v1" apiv1 "k8s.io/api/core/v1"
"k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/workqueue" "k8s.io/client-go/util/workqueue"
"github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/util/strutil"
) )
const ( const (
@ -127,7 +128,7 @@ func convertToNode(o interface{}) (*apiv1.Node, error) {
return node, nil return node, nil
} }
return nil, fmt.Errorf("received unexpected object: %v", o) return nil, errors.Errorf("received unexpected object: %v", o)
} }
func nodeSource(n *apiv1.Node) string { func nodeSource(n *apiv1.Node) string {
@ -214,5 +215,5 @@ func nodeAddress(node *apiv1.Node) (string, map[apiv1.NodeAddressType][]string,
if addresses, ok := m[apiv1.NodeHostName]; ok { if addresses, ok := m[apiv1.NodeHostName]; ok {
return addresses[0], m, nil return addresses[0], m, nil
} }
return "", m, fmt.Errorf("host address unknown") return "", m, errors.New("host address unknown")
} }

View file

@ -15,13 +15,13 @@ package kubernetes
import ( import (
"context" "context"
"fmt"
"net" "net"
"strconv" "strconv"
"strings" "strings"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
apiv1 "k8s.io/api/core/v1" apiv1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
@ -131,7 +131,7 @@ func convertToPod(o interface{}) (*apiv1.Pod, error) {
return pod, nil return pod, nil
} }
return nil, fmt.Errorf("received unexpected object: %v", o) return nil, errors.Errorf("received unexpected object: %v", o)
} }
const ( const (

View file

@ -15,12 +15,12 @@ package kubernetes
import ( import (
"context" "context"
"fmt"
"net" "net"
"strconv" "strconv"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
apiv1 "k8s.io/api/core/v1" apiv1 "k8s.io/api/core/v1"
"k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/cache"
@ -123,7 +123,7 @@ func convertToService(o interface{}) (*apiv1.Service, error) {
if ok { if ok {
return service, nil return service, nil
} }
return nil, fmt.Errorf("received unexpected object: %v", o) return nil, errors.Errorf("received unexpected object: %v", o)
} }
func serviceSource(s *apiv1.Service) string { func serviceSource(s *apiv1.Service) string {

View file

@ -26,6 +26,7 @@ import (
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -79,16 +80,16 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if len(c.Servers) == 0 { if len(c.Servers) == 0 {
return fmt.Errorf("marathon_sd: must contain at least one Marathon server") return errors.New("marathon_sd: must contain at least one Marathon server")
} }
if len(c.AuthToken) > 0 && len(c.AuthTokenFile) > 0 { if len(c.AuthToken) > 0 && len(c.AuthTokenFile) > 0 {
return fmt.Errorf("marathon_sd: at most one of auth_token & auth_token_file must be configured") return errors.New("marathon_sd: at most one of auth_token & auth_token_file must be configured")
} }
if c.HTTPClientConfig.BasicAuth != nil && (len(c.AuthToken) > 0 || len(c.AuthTokenFile) > 0) { if c.HTTPClientConfig.BasicAuth != nil && (len(c.AuthToken) > 0 || len(c.AuthTokenFile) > 0) {
return fmt.Errorf("marathon_sd: at most one of basic_auth, auth_token & auth_token_file must be configured") return errors.New("marathon_sd: at most one of basic_auth, auth_token & auth_token_file must be configured")
} }
if (len(c.HTTPClientConfig.BearerToken) > 0 || len(c.HTTPClientConfig.BearerTokenFile) > 0) && (len(c.AuthToken) > 0 || len(c.AuthTokenFile) > 0) { if (len(c.HTTPClientConfig.BearerToken) > 0 || len(c.HTTPClientConfig.BearerTokenFile) > 0) && (len(c.AuthToken) > 0 || len(c.AuthTokenFile) > 0) {
return fmt.Errorf("marathon_sd: at most one of bearer_token, bearer_token_file, auth_token & auth_token_file must be configured") return errors.New("marathon_sd: at most one of bearer_token, bearer_token_file, auth_token & auth_token_file must be configured")
} }
return c.HTTPClientConfig.Validate() return c.HTTPClientConfig.Validate()
} }
@ -163,7 +164,7 @@ func newAuthTokenFileRoundTripper(tokenFile string, rt http.RoundTripper) (http.
// fail-fast if we can't read the file. // fail-fast if we can't read the file.
_, err := ioutil.ReadFile(tokenFile) _, err := ioutil.ReadFile(tokenFile)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to read auth token file %s: %s", tokenFile, err) return nil, errors.Wrapf(err, "unable to read auth token file %s", tokenFile)
} }
return &authTokenFileRoundTripper{tokenFile, rt}, nil return &authTokenFileRoundTripper{tokenFile, rt}, nil
} }
@ -171,7 +172,7 @@ func newAuthTokenFileRoundTripper(tokenFile string, rt http.RoundTripper) (http.
func (rt *authTokenFileRoundTripper) RoundTrip(request *http.Request) (*http.Response, error) { func (rt *authTokenFileRoundTripper) RoundTrip(request *http.Request) (*http.Response, error) {
b, err := ioutil.ReadFile(rt.authTokenFile) b, err := ioutil.ReadFile(rt.authTokenFile)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to read auth token file %s: %s", rt.authTokenFile, err) return nil, errors.Wrapf(err, "unable to read auth token file %s", rt.authTokenFile)
} }
authToken := strings.TrimSpace(string(b)) authToken := strings.TrimSpace(string(b))
@ -308,13 +309,13 @@ func fetchApps(ctx context.Context, client *http.Client, url string) (*appList,
defer resp.Body.Close() defer resp.Body.Close()
if (resp.StatusCode < 200) || (resp.StatusCode >= 300) { if (resp.StatusCode < 200) || (resp.StatusCode >= 300) {
return nil, fmt.Errorf("non 2xx status '%v' response during marathon service discovery", resp.StatusCode) return nil, errors.Errorf("non 2xx status '%v' response during marathon service discovery", resp.StatusCode)
} }
var apps appList var apps appList
err = json.NewDecoder(resp.Body).Decode(&apps) err = json.NewDecoder(resp.Body).Decode(&apps)
if err != nil { if err != nil {
return nil, fmt.Errorf("%q: %v", url, err) return nil, errors.Wrapf(err, "%q", url)
} }
return &apps, nil return &apps, nil
} }

View file

@ -23,7 +23,9 @@ import (
"github.com/gophercloud/gophercloud/openstack" "github.com/gophercloud/gophercloud/openstack"
"github.com/gophercloud/gophercloud/openstack/compute/v2/extensions/hypervisors" "github.com/gophercloud/gophercloud/openstack/compute/v2/extensions/hypervisors"
"github.com/gophercloud/gophercloud/pagination" "github.com/gophercloud/gophercloud/pagination"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
) )
@ -55,13 +57,13 @@ func (h *HypervisorDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group
h.provider.Context = ctx h.provider.Context = ctx
err := openstack.Authenticate(h.provider, *h.authOpts) err := openstack.Authenticate(h.provider, *h.authOpts)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not authenticate to OpenStack: %s", err) return nil, errors.Wrap(err, "could not authenticate to OpenStack")
} }
client, err := openstack.NewComputeV2(h.provider, gophercloud.EndpointOpts{ client, err := openstack.NewComputeV2(h.provider, gophercloud.EndpointOpts{
Region: h.region, Region: h.region,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("could not create OpenStack compute session: %s", err) return nil, errors.Wrap(err, "could not create OpenStack compute session")
} }
tg := &targetgroup.Group{ tg := &targetgroup.Group{
@ -73,7 +75,7 @@ func (h *HypervisorDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group
err = pagerHypervisors.EachPage(func(page pagination.Page) (bool, error) { err = pagerHypervisors.EachPage(func(page pagination.Page) (bool, error) {
hypervisorList, err := hypervisors.ExtractHypervisors(page) hypervisorList, err := hypervisors.ExtractHypervisors(page)
if err != nil { if err != nil {
return false, fmt.Errorf("could not extract hypervisors: %s", err) return false, errors.Wrap(err, "could not extract hypervisors")
} }
for _, hypervisor := range hypervisorList { for _, hypervisor := range hypervisorList {
labels := model.LabelSet{} labels := model.LabelSet{}

View file

@ -25,6 +25,7 @@ import (
"github.com/gophercloud/gophercloud/openstack/compute/v2/extensions/floatingips" "github.com/gophercloud/gophercloud/openstack/compute/v2/extensions/floatingips"
"github.com/gophercloud/gophercloud/openstack/compute/v2/servers" "github.com/gophercloud/gophercloud/openstack/compute/v2/servers"
"github.com/gophercloud/gophercloud/pagination" "github.com/gophercloud/gophercloud/pagination"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
@ -72,13 +73,13 @@ func (i *InstanceDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group,
i.provider.Context = ctx i.provider.Context = ctx
err := openstack.Authenticate(i.provider, *i.authOpts) err := openstack.Authenticate(i.provider, *i.authOpts)
if err != nil { if err != nil {
return nil, fmt.Errorf("could not authenticate to OpenStack: %s", err) return nil, errors.Wrap(err, "could not authenticate to OpenStack")
} }
client, err := openstack.NewComputeV2(i.provider, gophercloud.EndpointOpts{ client, err := openstack.NewComputeV2(i.provider, gophercloud.EndpointOpts{
Region: i.region, Region: i.region,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("could not create OpenStack compute session: %s", err) return nil, errors.Wrap(err, "could not create OpenStack compute session")
} }
// OpenStack API reference // OpenStack API reference
@ -89,7 +90,7 @@ func (i *InstanceDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group,
err = pagerFIP.EachPage(func(page pagination.Page) (bool, error) { err = pagerFIP.EachPage(func(page pagination.Page) (bool, error) {
result, err := floatingips.ExtractFloatingIPs(page) result, err := floatingips.ExtractFloatingIPs(page)
if err != nil { if err != nil {
return false, fmt.Errorf("could not extract floatingips: %s", err) return false, errors.Wrap(err, "could not extract floatingips")
} }
for _, ip := range result { for _, ip := range result {
// Skip not associated ips // Skip not associated ips
@ -116,11 +117,11 @@ func (i *InstanceDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group,
} }
err = pager.EachPage(func(page pagination.Page) (bool, error) { err = pager.EachPage(func(page pagination.Page) (bool, error) {
if ctx.Err() != nil { if ctx.Err() != nil {
return false, fmt.Errorf("could not extract instances: %s", ctx.Err()) return false, errors.Wrap(ctx.Err(), "could not extract instances")
} }
instanceList, err := servers.ExtractServers(page) instanceList, err := servers.ExtractServers(page)
if err != nil { if err != nil {
return false, fmt.Errorf("could not extract instances: %s", err) return false, errors.Wrap(err, "could not extract instances")
} }
for _, s := range instanceList { for _, s := range instanceList {

View file

@ -15,15 +15,14 @@ package openstack
import ( import (
"context" "context"
"errors"
"fmt"
"net/http" "net/http"
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/gophercloud/gophercloud" "github.com/gophercloud/gophercloud"
"github.com/gophercloud/gophercloud/openstack" "github.com/gophercloud/gophercloud/openstack"
"github.com/mwitkow/go-conntrack" conntrack "github.com/mwitkow/go-conntrack"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -80,7 +79,7 @@ func (c *Role) UnmarshalYAML(unmarshal func(interface{}) error) error {
case OpenStackRoleHypervisor, OpenStackRoleInstance: case OpenStackRoleHypervisor, OpenStackRoleInstance:
return nil return nil
default: default:
return fmt.Errorf("unknown OpenStack SD role %q", *c) return errors.Errorf("unknown OpenStack SD role %q", *c)
} }
} }
@ -93,10 +92,10 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if c.Role == "" { if c.Role == "" {
return fmt.Errorf("role missing (one of: instance, hypervisor)") return errors.New("role missing (one of: instance, hypervisor)")
} }
if c.Region == "" { if c.Region == "" {
return fmt.Errorf("openstack SD configuration requires a region") return errors.New("openstack SD configuration requires a region")
} }
return nil return nil
} }

View file

@ -24,7 +24,8 @@ import (
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/mwitkow/go-conntrack" conntrack "github.com/mwitkow/go-conntrack"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -70,16 +71,16 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if c.Account == "" { if c.Account == "" {
return fmt.Errorf("triton SD configuration requires an account") return errors.New("triton SD configuration requires an account")
} }
if c.DNSSuffix == "" { if c.DNSSuffix == "" {
return fmt.Errorf("triton SD configuration requires a dns_suffix") return errors.New("triton SD configuration requires a dns_suffix")
} }
if c.Endpoint == "" { if c.Endpoint == "" {
return fmt.Errorf("triton SD configuration requires an endpoint") return errors.New("triton SD configuration requires an endpoint")
} }
if c.RefreshInterval <= 0 { if c.RefreshInterval <= 0 {
return fmt.Errorf("triton SD configuration requires RefreshInterval to be a positive integer") return errors.New("triton SD configuration requires RefreshInterval to be a positive integer")
} }
return nil return nil
} }
@ -153,20 +154,20 @@ func (d *Discovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) {
req = req.WithContext(ctx) req = req.WithContext(ctx)
resp, err := d.client.Do(req) resp, err := d.client.Do(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("an error occurred when requesting targets from the discovery endpoint: %s", err) return nil, errors.Wrap(err, "an error occurred when requesting targets from the discovery endpoint")
} }
defer resp.Body.Close() defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body) data, err := ioutil.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, fmt.Errorf("an error occurred when reading the response body: %s", err) return nil, errors.Wrap(err, "an error occurred when reading the response body")
} }
dr := discoveryResponse{} dr := discoveryResponse{}
err = json.Unmarshal(data, &dr) err = json.Unmarshal(data, &dr)
if err != nil { if err != nil {
return nil, fmt.Errorf("an error occurred unmarshaling the discovery response json: %s", err) return nil, errors.Wrap(err, "an error occurred unmarshaling the discovery response json")
} }
for _, container := range dr.Containers { for _, container := range dr.Containers {

View file

@ -23,6 +23,7 @@ import (
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/samuel/go-zookeeper/zk" "github.com/samuel/go-zookeeper/zk"
@ -58,14 +59,14 @@ func (c *ServersetSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) err
return err return err
} }
if len(c.Servers) == 0 { if len(c.Servers) == 0 {
return fmt.Errorf("serverset SD config must contain at least one Zookeeper server") return errors.New("serverset SD config must contain at least one Zookeeper server")
} }
if len(c.Paths) == 0 { if len(c.Paths) == 0 {
return fmt.Errorf("serverset SD config must contain at least one path") return errors.New("serverset SD config must contain at least one path")
} }
for _, path := range c.Paths { for _, path := range c.Paths {
if !strings.HasPrefix(path, "/") { if !strings.HasPrefix(path, "/") {
return fmt.Errorf("serverset SD config paths must begin with '/': %s", path) return errors.Errorf("serverset SD config paths must begin with '/': %s", path)
} }
} }
return nil return nil
@ -87,14 +88,14 @@ func (c *NerveSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return err return err
} }
if len(c.Servers) == 0 { if len(c.Servers) == 0 {
return fmt.Errorf("nerve SD config must contain at least one Zookeeper server") return errors.New("nerve SD config must contain at least one Zookeeper server")
} }
if len(c.Paths) == 0 { if len(c.Paths) == 0 {
return fmt.Errorf("nerve SD config must contain at least one path") return errors.New("nerve SD config must contain at least one path")
} }
for _, path := range c.Paths { for _, path := range c.Paths {
if !strings.HasPrefix(path, "/") { if !strings.HasPrefix(path, "/") {
return fmt.Errorf("nerve SD config paths must begin with '/': %s", path) return errors.Errorf("nerve SD config paths must begin with '/': %s", path)
} }
} }
return nil return nil
@ -223,7 +224,7 @@ func parseServersetMember(data []byte, path string) (model.LabelSet, error) {
member := serversetMember{} member := serversetMember{}
if err := json.Unmarshal(data, &member); err != nil { if err := json.Unmarshal(data, &member); err != nil {
return nil, fmt.Errorf("error unmarshaling serverset member %q: %s", path, err) return nil, errors.Wrapf(err, "error unmarshaling serverset member %q", path)
} }
labels := model.LabelSet{} labels := model.LabelSet{}
@ -265,7 +266,7 @@ func parseNerveMember(data []byte, path string) (model.LabelSet, error) {
member := nerveMember{} member := nerveMember{}
err := json.Unmarshal(data, &member) err := json.Unmarshal(data, &member)
if err != nil { if err != nil {
return nil, fmt.Errorf("error unmarshaling nerve member %q: %s", path, err) return nil, errors.Wrapf(err, "error unmarshaling nerve member %q", path)
} }
labels := model.LabelSet{} labels := model.LabelSet{}

View file

@ -22,11 +22,12 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
influx "github.com/influxdata/influxdb/client/v2"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/prompb"
influx "github.com/influxdata/influxdb/client/v2" "github.com/prometheus/prometheus/prompb"
) )
// Client allows sending batches of Prometheus samples to InfluxDB. // Client allows sending batches of Prometheus samples to InfluxDB.
@ -125,7 +126,7 @@ func (c *Client) Read(req *prompb.ReadRequest) (*prompb.ReadResponse, error) {
return nil, err return nil, err
} }
if resp.Err != "" { if resp.Err != "" {
return nil, fmt.Errorf(resp.Err) return nil, errors.New(resp.Err)
} }
if err = mergeResult(labelsToSeries, resp.Results); err != nil { if err = mergeResult(labelsToSeries, resp.Results); err != nil {
@ -158,7 +159,7 @@ func (c *Client) buildCommand(q *prompb.Query) (string, error) {
from = fmt.Sprintf("FROM %q./^%s$/", c.retentionPolicy, escapeSlashes(m.Value)) from = fmt.Sprintf("FROM %q./^%s$/", c.retentionPolicy, escapeSlashes(m.Value))
default: default:
// TODO: Figure out how to support these efficiently. // TODO: Figure out how to support these efficiently.
return "", fmt.Errorf("non-equal or regex-non-equal matchers are not supported on the metric name yet") return "", errors.New("non-equal or regex-non-equal matchers are not supported on the metric name yet")
} }
continue continue
} }
@ -173,7 +174,7 @@ func (c *Client) buildCommand(q *prompb.Query) (string, error) {
case prompb.LabelMatcher_NRE: case prompb.LabelMatcher_NRE:
matchers = append(matchers, fmt.Sprintf("%q !~ /^%s$/", m.Name, escapeSlashes(m.Value))) matchers = append(matchers, fmt.Sprintf("%q !~ /^%s$/", m.Name, escapeSlashes(m.Value)))
default: default:
return "", fmt.Errorf("unknown match type %v", m.Type) return "", errors.Errorf("unknown match type %v", m.Type)
} }
} }
matchers = append(matchers, fmt.Sprintf("time >= %vms", q.StartTimestampMs)) matchers = append(matchers, fmt.Sprintf("time >= %vms", q.StartTimestampMs))
@ -252,27 +253,27 @@ func valuesToSamples(values [][]interface{}) ([]prompb.Sample, error) {
samples := make([]prompb.Sample, 0, len(values)) samples := make([]prompb.Sample, 0, len(values))
for _, v := range values { for _, v := range values {
if len(v) != 2 { if len(v) != 2 {
return nil, fmt.Errorf("bad sample tuple length, expected [<timestamp>, <value>], got %v", v) return nil, errors.Errorf("bad sample tuple length, expected [<timestamp>, <value>], got %v", v)
} }
jsonTimestamp, ok := v[0].(json.Number) jsonTimestamp, ok := v[0].(json.Number)
if !ok { if !ok {
return nil, fmt.Errorf("bad timestamp: %v", v[0]) return nil, errors.Errorf("bad timestamp: %v", v[0])
} }
jsonValue, ok := v[1].(json.Number) jsonValue, ok := v[1].(json.Number)
if !ok { if !ok {
return nil, fmt.Errorf("bad sample value: %v", v[1]) return nil, errors.Errorf("bad sample value: %v", v[1])
} }
timestamp, err := jsonTimestamp.Int64() timestamp, err := jsonTimestamp.Int64()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to convert sample timestamp to int64: %v", err) return nil, errors.Wrap(err, "unable to convert sample timestamp to int64")
} }
value, err := jsonValue.Float64() value, err := jsonValue.Float64()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to convert sample value to float64: %v", err) return nil, errors.Wrap(err, "unable to convert sample value to float64")
} }
samples = append(samples, prompb.Sample{ samples = append(samples, prompb.Sample{

View file

@ -17,7 +17,6 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"fmt"
"io/ioutil" "io/ioutil"
"math" "math"
"net/http" "net/http"
@ -26,6 +25,7 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
) )
@ -133,7 +133,7 @@ func (c *Client) Write(samples model.Samples) error {
if err := json.Unmarshal(buf, &r); err != nil { if err := json.Unmarshal(buf, &r); err != nil {
return err return err
} }
return fmt.Errorf("failed to write %d samples to OpenTSDB, %d succeeded", r["failed"], r["success"]) return errors.Errorf("failed to write %d samples to OpenTSDB, %d succeeded", r["failed"], r["success"])
} }
// Name identifies the client as an OpenTSDB client. // Name identifies the client as an OpenTSDB client.

View file

@ -17,6 +17,7 @@ import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
) )
@ -97,13 +98,13 @@ func (tv *TagValue) UnmarshalJSON(json []byte) error {
for i, b := range json { for i, b := range json {
if i == 0 { if i == 0 {
if b != '"' { if b != '"' {
return fmt.Errorf("expected '\"', got %q", b) return errors.Errorf("expected '\"', got %q", b)
} }
continue continue
} }
if i == len(json)-1 { if i == len(json)-1 {
if b != '"' { if b != '"' {
return fmt.Errorf("expected '\"', got %q", b) return errors.Errorf("expected '\"', got %q", b)
} }
break break
} }
@ -129,7 +130,7 @@ func (tv *TagValue) UnmarshalJSON(json []byte) error {
parsedByte = (b - 55) << 4 parsedByte = (b - 55) << 4
escapeLevel = 2 escapeLevel = 2
default: default:
return fmt.Errorf( return errors.Errorf(
"illegal escape sequence at byte %d (%c)", "illegal escape sequence at byte %d (%c)",
i, b, i, b,
) )
@ -141,7 +142,7 @@ func (tv *TagValue) UnmarshalJSON(json []byte) error {
case b >= 'A' && b <= 'F': // A-F case b >= 'A' && b <= 'F': // A-F
parsedByte += b - 55 parsedByte += b - 55
default: default:
return fmt.Errorf( return errors.Errorf(
"illegal escape sequence at byte %d (%c)", "illegal escape sequence at byte %d (%c)",
i, b, i, b,
) )

View file

@ -30,6 +30,7 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -511,7 +512,7 @@ func (n *Manager) sendOne(ctx context.Context, c *http.Client, url string, b []b
// Any HTTP status 2xx is OK. // Any HTTP status 2xx is OK.
if resp.StatusCode/100 != 2 { if resp.StatusCode/100 != 2 {
return fmt.Errorf("bad response status %v", resp.Status) return errors.Errorf("bad response status %s", resp.Status)
} }
return err return err
} }
@ -662,7 +663,7 @@ func alertmanagerFromGroup(tg *targetgroup.Group, cfg *config.AlertmanagerConfig
case "https": case "https":
addr = addr + ":443" addr = addr + ":443"
default: default:
return nil, nil, fmt.Errorf("invalid scheme: %q", cfg.Scheme) return nil, nil, errors.Errorf("invalid scheme: %q", cfg.Scheme)
} }
lb.Set(model.AddressLabel, addr) lb.Set(model.AddressLabel, addr)
} }

View file

@ -19,6 +19,7 @@ import (
"regexp" "regexp"
"strings" "strings"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
@ -66,7 +67,7 @@ func (a *Action) UnmarshalYAML(unmarshal func(interface{}) error) error {
*a = act *a = act
return nil return nil
} }
return fmt.Errorf("unknown relabel action %q", s) return errors.Errorf("unknown relabel action %q", s)
} }
// Config is the configuration for relabeling of target label sets. // Config is the configuration for relabeling of target label sets.
@ -100,19 +101,19 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
c.Regex = MustNewRegexp("") c.Regex = MustNewRegexp("")
} }
if c.Modulus == 0 && c.Action == HashMod { if c.Modulus == 0 && c.Action == HashMod {
return fmt.Errorf("relabel configuration for hashmod requires non-zero modulus") return errors.Errorf("relabel configuration for hashmod requires non-zero modulus")
} }
if (c.Action == Replace || c.Action == HashMod) && c.TargetLabel == "" { if (c.Action == Replace || c.Action == HashMod) && c.TargetLabel == "" {
return fmt.Errorf("relabel configuration for %s action requires 'target_label' value", c.Action) return errors.Errorf("relabel configuration for %s action requires 'target_label' value", c.Action)
} }
if c.Action == Replace && !relabelTarget.MatchString(c.TargetLabel) { if c.Action == Replace && !relabelTarget.MatchString(c.TargetLabel) {
return fmt.Errorf("%q is invalid 'target_label' for %s action", c.TargetLabel, c.Action) return errors.Errorf("%q is invalid 'target_label' for %s action", c.TargetLabel, c.Action)
} }
if c.Action == LabelMap && !relabelTarget.MatchString(c.Replacement) { if c.Action == LabelMap && !relabelTarget.MatchString(c.Replacement) {
return fmt.Errorf("%q is invalid 'replacement' for %s action", c.Replacement, c.Action) return errors.Errorf("%q is invalid 'replacement' for %s action", c.Replacement, c.Action)
} }
if c.Action == HashMod && !model.LabelName(c.TargetLabel).IsValid() { if c.Action == HashMod && !model.LabelName(c.TargetLabel).IsValid() {
return fmt.Errorf("%q is invalid 'target_label' for %s action", c.TargetLabel, c.Action) return errors.Errorf("%q is invalid 'target_label' for %s action", c.TargetLabel, c.Action)
} }
if c.Action == LabelDrop || c.Action == LabelKeep { if c.Action == LabelDrop || c.Action == LabelKeep {
@ -121,7 +122,7 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
c.Modulus != DefaultRelabelConfig.Modulus || c.Modulus != DefaultRelabelConfig.Modulus ||
c.Separator != DefaultRelabelConfig.Separator || c.Separator != DefaultRelabelConfig.Separator ||
c.Replacement != DefaultRelabelConfig.Replacement { c.Replacement != DefaultRelabelConfig.Replacement {
return fmt.Errorf("%s action requires only 'regex', and no other fields", c.Action) return errors.Errorf("%s action requires only 'regex', and no other fields", c.Action)
} }
} }
@ -247,7 +248,7 @@ func relabel(lset labels.Labels, cfg *Config) labels.Labels {
} }
} }
default: default:
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action)) panic(errors.Errorf("relabel: unknown relabel action type %q", cfg.Action))
} }
return lb.Labels() return lb.Labels()

View file

@ -15,16 +15,16 @@ package rulefmt
import ( import (
"context" "context"
"fmt"
"io/ioutil" "io/ioutil"
"time" "time"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
yaml "gopkg.in/yaml.v2"
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/template" "github.com/prometheus/prometheus/template"
"gopkg.in/yaml.v2"
) )
// Error represents semantical errors on parsing rule groups. // Error represents semantical errors on parsing rule groups.
@ -174,7 +174,7 @@ func testTemplateParsing(rl *Rule) (errs []error) {
for _, val := range rl.Labels { for _, val := range rl.Labels {
err := parseTest(val) err := parseTest(val)
if err != nil { if err != nil {
errs = append(errs, fmt.Errorf("msg=%s", err.Error())) errs = append(errs, errors.Errorf("msg=%s", err.Error()))
} }
} }
@ -182,7 +182,7 @@ func testTemplateParsing(rl *Rule) (errs []error) {
for _, val := range rl.Annotations { for _, val := range rl.Annotations {
err := parseTest(val) err := parseTest(val)
if err != nil { if err != nil {
errs = append(errs, fmt.Errorf("msg=%s", err.Error())) errs = append(errs, errors.Errorf("msg=%s", err.Error()))
} }
} }

View file

@ -16,7 +16,7 @@
package textparse package textparse
import ( import (
"fmt" "github.com/pkg/errors"
) )
// Lex is called by the parser generated by "go tool yacc" to obtain each // Lex is called by the parser generated by "go tool yacc" to obtain each
@ -33,7 +33,7 @@ yystate0:
switch yyt := l.state; yyt { switch yyt := l.state; yyt {
default: default:
panic(fmt.Errorf(`invalid start condition %d`, yyt)) panic(errors.Errorf(`invalid start condition %d`, yyt))
case 0: // start condition: INITIAL case 0: // start condition: INITIAL
goto yystart1 goto yystart1
case 1: // start condition: sComment case 1: // start condition: sComment

View file

@ -17,8 +17,6 @@
package textparse package textparse
import ( import (
"errors"
"fmt"
"io" "io"
"math" "math"
"sort" "sort"
@ -26,6 +24,8 @@ import (
"strings" "strings"
"unicode/utf8" "unicode/utf8"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/value" "github.com/prometheus/prometheus/pkg/value"
) )
@ -185,7 +185,7 @@ func (p *OpenMetricsParser) Next() (Entry, error) {
switch t := p.nextToken(); t { switch t := p.nextToken(); t {
case tEofWord: case tEofWord:
if t := p.nextToken(); t != tEOF { if t := p.nextToken(); t != tEOF {
return EntryInvalid, fmt.Errorf("unexpected data after # EOF") return EntryInvalid, errors.New("unexpected data after # EOF")
} }
return EntryInvalid, io.EOF return EntryInvalid, io.EOF
case tEOF: case tEOF:
@ -227,11 +227,11 @@ func (p *OpenMetricsParser) Next() (Entry, error) {
case "unknown": case "unknown":
p.mtype = MetricTypeUnknown p.mtype = MetricTypeUnknown
default: default:
return EntryInvalid, fmt.Errorf("invalid metric type %q", s) return EntryInvalid, errors.Errorf("invalid metric type %q", s)
} }
case tHelp: case tHelp:
if !utf8.Valid(p.text) { if !utf8.Valid(p.text) {
return EntryInvalid, fmt.Errorf("help text is not a valid utf8 string") return EntryInvalid, errors.New("help text is not a valid utf8 string")
} }
} }
switch t { switch t {
@ -244,7 +244,7 @@ func (p *OpenMetricsParser) Next() (Entry, error) {
u := yoloString(p.text) u := yoloString(p.text)
if len(u) > 0 { if len(u) > 0 {
if !strings.HasSuffix(m, u) || len(m) < len(u)+1 || p.l.b[p.offsets[1]-len(u)-1] != '_' { if !strings.HasSuffix(m, u) || len(m) < len(u)+1 || p.l.b[p.offsets[1]-len(u)-1] != '_' {
return EntryInvalid, fmt.Errorf("unit not a suffix of metric %q", m) return EntryInvalid, errors.Errorf("unit not a suffix of metric %q", m)
} }
} }
return EntryUnit, nil return EntryUnit, nil
@ -293,7 +293,7 @@ func (p *OpenMetricsParser) Next() (Entry, error) {
return EntrySeries, nil return EntrySeries, nil
default: default:
err = fmt.Errorf("%q %q is not a valid start token", t, string(p.l.cur())) err = errors.Errorf("%q %q is not a valid start token", t, string(p.l.cur()))
} }
return EntryInvalid, err return EntryInvalid, err
} }
@ -336,7 +336,7 @@ func (p *OpenMetricsParser) parseLVals() error {
return parseError("expected label value", t) return parseError("expected label value", t)
} }
if !utf8.Valid(p.l.buf()) { if !utf8.Valid(p.l.buf()) {
return fmt.Errorf("invalid UTF-8 label value") return errors.New("invalid UTF-8 label value")
} }
// The openMetricsLexer ensures the value string is quoted. Strip first // The openMetricsLexer ensures the value string is quoted. Strip first

View file

@ -16,7 +16,7 @@
package textparse package textparse
import ( import (
"fmt" "github.com/pkg/errors"
) )
const ( const (
@ -44,7 +44,7 @@ yystate0:
switch yyt := l.state; yyt { switch yyt := l.state; yyt {
default: default:
panic(fmt.Errorf(`invalid start condition %d`, yyt)) panic(errors.Errorf(`invalid start condition %d`, yyt))
case 0: // start condition: INITIAL case 0: // start condition: INITIAL
goto yystart1 goto yystart1
case 1: // start condition: sComment case 1: // start condition: sComment

View file

@ -17,7 +17,6 @@
package textparse package textparse
import ( import (
"errors"
"fmt" "fmt"
"io" "io"
"math" "math"
@ -27,6 +26,8 @@ import (
"unicode/utf8" "unicode/utf8"
"unsafe" "unsafe"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/value" "github.com/prometheus/prometheus/pkg/value"
) )
@ -244,7 +245,7 @@ func (p *PromParser) nextToken() token {
} }
func parseError(exp string, got token) error { func parseError(exp string, got token) error {
return fmt.Errorf("%s, got %q", exp, got) return errors.Errorf("%s, got %q", exp, got)
} }
// Next advances the parser to the next sample. It returns false if no // Next advances the parser to the next sample. It returns false if no
@ -293,11 +294,11 @@ func (p *PromParser) Next() (Entry, error) {
case "untyped": case "untyped":
p.mtype = MetricTypeUnknown p.mtype = MetricTypeUnknown
default: default:
return EntryInvalid, fmt.Errorf("invalid metric type %q", s) return EntryInvalid, errors.Errorf("invalid metric type %q", s)
} }
case tHelp: case tHelp:
if !utf8.Valid(p.text) { if !utf8.Valid(p.text) {
return EntryInvalid, fmt.Errorf("help text is not a valid utf8 string") return EntryInvalid, errors.Errorf("help text is not a valid utf8 string")
} }
} }
if t := p.nextToken(); t != tLinebreak { if t := p.nextToken(); t != tLinebreak {
@ -356,7 +357,7 @@ func (p *PromParser) Next() (Entry, error) {
return EntrySeries, nil return EntrySeries, nil
default: default:
err = fmt.Errorf("%q is not a valid start token", t) err = errors.Errorf("%q is not a valid start token", t)
} }
return EntryInvalid, err return EntryInvalid, err
} }
@ -380,7 +381,7 @@ func (p *PromParser) parseLVals() error {
return parseError("expected label value", t) return parseError("expected label value", t)
} }
if !utf8.Valid(p.l.buf()) { if !utf8.Valid(p.l.buf()) {
return fmt.Errorf("invalid UTF-8 label value") return errors.Errorf("invalid UTF-8 label value")
} }
// The promlexer ensures the value string is quoted. Strip first // The promlexer ensures the value string is quoted. Strip first

View file

@ -14,9 +14,10 @@
package promql package promql
import ( import (
"fmt"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
) )
@ -301,7 +302,7 @@ func Walk(v Visitor, node Node, path []Node) error {
// nothing to do // nothing to do
default: default:
panic(fmt.Errorf("promql.Walk: unhandled node type %T", node)) panic(errors.Errorf("promql.Walk: unhandled node type %T", node))
} }
_, err = v.Visit(nil, nil) _, err = v.Visit(nil, nil)

View file

@ -29,8 +29,10 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
opentracing "github.com/opentracing/opentracing-go" opentracing "github.com/opentracing/opentracing-go"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/gate" "github.com/prometheus/prometheus/pkg/gate"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
@ -307,7 +309,7 @@ func (ng *Engine) NewRangeQuery(q storage.Queryable, qs string, start, end time.
return nil, err return nil, err
} }
if expr.Type() != ValueTypeVector && expr.Type() != ValueTypeScalar { if expr.Type() != ValueTypeVector && expr.Type() != ValueTypeScalar {
return nil, fmt.Errorf("invalid expression type %q for range query, must be Scalar or instant Vector", documentedType(expr.Type())) return nil, errors.Errorf("invalid expression type %q for range query, must be Scalar or instant Vector", documentedType(expr.Type()))
} }
qry := ng.newQuery(q, expr, start, end, interval) qry := ng.newQuery(q, expr, start, end, interval)
qry.q = qs qry.q = qs
@ -391,7 +393,7 @@ func (ng *Engine) exec(ctx context.Context, q *query) (Value, storage.Warnings,
return nil, nil, s(ctx) return nil, nil, s(ctx)
} }
panic(fmt.Errorf("promql.Engine.exec: unhandled statement of type %T", q.Statement())) panic(errors.Errorf("promql.Engine.exec: unhandled statement of type %T", q.Statement()))
} }
func timeMilliseconds(t time.Time) int64 { func timeMilliseconds(t time.Time) int64 {
@ -441,7 +443,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
mat, ok := val.(Matrix) mat, ok := val.(Matrix)
if !ok { if !ok {
panic(fmt.Errorf("promql.Engine.exec: invalid expression type %q", val.Type())) panic(errors.Errorf("promql.Engine.exec: invalid expression type %q", val.Type()))
} }
query.matrix = mat query.matrix = mat
switch s.Expr.Type() { switch s.Expr.Type() {
@ -459,7 +461,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
case ValueTypeMatrix: case ValueTypeMatrix:
return mat, warnings, nil return mat, warnings, nil
default: default:
panic(fmt.Errorf("promql.Engine.exec: unexpected expression type %q", s.Expr.Type())) panic(errors.Errorf("promql.Engine.exec: unexpected expression type %q", s.Expr.Type()))
} }
} }
@ -482,7 +484,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
mat, ok := val.(Matrix) mat, ok := val.(Matrix)
if !ok { if !ok {
panic(fmt.Errorf("promql.Engine.exec: invalid expression type %q", val.Type())) panic(errors.Errorf("promql.Engine.exec: invalid expression type %q", val.Type()))
} }
query.matrix = mat query.matrix = mat
@ -666,7 +668,7 @@ type evaluator struct {
// errorf causes a panic with the input formatted into an error. // errorf causes a panic with the input formatted into an error.
func (ev *evaluator) errorf(format string, args ...interface{}) { func (ev *evaluator) errorf(format string, args ...interface{}) {
ev.error(fmt.Errorf(format, args...)) ev.error(errors.Errorf(format, args...))
} }
// error causes a panic with the given error. // error causes a panic with the given error.
@ -686,7 +688,7 @@ func (ev *evaluator) recover(errp *error) {
buf = buf[:runtime.Stack(buf, false)] buf = buf[:runtime.Stack(buf, false)]
level.Error(ev.logger).Log("msg", "runtime panic in parser", "err", e, "stacktrace", string(buf)) level.Error(ev.logger).Log("msg", "runtime panic in parser", "err", e, "stacktrace", string(buf))
*errp = fmt.Errorf("unexpected error: %s", err) *errp = errors.Wrap(err, "unexpected error")
} else { } else {
*errp = e.(error) *errp = e.(error)
} }
@ -1127,7 +1129,7 @@ func (ev *evaluator) eval(expr Expr) Value {
case *MatrixSelector: case *MatrixSelector:
if ev.startTimestamp != ev.endTimestamp { if ev.startTimestamp != ev.endTimestamp {
panic(fmt.Errorf("cannot do range evaluation of matrix selector")) panic(errors.New("cannot do range evaluation of matrix selector"))
} }
return ev.matrixSelector(e) return ev.matrixSelector(e)
@ -1160,7 +1162,7 @@ func (ev *evaluator) eval(expr Expr) Value {
return res return res
} }
panic(fmt.Errorf("unhandled expression of type: %T", expr)) panic(errors.Errorf("unhandled expression of type: %T", expr))
} }
func durationToInt64Millis(d time.Duration) int64 { func durationToInt64Millis(d time.Duration) int64 {
@ -1629,7 +1631,7 @@ func scalarBinop(op ItemType, lhs, rhs float64) float64 {
case ItemLTE: case ItemLTE:
return btos(lhs <= rhs) return btos(lhs <= rhs)
} }
panic(fmt.Errorf("operator %q not allowed for Scalar operations", op)) panic(errors.Errorf("operator %q not allowed for Scalar operations", op))
} }
// vectorElemBinop evaluates a binary operation between two Vector elements. // vectorElemBinop evaluates a binary operation between two Vector elements.
@ -1660,7 +1662,7 @@ func vectorElemBinop(op ItemType, lhs, rhs float64) (float64, bool) {
case ItemLTE: case ItemLTE:
return lhs, lhs <= rhs return lhs, lhs <= rhs
} }
panic(fmt.Errorf("operator %q not allowed for operations between Vectors", op)) panic(errors.Errorf("operator %q not allowed for operations between Vectors", op))
} }
type groupedAggregation struct { type groupedAggregation struct {
@ -1824,7 +1826,7 @@ func (ev *evaluator) aggregation(op ItemType, grouping []string, without bool, p
group.heap = append(group.heap, s) group.heap = append(group.heap, s)
default: default:
panic(fmt.Errorf("expected aggregation operator but got %q", op)) panic(errors.Errorf("expected aggregation operator but got %q", op))
} }
} }

View file

@ -15,12 +15,13 @@ package promql
import ( import (
"context" "context"
"fmt"
"reflect" "reflect"
"testing" "testing"
"time" "time"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/prometheus/util/testutil"
@ -194,7 +195,7 @@ func TestQueryError(t *testing.T) {
Timeout: 10 * time.Second, Timeout: 10 * time.Second,
} }
engine := NewEngine(opts) engine := NewEngine(opts)
errStorage := ErrStorage{fmt.Errorf("storage error")} errStorage := ErrStorage{errors.New("storage error")}
queryable := storage.QueryableFunc(func(ctx context.Context, mint, maxt int64) (storage.Querier, error) { queryable := storage.QueryableFunc(func(ctx context.Context, mint, maxt int64) (storage.Querier, error) {
return &errQuerier{err: errStorage}, nil return &errQuerier{err: errStorage}, nil
}) })
@ -641,7 +642,7 @@ func TestRecoverEvaluatorError(t *testing.T) {
ev := &evaluator{logger: log.NewNopLogger()} ev := &evaluator{logger: log.NewNopLogger()}
var err error var err error
e := fmt.Errorf("custom error") e := errors.New("custom error")
defer func() { defer func() {
if err.Error() != e.Error() { if err.Error() != e.Error() {

View file

@ -14,7 +14,6 @@
package promql package promql
import ( import (
"fmt"
"math" "math"
"regexp" "regexp"
"sort" "sort"
@ -22,7 +21,9 @@ import (
"strings" "strings"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
) )
@ -232,10 +233,10 @@ func funcHoltWinters(vals []Value, args Expressions, enh *EvalNodeHelper) Vector
// Sanity check the input. // Sanity check the input.
if sf <= 0 || sf >= 1 { if sf <= 0 || sf >= 1 {
panic(fmt.Errorf("invalid smoothing factor. Expected: 0 < sf < 1, got: %f", sf)) panic(errors.Errorf("invalid smoothing factor. Expected: 0 < sf < 1, got: %f", sf))
} }
if tf <= 0 || tf >= 1 { if tf <= 0 || tf >= 1 {
panic(fmt.Errorf("invalid trend factor. Expected: 0 < tf < 1, got: %f", tf)) panic(errors.Errorf("invalid trend factor. Expected: 0 < tf < 1, got: %f", tf))
} }
var l int var l int
@ -730,10 +731,10 @@ func funcLabelReplace(vals []Value, args Expressions, enh *EvalNodeHelper) Vecto
var err error var err error
enh.regex, err = regexp.Compile("^(?:" + regexStr + ")$") enh.regex, err = regexp.Compile("^(?:" + regexStr + ")$")
if err != nil { if err != nil {
panic(fmt.Errorf("invalid regular expression in label_replace(): %s", regexStr)) panic(errors.Errorf("invalid regular expression in label_replace(): %s", regexStr))
} }
if !model.LabelNameRE.MatchString(dst) { if !model.LabelNameRE.MatchString(dst) {
panic(fmt.Errorf("invalid destination label name in label_replace(): %s", dst)) panic(errors.Errorf("invalid destination label name in label_replace(): %s", dst))
} }
enh.dmn = make(map[uint64]labels.Labels, len(enh.out)) enh.dmn = make(map[uint64]labels.Labels, len(enh.out))
} }
@ -795,13 +796,13 @@ func funcLabelJoin(vals []Value, args Expressions, enh *EvalNodeHelper) Vector {
for i := 3; i < len(args); i++ { for i := 3; i < len(args); i++ {
src := args[i].(*StringLiteral).Val src := args[i].(*StringLiteral).Val
if !model.LabelName(src).IsValid() { if !model.LabelName(src).IsValid() {
panic(fmt.Errorf("invalid source label name in label_join(): %s", src)) panic(errors.Errorf("invalid source label name in label_join(): %s", src))
} }
srcLabels[i-3] = src srcLabels[i-3] = src
} }
if !model.LabelName(dst).IsValid() { if !model.LabelName(dst).IsValid() {
panic(fmt.Errorf("invalid destination label name in label_join(): %s", dst)) panic(errors.Errorf("invalid destination label name in label_join(): %s", dst))
} }
srcVals := make([]string, len(srcLabels)) srcVals := make([]string, len(srcLabels))

View file

@ -23,7 +23,9 @@ import (
"strings" "strings"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/value" "github.com/prometheus/prometheus/pkg/value"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -285,7 +287,7 @@ func (p *parser) backup() {
// errorf formats the error and terminates processing. // errorf formats the error and terminates processing.
func (p *parser) errorf(format string, args ...interface{}) { func (p *parser) errorf(format string, args ...interface{}) {
p.error(fmt.Errorf(format, args...)) p.error(errors.Errorf(format, args...))
} }
// error terminates processing. // error terminates processing.
@ -319,7 +321,7 @@ func (p *parser) expectOneOf(exp1, exp2 ItemType, context string) item {
return token return token
} }
var errUnexpected = fmt.Errorf("unexpected error") var errUnexpected = errors.New("unexpected error")
// recover is the handler that turns panics into returns from the top level of Parse. // recover is the handler that turns panics into returns from the top level of Parse.
func (p *parser) recover(errp *error) { func (p *parser) recover(errp *error) {
@ -1070,7 +1072,7 @@ func parseDuration(ds string) (time.Duration, error) {
return 0, err return 0, err
} }
if dur == 0 { if dur == 0 {
return 0, fmt.Errorf("duration must be greater than 0") return 0, errors.New("duration must be greater than 0")
} }
return time.Duration(dur), nil return time.Duration(dur), nil
} }

View file

@ -14,16 +14,17 @@
package promql package promql
import ( import (
"fmt"
"math" "math"
"reflect" "reflect"
"strings" "strings"
"testing" "testing"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/prometheus/prometheus/pkg/labels"
) )
var testExpr = []struct { var testExpr = []struct {
@ -1644,7 +1645,7 @@ func mustLabelMatcher(mt labels.MatchType, name, val string) *labels.Matcher {
func mustGetFunction(name string) *Function { func mustGetFunction(name string) *Function {
f, ok := getFunction(name) f, ok := getFunction(name)
if !ok { if !ok {
panic(fmt.Errorf("function %q does not exist", name)) panic(errors.Errorf("function %q does not exist", name))
} }
return f return f
} }
@ -1795,7 +1796,7 @@ func TestRecoverParserError(t *testing.T) {
p := newParser("foo bar") p := newParser("foo bar")
var err error var err error
e := fmt.Errorf("custom error") e := errors.New("custom error")
defer func() { defer func() {
if err.Error() != e.Error() { if err.Error() != e.Error() {

View file

@ -15,7 +15,6 @@ package promql
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"math" "math"
@ -24,6 +23,7 @@ import (
"strings" "strings"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
@ -102,7 +102,7 @@ func (t *Test) Storage() storage.Storage {
func raise(line int, format string, v ...interface{}) error { func raise(line int, format string, v ...interface{}) error {
return &ParseErr{ return &ParseErr{
Line: line + 1, Line: line + 1,
Err: fmt.Errorf(format, v...), Err: errors.Errorf(format, v...),
} }
} }
@ -356,21 +356,21 @@ func (ev *evalCmd) expect(pos int, m labels.Labels, vals ...sequenceValue) {
func (ev *evalCmd) compareResult(result Value) error { func (ev *evalCmd) compareResult(result Value) error {
switch val := result.(type) { switch val := result.(type) {
case Matrix: case Matrix:
return fmt.Errorf("received range result on instant evaluation") return errors.New("received range result on instant evaluation")
case Vector: case Vector:
seen := map[uint64]bool{} seen := map[uint64]bool{}
for pos, v := range val { for pos, v := range val {
fp := v.Metric.Hash() fp := v.Metric.Hash()
if _, ok := ev.metrics[fp]; !ok { if _, ok := ev.metrics[fp]; !ok {
return fmt.Errorf("unexpected metric %s in result", v.Metric) return errors.Errorf("unexpected metric %s in result", v.Metric)
} }
exp := ev.expected[fp] exp := ev.expected[fp]
if ev.ordered && exp.pos != pos+1 { if ev.ordered && exp.pos != pos+1 {
return fmt.Errorf("expected metric %s with %v at position %d but was at %d", v.Metric, exp.vals, exp.pos, pos+1) return errors.Errorf("expected metric %s with %v at position %d but was at %d", v.Metric, exp.vals, exp.pos, pos+1)
} }
if !almostEqual(exp.vals[0].value, v.V) { if !almostEqual(exp.vals[0].value, v.V) {
return fmt.Errorf("expected %v for %s but got %v", exp.vals[0].value, v.Metric, v.V) return errors.Errorf("expected %v for %s but got %v", exp.vals[0].value, v.Metric, v.V)
} }
seen[fp] = true seen[fp] = true
@ -381,17 +381,17 @@ func (ev *evalCmd) compareResult(result Value) error {
for _, ss := range val { for _, ss := range val {
fmt.Println(" ", ss.Metric, ss.Point) fmt.Println(" ", ss.Metric, ss.Point)
} }
return fmt.Errorf("expected metric %s with %v not found", ev.metrics[fp], expVals) return errors.Errorf("expected metric %s with %v not found", ev.metrics[fp], expVals)
} }
} }
case Scalar: case Scalar:
if !almostEqual(ev.expected[0].vals[0].value, val.V) { if !almostEqual(ev.expected[0].vals[0].value, val.V) {
return fmt.Errorf("expected Scalar %v but got %v", val.V, ev.expected[0].vals[0].value) return errors.Errorf("expected Scalar %v but got %v", val.V, ev.expected[0].vals[0].value)
} }
default: default:
panic(fmt.Errorf("promql.Test.compareResult: unexpected result type %T", result)) panic(errors.Errorf("promql.Test.compareResult: unexpected result type %T", result))
} }
return nil return nil
} }
@ -447,16 +447,16 @@ func (t *Test) exec(tc testCommand) error {
if cmd.fail { if cmd.fail {
return nil return nil
} }
return fmt.Errorf("error evaluating query %q (line %d): %s", cmd.expr, cmd.line, res.Err) return errors.Wrapf(res.Err, "error evaluating query %q (line %d)", cmd.expr, cmd.line)
} }
defer q.Close() defer q.Close()
if res.Err == nil && cmd.fail { if res.Err == nil && cmd.fail {
return fmt.Errorf("expected error evaluating query %q (line %d) but got none", cmd.expr, cmd.line) return errors.Errorf("expected error evaluating query %q (line %d) but got none", cmd.expr, cmd.line)
} }
err = cmd.compareResult(res.Value) err = cmd.compareResult(res.Value)
if err != nil { if err != nil {
return fmt.Errorf("error in %s %s: %s", cmd, cmd.expr, err) return errors.Wrapf(err, "error in %s %s", cmd, cmd.expr)
} }
// Check query returns same result in range mode, // Check query returns same result in range mode,
@ -467,7 +467,7 @@ func (t *Test) exec(tc testCommand) error {
} }
rangeRes := q.Exec(t.context) rangeRes := q.Exec(t.context)
if rangeRes.Err != nil { if rangeRes.Err != nil {
return fmt.Errorf("error evaluating query %q (line %d) in range mode: %s", cmd.expr, cmd.line, rangeRes.Err) return errors.Wrapf(rangeRes.Err, "error evaluating query %q (line %d) in range mode", cmd.expr, cmd.line)
} }
defer q.Close() defer q.Close()
if cmd.ordered { if cmd.ordered {
@ -490,7 +490,7 @@ func (t *Test) exec(tc testCommand) error {
err = cmd.compareResult(vec) err = cmd.compareResult(vec)
} }
if err != nil { if err != nil {
return fmt.Errorf("error in %s %s (line %d) rande mode: %s", cmd, cmd.expr, cmd.line, err) return errors.Wrapf(err, "error in %s %s (line %d) rande mode", cmd, cmd.expr, cmd.line)
} }
default: default:
@ -561,7 +561,7 @@ func parseNumber(s string) (float64, error) {
f, err = strconv.ParseFloat(s, 64) f, err = strconv.ParseFloat(s, 64)
} }
if err != nil { if err != nil {
return 0, fmt.Errorf("error parsing number: %s", err) return 0, errors.Wrap(err, "error parsing number")
} }
return f, nil return f, nil
} }

View file

@ -19,6 +19,8 @@ import (
"strconv" "strconv"
"strings" "strings"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
) )
@ -215,7 +217,7 @@ func (r *Result) Vector() (Vector, error) {
} }
v, ok := r.Value.(Vector) v, ok := r.Value.(Vector)
if !ok { if !ok {
return nil, fmt.Errorf("query result is not a Vector") return nil, errors.New("query result is not a Vector")
} }
return v, nil return v, nil
} }
@ -228,7 +230,7 @@ func (r *Result) Matrix() (Matrix, error) {
} }
v, ok := r.Value.(Matrix) v, ok := r.Value.(Matrix)
if !ok { if !ok {
return nil, fmt.Errorf("query result is not a range Vector") return nil, errors.New("query result is not a range Vector")
} }
return v, nil return v, nil
} }
@ -241,7 +243,7 @@ func (r *Result) Scalar() (Scalar, error) {
} }
v, ok := r.Value.(Scalar) v, ok := r.Value.(Scalar)
if !ok { if !ok {
return Scalar{}, fmt.Errorf("query result is not a Scalar") return Scalar{}, errors.New("query result is not a Scalar")
} }
return v, nil return v, nil
} }

View file

@ -26,6 +26,7 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
@ -71,7 +72,7 @@ func (s AlertState) String() string {
case StateFiring: case StateFiring:
return "firing" return "firing"
} }
panic(fmt.Errorf("unknown alert state: %v", s.String())) panic(errors.Errorf("unknown alert state: %s", s.String()))
} }
// Alert is the user-level representation of a single instance of an alerting rule. // Alert is the user-level representation of a single instance of an alerting rule.

View file

@ -16,7 +16,6 @@ package rules
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"math" "math"
"net/url" "net/url"
"sort" "sort"
@ -29,8 +28,8 @@ import (
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
opentracing "github.com/opentracing/opentracing-go" opentracing "github.com/opentracing/opentracing-go"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/rulefmt" "github.com/prometheus/prometheus/pkg/rulefmt"
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
@ -180,7 +179,7 @@ func EngineQueryFunc(engine *promql.Engine, q storage.Queryable) QueryFunc {
Metric: labels.Labels{}, Metric: labels.Labels{},
}}, nil }}, nil
default: default:
return nil, fmt.Errorf("rule result is not a vector or scalar") return nil, errors.New("rule result is not a vector or scalar")
} }
} }
} }

View file

@ -25,6 +25,7 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
@ -199,7 +200,7 @@ func (m *Manager) ApplyConfig(cfg *config.Config) error {
} }
if failed { if failed {
return fmt.Errorf("failed to apply the new configuration") return errors.New("failed to apply the new configuration")
} }
return nil return nil
} }

View file

@ -14,18 +14,19 @@
package scrape package scrape
import ( import (
"fmt"
"strconv" "strconv"
"testing" "testing"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
yaml "gopkg.in/yaml.v2"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/discovery/targetgroup"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/pkg/relabel" "github.com/prometheus/prometheus/pkg/relabel"
"github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/prometheus/util/testutil"
yaml "gopkg.in/yaml.v2"
) )
func TestPopulateLabels(t *testing.T) { func TestPopulateLabels(t *testing.T) {
@ -127,7 +128,7 @@ func TestPopulateLabels(t *testing.T) {
}, },
res: nil, res: nil,
resOrig: nil, resOrig: nil,
err: fmt.Errorf("no address"), err: errors.New("no address"),
}, },
// Address label missing, but added in relabelling. // Address label missing, but added in relabelling.
{ {
@ -206,14 +207,14 @@ func TestPopulateLabels(t *testing.T) {
}, },
res: nil, res: nil,
resOrig: nil, resOrig: nil,
err: fmt.Errorf("invalid label value for \"custom\": \"\\xbd\""), err: errors.New("invalid label value for \"custom\": \"\\xbd\""),
}, },
} }
for _, c := range cases { for _, c := range cases {
in := c.in.Copy() in := c.in.Copy()
res, orig, err := populateLabels(c.in, c.cfg) res, orig, err := populateLabels(c.in, c.cfg)
testutil.Equals(t, c.err, err) testutil.ErrorEqual(err, c.err)
testutil.Equals(t, c.in, in) testutil.Equals(t, c.in, in)
testutil.Equals(t, c.res, res) testutil.Equals(t, c.res, res)
testutil.Equals(t, c.resOrig, orig) testutil.Equals(t, c.resOrig, orig)

View file

@ -536,7 +536,7 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error)
defer resp.Body.Close() defer resp.Body.Close()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("server returned HTTP status %s", resp.Status) return "", errors.Errorf("server returned HTTP status %s", resp.Status)
} }
if resp.Header.Get("Content-Encoding") != "gzip" { if resp.Header.Get("Content-Encoding") != "gzip" {

View file

@ -29,6 +29,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/pkg/errors"
dto "github.com/prometheus/client_model/go" dto "github.com/prometheus/client_model/go"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -690,7 +691,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
} else if numScrapes == 5 { } else if numScrapes == 5 {
cancel() cancel()
} }
return fmt.Errorf("scrape failed") return errors.New("scrape failed")
} }
go func() { go func() {
@ -752,7 +753,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
} else if numScrapes == 3 { } else if numScrapes == 3 {
cancel() cancel()
} }
return fmt.Errorf("scrape failed") return errors.New("scrape failed")
} }
go func() { go func() {
@ -1079,7 +1080,7 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error { scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
cancel() cancel()
return fmt.Errorf("scrape failed") return errors.New("scrape failed")
} }
sl.run(10*time.Millisecond, time.Hour, nil) sl.run(10*time.Millisecond, time.Hour, nil)
@ -1292,9 +1293,9 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
go func() { go func() {
_, err := ts.scrape(ctx, ioutil.Discard) _, err := ts.scrape(ctx, ioutil.Discard)
if err == nil { if err == nil {
errc <- fmt.Errorf("Expected error but got nil") errc <- errors.New("Expected error but got nil")
} else if ctx.Err() != context.Canceled { } else if ctx.Err() != context.Canceled {
errc <- fmt.Errorf("Expected context cancelation error but got: %s", ctx.Err()) errc <- errors.Errorf("Expected context cancelation error but got: %s", ctx.Err())
} }
close(errc) close(errc)
}() }()

View file

@ -14,7 +14,6 @@
package scrape package scrape
import ( import (
"errors"
"fmt" "fmt"
"hash/fnv" "hash/fnv"
"net" "net"
@ -23,6 +22,7 @@ import (
"sync" "sync"
"time" "time"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
@ -345,7 +345,7 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
return nil, preRelabelLabels, nil return nil, preRelabelLabels, nil
} }
if v := lset.Get(model.AddressLabel); v == "" { if v := lset.Get(model.AddressLabel); v == "" {
return nil, nil, fmt.Errorf("no address") return nil, nil, errors.New("no address")
} }
lb = labels.NewBuilder(lset) lb = labels.NewBuilder(lset)
@ -372,7 +372,7 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
case "https": case "https":
addr = addr + ":443" addr = addr + ":443"
default: default:
return nil, nil, fmt.Errorf("invalid scheme: %q", cfg.Scheme) return nil, nil, errors.Errorf("invalid scheme: %q", cfg.Scheme)
} }
lb.Set(model.AddressLabel, addr) lb.Set(model.AddressLabel, addr)
} }
@ -398,7 +398,7 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
for _, l := range res { for _, l := range res {
// Check label values are valid, drop the target if not. // Check label values are valid, drop the target if not.
if !model.LabelValue(l.Value).IsValid() { if !model.LabelValue(l.Value).IsValid() {
return nil, nil, fmt.Errorf("invalid label value for %q: %q", l.Name, l.Value) return nil, nil, errors.Errorf("invalid label value for %q: %q", l.Name, l.Value)
} }
} }
return res, preRelabelLabels, nil return res, preRelabelLabels, nil
@ -424,7 +424,7 @@ func targetsFromGroup(tg *targetgroup.Group, cfg *config.ScrapeConfig) ([]*Targe
lbls, origLabels, err := populateLabels(lset, cfg) lbls, origLabels, err := populateLabels(lset, cfg)
if err != nil { if err != nil {
return nil, fmt.Errorf("instance %d in group %s: %s", i, tg, err) return nil, errors.Wrapf(err, "instance %d in group %s", i, tg)
} }
if lbls != nil || origLabels != nil { if lbls != nil || origLabels != nil {
targets = append(targets, NewTarget(lbls, origLabels, cfg.Params)) targets = append(targets, NewTarget(lbls, origLabels, cfg.Params))

View file

@ -25,10 +25,11 @@ import (
"github.com/gogo/protobuf/proto" "github.com/gogo/protobuf/proto"
"github.com/golang/snappy" "github.com/golang/snappy"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/version" "github.com/prometheus/common/version"
config_util "github.com/prometheus/common/config"
"github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/prompb"
) )
@ -102,7 +103,7 @@ func (c *Client) Store(ctx context.Context, req []byte) error {
if scanner.Scan() { if scanner.Scan() {
line = scanner.Text() line = scanner.Text()
} }
err = fmt.Errorf("server returned HTTP status %s: %s", httpResp.Status, line) err = errors.Errorf("server returned HTTP status %s: %s", httpResp.Status, line)
} }
if httpResp.StatusCode/100 == 5 { if httpResp.StatusCode/100 == 5 {
return recoverableError{err} return recoverableError{err}
@ -126,13 +127,13 @@ func (c *Client) Read(ctx context.Context, query *prompb.Query) (*prompb.QueryRe
} }
data, err := proto.Marshal(req) data, err := proto.Marshal(req)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to marshal read request: %v", err) return nil, errors.Wrapf(err, "unable to marshal read request")
} }
compressed := snappy.Encode(nil, data) compressed := snappy.Encode(nil, data)
httpReq, err := http.NewRequest("POST", c.url.String(), bytes.NewReader(compressed)) httpReq, err := http.NewRequest("POST", c.url.String(), bytes.NewReader(compressed))
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to create request: %v", err) return nil, errors.Wrap(err, "unable to create request")
} }
httpReq.Header.Add("Content-Encoding", "snappy") httpReq.Header.Add("Content-Encoding", "snappy")
httpReq.Header.Add("Accept-Encoding", "snappy") httpReq.Header.Add("Accept-Encoding", "snappy")
@ -145,31 +146,31 @@ func (c *Client) Read(ctx context.Context, query *prompb.Query) (*prompb.QueryRe
httpResp, err := c.client.Do(httpReq.WithContext(ctx)) httpResp, err := c.client.Do(httpReq.WithContext(ctx))
if err != nil { if err != nil {
return nil, fmt.Errorf("error sending request: %v", err) return nil, errors.Wrap(err, "error sending request")
} }
defer httpResp.Body.Close() defer httpResp.Body.Close()
if httpResp.StatusCode/100 != 2 { if httpResp.StatusCode/100 != 2 {
return nil, fmt.Errorf("server returned HTTP status %s", httpResp.Status) return nil, errors.Errorf("server returned HTTP status %s", httpResp.Status)
} }
compressed, err = ioutil.ReadAll(httpResp.Body) compressed, err = ioutil.ReadAll(httpResp.Body)
if err != nil { if err != nil {
return nil, fmt.Errorf("error reading response: %v", err) return nil, errors.Wrap(err, "error reading response")
} }
uncompressed, err := snappy.Decode(nil, compressed) uncompressed, err := snappy.Decode(nil, compressed)
if err != nil { if err != nil {
return nil, fmt.Errorf("error reading response: %v", err) return nil, errors.Wrap(err, "error reading response")
} }
var resp prompb.ReadResponse var resp prompb.ReadResponse
err = proto.Unmarshal(uncompressed, &resp) err = proto.Unmarshal(uncompressed, &resp)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to unmarshal response body: %v", err) return nil, errors.Wrap(err, "unable to unmarshal response body")
} }
if len(resp.Results) != len(req.Queries) { if len(resp.Results) != len(req.Queries) {
return nil, fmt.Errorf("responses: want %d, got %d", len(req.Queries), len(resp.Results)) return nil, errors.Errorf("responses: want %d, got %d", len(req.Queries), len(resp.Results))
} }
return resp.Results[0], nil return resp.Results[0], nil

View file

@ -15,17 +15,18 @@ package remote
import ( import (
"context" "context"
"fmt"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"net/url" "net/url"
"reflect"
"strings" "strings"
"testing" "testing"
"time" "time"
"github.com/pkg/errors"
config_util "github.com/prometheus/common/config" config_util "github.com/prometheus/common/config"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/testutil"
) )
var longErrMessage = strings.Repeat("error message", maxErrMsgLen) var longErrMessage = strings.Repeat("error message", maxErrMsgLen)
@ -41,15 +42,15 @@ func TestStoreHTTPErrorHandling(t *testing.T) {
}, },
{ {
code: 300, code: 300,
err: fmt.Errorf("server returned HTTP status 300 Multiple Choices: " + longErrMessage[:maxErrMsgLen]), err: errors.New("server returned HTTP status 300 Multiple Choices: " + longErrMessage[:maxErrMsgLen]),
}, },
{ {
code: 404, code: 404,
err: fmt.Errorf("server returned HTTP status 404 Not Found: " + longErrMessage[:maxErrMsgLen]), err: errors.New("server returned HTTP status 404 Not Found: " + longErrMessage[:maxErrMsgLen]),
}, },
{ {
code: 500, code: 500,
err: recoverableError{fmt.Errorf("server returned HTTP status 500 Internal Server Error: " + longErrMessage[:maxErrMsgLen])}, err: recoverableError{errors.New("server returned HTTP status 500 Internal Server Error: " + longErrMessage[:maxErrMsgLen])},
}, },
} }
@ -74,7 +75,7 @@ func TestStoreHTTPErrorHandling(t *testing.T) {
} }
err = c.Store(context.Background(), []byte{}) err = c.Store(context.Background(), []byte{})
if !reflect.DeepEqual(err, test.err) { if !testutil.ErrorEqual(err, test.err) {
t.Errorf("%d. Unexpected error; want %v, got %v", i, test.err, err) t.Errorf("%d. Unexpected error; want %v, got %v", i, test.err, err)
} }

View file

@ -22,6 +22,7 @@ import (
"github.com/gogo/protobuf/proto" "github.com/gogo/protobuf/proto"
"github.com/golang/snappy" "github.com/golang/snappy"
"github.com/pkg/errors"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
@ -280,13 +281,13 @@ func (c *concreteSeriesIterator) Err() error {
func validateLabelsAndMetricName(ls labels.Labels) error { func validateLabelsAndMetricName(ls labels.Labels) error {
for _, l := range ls { for _, l := range ls {
if l.Name == labels.MetricName && !model.IsValidMetricName(model.LabelValue(l.Value)) { if l.Name == labels.MetricName && !model.IsValidMetricName(model.LabelValue(l.Value)) {
return fmt.Errorf("invalid metric name: %v", l.Value) return errors.Errorf("invalid metric name: %v", l.Value)
} }
if !model.LabelName(l.Name).IsValid() { if !model.LabelName(l.Name).IsValid() {
return fmt.Errorf("invalid label name: %v", l.Name) return errors.Errorf("invalid label name: %v", l.Name)
} }
if !model.LabelValue(l.Value).IsValid() { if !model.LabelValue(l.Value).IsValid() {
return fmt.Errorf("invalid label value: %v", l.Value) return errors.Errorf("invalid label value: %v", l.Value)
} }
} }
return nil return nil
@ -306,7 +307,7 @@ func toLabelMatchers(matchers []*labels.Matcher) ([]*prompb.LabelMatcher, error)
case labels.MatchNotRegexp: case labels.MatchNotRegexp:
mType = prompb.LabelMatcher_NRE mType = prompb.LabelMatcher_NRE
default: default:
return nil, fmt.Errorf("invalid matcher type") return nil, errors.New("invalid matcher type")
} }
pbMatchers = append(pbMatchers, &prompb.LabelMatcher{ pbMatchers = append(pbMatchers, &prompb.LabelMatcher{
Type: mType, Type: mType,
@ -331,7 +332,7 @@ func fromLabelMatchers(matchers []*prompb.LabelMatcher) ([]*labels.Matcher, erro
case prompb.LabelMatcher_NRE: case prompb.LabelMatcher_NRE:
mtype = labels.MatchNotRegexp mtype = labels.MatchNotRegexp
default: default:
return nil, fmt.Errorf("invalid matcher type") return nil, errors.New("invalid matcher type")
} }
matcher, err := labels.NewMatcher(mtype, matcher.Name, matcher.Value) matcher, err := labels.NewMatcher(mtype, matcher.Name, matcher.Value)
if err != nil { if err != nil {

View file

@ -27,12 +27,12 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/tsdb" "github.com/prometheus/tsdb"
"github.com/prometheus/tsdb/fileutil" "github.com/prometheus/tsdb/fileutil"
"github.com/prometheus/tsdb/wal" "github.com/prometheus/tsdb/wal"
"github.com/prometheus/prometheus/pkg/timestamp"
) )
const ( const (

View file

@ -16,7 +16,6 @@ package template
import ( import (
"bytes" "bytes"
"context" "context"
"errors"
"fmt" "fmt"
"math" "math"
"net/url" "net/url"
@ -28,9 +27,10 @@ import (
html_template "html/template" html_template "html/template"
text_template "text/template" text_template "text/template"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
) )
@ -288,7 +288,7 @@ func (te Expander) Expand() (result string, resultErr error) {
var ok bool var ok bool
resultErr, ok = r.(error) resultErr, ok = r.(error)
if !ok { if !ok {
resultErr = fmt.Errorf("panic expanding template %v: %v", te.name, r) resultErr = errors.Errorf("panic expanding template %v: %v", te.name, r)
} }
} }
if resultErr != nil { if resultErr != nil {
@ -300,12 +300,12 @@ func (te Expander) Expand() (result string, resultErr error) {
tmpl, err := text_template.New(te.name).Funcs(te.funcMap).Option("missingkey=zero").Parse(te.text) tmpl, err := text_template.New(te.name).Funcs(te.funcMap).Option("missingkey=zero").Parse(te.text)
if err != nil { if err != nil {
return "", fmt.Errorf("error parsing template %v: %v", te.name, err) return "", errors.Wrapf(err, "error parsing template %v", te.name)
} }
var buffer bytes.Buffer var buffer bytes.Buffer
err = tmpl.Execute(&buffer, te.data) err = tmpl.Execute(&buffer, te.data)
if err != nil { if err != nil {
return "", fmt.Errorf("error executing template %v: %v", te.name, err) return "", errors.Wrapf(err, "error executing template %v", te.name)
} }
return buffer.String(), nil return buffer.String(), nil
} }
@ -317,7 +317,7 @@ func (te Expander) ExpandHTML(templateFiles []string) (result string, resultErr
var ok bool var ok bool
resultErr, ok = r.(error) resultErr, ok = r.(error)
if !ok { if !ok {
resultErr = fmt.Errorf("panic expanding template %v: %v", te.name, r) resultErr = errors.Errorf("panic expanding template %s: %v", te.name, r)
} }
} }
}() }()
@ -333,18 +333,18 @@ func (te Expander) ExpandHTML(templateFiles []string) (result string, resultErr
}) })
tmpl, err := tmpl.Parse(te.text) tmpl, err := tmpl.Parse(te.text)
if err != nil { if err != nil {
return "", fmt.Errorf("error parsing template %v: %v", te.name, err) return "", errors.Wrapf(err, "error parsing template %v", te.name)
} }
if len(templateFiles) > 0 { if len(templateFiles) > 0 {
_, err = tmpl.ParseFiles(templateFiles...) _, err = tmpl.ParseFiles(templateFiles...)
if err != nil { if err != nil {
return "", fmt.Errorf("error parsing template files for %v: %v", te.name, err) return "", errors.Wrapf(err, "error parsing template files for %v", te.name)
} }
} }
var buffer bytes.Buffer var buffer bytes.Buffer
err = tmpl.Execute(&buffer, te.data) err = tmpl.Execute(&buffer, te.data)
if err != nil { if err != nil {
return "", fmt.Errorf("error executing template %v: %v", te.name, err) return "", errors.Wrapf(err, "error executing template %v", te.name)
} }
return buffer.String(), nil return buffer.String(), nil
} }

View file

@ -21,6 +21,7 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/samuel/go-zookeeper/zk" "github.com/samuel/go-zookeeper/zk"
) )
@ -195,7 +196,7 @@ func (tc *ZookeeperTreeCache) recursiveNodeUpdate(path string, node *zookeeperTr
if err == zk.ErrNoNode { if err == zk.ErrNoNode {
tc.recursiveDelete(path, node) tc.recursiveDelete(path, node)
if node == tc.head { if node == tc.head {
return fmt.Errorf("path %s does not exist", path) return errors.Errorf("path %s does not exist", path)
} }
return nil return nil
} else if err != nil { } else if err != nil {

View file

@ -15,7 +15,6 @@ package v1
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"math" "math"
"math/rand" "math/rand"
@ -32,6 +31,7 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
jsoniter "github.com/json-iterator/go" jsoniter "github.com/json-iterator/go"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/route" "github.com/prometheus/common/route"
@ -271,7 +271,7 @@ func (api *API) query(r *http.Request) apiFuncResult {
var err error var err error
ts, err = parseTime(t) ts, err = parseTime(t)
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'time': %s", err) err = errors.Wrapf(err, "invalid parameter 'time'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
} else { } else {
@ -283,7 +283,7 @@ func (api *API) query(r *http.Request) apiFuncResult {
var cancel context.CancelFunc var cancel context.CancelFunc
timeout, err := parseDuration(to) timeout, err := parseDuration(to)
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'timeout': %s", err) err = errors.Wrapf(err, "invalid parameter 'timeout'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
@ -293,7 +293,7 @@ func (api *API) query(r *http.Request) apiFuncResult {
qry, err := api.QueryEngine.NewInstantQuery(api.Queryable, r.FormValue("query"), ts) qry, err := api.QueryEngine.NewInstantQuery(api.Queryable, r.FormValue("query"), ts)
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'query': %s", err) err = errors.Wrapf(err, "invalid parameter 'query'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
@ -318,12 +318,12 @@ func (api *API) query(r *http.Request) apiFuncResult {
func (api *API) queryRange(r *http.Request) apiFuncResult { func (api *API) queryRange(r *http.Request) apiFuncResult {
start, err := parseTime(r.FormValue("start")) start, err := parseTime(r.FormValue("start"))
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'start': %s", err) err = errors.Wrapf(err, "invalid parameter 'start'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
end, err := parseTime(r.FormValue("end")) end, err := parseTime(r.FormValue("end"))
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'end': %s", err) err = errors.Wrapf(err, "invalid parameter 'end'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
if end.Before(start) { if end.Before(start) {
@ -333,7 +333,7 @@ func (api *API) queryRange(r *http.Request) apiFuncResult {
step, err := parseDuration(r.FormValue("step")) step, err := parseDuration(r.FormValue("step"))
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'step': %s", err) err = errors.Wrapf(err, "invalid parameter 'step'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
@ -354,7 +354,7 @@ func (api *API) queryRange(r *http.Request) apiFuncResult {
var cancel context.CancelFunc var cancel context.CancelFunc
timeout, err := parseDuration(to) timeout, err := parseDuration(to)
if err != nil { if err != nil {
err = fmt.Errorf("invalid parameter 'timeout': %s", err) err = errors.Wrap(err, "invalid parameter 'timeout'")
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
} }
@ -421,7 +421,7 @@ func (api *API) labelValues(r *http.Request) apiFuncResult {
name := route.Param(ctx, "name") name := route.Param(ctx, "name")
if !model.LabelNameRE.MatchString(name) { if !model.LabelNameRE.MatchString(name) {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("invalid label name: %q", name)}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.Errorf("invalid label name: %q", name)}, nil, nil}
} }
q, err := api.Queryable.Querier(ctx, math.MinInt64, math.MaxInt64) q, err := api.Queryable.Querier(ctx, math.MinInt64, math.MaxInt64)
if err != nil { if err != nil {
@ -444,10 +444,10 @@ var (
func (api *API) series(r *http.Request) apiFuncResult { func (api *API) series(r *http.Request) apiFuncResult {
if err := r.ParseForm(); err != nil { if err := r.ParseForm(); err != nil {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("error parsing form values: %v", err)}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.Wrapf(err, "error parsing form values")}, nil, nil}
} }
if len(r.Form["match[]"]) == 0 { if len(r.Form["match[]"]) == 0 {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.New("no match[] parameter provided")}, nil, nil}
} }
var start time.Time var start time.Time
@ -511,7 +511,7 @@ func (api *API) series(r *http.Request) apiFuncResult {
} }
func (api *API) dropSeries(r *http.Request) apiFuncResult { func (api *API) dropSeries(r *http.Request) apiFuncResult {
return apiFuncResult{nil, &apiError{errorInternal, fmt.Errorf("not implemented")}, nil, nil} return apiFuncResult{nil, &apiError{errorInternal, errors.New("not implemented")}, nil, nil}
} }
// Target has the information for one target. // Target has the information for one target.
@ -599,7 +599,7 @@ func (api *API) targetMetadata(r *http.Request) apiFuncResult {
if s := r.FormValue("limit"); s != "" { if s := r.FormValue("limit"); s != "" {
var err error var err error
if limit, err = strconv.Atoi(s); err != nil { if limit, err = strconv.Atoi(s); err != nil {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("limit must be a number")}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.New("limit must be a number")}, nil, nil}
} }
} }
@ -810,7 +810,7 @@ func (api *API) rules(r *http.Request) apiFuncResult {
Type: "recording", Type: "recording",
} }
default: default:
err := fmt.Errorf("failed to assert type of rule '%v'", rule.Name()) err := errors.Errorf("failed to assert type of rule '%v'", rule.Name())
return apiFuncResult{nil, &apiError{errorInternal, err}, nil, nil} return apiFuncResult{nil, &apiError{errorInternal, err}, nil, nil}
} }
@ -933,10 +933,10 @@ func (api *API) deleteSeries(r *http.Request) apiFuncResult {
} }
if err := r.ParseForm(); err != nil { if err := r.ParseForm(); err != nil {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("error parsing form values: %v", err)}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.Wrap(err, "error parsing form values")}, nil, nil}
} }
if len(r.Form["match[]"]) == 0 { if len(r.Form["match[]"]) == 0 {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.New("no match[] parameter provided")}, nil, nil}
} }
var start time.Time var start time.Time
@ -991,7 +991,7 @@ func (api *API) snapshot(r *http.Request) apiFuncResult {
if r.FormValue("skip_head") != "" { if r.FormValue("skip_head") != "" {
skipHead, err = strconv.ParseBool(r.FormValue("skip_head")) skipHead, err = strconv.ParseBool(r.FormValue("skip_head"))
if err != nil { if err != nil {
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("unable to parse boolean 'skip_head' argument: %v", err)}, nil, nil} return apiFuncResult{nil, &apiError{errorBadData, errors.Wrapf(err, "unable to parse boolean 'skip_head' argument")}, nil, nil}
} }
} }
@ -1008,10 +1008,10 @@ func (api *API) snapshot(r *http.Request) apiFuncResult {
dir = filepath.Join(snapdir, name) dir = filepath.Join(snapdir, name)
) )
if err := os.MkdirAll(dir, 0777); err != nil { if err := os.MkdirAll(dir, 0777); err != nil {
return apiFuncResult{nil, &apiError{errorInternal, fmt.Errorf("create snapshot directory: %s", err)}, nil, nil} return apiFuncResult{nil, &apiError{errorInternal, errors.Wrap(err, "create snapshot directory")}, nil, nil}
} }
if err := db.Snapshot(dir, !skipHead); err != nil { if err := db.Snapshot(dir, !skipHead); err != nil {
return apiFuncResult{nil, &apiError{errorInternal, fmt.Errorf("create snapshot: %s", err)}, nil, nil} return apiFuncResult{nil, &apiError{errorInternal, errors.Wrap(err, "create snapshot")}, nil, nil}
} }
return apiFuncResult{struct { return apiFuncResult{struct {
@ -1158,21 +1158,21 @@ func parseTime(s string) (time.Time, error) {
if t, err := time.Parse(time.RFC3339Nano, s); err == nil { if t, err := time.Parse(time.RFC3339Nano, s); err == nil {
return t, nil return t, nil
} }
return time.Time{}, fmt.Errorf("cannot parse %q to a valid timestamp", s) return time.Time{}, errors.Errorf("cannot parse %q to a valid timestamp", s)
} }
func parseDuration(s string) (time.Duration, error) { func parseDuration(s string) (time.Duration, error) {
if d, err := strconv.ParseFloat(s, 64); err == nil { if d, err := strconv.ParseFloat(s, 64); err == nil {
ts := d * float64(time.Second) ts := d * float64(time.Second)
if ts > float64(math.MaxInt64) || ts < float64(math.MinInt64) { if ts > float64(math.MaxInt64) || ts < float64(math.MinInt64) {
return 0, fmt.Errorf("cannot parse %q to a valid duration. It overflows int64", s) return 0, errors.Errorf("cannot parse %q to a valid duration. It overflows int64", s)
} }
return time.Duration(ts), nil return time.Duration(ts), nil
} }
if d, err := model.ParseDuration(s); err == nil { if d, err := model.ParseDuration(s); err == nil {
return time.Duration(d), nil return time.Duration(d), nil
} }
return 0, fmt.Errorf("cannot parse %q to a valid duration", s) return 0, errors.Errorf("cannot parse %q to a valid duration", s)
} }
func marshalPointJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) { func marshalPointJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {

View file

@ -38,6 +38,7 @@ import (
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/promlog" "github.com/prometheus/common/promlog"
"github.com/prometheus/common/route" "github.com/prometheus/common/route"
tsdbLabels "github.com/prometheus/tsdb/labels"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/pkg/gate" "github.com/prometheus/prometheus/pkg/gate"
@ -50,7 +51,6 @@ import (
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote" "github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/prometheus/util/testutil"
tsdbLabels "github.com/prometheus/tsdb/labels"
) )
type testTargetRetriever struct{} type testTargetRetriever struct{}
@ -1026,7 +1026,7 @@ func (f *fakeDB) Dir() string {
func (f *fakeDB) Snapshot(dir string, withHead bool) error { return f.err } func (f *fakeDB) Snapshot(dir string, withHead bool) error { return f.err }
func TestAdminEndpoints(t *testing.T) { func TestAdminEndpoints(t *testing.T) {
tsdb, tsdbWithError := &fakeDB{}, &fakeDB{err: fmt.Errorf("some error")} tsdb, tsdbWithError := &fakeDB{}, &fakeDB{err: errors.New("some error")}
snapshotAPI := func(api *API) apiFunc { return api.snapshot } snapshotAPI := func(api *API) apiFunc { return api.snapshot }
cleanAPI := func(api *API) apiFunc { return api.cleanTombstones } cleanAPI := func(api *API) apiFunc { return api.cleanTombstones }
deleteAPI := func(api *API) apiFunc { return api.deleteSeries } deleteAPI := func(api *API) apiFunc { return api.deleteSeries }

View file

@ -34,26 +34,24 @@ import (
"strings" "strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
"time"
"google.golang.org/grpc"
template_text "text/template" template_text "text/template"
"time"
"github.com/cockroachdb/cmux" "github.com/cockroachdb/cmux"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level" "github.com/go-kit/kit/log/level"
"github.com/mwitkow/go-conntrack" conntrack "github.com/mwitkow/go-conntrack"
"github.com/opentracing-contrib/go-stdlib/nethttp" "github.com/opentracing-contrib/go-stdlib/nethttp"
"github.com/opentracing/opentracing-go" opentracing "github.com/opentracing/opentracing-go"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/prometheus/client_model/go" io_prometheus_client "github.com/prometheus/client_model/go"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/common/route" "github.com/prometheus/common/route"
prometheus_tsdb "github.com/prometheus/prometheus/storage/tsdb"
"github.com/prometheus/tsdb" "github.com/prometheus/tsdb"
"golang.org/x/net/netutil" "golang.org/x/net/netutil"
"google.golang.org/grpc"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/notifier" "github.com/prometheus/prometheus/notifier"
@ -61,6 +59,7 @@ import (
"github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape" "github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
prometheus_tsdb "github.com/prometheus/prometheus/storage/tsdb"
"github.com/prometheus/prometheus/template" "github.com/prometheus/prometheus/template"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
api_v1 "github.com/prometheus/prometheus/web/api/v1" api_v1 "github.com/prometheus/prometheus/web/api/v1"
@ -875,11 +874,11 @@ func (h *Handler) getTemplate(name string) (string, error) {
err := appendf("_base.html") err := appendf("_base.html")
if err != nil { if err != nil {
return "", fmt.Errorf("error reading base template: %s", err) return "", errors.Wrap(err, "error reading base template")
} }
err = appendf(name) err = appendf(name)
if err != nil { if err != nil {
return "", fmt.Errorf("error reading page template %s: %s", name, err) return "", errors.Wrapf(err, "error reading page template %s", name)
} }
return tmpl, nil return tmpl, nil