mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
Unify duration parsing
It's actually happening in several places (and for flags, we use the standard Go time.Duration...). This at least reduces all our home-grown parsing to one place (in model).
This commit is contained in:
parent
cfc15cf103
commit
a7408bfb47
115
config/config.go
115
config/config.go
|
@ -25,8 +25,6 @@ import (
|
|||
|
||||
"github.com/prometheus/common/model"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -75,9 +73,9 @@ var (
|
|||
|
||||
// DefaultGlobalConfig is the default global configuration.
|
||||
DefaultGlobalConfig = GlobalConfig{
|
||||
ScrapeInterval: Duration(1 * time.Minute),
|
||||
ScrapeTimeout: Duration(10 * time.Second),
|
||||
EvaluationInterval: Duration(1 * time.Minute),
|
||||
ScrapeInterval: model.Duration(1 * time.Minute),
|
||||
ScrapeTimeout: model.Duration(10 * time.Second),
|
||||
EvaluationInterval: model.Duration(1 * time.Minute),
|
||||
}
|
||||
|
||||
// DefaultScrapeConfig is the default scrape configuration.
|
||||
|
@ -99,13 +97,13 @@ var (
|
|||
|
||||
// DefaultDNSSDConfig is the default DNS SD configuration.
|
||||
DefaultDNSSDConfig = DNSSDConfig{
|
||||
RefreshInterval: Duration(30 * time.Second),
|
||||
RefreshInterval: model.Duration(30 * time.Second),
|
||||
Type: "SRV",
|
||||
}
|
||||
|
||||
// DefaultFileSDConfig is the default file SD configuration.
|
||||
DefaultFileSDConfig = FileSDConfig{
|
||||
RefreshInterval: Duration(5 * time.Minute),
|
||||
RefreshInterval: model.Duration(5 * time.Minute),
|
||||
}
|
||||
|
||||
// DefaultConsulSDConfig is the default Consul SD configuration.
|
||||
|
@ -116,30 +114,30 @@ var (
|
|||
|
||||
// DefaultServersetSDConfig is the default Serverset SD configuration.
|
||||
DefaultServersetSDConfig = ServersetSDConfig{
|
||||
Timeout: Duration(10 * time.Second),
|
||||
Timeout: model.Duration(10 * time.Second),
|
||||
}
|
||||
|
||||
// DefaultNerveSDConfig is the default Nerve SD configuration.
|
||||
DefaultNerveSDConfig = NerveSDConfig{
|
||||
Timeout: Duration(10 * time.Second),
|
||||
Timeout: model.Duration(10 * time.Second),
|
||||
}
|
||||
|
||||
// DefaultMarathonSDConfig is the default Marathon SD configuration.
|
||||
DefaultMarathonSDConfig = MarathonSDConfig{
|
||||
RefreshInterval: Duration(30 * time.Second),
|
||||
RefreshInterval: model.Duration(30 * time.Second),
|
||||
}
|
||||
|
||||
// DefaultKubernetesSDConfig is the default Kubernetes SD configuration
|
||||
DefaultKubernetesSDConfig = KubernetesSDConfig{
|
||||
KubeletPort: 10255,
|
||||
RequestTimeout: Duration(10 * time.Second),
|
||||
RetryInterval: Duration(1 * time.Second),
|
||||
RequestTimeout: model.Duration(10 * time.Second),
|
||||
RetryInterval: model.Duration(1 * time.Second),
|
||||
}
|
||||
|
||||
// DefaultEC2SDConfig is the default EC2 SD configuration.
|
||||
DefaultEC2SDConfig = EC2SDConfig{
|
||||
Port: 80,
|
||||
RefreshInterval: Duration(60 * time.Second),
|
||||
RefreshInterval: model.Duration(60 * time.Second),
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -281,11 +279,11 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
// objects.
|
||||
type GlobalConfig struct {
|
||||
// How frequently to scrape targets by default.
|
||||
ScrapeInterval Duration `yaml:"scrape_interval,omitempty"`
|
||||
ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
|
||||
// The default timeout when scraping targets.
|
||||
ScrapeTimeout Duration `yaml:"scrape_timeout,omitempty"`
|
||||
ScrapeTimeout model.Duration `yaml:"scrape_timeout,omitempty"`
|
||||
// How frequently to evaluate rules by default.
|
||||
EvaluationInterval Duration `yaml:"evaluation_interval,omitempty"`
|
||||
EvaluationInterval model.Duration `yaml:"evaluation_interval,omitempty"`
|
||||
// The labels to add to any timeseries that this Prometheus instance scrapes.
|
||||
ExternalLabels model.LabelSet `yaml:"external_labels,omitempty"`
|
||||
|
||||
|
@ -344,9 +342,9 @@ type ScrapeConfig struct {
|
|||
// A set of query parameters with which the target is scraped.
|
||||
Params url.Values `yaml:"params,omitempty"`
|
||||
// How frequently to scrape the targets of this scrape config.
|
||||
ScrapeInterval Duration `yaml:"scrape_interval,omitempty"`
|
||||
ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
|
||||
// The timeout for scraping targets of this config.
|
||||
ScrapeTimeout Duration `yaml:"scrape_timeout,omitempty"`
|
||||
ScrapeTimeout model.Duration `yaml:"scrape_timeout,omitempty"`
|
||||
// The HTTP resource path on which to fetch metrics from targets.
|
||||
MetricsPath string `yaml:"metrics_path,omitempty"`
|
||||
// The URL scheme with which to fetch metrics from targets.
|
||||
|
@ -532,10 +530,10 @@ func (tg *TargetGroup) UnmarshalJSON(b []byte) error {
|
|||
|
||||
// DNSSDConfig is the configuration for DNS based service discovery.
|
||||
type DNSSDConfig struct {
|
||||
Names []string `yaml:"names"`
|
||||
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
|
||||
Type string `yaml:"type"`
|
||||
Port int `yaml:"port"` // Ignored for SRV records
|
||||
Names []string `yaml:"names"`
|
||||
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
|
||||
Type string `yaml:"type"`
|
||||
Port int `yaml:"port"` // Ignored for SRV records
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
}
|
||||
|
@ -565,8 +563,8 @@ func (c *DNSSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
|
||||
// FileSDConfig is the configuration for file based discovery.
|
||||
type FileSDConfig struct {
|
||||
Names []string `yaml:"names"`
|
||||
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
|
||||
Names []string `yaml:"names"`
|
||||
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
|
||||
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
|
@ -624,9 +622,9 @@ func (c *ConsulSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error
|
|||
|
||||
// ServersetSDConfig is the configuration for Twitter serversets in Zookeeper based discovery.
|
||||
type ServersetSDConfig struct {
|
||||
Servers []string `yaml:"servers"`
|
||||
Paths []string `yaml:"paths"`
|
||||
Timeout Duration `yaml:"timeout,omitempty"`
|
||||
Servers []string `yaml:"servers"`
|
||||
Paths []string `yaml:"paths"`
|
||||
Timeout model.Duration `yaml:"timeout,omitempty"`
|
||||
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
|
@ -656,9 +654,9 @@ func (c *ServersetSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) err
|
|||
|
||||
// NerveSDConfig is the configuration for AirBnB's Nerve in Zookeeper based discovery.
|
||||
type NerveSDConfig struct {
|
||||
Servers []string `yaml:"servers"`
|
||||
Paths []string `yaml:"paths"`
|
||||
Timeout Duration `yaml:"timeout,omitempty"`
|
||||
Servers []string `yaml:"servers"`
|
||||
Paths []string `yaml:"paths"`
|
||||
Timeout model.Duration `yaml:"timeout,omitempty"`
|
||||
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
|
@ -688,8 +686,8 @@ func (c *NerveSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
|
||||
// MarathonSDConfig is the configuration for services running on Marathon.
|
||||
type MarathonSDConfig struct {
|
||||
Servers []string `yaml:"servers,omitempty"`
|
||||
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
|
||||
Servers []string `yaml:"servers,omitempty"`
|
||||
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
|
||||
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
|
@ -712,15 +710,15 @@ func (c *MarathonSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) erro
|
|||
|
||||
// KubernetesSDConfig is the configuration for Kubernetes service discovery.
|
||||
type KubernetesSDConfig struct {
|
||||
APIServers []URL `yaml:"api_servers"`
|
||||
KubeletPort int `yaml:"kubelet_port,omitempty"`
|
||||
InCluster bool `yaml:"in_cluster,omitempty"`
|
||||
BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"`
|
||||
BearerToken string `yaml:"bearer_token,omitempty"`
|
||||
BearerTokenFile string `yaml:"bearer_token_file,omitempty"`
|
||||
RetryInterval Duration `yaml:"retry_interval,omitempty"`
|
||||
RequestTimeout Duration `yaml:"request_timeout,omitempty"`
|
||||
TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
|
||||
APIServers []URL `yaml:"api_servers"`
|
||||
KubeletPort int `yaml:"kubelet_port,omitempty"`
|
||||
InCluster bool `yaml:"in_cluster,omitempty"`
|
||||
BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"`
|
||||
BearerToken string `yaml:"bearer_token,omitempty"`
|
||||
BearerTokenFile string `yaml:"bearer_token_file,omitempty"`
|
||||
RetryInterval model.Duration `yaml:"retry_interval,omitempty"`
|
||||
RequestTimeout model.Duration `yaml:"request_timeout,omitempty"`
|
||||
TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
|
||||
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
|
@ -749,11 +747,11 @@ func (c *KubernetesSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) er
|
|||
|
||||
// EC2SDConfig is the configuration for EC2 based service discovery.
|
||||
type EC2SDConfig struct {
|
||||
Region string `yaml:"region"`
|
||||
AccessKey string `yaml:"access_key,omitempty"`
|
||||
SecretKey string `yaml:"secret_key,omitempty"`
|
||||
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
|
||||
Port int `yaml:"port"`
|
||||
Region string `yaml:"region"`
|
||||
AccessKey string `yaml:"access_key,omitempty"`
|
||||
SecretKey string `yaml:"secret_key,omitempty"`
|
||||
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
|
||||
Port int `yaml:"port"`
|
||||
// Catches all undefined fields and must be empty after parsing.
|
||||
XXX map[string]interface{} `yaml:",inline"`
|
||||
}
|
||||
|
@ -883,28 +881,3 @@ func (re Regexp) MarshalYAML() (interface{}, error) {
|
|||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Duration encapsulates a time.Duration and makes it YAML marshallable.
|
||||
//
|
||||
// TODO(fabxc): Since we have custom types for most things, including timestamps,
|
||||
// we might want to move this into our model as well, eventually.
|
||||
type Duration time.Duration
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var s string
|
||||
if err := unmarshal(&s); err != nil {
|
||||
return err
|
||||
}
|
||||
dur, err := strutil.StringToDuration(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*d = Duration(dur)
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalYAML implements the yaml.Marshaler interface.
|
||||
func (d Duration) MarshalYAML() (interface{}, error) {
|
||||
return strutil.DurationToString(time.Duration(d)), nil
|
||||
}
|
||||
|
|
|
@ -28,9 +28,9 @@ import (
|
|||
|
||||
var expectedConf = &Config{
|
||||
GlobalConfig: GlobalConfig{
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
EvaluationInterval: Duration(30 * time.Second),
|
||||
EvaluationInterval: model.Duration(30 * time.Second),
|
||||
|
||||
ExternalLabels: model.LabelSet{
|
||||
"monitor": "codelab",
|
||||
|
@ -49,7 +49,7 @@ var expectedConf = &Config{
|
|||
JobName: "prometheus",
|
||||
|
||||
HonorLabels: true,
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||
|
@ -73,11 +73,11 @@ var expectedConf = &Config{
|
|||
FileSDConfigs: []*FileSDConfig{
|
||||
{
|
||||
Names: []string{"foo/*.slow.json", "foo/*.slow.yml", "single/file.yml"},
|
||||
RefreshInterval: Duration(10 * time.Minute),
|
||||
RefreshInterval: model.Duration(10 * time.Minute),
|
||||
},
|
||||
{
|
||||
Names: []string{"bar/*.yaml"},
|
||||
RefreshInterval: Duration(5 * time.Minute),
|
||||
RefreshInterval: model.Duration(5 * time.Minute),
|
||||
},
|
||||
},
|
||||
|
||||
|
@ -108,8 +108,8 @@ var expectedConf = &Config{
|
|||
{
|
||||
JobName: "service-x",
|
||||
|
||||
ScrapeInterval: Duration(50 * time.Second),
|
||||
ScrapeTimeout: Duration(5 * time.Second),
|
||||
ScrapeInterval: model.Duration(50 * time.Second),
|
||||
ScrapeTimeout: model.Duration(5 * time.Second),
|
||||
|
||||
BasicAuth: &BasicAuth{
|
||||
Username: "admin_name",
|
||||
|
@ -124,14 +124,14 @@ var expectedConf = &Config{
|
|||
"first.dns.address.domain.com",
|
||||
"second.dns.address.domain.com",
|
||||
},
|
||||
RefreshInterval: Duration(15 * time.Second),
|
||||
RefreshInterval: model.Duration(15 * time.Second),
|
||||
Type: "SRV",
|
||||
},
|
||||
{
|
||||
Names: []string{
|
||||
"first.dns.address.domain.com",
|
||||
},
|
||||
RefreshInterval: Duration(30 * time.Second),
|
||||
RefreshInterval: model.Duration(30 * time.Second),
|
||||
Type: "SRV",
|
||||
},
|
||||
},
|
||||
|
@ -180,7 +180,7 @@ var expectedConf = &Config{
|
|||
{
|
||||
JobName: "service-y",
|
||||
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||
|
@ -198,8 +198,8 @@ var expectedConf = &Config{
|
|||
{
|
||||
JobName: "service-z",
|
||||
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeTimeout: Duration(10 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: model.Duration(10 * time.Second),
|
||||
|
||||
MetricsPath: "/metrics",
|
||||
Scheme: "http",
|
||||
|
@ -214,7 +214,7 @@ var expectedConf = &Config{
|
|||
{
|
||||
JobName: "service-kubernetes",
|
||||
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||
|
@ -228,15 +228,15 @@ var expectedConf = &Config{
|
|||
Password: "mypassword",
|
||||
},
|
||||
KubeletPort: 10255,
|
||||
RequestTimeout: Duration(10 * time.Second),
|
||||
RetryInterval: Duration(1 * time.Second),
|
||||
RequestTimeout: model.Duration(10 * time.Second),
|
||||
RetryInterval: model.Duration(1 * time.Second),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
JobName: "service-marathon",
|
||||
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||
|
@ -247,14 +247,14 @@ var expectedConf = &Config{
|
|||
Servers: []string{
|
||||
"http://marathon.example.com:8080",
|
||||
},
|
||||
RefreshInterval: Duration(30 * time.Second),
|
||||
RefreshInterval: model.Duration(30 * time.Second),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
JobName: "service-ec2",
|
||||
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||
|
@ -265,7 +265,7 @@ var expectedConf = &Config{
|
|||
Region: "us-east-1",
|
||||
AccessKey: "access",
|
||||
SecretKey: "secret",
|
||||
RefreshInterval: Duration(60 * time.Second),
|
||||
RefreshInterval: model.Duration(60 * time.Second),
|
||||
Port: 80,
|
||||
},
|
||||
},
|
||||
|
@ -273,7 +273,7 @@ var expectedConf = &Config{
|
|||
{
|
||||
JobName: "service-nerve",
|
||||
|
||||
ScrapeInterval: Duration(15 * time.Second),
|
||||
ScrapeInterval: model.Duration(15 * time.Second),
|
||||
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
|
||||
|
||||
MetricsPath: DefaultScrapeConfig.MetricsPath,
|
||||
|
@ -283,7 +283,7 @@ var expectedConf = &Config{
|
|||
{
|
||||
Servers: []string{"localhost"},
|
||||
Paths: []string{"/monitoring"},
|
||||
Timeout: Duration(10 * time.Second),
|
||||
Timeout: model.Duration(10 * time.Second),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1140,12 +1140,12 @@ func (p *parser) unquoteString(s string) string {
|
|||
}
|
||||
|
||||
func parseDuration(ds string) (time.Duration, error) {
|
||||
dur, err := strutil.StringToDuration(ds)
|
||||
dur, err := model.ParseDuration(ds)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if dur == 0 {
|
||||
return 0, fmt.Errorf("duration must be greater than 0")
|
||||
}
|
||||
return dur, nil
|
||||
return time.Duration(dur), nil
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ import (
|
|||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
// Tree returns a string of the tree structure of the given node.
|
||||
|
@ -104,7 +103,7 @@ func (node *AlertStmt) String() string {
|
|||
s := fmt.Sprintf("ALERT %s", node.Name)
|
||||
s += fmt.Sprintf("\n\tIF %s", node.Expr)
|
||||
if node.Duration > 0 {
|
||||
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(node.Duration))
|
||||
s += fmt.Sprintf("\n\tFOR %s", model.Duration(node.Duration))
|
||||
}
|
||||
if len(node.Labels) > 0 {
|
||||
s += fmt.Sprintf("\n\tLABELS %s", node.Labels)
|
||||
|
@ -178,9 +177,9 @@ func (node *MatrixSelector) String() string {
|
|||
}
|
||||
offset := ""
|
||||
if node.Offset != time.Duration(0) {
|
||||
offset = fmt.Sprintf(" OFFSET %s", strutil.DurationToString(node.Offset))
|
||||
offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset))
|
||||
}
|
||||
return fmt.Sprintf("%s[%s]%s", vecSelector.String(), strutil.DurationToString(node.Range), offset)
|
||||
return fmt.Sprintf("%s[%s]%s", vecSelector.String(), model.Duration(node.Range), offset)
|
||||
}
|
||||
|
||||
func (node *NumberLiteral) String() string {
|
||||
|
@ -210,7 +209,7 @@ func (node *VectorSelector) String() string {
|
|||
}
|
||||
offset := ""
|
||||
if node.Offset != time.Duration(0) {
|
||||
offset = fmt.Sprintf(" OFFSET %s", strutil.DurationToString(node.Offset))
|
||||
offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset))
|
||||
}
|
||||
|
||||
if len(labelStrings) == 0 {
|
||||
|
|
|
@ -26,7 +26,6 @@ import (
|
|||
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/storage/local"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
|
@ -98,11 +97,11 @@ func (t *Test) parseLoad(lines []string, i int) (int, *loadCmd, error) {
|
|||
}
|
||||
parts := patLoad.FindStringSubmatch(lines[i])
|
||||
|
||||
gap, err := strutil.StringToDuration(parts[1])
|
||||
gap, err := model.ParseDuration(parts[1])
|
||||
if err != nil {
|
||||
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
|
||||
}
|
||||
cmd := newLoadCmd(gap)
|
||||
cmd := newLoadCmd(time.Duration(gap))
|
||||
for i+1 < len(lines) {
|
||||
i++
|
||||
defLine := lines[i]
|
||||
|
@ -141,11 +140,11 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
|||
return i, nil, err
|
||||
}
|
||||
|
||||
offset, err := strutil.StringToDuration(at)
|
||||
offset, err := model.ParseDuration(at)
|
||||
if err != nil {
|
||||
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
|
||||
}
|
||||
ts := testStartTime.Add(offset)
|
||||
ts := testStartTime.Add(time.Duration(offset))
|
||||
|
||||
cmd := newEvalCmd(expr, ts, ts, 0)
|
||||
switch mod {
|
||||
|
|
|
@ -7,6 +7,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/prometheus/prometheus/config"
|
||||
)
|
||||
|
||||
|
@ -22,7 +24,7 @@ func testFileSD(t *testing.T, ext string) {
|
|||
// whether file watches work as expected.
|
||||
var conf config.FileSDConfig
|
||||
conf.Names = []string{"fixtures/_*" + ext}
|
||||
conf.RefreshInterval = config.Duration(1 * time.Hour)
|
||||
conf.RefreshInterval = model.Duration(1 * time.Hour)
|
||||
|
||||
var (
|
||||
fsd = NewFileDiscovery(&conf)
|
||||
|
|
|
@ -420,8 +420,8 @@ func TestURLParams(t *testing.T) {
|
|||
target := NewTarget(
|
||||
&config.ScrapeConfig{
|
||||
JobName: "test_job1",
|
||||
ScrapeInterval: config.Duration(1 * time.Minute),
|
||||
ScrapeTimeout: config.Duration(1 * time.Second),
|
||||
ScrapeInterval: model.Duration(1 * time.Minute),
|
||||
ScrapeTimeout: model.Duration(1 * time.Second),
|
||||
Scheme: serverURL.Scheme,
|
||||
Params: url.Values{
|
||||
"foo": []string{"bar", "baz"},
|
||||
|
@ -441,7 +441,7 @@ func TestURLParams(t *testing.T) {
|
|||
|
||||
func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target {
|
||||
cfg := &config.ScrapeConfig{
|
||||
ScrapeTimeout: config.Duration(deadline),
|
||||
ScrapeTimeout: model.Duration(deadline),
|
||||
}
|
||||
c, _ := newHTTPClient(cfg)
|
||||
t := &Target{
|
||||
|
@ -481,7 +481,7 @@ func TestNewHTTPBearerToken(t *testing.T) {
|
|||
defer server.Close()
|
||||
|
||||
cfg := &config.ScrapeConfig{
|
||||
ScrapeTimeout: config.Duration(1 * time.Second),
|
||||
ScrapeTimeout: model.Duration(1 * time.Second),
|
||||
BearerToken: "1234",
|
||||
}
|
||||
c, err := newHTTPClient(cfg)
|
||||
|
@ -509,7 +509,7 @@ func TestNewHTTPBearerTokenFile(t *testing.T) {
|
|||
defer server.Close()
|
||||
|
||||
cfg := &config.ScrapeConfig{
|
||||
ScrapeTimeout: config.Duration(1 * time.Second),
|
||||
ScrapeTimeout: model.Duration(1 * time.Second),
|
||||
BearerTokenFile: "testdata/bearertoken.txt",
|
||||
}
|
||||
c, err := newHTTPClient(cfg)
|
||||
|
@ -536,7 +536,7 @@ func TestNewHTTPBasicAuth(t *testing.T) {
|
|||
defer server.Close()
|
||||
|
||||
cfg := &config.ScrapeConfig{
|
||||
ScrapeTimeout: config.Duration(1 * time.Second),
|
||||
ScrapeTimeout: model.Duration(1 * time.Second),
|
||||
BasicAuth: &config.BasicAuth{
|
||||
Username: "user",
|
||||
Password: "password123",
|
||||
|
@ -566,7 +566,7 @@ func TestNewHTTPCACert(t *testing.T) {
|
|||
defer server.Close()
|
||||
|
||||
cfg := &config.ScrapeConfig{
|
||||
ScrapeTimeout: config.Duration(1 * time.Second),
|
||||
ScrapeTimeout: model.Duration(1 * time.Second),
|
||||
TLSConfig: config.TLSConfig{
|
||||
CAFile: "testdata/ca.cer",
|
||||
},
|
||||
|
@ -599,7 +599,7 @@ func TestNewHTTPClientCert(t *testing.T) {
|
|||
defer server.Close()
|
||||
|
||||
cfg := &config.ScrapeConfig{
|
||||
ScrapeTimeout: config.Duration(1 * time.Second),
|
||||
ScrapeTimeout: model.Duration(1 * time.Second),
|
||||
TLSConfig: config.TLSConfig{
|
||||
CAFile: "testdata/ca.cer",
|
||||
CertFile: "testdata/client.cer",
|
||||
|
|
|
@ -75,7 +75,7 @@ func TestPrefixedTargetProvider(t *testing.T) {
|
|||
func TestTargetManagerChan(t *testing.T) {
|
||||
testJob1 := &config.ScrapeConfig{
|
||||
JobName: "test_job1",
|
||||
ScrapeInterval: config.Duration(1 * time.Minute),
|
||||
ScrapeInterval: model.Duration(1 * time.Minute),
|
||||
TargetGroups: []*config.TargetGroup{{
|
||||
Targets: []model.LabelSet{
|
||||
{model.AddressLabel: "example.org:80"},
|
||||
|
@ -204,7 +204,7 @@ func TestTargetManagerChan(t *testing.T) {
|
|||
func TestTargetManagerConfigUpdate(t *testing.T) {
|
||||
testJob1 := &config.ScrapeConfig{
|
||||
JobName: "test_job1",
|
||||
ScrapeInterval: config.Duration(1 * time.Minute),
|
||||
ScrapeInterval: model.Duration(1 * time.Minute),
|
||||
Params: url.Values{
|
||||
"testParam": []string{"paramValue", "secondValue"},
|
||||
},
|
||||
|
@ -234,7 +234,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
|
|||
}
|
||||
testJob2 := &config.ScrapeConfig{
|
||||
JobName: "test_job2",
|
||||
ScrapeInterval: config.Duration(1 * time.Minute),
|
||||
ScrapeInterval: model.Duration(1 * time.Minute),
|
||||
TargetGroups: []*config.TargetGroup{
|
||||
{
|
||||
Targets: []model.LabelSet{
|
||||
|
@ -288,7 +288,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
|
|||
// Test that targets without host:port addresses are dropped.
|
||||
testJob3 := &config.ScrapeConfig{
|
||||
JobName: "test_job1",
|
||||
ScrapeInterval: config.Duration(1 * time.Minute),
|
||||
ScrapeInterval: model.Duration(1 * time.Minute),
|
||||
TargetGroups: []*config.TargetGroup{{
|
||||
Targets: []model.LabelSet{
|
||||
{model.AddressLabel: "example.net:80"},
|
||||
|
|
|
@ -58,7 +58,7 @@ func (s AlertState) String() string {
|
|||
case StateFiring:
|
||||
return "firing"
|
||||
}
|
||||
panic(fmt.Errorf("unknown alert state: %v", s))
|
||||
panic(fmt.Errorf("unknown alert state: %v", s.String()))
|
||||
}
|
||||
|
||||
// Alert is the user-level representation of a single instance of an alerting rule.
|
||||
|
@ -255,7 +255,7 @@ func (rule *AlertingRule) String() string {
|
|||
s := fmt.Sprintf("ALERT %s", rule.name)
|
||||
s += fmt.Sprintf("\n\tIF %s", rule.vector)
|
||||
if rule.holdDuration > 0 {
|
||||
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(rule.holdDuration))
|
||||
s += fmt.Sprintf("\n\tFOR %s", model.Duration(rule.holdDuration))
|
||||
}
|
||||
if len(rule.labels) > 0 {
|
||||
s += fmt.Sprintf("\n\tLABELS %s", rule.labels)
|
||||
|
@ -277,7 +277,7 @@ func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML {
|
|||
s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name)
|
||||
s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector)
|
||||
if rule.holdDuration > 0 {
|
||||
s += fmt.Sprintf("\n FOR %s", strutil.DurationToString(rule.holdDuration))
|
||||
s += fmt.Sprintf("\n FOR %s", model.Duration(rule.holdDuration))
|
||||
}
|
||||
if len(rule.labels) > 0 {
|
||||
s += fmt.Sprintf("\n LABELS %s", rule.labels)
|
||||
|
|
|
@ -17,75 +17,13 @@ import (
|
|||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
durationRE = regexp.MustCompile("^([0-9]+)([ywdhms]+)$")
|
||||
invalidLabelCharRE = regexp.MustCompile(`[^a-zA-Z0-9_]`)
|
||||
)
|
||||
|
||||
// DurationToString formats a time.Duration as a string with the assumption that
|
||||
// a year always has 365 days and a day always has 24h. (The former doesn't work
|
||||
// in leap years, the latter is broken by DST switches, not to speak about leap
|
||||
// seconds, but those are not even treated properly by the duration strings in
|
||||
// the standard library.)
|
||||
func DurationToString(duration time.Duration) string {
|
||||
seconds := int64(duration / time.Second)
|
||||
factors := map[string]int64{
|
||||
"y": 60 * 60 * 24 * 365,
|
||||
"d": 60 * 60 * 24,
|
||||
"h": 60 * 60,
|
||||
"m": 60,
|
||||
"s": 1,
|
||||
}
|
||||
unit := "s"
|
||||
switch int64(0) {
|
||||
case seconds % factors["y"]:
|
||||
unit = "y"
|
||||
case seconds % factors["d"]:
|
||||
unit = "d"
|
||||
case seconds % factors["h"]:
|
||||
unit = "h"
|
||||
case seconds % factors["m"]:
|
||||
unit = "m"
|
||||
}
|
||||
return fmt.Sprintf("%v%v", seconds/factors[unit], unit)
|
||||
}
|
||||
|
||||
// StringToDuration parses a string into a time.Duration, assuming that a year
|
||||
// always has 365d, a week 7d, a day 24h. See DurationToString for problems with
|
||||
// that.
|
||||
func StringToDuration(durationStr string) (duration time.Duration, err error) {
|
||||
matches := durationRE.FindStringSubmatch(durationStr)
|
||||
if len(matches) != 3 {
|
||||
err = fmt.Errorf("not a valid duration string: %q", durationStr)
|
||||
return
|
||||
}
|
||||
durationSeconds, _ := strconv.Atoi(matches[1])
|
||||
duration = time.Duration(durationSeconds) * time.Second
|
||||
unit := matches[2]
|
||||
switch unit {
|
||||
case "y":
|
||||
duration *= 60 * 60 * 24 * 365
|
||||
case "w":
|
||||
duration *= 60 * 60 * 24 * 7
|
||||
case "d":
|
||||
duration *= 60 * 60 * 24
|
||||
case "h":
|
||||
duration *= 60 * 60
|
||||
case "m":
|
||||
duration *= 60
|
||||
case "s":
|
||||
duration *= 1
|
||||
default:
|
||||
return 0, fmt.Errorf("invalid time unit in duration string: %q", unit)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// TableLinkForExpression creates an escaped relative link to the table view of
|
||||
// the provided expression.
|
||||
func TableLinkForExpression(expr string) string {
|
||||
|
|
|
@ -18,7 +18,6 @@ import (
|
|||
"github.com/prometheus/prometheus/storage/local"
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
type status string
|
||||
|
@ -324,8 +323,8 @@ func parseDuration(s string) (time.Duration, error) {
|
|||
if d, err := strconv.ParseFloat(s, 64); err == nil {
|
||||
return time.Duration(d * float64(time.Second)), nil
|
||||
}
|
||||
if d, err := strutil.StringToDuration(s); err == nil {
|
||||
return d, nil
|
||||
if d, err := model.ParseDuration(s); err == nil {
|
||||
return time.Duration(d), nil
|
||||
}
|
||||
return 0, fmt.Errorf("cannot parse %q to a valid duration", s)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue