Unify duration parsing

It's actually happening in several places (and for flags, we use the
standard Go time.Duration...). This at least reduces all our
home-grown parsing to one place (in model).
This commit is contained in:
beorn7 2016-01-29 15:23:11 +01:00
parent cfc15cf103
commit a7408bfb47
11 changed files with 95 additions and 185 deletions

View file

@ -25,8 +25,6 @@ import (
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
"github.com/prometheus/prometheus/util/strutil"
) )
var ( var (
@ -75,9 +73,9 @@ var (
// DefaultGlobalConfig is the default global configuration. // DefaultGlobalConfig is the default global configuration.
DefaultGlobalConfig = GlobalConfig{ DefaultGlobalConfig = GlobalConfig{
ScrapeInterval: Duration(1 * time.Minute), ScrapeInterval: model.Duration(1 * time.Minute),
ScrapeTimeout: Duration(10 * time.Second), ScrapeTimeout: model.Duration(10 * time.Second),
EvaluationInterval: Duration(1 * time.Minute), EvaluationInterval: model.Duration(1 * time.Minute),
} }
// DefaultScrapeConfig is the default scrape configuration. // DefaultScrapeConfig is the default scrape configuration.
@ -99,13 +97,13 @@ var (
// DefaultDNSSDConfig is the default DNS SD configuration. // DefaultDNSSDConfig is the default DNS SD configuration.
DefaultDNSSDConfig = DNSSDConfig{ DefaultDNSSDConfig = DNSSDConfig{
RefreshInterval: Duration(30 * time.Second), RefreshInterval: model.Duration(30 * time.Second),
Type: "SRV", Type: "SRV",
} }
// DefaultFileSDConfig is the default file SD configuration. // DefaultFileSDConfig is the default file SD configuration.
DefaultFileSDConfig = FileSDConfig{ DefaultFileSDConfig = FileSDConfig{
RefreshInterval: Duration(5 * time.Minute), RefreshInterval: model.Duration(5 * time.Minute),
} }
// DefaultConsulSDConfig is the default Consul SD configuration. // DefaultConsulSDConfig is the default Consul SD configuration.
@ -116,30 +114,30 @@ var (
// DefaultServersetSDConfig is the default Serverset SD configuration. // DefaultServersetSDConfig is the default Serverset SD configuration.
DefaultServersetSDConfig = ServersetSDConfig{ DefaultServersetSDConfig = ServersetSDConfig{
Timeout: Duration(10 * time.Second), Timeout: model.Duration(10 * time.Second),
} }
// DefaultNerveSDConfig is the default Nerve SD configuration. // DefaultNerveSDConfig is the default Nerve SD configuration.
DefaultNerveSDConfig = NerveSDConfig{ DefaultNerveSDConfig = NerveSDConfig{
Timeout: Duration(10 * time.Second), Timeout: model.Duration(10 * time.Second),
} }
// DefaultMarathonSDConfig is the default Marathon SD configuration. // DefaultMarathonSDConfig is the default Marathon SD configuration.
DefaultMarathonSDConfig = MarathonSDConfig{ DefaultMarathonSDConfig = MarathonSDConfig{
RefreshInterval: Duration(30 * time.Second), RefreshInterval: model.Duration(30 * time.Second),
} }
// DefaultKubernetesSDConfig is the default Kubernetes SD configuration // DefaultKubernetesSDConfig is the default Kubernetes SD configuration
DefaultKubernetesSDConfig = KubernetesSDConfig{ DefaultKubernetesSDConfig = KubernetesSDConfig{
KubeletPort: 10255, KubeletPort: 10255,
RequestTimeout: Duration(10 * time.Second), RequestTimeout: model.Duration(10 * time.Second),
RetryInterval: Duration(1 * time.Second), RetryInterval: model.Duration(1 * time.Second),
} }
// DefaultEC2SDConfig is the default EC2 SD configuration. // DefaultEC2SDConfig is the default EC2 SD configuration.
DefaultEC2SDConfig = EC2SDConfig{ DefaultEC2SDConfig = EC2SDConfig{
Port: 80, Port: 80,
RefreshInterval: Duration(60 * time.Second), RefreshInterval: model.Duration(60 * time.Second),
} }
) )
@ -281,11 +279,11 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
// objects. // objects.
type GlobalConfig struct { type GlobalConfig struct {
// How frequently to scrape targets by default. // How frequently to scrape targets by default.
ScrapeInterval Duration `yaml:"scrape_interval,omitempty"` ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
// The default timeout when scraping targets. // The default timeout when scraping targets.
ScrapeTimeout Duration `yaml:"scrape_timeout,omitempty"` ScrapeTimeout model.Duration `yaml:"scrape_timeout,omitempty"`
// How frequently to evaluate rules by default. // How frequently to evaluate rules by default.
EvaluationInterval Duration `yaml:"evaluation_interval,omitempty"` EvaluationInterval model.Duration `yaml:"evaluation_interval,omitempty"`
// The labels to add to any timeseries that this Prometheus instance scrapes. // The labels to add to any timeseries that this Prometheus instance scrapes.
ExternalLabels model.LabelSet `yaml:"external_labels,omitempty"` ExternalLabels model.LabelSet `yaml:"external_labels,omitempty"`
@ -344,9 +342,9 @@ type ScrapeConfig struct {
// A set of query parameters with which the target is scraped. // A set of query parameters with which the target is scraped.
Params url.Values `yaml:"params,omitempty"` Params url.Values `yaml:"params,omitempty"`
// How frequently to scrape the targets of this scrape config. // How frequently to scrape the targets of this scrape config.
ScrapeInterval Duration `yaml:"scrape_interval,omitempty"` ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
// The timeout for scraping targets of this config. // The timeout for scraping targets of this config.
ScrapeTimeout Duration `yaml:"scrape_timeout,omitempty"` ScrapeTimeout model.Duration `yaml:"scrape_timeout,omitempty"`
// The HTTP resource path on which to fetch metrics from targets. // The HTTP resource path on which to fetch metrics from targets.
MetricsPath string `yaml:"metrics_path,omitempty"` MetricsPath string `yaml:"metrics_path,omitempty"`
// The URL scheme with which to fetch metrics from targets. // The URL scheme with which to fetch metrics from targets.
@ -532,10 +530,10 @@ func (tg *TargetGroup) UnmarshalJSON(b []byte) error {
// DNSSDConfig is the configuration for DNS based service discovery. // DNSSDConfig is the configuration for DNS based service discovery.
type DNSSDConfig struct { type DNSSDConfig struct {
Names []string `yaml:"names"` Names []string `yaml:"names"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"` RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
Type string `yaml:"type"` Type string `yaml:"type"`
Port int `yaml:"port"` // Ignored for SRV records Port int `yaml:"port"` // Ignored for SRV records
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
} }
@ -565,8 +563,8 @@ func (c *DNSSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
// FileSDConfig is the configuration for file based discovery. // FileSDConfig is the configuration for file based discovery.
type FileSDConfig struct { type FileSDConfig struct {
Names []string `yaml:"names"` Names []string `yaml:"names"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"` RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
@ -624,9 +622,9 @@ func (c *ConsulSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error
// ServersetSDConfig is the configuration for Twitter serversets in Zookeeper based discovery. // ServersetSDConfig is the configuration for Twitter serversets in Zookeeper based discovery.
type ServersetSDConfig struct { type ServersetSDConfig struct {
Servers []string `yaml:"servers"` Servers []string `yaml:"servers"`
Paths []string `yaml:"paths"` Paths []string `yaml:"paths"`
Timeout Duration `yaml:"timeout,omitempty"` Timeout model.Duration `yaml:"timeout,omitempty"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
@ -656,9 +654,9 @@ func (c *ServersetSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) err
// NerveSDConfig is the configuration for AirBnB's Nerve in Zookeeper based discovery. // NerveSDConfig is the configuration for AirBnB's Nerve in Zookeeper based discovery.
type NerveSDConfig struct { type NerveSDConfig struct {
Servers []string `yaml:"servers"` Servers []string `yaml:"servers"`
Paths []string `yaml:"paths"` Paths []string `yaml:"paths"`
Timeout Duration `yaml:"timeout,omitempty"` Timeout model.Duration `yaml:"timeout,omitempty"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
@ -688,8 +686,8 @@ func (c *NerveSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
// MarathonSDConfig is the configuration for services running on Marathon. // MarathonSDConfig is the configuration for services running on Marathon.
type MarathonSDConfig struct { type MarathonSDConfig struct {
Servers []string `yaml:"servers,omitempty"` Servers []string `yaml:"servers,omitempty"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"` RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
@ -712,15 +710,15 @@ func (c *MarathonSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) erro
// KubernetesSDConfig is the configuration for Kubernetes service discovery. // KubernetesSDConfig is the configuration for Kubernetes service discovery.
type KubernetesSDConfig struct { type KubernetesSDConfig struct {
APIServers []URL `yaml:"api_servers"` APIServers []URL `yaml:"api_servers"`
KubeletPort int `yaml:"kubelet_port,omitempty"` KubeletPort int `yaml:"kubelet_port,omitempty"`
InCluster bool `yaml:"in_cluster,omitempty"` InCluster bool `yaml:"in_cluster,omitempty"`
BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"` BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"`
BearerToken string `yaml:"bearer_token,omitempty"` BearerToken string `yaml:"bearer_token,omitempty"`
BearerTokenFile string `yaml:"bearer_token_file,omitempty"` BearerTokenFile string `yaml:"bearer_token_file,omitempty"`
RetryInterval Duration `yaml:"retry_interval,omitempty"` RetryInterval model.Duration `yaml:"retry_interval,omitempty"`
RequestTimeout Duration `yaml:"request_timeout,omitempty"` RequestTimeout model.Duration `yaml:"request_timeout,omitempty"`
TLSConfig TLSConfig `yaml:"tls_config,omitempty"` TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
@ -749,11 +747,11 @@ func (c *KubernetesSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) er
// EC2SDConfig is the configuration for EC2 based service discovery. // EC2SDConfig is the configuration for EC2 based service discovery.
type EC2SDConfig struct { type EC2SDConfig struct {
Region string `yaml:"region"` Region string `yaml:"region"`
AccessKey string `yaml:"access_key,omitempty"` AccessKey string `yaml:"access_key,omitempty"`
SecretKey string `yaml:"secret_key,omitempty"` SecretKey string `yaml:"secret_key,omitempty"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"` RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
Port int `yaml:"port"` Port int `yaml:"port"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
} }
@ -883,28 +881,3 @@ func (re Regexp) MarshalYAML() (interface{}, error) {
} }
return nil, nil return nil, nil
} }
// Duration encapsulates a time.Duration and makes it YAML marshallable.
//
// TODO(fabxc): Since we have custom types for most things, including timestamps,
// we might want to move this into our model as well, eventually.
type Duration time.Duration
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
dur, err := strutil.StringToDuration(s)
if err != nil {
return err
}
*d = Duration(dur)
return nil
}
// MarshalYAML implements the yaml.Marshaler interface.
func (d Duration) MarshalYAML() (interface{}, error) {
return strutil.DurationToString(time.Duration(d)), nil
}

View file

@ -28,9 +28,9 @@ import (
var expectedConf = &Config{ var expectedConf = &Config{
GlobalConfig: GlobalConfig{ GlobalConfig: GlobalConfig{
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
EvaluationInterval: Duration(30 * time.Second), EvaluationInterval: model.Duration(30 * time.Second),
ExternalLabels: model.LabelSet{ ExternalLabels: model.LabelSet{
"monitor": "codelab", "monitor": "codelab",
@ -49,7 +49,7 @@ var expectedConf = &Config{
JobName: "prometheus", JobName: "prometheus",
HonorLabels: true, HonorLabels: true,
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath, MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -73,11 +73,11 @@ var expectedConf = &Config{
FileSDConfigs: []*FileSDConfig{ FileSDConfigs: []*FileSDConfig{
{ {
Names: []string{"foo/*.slow.json", "foo/*.slow.yml", "single/file.yml"}, Names: []string{"foo/*.slow.json", "foo/*.slow.yml", "single/file.yml"},
RefreshInterval: Duration(10 * time.Minute), RefreshInterval: model.Duration(10 * time.Minute),
}, },
{ {
Names: []string{"bar/*.yaml"}, Names: []string{"bar/*.yaml"},
RefreshInterval: Duration(5 * time.Minute), RefreshInterval: model.Duration(5 * time.Minute),
}, },
}, },
@ -108,8 +108,8 @@ var expectedConf = &Config{
{ {
JobName: "service-x", JobName: "service-x",
ScrapeInterval: Duration(50 * time.Second), ScrapeInterval: model.Duration(50 * time.Second),
ScrapeTimeout: Duration(5 * time.Second), ScrapeTimeout: model.Duration(5 * time.Second),
BasicAuth: &BasicAuth{ BasicAuth: &BasicAuth{
Username: "admin_name", Username: "admin_name",
@ -124,14 +124,14 @@ var expectedConf = &Config{
"first.dns.address.domain.com", "first.dns.address.domain.com",
"second.dns.address.domain.com", "second.dns.address.domain.com",
}, },
RefreshInterval: Duration(15 * time.Second), RefreshInterval: model.Duration(15 * time.Second),
Type: "SRV", Type: "SRV",
}, },
{ {
Names: []string{ Names: []string{
"first.dns.address.domain.com", "first.dns.address.domain.com",
}, },
RefreshInterval: Duration(30 * time.Second), RefreshInterval: model.Duration(30 * time.Second),
Type: "SRV", Type: "SRV",
}, },
}, },
@ -180,7 +180,7 @@ var expectedConf = &Config{
{ {
JobName: "service-y", JobName: "service-y",
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath, MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -198,8 +198,8 @@ var expectedConf = &Config{
{ {
JobName: "service-z", JobName: "service-z",
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: Duration(10 * time.Second), ScrapeTimeout: model.Duration(10 * time.Second),
MetricsPath: "/metrics", MetricsPath: "/metrics",
Scheme: "http", Scheme: "http",
@ -214,7 +214,7 @@ var expectedConf = &Config{
{ {
JobName: "service-kubernetes", JobName: "service-kubernetes",
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath, MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -228,15 +228,15 @@ var expectedConf = &Config{
Password: "mypassword", Password: "mypassword",
}, },
KubeletPort: 10255, KubeletPort: 10255,
RequestTimeout: Duration(10 * time.Second), RequestTimeout: model.Duration(10 * time.Second),
RetryInterval: Duration(1 * time.Second), RetryInterval: model.Duration(1 * time.Second),
}, },
}, },
}, },
{ {
JobName: "service-marathon", JobName: "service-marathon",
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath, MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -247,14 +247,14 @@ var expectedConf = &Config{
Servers: []string{ Servers: []string{
"http://marathon.example.com:8080", "http://marathon.example.com:8080",
}, },
RefreshInterval: Duration(30 * time.Second), RefreshInterval: model.Duration(30 * time.Second),
}, },
}, },
}, },
{ {
JobName: "service-ec2", JobName: "service-ec2",
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath, MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -265,7 +265,7 @@ var expectedConf = &Config{
Region: "us-east-1", Region: "us-east-1",
AccessKey: "access", AccessKey: "access",
SecretKey: "secret", SecretKey: "secret",
RefreshInterval: Duration(60 * time.Second), RefreshInterval: model.Duration(60 * time.Second),
Port: 80, Port: 80,
}, },
}, },
@ -273,7 +273,7 @@ var expectedConf = &Config{
{ {
JobName: "service-nerve", JobName: "service-nerve",
ScrapeInterval: Duration(15 * time.Second), ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath, MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -283,7 +283,7 @@ var expectedConf = &Config{
{ {
Servers: []string{"localhost"}, Servers: []string{"localhost"},
Paths: []string{"/monitoring"}, Paths: []string{"/monitoring"},
Timeout: Duration(10 * time.Second), Timeout: model.Duration(10 * time.Second),
}, },
}, },
}, },

View file

@ -1140,12 +1140,12 @@ func (p *parser) unquoteString(s string) string {
} }
func parseDuration(ds string) (time.Duration, error) { func parseDuration(ds string) (time.Duration, error) {
dur, err := strutil.StringToDuration(ds) dur, err := model.ParseDuration(ds)
if err != nil { if err != nil {
return 0, err return 0, err
} }
if dur == 0 { if dur == 0 {
return 0, fmt.Errorf("duration must be greater than 0") return 0, fmt.Errorf("duration must be greater than 0")
} }
return dur, nil return time.Duration(dur), nil
} }

View file

@ -22,7 +22,6 @@ import (
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/strutil"
) )
// Tree returns a string of the tree structure of the given node. // Tree returns a string of the tree structure of the given node.
@ -104,7 +103,7 @@ func (node *AlertStmt) String() string {
s := fmt.Sprintf("ALERT %s", node.Name) s := fmt.Sprintf("ALERT %s", node.Name)
s += fmt.Sprintf("\n\tIF %s", node.Expr) s += fmt.Sprintf("\n\tIF %s", node.Expr)
if node.Duration > 0 { if node.Duration > 0 {
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(node.Duration)) s += fmt.Sprintf("\n\tFOR %s", model.Duration(node.Duration))
} }
if len(node.Labels) > 0 { if len(node.Labels) > 0 {
s += fmt.Sprintf("\n\tLABELS %s", node.Labels) s += fmt.Sprintf("\n\tLABELS %s", node.Labels)
@ -178,9 +177,9 @@ func (node *MatrixSelector) String() string {
} }
offset := "" offset := ""
if node.Offset != time.Duration(0) { if node.Offset != time.Duration(0) {
offset = fmt.Sprintf(" OFFSET %s", strutil.DurationToString(node.Offset)) offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset))
} }
return fmt.Sprintf("%s[%s]%s", vecSelector.String(), strutil.DurationToString(node.Range), offset) return fmt.Sprintf("%s[%s]%s", vecSelector.String(), model.Duration(node.Range), offset)
} }
func (node *NumberLiteral) String() string { func (node *NumberLiteral) String() string {
@ -210,7 +209,7 @@ func (node *VectorSelector) String() string {
} }
offset := "" offset := ""
if node.Offset != time.Duration(0) { if node.Offset != time.Duration(0) {
offset = fmt.Sprintf(" OFFSET %s", strutil.DurationToString(node.Offset)) offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset))
} }
if len(labelStrings) == 0 { if len(labelStrings) == 0 {

View file

@ -26,7 +26,6 @@ import (
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/util/strutil"
"github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/prometheus/util/testutil"
) )
@ -98,11 +97,11 @@ func (t *Test) parseLoad(lines []string, i int) (int, *loadCmd, error) {
} }
parts := patLoad.FindStringSubmatch(lines[i]) parts := patLoad.FindStringSubmatch(lines[i])
gap, err := strutil.StringToDuration(parts[1]) gap, err := model.ParseDuration(parts[1])
if err != nil { if err != nil {
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err) return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
} }
cmd := newLoadCmd(gap) cmd := newLoadCmd(time.Duration(gap))
for i+1 < len(lines) { for i+1 < len(lines) {
i++ i++
defLine := lines[i] defLine := lines[i]
@ -141,11 +140,11 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
return i, nil, err return i, nil, err
} }
offset, err := strutil.StringToDuration(at) offset, err := model.ParseDuration(at)
if err != nil { if err != nil {
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err) return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
} }
ts := testStartTime.Add(offset) ts := testStartTime.Add(time.Duration(offset))
cmd := newEvalCmd(expr, ts, ts, 0) cmd := newEvalCmd(expr, ts, ts, 0)
switch mod { switch mod {

View file

@ -7,6 +7,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
@ -22,7 +24,7 @@ func testFileSD(t *testing.T, ext string) {
// whether file watches work as expected. // whether file watches work as expected.
var conf config.FileSDConfig var conf config.FileSDConfig
conf.Names = []string{"fixtures/_*" + ext} conf.Names = []string{"fixtures/_*" + ext}
conf.RefreshInterval = config.Duration(1 * time.Hour) conf.RefreshInterval = model.Duration(1 * time.Hour)
var ( var (
fsd = NewFileDiscovery(&conf) fsd = NewFileDiscovery(&conf)

View file

@ -420,8 +420,8 @@ func TestURLParams(t *testing.T) {
target := NewTarget( target := NewTarget(
&config.ScrapeConfig{ &config.ScrapeConfig{
JobName: "test_job1", JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: model.Duration(1 * time.Minute),
ScrapeTimeout: config.Duration(1 * time.Second), ScrapeTimeout: model.Duration(1 * time.Second),
Scheme: serverURL.Scheme, Scheme: serverURL.Scheme,
Params: url.Values{ Params: url.Values{
"foo": []string{"bar", "baz"}, "foo": []string{"bar", "baz"},
@ -441,7 +441,7 @@ func TestURLParams(t *testing.T) {
func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target { func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target {
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(deadline), ScrapeTimeout: model.Duration(deadline),
} }
c, _ := newHTTPClient(cfg) c, _ := newHTTPClient(cfg)
t := &Target{ t := &Target{
@ -481,7 +481,7 @@ func TestNewHTTPBearerToken(t *testing.T) {
defer server.Close() defer server.Close()
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second), ScrapeTimeout: model.Duration(1 * time.Second),
BearerToken: "1234", BearerToken: "1234",
} }
c, err := newHTTPClient(cfg) c, err := newHTTPClient(cfg)
@ -509,7 +509,7 @@ func TestNewHTTPBearerTokenFile(t *testing.T) {
defer server.Close() defer server.Close()
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second), ScrapeTimeout: model.Duration(1 * time.Second),
BearerTokenFile: "testdata/bearertoken.txt", BearerTokenFile: "testdata/bearertoken.txt",
} }
c, err := newHTTPClient(cfg) c, err := newHTTPClient(cfg)
@ -536,7 +536,7 @@ func TestNewHTTPBasicAuth(t *testing.T) {
defer server.Close() defer server.Close()
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second), ScrapeTimeout: model.Duration(1 * time.Second),
BasicAuth: &config.BasicAuth{ BasicAuth: &config.BasicAuth{
Username: "user", Username: "user",
Password: "password123", Password: "password123",
@ -566,7 +566,7 @@ func TestNewHTTPCACert(t *testing.T) {
defer server.Close() defer server.Close()
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second), ScrapeTimeout: model.Duration(1 * time.Second),
TLSConfig: config.TLSConfig{ TLSConfig: config.TLSConfig{
CAFile: "testdata/ca.cer", CAFile: "testdata/ca.cer",
}, },
@ -599,7 +599,7 @@ func TestNewHTTPClientCert(t *testing.T) {
defer server.Close() defer server.Close()
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second), ScrapeTimeout: model.Duration(1 * time.Second),
TLSConfig: config.TLSConfig{ TLSConfig: config.TLSConfig{
CAFile: "testdata/ca.cer", CAFile: "testdata/ca.cer",
CertFile: "testdata/client.cer", CertFile: "testdata/client.cer",

View file

@ -75,7 +75,7 @@ func TestPrefixedTargetProvider(t *testing.T) {
func TestTargetManagerChan(t *testing.T) { func TestTargetManagerChan(t *testing.T) {
testJob1 := &config.ScrapeConfig{ testJob1 := &config.ScrapeConfig{
JobName: "test_job1", JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: model.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{{ TargetGroups: []*config.TargetGroup{{
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{model.AddressLabel: "example.org:80"}, {model.AddressLabel: "example.org:80"},
@ -204,7 +204,7 @@ func TestTargetManagerChan(t *testing.T) {
func TestTargetManagerConfigUpdate(t *testing.T) { func TestTargetManagerConfigUpdate(t *testing.T) {
testJob1 := &config.ScrapeConfig{ testJob1 := &config.ScrapeConfig{
JobName: "test_job1", JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: model.Duration(1 * time.Minute),
Params: url.Values{ Params: url.Values{
"testParam": []string{"paramValue", "secondValue"}, "testParam": []string{"paramValue", "secondValue"},
}, },
@ -234,7 +234,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
} }
testJob2 := &config.ScrapeConfig{ testJob2 := &config.ScrapeConfig{
JobName: "test_job2", JobName: "test_job2",
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: model.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{ TargetGroups: []*config.TargetGroup{
{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
@ -288,7 +288,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
// Test that targets without host:port addresses are dropped. // Test that targets without host:port addresses are dropped.
testJob3 := &config.ScrapeConfig{ testJob3 := &config.ScrapeConfig{
JobName: "test_job1", JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: model.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{{ TargetGroups: []*config.TargetGroup{{
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{model.AddressLabel: "example.net:80"}, {model.AddressLabel: "example.net:80"},

View file

@ -58,7 +58,7 @@ func (s AlertState) String() string {
case StateFiring: case StateFiring:
return "firing" return "firing"
} }
panic(fmt.Errorf("unknown alert state: %v", s)) panic(fmt.Errorf("unknown alert state: %v", s.String()))
} }
// Alert is the user-level representation of a single instance of an alerting rule. // Alert is the user-level representation of a single instance of an alerting rule.
@ -255,7 +255,7 @@ func (rule *AlertingRule) String() string {
s := fmt.Sprintf("ALERT %s", rule.name) s := fmt.Sprintf("ALERT %s", rule.name)
s += fmt.Sprintf("\n\tIF %s", rule.vector) s += fmt.Sprintf("\n\tIF %s", rule.vector)
if rule.holdDuration > 0 { if rule.holdDuration > 0 {
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(rule.holdDuration)) s += fmt.Sprintf("\n\tFOR %s", model.Duration(rule.holdDuration))
} }
if len(rule.labels) > 0 { if len(rule.labels) > 0 {
s += fmt.Sprintf("\n\tLABELS %s", rule.labels) s += fmt.Sprintf("\n\tLABELS %s", rule.labels)
@ -277,7 +277,7 @@ func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML {
s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name) s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name)
s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector) s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector)
if rule.holdDuration > 0 { if rule.holdDuration > 0 {
s += fmt.Sprintf("\n FOR %s", strutil.DurationToString(rule.holdDuration)) s += fmt.Sprintf("\n FOR %s", model.Duration(rule.holdDuration))
} }
if len(rule.labels) > 0 { if len(rule.labels) > 0 {
s += fmt.Sprintf("\n LABELS %s", rule.labels) s += fmt.Sprintf("\n LABELS %s", rule.labels)

View file

@ -17,75 +17,13 @@ import (
"fmt" "fmt"
"net/url" "net/url"
"regexp" "regexp"
"strconv"
"strings" "strings"
"time"
) )
var ( var (
durationRE = regexp.MustCompile("^([0-9]+)([ywdhms]+)$")
invalidLabelCharRE = regexp.MustCompile(`[^a-zA-Z0-9_]`) invalidLabelCharRE = regexp.MustCompile(`[^a-zA-Z0-9_]`)
) )
// DurationToString formats a time.Duration as a string with the assumption that
// a year always has 365 days and a day always has 24h. (The former doesn't work
// in leap years, the latter is broken by DST switches, not to speak about leap
// seconds, but those are not even treated properly by the duration strings in
// the standard library.)
func DurationToString(duration time.Duration) string {
seconds := int64(duration / time.Second)
factors := map[string]int64{
"y": 60 * 60 * 24 * 365,
"d": 60 * 60 * 24,
"h": 60 * 60,
"m": 60,
"s": 1,
}
unit := "s"
switch int64(0) {
case seconds % factors["y"]:
unit = "y"
case seconds % factors["d"]:
unit = "d"
case seconds % factors["h"]:
unit = "h"
case seconds % factors["m"]:
unit = "m"
}
return fmt.Sprintf("%v%v", seconds/factors[unit], unit)
}
// StringToDuration parses a string into a time.Duration, assuming that a year
// always has 365d, a week 7d, a day 24h. See DurationToString for problems with
// that.
func StringToDuration(durationStr string) (duration time.Duration, err error) {
matches := durationRE.FindStringSubmatch(durationStr)
if len(matches) != 3 {
err = fmt.Errorf("not a valid duration string: %q", durationStr)
return
}
durationSeconds, _ := strconv.Atoi(matches[1])
duration = time.Duration(durationSeconds) * time.Second
unit := matches[2]
switch unit {
case "y":
duration *= 60 * 60 * 24 * 365
case "w":
duration *= 60 * 60 * 24 * 7
case "d":
duration *= 60 * 60 * 24
case "h":
duration *= 60 * 60
case "m":
duration *= 60
case "s":
duration *= 1
default:
return 0, fmt.Errorf("invalid time unit in duration string: %q", unit)
}
return
}
// TableLinkForExpression creates an escaped relative link to the table view of // TableLinkForExpression creates an escaped relative link to the table view of
// the provided expression. // the provided expression.
func TableLinkForExpression(expr string) string { func TableLinkForExpression(expr string) string {

View file

@ -18,7 +18,6 @@ import (
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
"github.com/prometheus/prometheus/util/strutil"
) )
type status string type status string
@ -324,8 +323,8 @@ func parseDuration(s string) (time.Duration, error) {
if d, err := strconv.ParseFloat(s, 64); err == nil { if d, err := strconv.ParseFloat(s, 64); err == nil {
return time.Duration(d * float64(time.Second)), nil return time.Duration(d * float64(time.Second)), nil
} }
if d, err := strutil.StringToDuration(s); err == nil { if d, err := model.ParseDuration(s); err == nil {
return d, nil return time.Duration(d), nil
} }
return 0, fmt.Errorf("cannot parse %q to a valid duration", s) return 0, fmt.Errorf("cannot parse %q to a valid duration", s)
} }