Merge pull request #1357 from prometheus/beorn7/duration

Unify duration parsing
This commit is contained in:
Björn Rabenstein 2016-01-29 19:19:25 +01:00
commit b555be85e2
13 changed files with 109 additions and 189 deletions

View file

@ -25,8 +25,6 @@ import (
"github.com/prometheus/common/model"
"gopkg.in/yaml.v2"
"github.com/prometheus/prometheus/util/strutil"
)
var (
@ -75,9 +73,9 @@ var (
// DefaultGlobalConfig is the default global configuration.
DefaultGlobalConfig = GlobalConfig{
ScrapeInterval: Duration(1 * time.Minute),
ScrapeTimeout: Duration(10 * time.Second),
EvaluationInterval: Duration(1 * time.Minute),
ScrapeInterval: model.Duration(1 * time.Minute),
ScrapeTimeout: model.Duration(10 * time.Second),
EvaluationInterval: model.Duration(1 * time.Minute),
}
// DefaultScrapeConfig is the default scrape configuration.
@ -99,13 +97,13 @@ var (
// DefaultDNSSDConfig is the default DNS SD configuration.
DefaultDNSSDConfig = DNSSDConfig{
RefreshInterval: Duration(30 * time.Second),
RefreshInterval: model.Duration(30 * time.Second),
Type: "SRV",
}
// DefaultFileSDConfig is the default file SD configuration.
DefaultFileSDConfig = FileSDConfig{
RefreshInterval: Duration(5 * time.Minute),
RefreshInterval: model.Duration(5 * time.Minute),
}
// DefaultConsulSDConfig is the default Consul SD configuration.
@ -116,30 +114,30 @@ var (
// DefaultServersetSDConfig is the default Serverset SD configuration.
DefaultServersetSDConfig = ServersetSDConfig{
Timeout: Duration(10 * time.Second),
Timeout: model.Duration(10 * time.Second),
}
// DefaultNerveSDConfig is the default Nerve SD configuration.
DefaultNerveSDConfig = NerveSDConfig{
Timeout: Duration(10 * time.Second),
Timeout: model.Duration(10 * time.Second),
}
// DefaultMarathonSDConfig is the default Marathon SD configuration.
DefaultMarathonSDConfig = MarathonSDConfig{
RefreshInterval: Duration(30 * time.Second),
RefreshInterval: model.Duration(30 * time.Second),
}
// DefaultKubernetesSDConfig is the default Kubernetes SD configuration
DefaultKubernetesSDConfig = KubernetesSDConfig{
KubeletPort: 10255,
RequestTimeout: Duration(10 * time.Second),
RetryInterval: Duration(1 * time.Second),
RequestTimeout: model.Duration(10 * time.Second),
RetryInterval: model.Duration(1 * time.Second),
}
// DefaultEC2SDConfig is the default EC2 SD configuration.
DefaultEC2SDConfig = EC2SDConfig{
Port: 80,
RefreshInterval: Duration(60 * time.Second),
RefreshInterval: model.Duration(60 * time.Second),
}
)
@ -281,11 +279,11 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
// objects.
type GlobalConfig struct {
// How frequently to scrape targets by default.
ScrapeInterval Duration `yaml:"scrape_interval,omitempty"`
ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
// The default timeout when scraping targets.
ScrapeTimeout Duration `yaml:"scrape_timeout,omitempty"`
ScrapeTimeout model.Duration `yaml:"scrape_timeout,omitempty"`
// How frequently to evaluate rules by default.
EvaluationInterval Duration `yaml:"evaluation_interval,omitempty"`
EvaluationInterval model.Duration `yaml:"evaluation_interval,omitempty"`
// The labels to add to any timeseries that this Prometheus instance scrapes.
ExternalLabels model.LabelSet `yaml:"external_labels,omitempty"`
@ -344,9 +342,9 @@ type ScrapeConfig struct {
// A set of query parameters with which the target is scraped.
Params url.Values `yaml:"params,omitempty"`
// How frequently to scrape the targets of this scrape config.
ScrapeInterval Duration `yaml:"scrape_interval,omitempty"`
ScrapeInterval model.Duration `yaml:"scrape_interval,omitempty"`
// The timeout for scraping targets of this config.
ScrapeTimeout Duration `yaml:"scrape_timeout,omitempty"`
ScrapeTimeout model.Duration `yaml:"scrape_timeout,omitempty"`
// The HTTP resource path on which to fetch metrics from targets.
MetricsPath string `yaml:"metrics_path,omitempty"`
// The URL scheme with which to fetch metrics from targets.
@ -532,10 +530,10 @@ func (tg *TargetGroup) UnmarshalJSON(b []byte) error {
// DNSSDConfig is the configuration for DNS based service discovery.
type DNSSDConfig struct {
Names []string `yaml:"names"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
Type string `yaml:"type"`
Port int `yaml:"port"` // Ignored for SRV records
Names []string `yaml:"names"`
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
Type string `yaml:"type"`
Port int `yaml:"port"` // Ignored for SRV records
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
}
@ -565,8 +563,8 @@ func (c *DNSSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
// FileSDConfig is the configuration for file based discovery.
type FileSDConfig struct {
Names []string `yaml:"names"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
Names []string `yaml:"names"`
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
@ -624,9 +622,9 @@ func (c *ConsulSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error
// ServersetSDConfig is the configuration for Twitter serversets in Zookeeper based discovery.
type ServersetSDConfig struct {
Servers []string `yaml:"servers"`
Paths []string `yaml:"paths"`
Timeout Duration `yaml:"timeout,omitempty"`
Servers []string `yaml:"servers"`
Paths []string `yaml:"paths"`
Timeout model.Duration `yaml:"timeout,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
@ -656,9 +654,9 @@ func (c *ServersetSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) err
// NerveSDConfig is the configuration for AirBnB's Nerve in Zookeeper based discovery.
type NerveSDConfig struct {
Servers []string `yaml:"servers"`
Paths []string `yaml:"paths"`
Timeout Duration `yaml:"timeout,omitempty"`
Servers []string `yaml:"servers"`
Paths []string `yaml:"paths"`
Timeout model.Duration `yaml:"timeout,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
@ -688,8 +686,8 @@ func (c *NerveSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
// MarathonSDConfig is the configuration for services running on Marathon.
type MarathonSDConfig struct {
Servers []string `yaml:"servers,omitempty"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
Servers []string `yaml:"servers,omitempty"`
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
@ -712,15 +710,15 @@ func (c *MarathonSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) erro
// KubernetesSDConfig is the configuration for Kubernetes service discovery.
type KubernetesSDConfig struct {
APIServers []URL `yaml:"api_servers"`
KubeletPort int `yaml:"kubelet_port,omitempty"`
InCluster bool `yaml:"in_cluster,omitempty"`
BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"`
BearerToken string `yaml:"bearer_token,omitempty"`
BearerTokenFile string `yaml:"bearer_token_file,omitempty"`
RetryInterval Duration `yaml:"retry_interval,omitempty"`
RequestTimeout Duration `yaml:"request_timeout,omitempty"`
TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
APIServers []URL `yaml:"api_servers"`
KubeletPort int `yaml:"kubelet_port,omitempty"`
InCluster bool `yaml:"in_cluster,omitempty"`
BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"`
BearerToken string `yaml:"bearer_token,omitempty"`
BearerTokenFile string `yaml:"bearer_token_file,omitempty"`
RetryInterval model.Duration `yaml:"retry_interval,omitempty"`
RequestTimeout model.Duration `yaml:"request_timeout,omitempty"`
TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
@ -749,11 +747,11 @@ func (c *KubernetesSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) er
// EC2SDConfig is the configuration for EC2 based service discovery.
type EC2SDConfig struct {
Region string `yaml:"region"`
AccessKey string `yaml:"access_key,omitempty"`
SecretKey string `yaml:"secret_key,omitempty"`
RefreshInterval Duration `yaml:"refresh_interval,omitempty"`
Port int `yaml:"port"`
Region string `yaml:"region"`
AccessKey string `yaml:"access_key,omitempty"`
SecretKey string `yaml:"secret_key,omitempty"`
RefreshInterval model.Duration `yaml:"refresh_interval,omitempty"`
Port int `yaml:"port"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
}
@ -883,28 +881,3 @@ func (re Regexp) MarshalYAML() (interface{}, error) {
}
return nil, nil
}
// Duration encapsulates a time.Duration and makes it YAML marshallable.
//
// TODO(fabxc): Since we have custom types for most things, including timestamps,
// we might want to move this into our model as well, eventually.
type Duration time.Duration
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
dur, err := strutil.StringToDuration(s)
if err != nil {
return err
}
*d = Duration(dur)
return nil
}
// MarshalYAML implements the yaml.Marshaler interface.
func (d Duration) MarshalYAML() (interface{}, error) {
return strutil.DurationToString(time.Duration(d)), nil
}

View file

@ -28,9 +28,9 @@ import (
var expectedConf = &Config{
GlobalConfig: GlobalConfig{
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
EvaluationInterval: Duration(30 * time.Second),
EvaluationInterval: model.Duration(30 * time.Second),
ExternalLabels: model.LabelSet{
"monitor": "codelab",
@ -49,7 +49,7 @@ var expectedConf = &Config{
JobName: "prometheus",
HonorLabels: true,
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -73,11 +73,11 @@ var expectedConf = &Config{
FileSDConfigs: []*FileSDConfig{
{
Names: []string{"foo/*.slow.json", "foo/*.slow.yml", "single/file.yml"},
RefreshInterval: Duration(10 * time.Minute),
RefreshInterval: model.Duration(10 * time.Minute),
},
{
Names: []string{"bar/*.yaml"},
RefreshInterval: Duration(5 * time.Minute),
RefreshInterval: model.Duration(5 * time.Minute),
},
},
@ -108,8 +108,8 @@ var expectedConf = &Config{
{
JobName: "service-x",
ScrapeInterval: Duration(50 * time.Second),
ScrapeTimeout: Duration(5 * time.Second),
ScrapeInterval: model.Duration(50 * time.Second),
ScrapeTimeout: model.Duration(5 * time.Second),
BasicAuth: &BasicAuth{
Username: "admin_name",
@ -124,14 +124,14 @@ var expectedConf = &Config{
"first.dns.address.domain.com",
"second.dns.address.domain.com",
},
RefreshInterval: Duration(15 * time.Second),
RefreshInterval: model.Duration(15 * time.Second),
Type: "SRV",
},
{
Names: []string{
"first.dns.address.domain.com",
},
RefreshInterval: Duration(30 * time.Second),
RefreshInterval: model.Duration(30 * time.Second),
Type: "SRV",
},
},
@ -180,7 +180,7 @@ var expectedConf = &Config{
{
JobName: "service-y",
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -198,8 +198,8 @@ var expectedConf = &Config{
{
JobName: "service-z",
ScrapeInterval: Duration(15 * time.Second),
ScrapeTimeout: Duration(10 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
MetricsPath: "/metrics",
Scheme: "http",
@ -214,7 +214,7 @@ var expectedConf = &Config{
{
JobName: "service-kubernetes",
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -228,15 +228,15 @@ var expectedConf = &Config{
Password: "mypassword",
},
KubeletPort: 10255,
RequestTimeout: Duration(10 * time.Second),
RetryInterval: Duration(1 * time.Second),
RequestTimeout: model.Duration(10 * time.Second),
RetryInterval: model.Duration(1 * time.Second),
},
},
},
{
JobName: "service-marathon",
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -247,14 +247,14 @@ var expectedConf = &Config{
Servers: []string{
"http://marathon.example.com:8080",
},
RefreshInterval: Duration(30 * time.Second),
RefreshInterval: model.Duration(30 * time.Second),
},
},
},
{
JobName: "service-ec2",
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -265,7 +265,7 @@ var expectedConf = &Config{
Region: "us-east-1",
AccessKey: "access",
SecretKey: "secret",
RefreshInterval: Duration(60 * time.Second),
RefreshInterval: model.Duration(60 * time.Second),
Port: 80,
},
},
@ -273,7 +273,7 @@ var expectedConf = &Config{
{
JobName: "service-nerve",
ScrapeInterval: Duration(15 * time.Second),
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -283,7 +283,7 @@ var expectedConf = &Config{
{
Servers: []string{"localhost"},
Paths: []string{"/monitoring"},
Timeout: Duration(10 * time.Second),
Timeout: model.Duration(10 * time.Second),
},
},
},

View file

@ -1140,12 +1140,12 @@ func (p *parser) unquoteString(s string) string {
}
func parseDuration(ds string) (time.Duration, error) {
dur, err := strutil.StringToDuration(ds)
dur, err := model.ParseDuration(ds)
if err != nil {
return 0, err
}
if dur == 0 {
return 0, fmt.Errorf("duration must be greater than 0")
}
return dur, nil
return time.Duration(dur), nil
}

View file

@ -22,7 +22,6 @@ import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/strutil"
)
// Tree returns a string of the tree structure of the given node.
@ -104,7 +103,7 @@ func (node *AlertStmt) String() string {
s := fmt.Sprintf("ALERT %s", node.Name)
s += fmt.Sprintf("\n\tIF %s", node.Expr)
if node.Duration > 0 {
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(node.Duration))
s += fmt.Sprintf("\n\tFOR %s", model.Duration(node.Duration))
}
if len(node.Labels) > 0 {
s += fmt.Sprintf("\n\tLABELS %s", node.Labels)
@ -178,9 +177,9 @@ func (node *MatrixSelector) String() string {
}
offset := ""
if node.Offset != time.Duration(0) {
offset = fmt.Sprintf(" OFFSET %s", strutil.DurationToString(node.Offset))
offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset))
}
return fmt.Sprintf("%s[%s]%s", vecSelector.String(), strutil.DurationToString(node.Range), offset)
return fmt.Sprintf("%s[%s]%s", vecSelector.String(), model.Duration(node.Range), offset)
}
func (node *NumberLiteral) String() string {
@ -210,7 +209,7 @@ func (node *VectorSelector) String() string {
}
offset := ""
if node.Offset != time.Duration(0) {
offset = fmt.Sprintf(" OFFSET %s", strutil.DurationToString(node.Offset))
offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset))
}
if len(labelStrings) == 0 {

View file

@ -26,7 +26,6 @@ import (
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/util/strutil"
"github.com/prometheus/prometheus/util/testutil"
)
@ -98,11 +97,11 @@ func (t *Test) parseLoad(lines []string, i int) (int, *loadCmd, error) {
}
parts := patLoad.FindStringSubmatch(lines[i])
gap, err := strutil.StringToDuration(parts[1])
gap, err := model.ParseDuration(parts[1])
if err != nil {
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
}
cmd := newLoadCmd(gap)
cmd := newLoadCmd(time.Duration(gap))
for i+1 < len(lines) {
i++
defLine := lines[i]
@ -141,11 +140,11 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
return i, nil, err
}
offset, err := strutil.StringToDuration(at)
offset, err := model.ParseDuration(at)
if err != nil {
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
}
ts := testStartTime.Add(offset)
ts := testStartTime.Add(time.Duration(offset))
cmd := newEvalCmd(expr, ts, ts, 0)
switch mod {

View file

@ -7,6 +7,8 @@ import (
"testing"
"time"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -22,7 +24,7 @@ func testFileSD(t *testing.T, ext string) {
// whether file watches work as expected.
var conf config.FileSDConfig
conf.Names = []string{"fixtures/_*" + ext}
conf.RefreshInterval = config.Duration(1 * time.Hour)
conf.RefreshInterval = model.Duration(1 * time.Hour)
var (
fsd = NewFileDiscovery(&conf)

View file

@ -420,8 +420,8 @@ func TestURLParams(t *testing.T) {
target := NewTarget(
&config.ScrapeConfig{
JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute),
ScrapeTimeout: config.Duration(1 * time.Second),
ScrapeInterval: model.Duration(1 * time.Minute),
ScrapeTimeout: model.Duration(1 * time.Second),
Scheme: serverURL.Scheme,
Params: url.Values{
"foo": []string{"bar", "baz"},
@ -441,7 +441,7 @@ func TestURLParams(t *testing.T) {
func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target {
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(deadline),
ScrapeTimeout: model.Duration(deadline),
}
c, _ := newHTTPClient(cfg)
t := &Target{
@ -481,7 +481,7 @@ func TestNewHTTPBearerToken(t *testing.T) {
defer server.Close()
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second),
ScrapeTimeout: model.Duration(1 * time.Second),
BearerToken: "1234",
}
c, err := newHTTPClient(cfg)
@ -509,7 +509,7 @@ func TestNewHTTPBearerTokenFile(t *testing.T) {
defer server.Close()
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second),
ScrapeTimeout: model.Duration(1 * time.Second),
BearerTokenFile: "testdata/bearertoken.txt",
}
c, err := newHTTPClient(cfg)
@ -536,7 +536,7 @@ func TestNewHTTPBasicAuth(t *testing.T) {
defer server.Close()
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second),
ScrapeTimeout: model.Duration(1 * time.Second),
BasicAuth: &config.BasicAuth{
Username: "user",
Password: "password123",
@ -566,7 +566,7 @@ func TestNewHTTPCACert(t *testing.T) {
defer server.Close()
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second),
ScrapeTimeout: model.Duration(1 * time.Second),
TLSConfig: config.TLSConfig{
CAFile: "testdata/ca.cer",
},
@ -599,7 +599,7 @@ func TestNewHTTPClientCert(t *testing.T) {
defer server.Close()
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(1 * time.Second),
ScrapeTimeout: model.Duration(1 * time.Second),
TLSConfig: config.TLSConfig{
CAFile: "testdata/ca.cer",
CertFile: "testdata/client.cer",

View file

@ -75,7 +75,7 @@ func TestPrefixedTargetProvider(t *testing.T) {
func TestTargetManagerChan(t *testing.T) {
testJob1 := &config.ScrapeConfig{
JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute),
ScrapeInterval: model.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{{
Targets: []model.LabelSet{
{model.AddressLabel: "example.org:80"},
@ -204,7 +204,7 @@ func TestTargetManagerChan(t *testing.T) {
func TestTargetManagerConfigUpdate(t *testing.T) {
testJob1 := &config.ScrapeConfig{
JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute),
ScrapeInterval: model.Duration(1 * time.Minute),
Params: url.Values{
"testParam": []string{"paramValue", "secondValue"},
},
@ -234,7 +234,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
}
testJob2 := &config.ScrapeConfig{
JobName: "test_job2",
ScrapeInterval: config.Duration(1 * time.Minute),
ScrapeInterval: model.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{
{
Targets: []model.LabelSet{
@ -288,7 +288,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
// Test that targets without host:port addresses are dropped.
testJob3 := &config.ScrapeConfig{
JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute),
ScrapeInterval: model.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{{
Targets: []model.LabelSet{
{model.AddressLabel: "example.net:80"},

View file

@ -58,7 +58,7 @@ func (s AlertState) String() string {
case StateFiring:
return "firing"
}
panic(fmt.Errorf("unknown alert state: %v", s))
panic(fmt.Errorf("unknown alert state: %v", s.String()))
}
// Alert is the user-level representation of a single instance of an alerting rule.
@ -255,7 +255,7 @@ func (rule *AlertingRule) String() string {
s := fmt.Sprintf("ALERT %s", rule.name)
s += fmt.Sprintf("\n\tIF %s", rule.vector)
if rule.holdDuration > 0 {
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(rule.holdDuration))
s += fmt.Sprintf("\n\tFOR %s", model.Duration(rule.holdDuration))
}
if len(rule.labels) > 0 {
s += fmt.Sprintf("\n\tLABELS %s", rule.labels)
@ -277,7 +277,7 @@ func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML {
s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name)
s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector)
if rule.holdDuration > 0 {
s += fmt.Sprintf("\n FOR %s", strutil.DurationToString(rule.holdDuration))
s += fmt.Sprintf("\n FOR %s", model.Duration(rule.holdDuration))
}
if len(rule.labels) > 0 {
s += fmt.Sprintf("\n LABELS %s", rule.labels)

View file

@ -17,75 +17,13 @@ import (
"fmt"
"net/url"
"regexp"
"strconv"
"strings"
"time"
)
var (
durationRE = regexp.MustCompile("^([0-9]+)([ywdhms]+)$")
invalidLabelCharRE = regexp.MustCompile(`[^a-zA-Z0-9_]`)
)
// DurationToString formats a time.Duration as a string with the assumption that
// a year always has 365 days and a day always has 24h. (The former doesn't work
// in leap years, the latter is broken by DST switches, not to speak about leap
// seconds, but those are not even treated properly by the duration strings in
// the standard library.)
func DurationToString(duration time.Duration) string {
seconds := int64(duration / time.Second)
factors := map[string]int64{
"y": 60 * 60 * 24 * 365,
"d": 60 * 60 * 24,
"h": 60 * 60,
"m": 60,
"s": 1,
}
unit := "s"
switch int64(0) {
case seconds % factors["y"]:
unit = "y"
case seconds % factors["d"]:
unit = "d"
case seconds % factors["h"]:
unit = "h"
case seconds % factors["m"]:
unit = "m"
}
return fmt.Sprintf("%v%v", seconds/factors[unit], unit)
}
// StringToDuration parses a string into a time.Duration, assuming that a year
// always has 365d, a week 7d, a day 24h. See DurationToString for problems with
// that.
func StringToDuration(durationStr string) (duration time.Duration, err error) {
matches := durationRE.FindStringSubmatch(durationStr)
if len(matches) != 3 {
err = fmt.Errorf("not a valid duration string: %q", durationStr)
return
}
durationSeconds, _ := strconv.Atoi(matches[1])
duration = time.Duration(durationSeconds) * time.Second
unit := matches[2]
switch unit {
case "y":
duration *= 60 * 60 * 24 * 365
case "w":
duration *= 60 * 60 * 24 * 7
case "d":
duration *= 60 * 60 * 24
case "h":
duration *= 60 * 60
case "m":
duration *= 60
case "s":
duration *= 1
default:
return 0, fmt.Errorf("invalid time unit in duration string: %q", unit)
}
return
}
// TableLinkForExpression creates an escaped relative link to the table view of
// the provided expression.
func TableLinkForExpression(expr string) string {

View file

@ -163,10 +163,10 @@ func (t *Time) UnmarshalJSON(b []byte) error {
// This type should not propagate beyond the scope of input/output processing.
type Duration time.Duration
var durationRE = regexp.MustCompile("^([0-9]+)(d|h|m|s|ms)$")
var durationRE = regexp.MustCompile("^([0-9]+)(y|w|d|h|m|s|ms)$")
// StringToDuration parses a string into a time.Duration, assuming that a year
// a day always has 24h.
// always has 365d, a week always has 7d, and a day always has 24h.
func ParseDuration(durationStr string) (Duration, error) {
matches := durationRE.FindStringSubmatch(durationStr)
if len(matches) != 3 {
@ -177,6 +177,10 @@ func ParseDuration(durationStr string) (Duration, error) {
dur = time.Duration(n) * time.Millisecond
)
switch unit := matches[2]; unit {
case "y":
dur *= 1000 * 60 * 60 * 24 * 365
case "w":
dur *= 1000 * 60 * 60 * 24 * 7
case "d":
dur *= 1000 * 60 * 60 * 24
case "h":
@ -199,6 +203,8 @@ func (d Duration) String() string {
unit = "ms"
)
factors := map[string]int64{
"y": 1000 * 60 * 60 * 24 * 365,
"w": 1000 * 60 * 60 * 24 * 7,
"d": 1000 * 60 * 60 * 24,
"h": 1000 * 60 * 60,
"m": 1000 * 60,
@ -207,6 +213,10 @@ func (d Duration) String() string {
}
switch int64(0) {
case ms % factors["y"]:
unit = "y"
case ms % factors["w"]:
unit = "w"
case ms % factors["d"]:
unit = "d"
case ms % factors["h"]:

4
vendor/vendor.json vendored
View file

@ -174,8 +174,8 @@
},
{
"path": "github.com/prometheus/common/model",
"revision": "b0d797186bfbaf6d785031c6c2d32f75c720007d",
"revisionTime": "2016-01-22T12:15:42+01:00"
"revision": "0e53cc19aa67dd2e8587a26e28643cb152f5403d",
"revisionTime": "2016-01-29T15:16:16+01:00"
},
{
"path": "github.com/prometheus/common/route",

View file

@ -18,7 +18,6 @@ import (
"github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/httputil"
"github.com/prometheus/prometheus/util/strutil"
)
type status string
@ -324,8 +323,8 @@ func parseDuration(s string) (time.Duration, error) {
if d, err := strconv.ParseFloat(s, 64); err == nil {
return time.Duration(d * float64(time.Second)), nil
}
if d, err := strutil.StringToDuration(s); err == nil {
return d, nil
if d, err := model.ParseDuration(s); err == nil {
return time.Duration(d), nil
}
return 0, fmt.Errorf("cannot parse %q to a valid duration", s)
}