mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
commit
867f2ac979
|
@ -9,7 +9,7 @@ RUN apk add --update -t build-deps go git mercurial vim \
|
|||
&& go get github.com/tools/godep \
|
||||
&& cd /go/src/github.com/prometheus/prometheus \
|
||||
&& $GOPATH/bin/godep restore && go get -d \
|
||||
&& ./utility/embed-static.sh web/static web/templates | gofmt > web/blob/files.go \
|
||||
&& ./scripts/embed-static.sh web/static web/templates | gofmt > web/blob/files.go \
|
||||
&& go build -ldflags " \
|
||||
-X main.buildVersion $(cat VERSION) \
|
||||
-X main.buildRevision $(git rev-parse --short HEAD) \
|
||||
|
|
|
@ -12,12 +12,13 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
var (
|
||||
patJobName = regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_-]*$`)
|
||||
patFileSDName = regexp.MustCompile(`^[^*]*(\*[^/]*)?\.(json|yml|yaml|JSON|YML|YAML)$`)
|
||||
patRulePath = regexp.MustCompile(`^[^*]*(\*[^/]*)?$`)
|
||||
)
|
||||
|
||||
// Load parses the YAML input s into a Config.
|
||||
|
@ -106,12 +107,17 @@ func (c Config) String() string {
|
|||
return string(b)
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedConfig = DefaultConfig
|
||||
if err := unmarshal(&c.DefaultedConfig); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, rf := range c.RuleFiles {
|
||||
if !patRulePath.MatchString(rf) {
|
||||
return fmt.Errorf("invalid rule file path %q", rf)
|
||||
}
|
||||
}
|
||||
// Do global overrides and validate unique names.
|
||||
jobNames := map[string]struct{}{}
|
||||
for _, scfg := range c.ScrapeConfigs {
|
||||
|
@ -144,7 +150,7 @@ type GlobalConfig struct {
|
|||
DefaultedGlobalConfig `yaml:",inline"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *GlobalConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedGlobalConfig = DefaultGlobalConfig
|
||||
if err := unmarshal(&c.DefaultedGlobalConfig); err != nil {
|
||||
|
@ -172,7 +178,7 @@ type ScrapeConfig struct {
|
|||
DefaultedScrapeConfig `yaml:",inline"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedScrapeConfig = DefaultScrapeConfig
|
||||
err := unmarshal(&c.DefaultedScrapeConfig)
|
||||
|
@ -234,7 +240,7 @@ func (tg TargetGroup) String() string {
|
|||
return tg.Source
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
g := struct {
|
||||
Targets []string `yaml:"targets"`
|
||||
|
@ -256,7 +262,7 @@ func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// MarshalYAML implements the yaml.Marshaller interface.
|
||||
// MarshalYAML implements the yaml.Marshaler interface.
|
||||
func (tg TargetGroup) MarshalYAML() (interface{}, error) {
|
||||
g := &struct {
|
||||
Targets []string `yaml:"targets"`
|
||||
|
@ -271,7 +277,7 @@ func (tg TargetGroup) MarshalYAML() (interface{}, error) {
|
|||
return g, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements the json.Unmarshaller interface.
|
||||
// UnmarshalJSON implements the json.Unmarshaler interface.
|
||||
func (tg *TargetGroup) UnmarshalJSON(b []byte) error {
|
||||
g := struct {
|
||||
Targets []string `yaml:"targets"`
|
||||
|
@ -299,7 +305,7 @@ type DNSSDConfig struct {
|
|||
DefaultedDNSSDConfig `yaml:",inline"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *DNSSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedDNSSDConfig = DefaultDNSSDConfig
|
||||
err := unmarshal(&c.DefaultedDNSSDConfig)
|
||||
|
@ -324,7 +330,7 @@ type FileSDConfig struct {
|
|||
DefaultedFileSDConfig `yaml:",inline"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *FileSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedFileSDConfig = DefaultFileSDConfig
|
||||
err := unmarshal(&c.DefaultedFileSDConfig)
|
||||
|
@ -354,7 +360,7 @@ type ConsulSDConfig struct {
|
|||
DefaultedConsulSDConfig `yaml:",inline"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *ConsulSDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedConsulSDConfig = DefaultConsulSDConfig
|
||||
err := unmarshal(&c.DefaultedConsulSDConfig)
|
||||
|
@ -394,7 +400,7 @@ const (
|
|||
RelabelDrop = "drop"
|
||||
)
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (a *RelabelAction) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var s string
|
||||
if err := unmarshal(&s); err != nil {
|
||||
|
@ -414,7 +420,7 @@ type RelabelConfig struct {
|
|||
DefaultedRelabelConfig `yaml:",inline"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *RelabelConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
c.DefaultedRelabelConfig = DefaultRelabelConfig
|
||||
return unmarshal(&c.DefaultedRelabelConfig)
|
||||
|
@ -442,7 +448,7 @@ type Regexp struct {
|
|||
regexp.Regexp
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (re *Regexp) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var s string
|
||||
if err := unmarshal(&s); err != nil {
|
||||
|
@ -456,7 +462,7 @@ func (re *Regexp) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// MarshalYAML implements the yaml.Marshaller interface.
|
||||
// MarshalYAML implements the yaml.Marshaler interface.
|
||||
func (re Regexp) MarshalYAML() (interface{}, error) {
|
||||
return re.String(), nil
|
||||
}
|
||||
|
@ -467,13 +473,13 @@ func (re Regexp) MarshalYAML() (interface{}, error) {
|
|||
// we might want to move this into our model as well, eventually.
|
||||
type Duration time.Duration
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaller interface.
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var s string
|
||||
if err := unmarshal(&s); err != nil {
|
||||
return err
|
||||
}
|
||||
dur, err := utility.StringToDuration(s)
|
||||
dur, err := strutil.StringToDuration(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -481,7 +487,7 @@ func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// MarshalYAML implements the yaml.Marshaller interface.
|
||||
// MarshalYAML implements the yaml.Marshaler interface.
|
||||
func (d Duration) MarshalYAML() (interface{}, error) {
|
||||
return utility.DurationToString(time.Duration(d)), nil
|
||||
return strutil.DurationToString(time.Duration(d)), nil
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ var expectedConf = &Config{DefaultedConfig{
|
|||
RuleFiles: []string{
|
||||
"first.rules",
|
||||
"second.rules",
|
||||
"my/*.rules",
|
||||
},
|
||||
|
||||
ScrapeConfigs: []*ScrapeConfig{
|
||||
|
@ -151,6 +152,9 @@ var expectedErrors = []struct {
|
|||
}, {
|
||||
filename: "regex.bad.yml",
|
||||
errMsg: "error parsing regexp",
|
||||
}, {
|
||||
filename: "rules.bad.yml",
|
||||
errMsg: "invalid rule file path",
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -159,6 +163,7 @@ func TestBadConfigs(t *testing.T) {
|
|||
_, err := LoadFromFile("testdata/" + ee.filename)
|
||||
if err == nil {
|
||||
t.Errorf("Expected error parsing %s but got none", ee.filename)
|
||||
continue
|
||||
}
|
||||
if !strings.Contains(err.Error(), ee.errMsg) {
|
||||
t.Errorf("Expected error for %s to contain %q but got: %s", ee.filename, ee.errMsg, err)
|
||||
|
|
1
config/testdata/conf.good.yml
vendored
1
config/testdata/conf.good.yml
vendored
|
@ -11,6 +11,7 @@ global:
|
|||
rule_files:
|
||||
- "first.rules"
|
||||
- "second.rules"
|
||||
- "my/*.rules"
|
||||
|
||||
scrape_configs:
|
||||
- job_name: prometheus
|
||||
|
|
3
config/testdata/rules.bad.yml
vendored
Normal file
3
config/testdata/rules.bad.yml
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
rule_files:
|
||||
- 'my_rule' # fine
|
||||
- 'my/*/rule' # bad
|
|
@ -28,7 +28,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -99,7 +99,7 @@ func NewNotificationHandler(alertmanagerURL string, notificationQueueCapacity in
|
|||
alertmanagerURL: strings.TrimRight(alertmanagerURL, "/"),
|
||||
pendingNotifications: make(chan NotificationReqs, notificationQueueCapacity),
|
||||
|
||||
httpClient: utility.NewDeadlineClient(*deadline),
|
||||
httpClient: httputil.NewDeadlineClient(*deadline),
|
||||
|
||||
notificationLatency: prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Namespace: namespace,
|
||||
|
|
|
@ -25,9 +25,9 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/stats"
|
||||
"github.com/prometheus/prometheus/storage/local"
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/util/stats"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -22,7 +22,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
|
@ -982,7 +982,7 @@ func (p *parser) checkType(node Node) (typ ExprType) {
|
|||
}
|
||||
|
||||
func parseDuration(ds string) (time.Duration, error) {
|
||||
dur, err := utility.StringToDuration(ds)
|
||||
dur, err := strutil.StringToDuration(ds)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import (
|
|||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
func (matrix Matrix) String() string {
|
||||
|
@ -145,7 +145,7 @@ func (node *AlertStmt) String() string {
|
|||
s := fmt.Sprintf("ALERT %s", node.Name)
|
||||
s += fmt.Sprintf("\n\tIF %s", node.Expr)
|
||||
if node.Duration > 0 {
|
||||
s += fmt.Sprintf("\n\tFOR %s", utility.DurationToString(node.Duration))
|
||||
s += fmt.Sprintf("\n\tFOR %s", strutil.DurationToString(node.Duration))
|
||||
}
|
||||
if len(node.Labels) > 0 {
|
||||
s += fmt.Sprintf("\n\tWITH %s", node.Labels)
|
||||
|
@ -207,7 +207,7 @@ func (node *MatrixSelector) String() string {
|
|||
Name: node.Name,
|
||||
LabelMatchers: node.LabelMatchers,
|
||||
}
|
||||
return fmt.Sprintf("%s[%s]", vecSelector.String(), utility.DurationToString(node.Range))
|
||||
return fmt.Sprintf("%s[%s]", vecSelector.String(), strutil.DurationToString(node.Range))
|
||||
}
|
||||
|
||||
func (node *NumberLiteral) String() string {
|
||||
|
@ -260,7 +260,7 @@ func (node *AlertStmt) DotGraph() string {
|
|||
%#p -> %x;
|
||||
%s
|
||||
}`,
|
||||
node, node.Name, utility.DurationToString(node.Duration),
|
||||
node, node.Name, strutil.DurationToString(node.Duration),
|
||||
node, reflect.ValueOf(node.Expr).Pointer(),
|
||||
node.Expr.DotGraph(),
|
||||
)
|
||||
|
|
|
@ -27,9 +27,8 @@ import (
|
|||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/storage/local"
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
|
||||
testutil "github.com/prometheus/prometheus/utility/test"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -91,7 +90,7 @@ func (t *Test) parseLoad(lines []string, i int) (int, *loadCmd, error) {
|
|||
}
|
||||
parts := patLoad.FindStringSubmatch(lines[i])
|
||||
|
||||
gap, err := utility.StringToDuration(parts[1])
|
||||
gap, err := strutil.StringToDuration(parts[1])
|
||||
if err != nil {
|
||||
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
|
||||
}
|
||||
|
@ -132,7 +131,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
|
|||
return i, nil, perr
|
||||
}
|
||||
|
||||
offset, err := utility.StringToDuration(at)
|
||||
offset, err := strutil.StringToDuration(at)
|
||||
if err != nil {
|
||||
return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ import (
|
|||
|
||||
"github.com/prometheus/prometheus/config"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -197,7 +197,7 @@ func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels clientmodel.LabelSe
|
|||
|
||||
t.scrapeInterval = time.Duration(cfg.ScrapeInterval)
|
||||
t.deadline = time.Duration(cfg.ScrapeTimeout)
|
||||
t.httpClient = utility.NewDeadlineClient(time.Duration(cfg.ScrapeTimeout))
|
||||
t.httpClient = httputil.NewDeadlineClient(time.Duration(cfg.ScrapeTimeout))
|
||||
|
||||
t.baseLabels = clientmodel.LabelSet{}
|
||||
// All remaining internal labels will not be part of the label set.
|
||||
|
|
|
@ -26,7 +26,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
func TestBaseLabels(t *testing.T) {
|
||||
|
@ -242,7 +242,7 @@ func newTestTarget(targetURL string, deadline time.Duration, baseLabels clientmo
|
|||
deadline: deadline,
|
||||
status: &TargetStatus{},
|
||||
scrapeInterval: 1 * time.Millisecond,
|
||||
httpClient: utility.NewDeadlineClient(deadline),
|
||||
httpClient: httputil.NewDeadlineClient(deadline),
|
||||
scraperStopping: make(chan struct{}),
|
||||
scraperStopped: make(chan struct{}),
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import (
|
|||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -148,10 +148,10 @@ func (rule *AlertingRule) Eval(timestamp clientmodel.Timestamp, engine *promql.E
|
|||
|
||||
// Create pending alerts for any new vector elements in the alert expression
|
||||
// or update the expression value for existing elements.
|
||||
resultFingerprints := utility.Set{}
|
||||
resultFPs := map[clientmodel.Fingerprint]struct{}{}
|
||||
for _, sample := range exprResult {
|
||||
fp := sample.Metric.Metric.Fingerprint()
|
||||
resultFingerprints.Add(fp)
|
||||
resultFPs[fp] = struct{}{}
|
||||
|
||||
if alert, ok := rule.activeAlerts[fp]; !ok {
|
||||
labels := clientmodel.LabelSet{}
|
||||
|
@ -176,7 +176,7 @@ func (rule *AlertingRule) Eval(timestamp clientmodel.Timestamp, engine *promql.E
|
|||
|
||||
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
|
||||
for fp, activeAlert := range rule.activeAlerts {
|
||||
if !resultFingerprints.Has(fp) {
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
vector = append(vector, activeAlert.sample(timestamp, 0))
|
||||
delete(rule.activeAlerts, fp)
|
||||
continue
|
||||
|
@ -201,7 +201,7 @@ func (rule *AlertingRule) DotGraph() string {
|
|||
%#p -> %x;
|
||||
%s
|
||||
}`,
|
||||
&rule, rule.name, utility.DurationToString(rule.holdDuration),
|
||||
&rule, rule.name, strutil.DurationToString(rule.holdDuration),
|
||||
&rule, reflect.ValueOf(rule.Vector).Pointer(),
|
||||
rule.Vector.DotGraph(),
|
||||
)
|
||||
|
@ -209,7 +209,7 @@ func (rule *AlertingRule) DotGraph() string {
|
|||
}
|
||||
|
||||
func (rule *AlertingRule) String() string {
|
||||
return fmt.Sprintf("ALERT %s IF %s FOR %s WITH %s", rule.name, rule.Vector, utility.DurationToString(rule.holdDuration), rule.Labels)
|
||||
return fmt.Sprintf("ALERT %s IF %s FOR %s WITH %s", rule.name, rule.Vector, strutil.DurationToString(rule.holdDuration), rule.Labels)
|
||||
}
|
||||
|
||||
// HTMLSnippet returns an HTML snippet representing this alerting rule.
|
||||
|
@ -220,11 +220,11 @@ func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML {
|
|||
}
|
||||
return template.HTML(fmt.Sprintf(
|
||||
`ALERT <a href="%s">%s</a> IF <a href="%s">%s</a> FOR %s WITH %s`,
|
||||
pathPrefix+utility.GraphLinkForExpression(alertMetric.String()),
|
||||
pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()),
|
||||
rule.name,
|
||||
pathPrefix+utility.GraphLinkForExpression(rule.Vector.String()),
|
||||
pathPrefix+strutil.GraphLinkForExpression(rule.Vector.String()),
|
||||
rule.Vector,
|
||||
utility.DurationToString(rule.holdDuration),
|
||||
strutil.DurationToString(rule.holdDuration),
|
||||
rule.Labels))
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ package rules
|
|||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
@ -29,8 +30,8 @@ import (
|
|||
"github.com/prometheus/prometheus/notification"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/storage"
|
||||
"github.com/prometheus/prometheus/templates"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/template"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
// Constants for instrumentation.
|
||||
|
@ -194,8 +195,8 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
|
|||
defs := "{{$labels := .Labels}}{{$value := .Value}}"
|
||||
|
||||
expand := func(text string) string {
|
||||
template := templates.NewTemplateExpander(defs+text, "__alert_"+rule.Name(), tmplData, timestamp, m.queryEngine, m.pathPrefix)
|
||||
result, err := template.Expand()
|
||||
tmpl := template.NewTemplateExpander(defs+text, "__alert_"+rule.Name(), tmplData, timestamp, m.queryEngine, m.pathPrefix)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = err.Error()
|
||||
log.Warnf("Error expanding alert template %v with data '%v': %v", rule.Name(), tmplData, err)
|
||||
|
@ -212,7 +213,7 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
|
|||
Value: aa.Value,
|
||||
ActiveSince: aa.ActiveSince.Time(),
|
||||
RuleString: rule.String(),
|
||||
GeneratorURL: m.prometheusURL + strings.TrimLeft(utility.GraphLinkForExpression(rule.Vector.String()), "/"),
|
||||
GeneratorURL: m.prometheusURL + strings.TrimLeft(strutil.GraphLinkForExpression(rule.Vector.String()), "/"),
|
||||
})
|
||||
}
|
||||
m.notificationHandler.SubmitReqs(notifications)
|
||||
|
@ -281,7 +282,16 @@ func (m *Manager) ApplyConfig(conf *config.Config) {
|
|||
copy(rulesSnapshot, m.rules)
|
||||
m.rules = m.rules[:0]
|
||||
|
||||
if err := m.loadRuleFiles(conf.RuleFiles...); err != nil {
|
||||
var files []string
|
||||
for _, pat := range conf.RuleFiles {
|
||||
fs, err := filepath.Glob(pat)
|
||||
if err != nil {
|
||||
// The only error can be a bad pattern.
|
||||
log.Errorf("Error retrieving rule files for %s: %s", pat, err)
|
||||
}
|
||||
files = append(files, fs...)
|
||||
}
|
||||
if err := m.loadRuleFiles(files...); err != nil {
|
||||
// If loading the new rules failed, restore the old rule set.
|
||||
m.rules = rulesSnapshot
|
||||
log.Errorf("Error loading rules, previous rule set restored: %s", err)
|
||||
|
|
|
@ -21,7 +21,7 @@ import (
|
|||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
// A RecordingRule records its vector expression into new timeseries.
|
||||
|
@ -89,9 +89,9 @@ func (rule RecordingRule) HTMLSnippet(pathPrefix string) template.HTML {
|
|||
ruleExpr := rule.vector.String()
|
||||
return template.HTML(fmt.Sprintf(
|
||||
`<a href="%s">%s</a>%s = <a href="%s">%s</a>`,
|
||||
pathPrefix+utility.GraphLinkForExpression(rule.name),
|
||||
pathPrefix+strutil.GraphLinkForExpression(rule.name),
|
||||
rule.name,
|
||||
rule.labels,
|
||||
pathPrefix+utility.GraphLinkForExpression(ruleExpr),
|
||||
pathPrefix+strutil.GraphLinkForExpression(ruleExpr),
|
||||
ruleExpr))
|
||||
}
|
||||
|
|
|
@ -47,6 +47,9 @@ type Storage interface {
|
|||
// The iterator will never return samples older than retention time,
|
||||
// relative to the time NewIterator was called.
|
||||
NewIterator(clientmodel.Fingerprint) SeriesIterator
|
||||
// Drop all time series associated with the given fingerprints. This operation
|
||||
// will not show up in the series operations metrics.
|
||||
DropMetricsForFingerprints(...clientmodel.Fingerprint)
|
||||
// Run the various maintenance loops in goroutines. Returns when the
|
||||
// storage is ready to use. Keeps everything running in the background
|
||||
// until Stop is called.
|
||||
|
|
|
@ -34,9 +34,9 @@ import (
|
|||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/storage/local/codable"
|
||||
"github.com/prometheus/prometheus/storage/local/flock"
|
||||
"github.com/prometheus/prometheus/storage/local/index"
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/util/flock"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -23,7 +23,7 @@ import (
|
|||
"github.com/prometheus/prometheus/storage/local/codable"
|
||||
"github.com/prometheus/prometheus/storage/local/index"
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/utility/test"
|
||||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -34,16 +34,16 @@ var (
|
|||
m5 = clientmodel.Metric{"label": "value5"}
|
||||
)
|
||||
|
||||
func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, test.Closer) {
|
||||
func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, testutil.Closer) {
|
||||
*defaultChunkEncoding = int(encoding)
|
||||
dir := test.NewTemporaryDirectory("test_persistence", t)
|
||||
dir := testutil.NewTemporaryDirectory("test_persistence", t)
|
||||
p, err := newPersistence(dir.Path(), false, false, func() bool { return false })
|
||||
if err != nil {
|
||||
dir.Close()
|
||||
t.Fatal(err)
|
||||
}
|
||||
go p.run()
|
||||
return p, test.NewCallbackCloser(func() {
|
||||
return p, testutil.NewCallbackCloser(func() {
|
||||
p.close()
|
||||
dir.Close()
|
||||
})
|
||||
|
|
|
@ -430,6 +430,26 @@ func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) c
|
|||
}
|
||||
}
|
||||
|
||||
// DropMetric implements Storage.
|
||||
func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...clientmodel.Fingerprint) {
|
||||
for _, fp := range fps {
|
||||
s.fpLocker.Lock(fp)
|
||||
|
||||
if series, ok := s.fpToSeries.get(fp); ok {
|
||||
s.fpToSeries.del(fp)
|
||||
s.numSeries.Dec()
|
||||
s.persistence.unindexMetric(fp, series.metric)
|
||||
if _, err := s.persistence.deleteSeriesFile(fp); err != nil {
|
||||
log.Errorf("Error deleting series file for %v: %v", fp, err)
|
||||
}
|
||||
} else if err := s.persistence.purgeArchivedMetric(fp); err != nil {
|
||||
log.Errorf("Error purging metric with fingerprint %v: %v", fp, err)
|
||||
}
|
||||
|
||||
s.fpLocker.Unlock(fp)
|
||||
}
|
||||
}
|
||||
|
||||
// Append implements Storage.
|
||||
func (s *memorySeriesStorage) Append(sample *clientmodel.Sample) {
|
||||
if s.getNumChunksToPersist() >= s.maxChunksToPersist {
|
||||
|
@ -694,7 +714,7 @@ func (s *memorySeriesStorage) cycleThroughArchivedFingerprints() chan clientmode
|
|||
|
||||
for {
|
||||
archivedFPs, err := s.persistence.fingerprintsModifiedBefore(
|
||||
clientmodel.TimestampFromTime(time.Now()).Add(-s.dropAfter),
|
||||
clientmodel.Now().Add(-s.dropAfter),
|
||||
)
|
||||
if err != nil {
|
||||
log.Error("Failed to lookup archived fingerprint ranges: ", err)
|
||||
|
@ -750,7 +770,7 @@ loop:
|
|||
dirtySeriesCount = 0
|
||||
checkpointTimer.Reset(s.checkpointInterval)
|
||||
case fp := <-memoryFingerprints:
|
||||
if s.maintainMemorySeries(fp, clientmodel.TimestampFromTime(time.Now()).Add(-s.dropAfter)) {
|
||||
if s.maintainMemorySeries(fp, clientmodel.Now().Add(-s.dropAfter)) {
|
||||
dirtySeriesCount++
|
||||
// Check if we have enough "dirty" series so that we need an early checkpoint.
|
||||
// However, if we are already behind persisting chunks, creating a checkpoint
|
||||
|
@ -764,7 +784,7 @@ loop:
|
|||
}
|
||||
}
|
||||
case fp := <-archivedFingerprints:
|
||||
s.maintainArchivedSeries(fp, clientmodel.TimestampFromTime(time.Now()).Add(-s.dropAfter))
|
||||
s.maintainArchivedSeries(fp, clientmodel.Now().Add(-s.dropAfter))
|
||||
}
|
||||
}
|
||||
// Wait until both channels are closed.
|
||||
|
|
|
@ -25,7 +25,7 @@ import (
|
|||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/storage/metric"
|
||||
"github.com/prometheus/prometheus/utility/test"
|
||||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
func TestFingerprintsForLabelMatchers(t *testing.T) {
|
||||
|
@ -152,8 +152,7 @@ func TestRetentionCutoff(t *testing.T) {
|
|||
|
||||
s.dropAfter = 1 * time.Hour
|
||||
|
||||
samples := make(clientmodel.Samples, 120)
|
||||
for i := range samples {
|
||||
for i := 0; i < 120; i++ {
|
||||
smpl := &clientmodel.Sample{
|
||||
Metric: clientmodel.Metric{"job": "test"},
|
||||
Timestamp: insertStart.Add(time.Duration(i) * time.Minute), // 1 minute intervals.
|
||||
|
@ -204,6 +203,85 @@ func TestRetentionCutoff(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestDropMetrics(t *testing.T) {
|
||||
now := clientmodel.Now()
|
||||
insertStart := now.Add(-2 * time.Hour)
|
||||
|
||||
s, closer := NewTestStorage(t, 1)
|
||||
defer closer.Close()
|
||||
|
||||
m1 := clientmodel.Metric{clientmodel.MetricNameLabel: "test", "n1": "v1"}
|
||||
m2 := clientmodel.Metric{clientmodel.MetricNameLabel: "test", "n1": "v2"}
|
||||
|
||||
N := 120000
|
||||
|
||||
for j, m := range []clientmodel.Metric{m1, m2} {
|
||||
for i := 0; i < N; i++ {
|
||||
smpl := &clientmodel.Sample{
|
||||
Metric: m,
|
||||
Timestamp: insertStart.Add(time.Duration(i) * time.Millisecond), // 1 minute intervals.
|
||||
Value: clientmodel.SampleValue(j),
|
||||
}
|
||||
s.Append(smpl)
|
||||
}
|
||||
}
|
||||
s.WaitForIndexing()
|
||||
|
||||
matcher := metric.LabelMatchers{{
|
||||
Type: metric.Equal,
|
||||
Name: clientmodel.MetricNameLabel,
|
||||
Value: "test",
|
||||
}}
|
||||
|
||||
fps := s.FingerprintsForLabelMatchers(matcher)
|
||||
if len(fps) != 2 {
|
||||
t.Fatalf("unexpected number of fingerprints: %d", len(fps))
|
||||
}
|
||||
|
||||
it := s.NewIterator(fps[0])
|
||||
if vals := it.RangeValues(metric.Interval{insertStart, now}); len(vals) != N {
|
||||
t.Fatalf("unexpected number of samples: %d", len(vals))
|
||||
}
|
||||
it = s.NewIterator(fps[1])
|
||||
if vals := it.RangeValues(metric.Interval{insertStart, now}); len(vals) != N {
|
||||
t.Fatalf("unexpected number of samples: %d", len(vals))
|
||||
}
|
||||
|
||||
s.DropMetricsForFingerprints(fps[0])
|
||||
s.WaitForIndexing()
|
||||
|
||||
fps2 := s.FingerprintsForLabelMatchers(matcher)
|
||||
if len(fps2) != 1 {
|
||||
t.Fatalf("unexpected number of fingerprints: %d", len(fps2))
|
||||
}
|
||||
|
||||
it = s.NewIterator(fps[0])
|
||||
if vals := it.RangeValues(metric.Interval{insertStart, now}); len(vals) != 0 {
|
||||
t.Fatalf("unexpected number of samples: %d", len(vals))
|
||||
}
|
||||
it = s.NewIterator(fps[1])
|
||||
if vals := it.RangeValues(metric.Interval{insertStart, now}); len(vals) != N {
|
||||
t.Fatalf("unexpected number of samples: %d", len(vals))
|
||||
}
|
||||
|
||||
s.DropMetricsForFingerprints(fps...)
|
||||
s.WaitForIndexing()
|
||||
|
||||
fps3 := s.FingerprintsForLabelMatchers(matcher)
|
||||
if len(fps3) != 0 {
|
||||
t.Fatalf("unexpected number of fingerprints: %d", len(fps3))
|
||||
}
|
||||
|
||||
it = s.NewIterator(fps[0])
|
||||
if vals := it.RangeValues(metric.Interval{insertStart, now}); len(vals) != 0 {
|
||||
t.Fatalf("unexpected number of samples: %d", len(vals))
|
||||
}
|
||||
it = s.NewIterator(fps[1])
|
||||
if vals := it.RangeValues(metric.Interval{insertStart, now}); len(vals) != 0 {
|
||||
t.Fatalf("unexpected number of samples: %d", len(vals))
|
||||
}
|
||||
}
|
||||
|
||||
// TestLoop is just a smoke test for the loop method, if we can switch it on and
|
||||
// off without disaster.
|
||||
func TestLoop(t *testing.T) {
|
||||
|
@ -217,7 +295,7 @@ func TestLoop(t *testing.T) {
|
|||
Value: clientmodel.SampleValue(float64(i) * 0.2),
|
||||
}
|
||||
}
|
||||
directory := test.NewTemporaryDirectory("test_storage", t)
|
||||
directory := testutil.NewTemporaryDirectory("test_storage", t)
|
||||
defer directory.Close()
|
||||
o := &MemorySeriesStorageOptions{
|
||||
MemoryChunks: 50,
|
||||
|
@ -902,7 +980,7 @@ func benchmarkFuzz(b *testing.B, encoding chunkEncoding) {
|
|||
*defaultChunkEncoding = int(encoding)
|
||||
const samplesPerRun = 100000
|
||||
rand.Seed(42)
|
||||
directory := test.NewTemporaryDirectory("test_storage", b)
|
||||
directory := testutil.NewTemporaryDirectory("test_storage", b)
|
||||
defer directory.Close()
|
||||
o := &MemorySeriesStorageOptions{
|
||||
MemoryChunks: 100,
|
||||
|
|
|
@ -21,12 +21,12 @@ package local
|
|||
import (
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/prometheus/utility/test"
|
||||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
type testStorageCloser struct {
|
||||
storage Storage
|
||||
directory test.Closer
|
||||
directory testutil.Closer
|
||||
}
|
||||
|
||||
func (t *testStorageCloser) Close() {
|
||||
|
@ -37,9 +37,9 @@ func (t *testStorageCloser) Close() {
|
|||
// NewTestStorage creates a storage instance backed by files in a temporary
|
||||
// directory. The returned storage is already in serving state. Upon closing the
|
||||
// returned test.Closer, the temporary directory is cleaned up.
|
||||
func NewTestStorage(t test.T, encoding chunkEncoding) (*memorySeriesStorage, test.Closer) {
|
||||
func NewTestStorage(t testutil.T, encoding chunkEncoding) (*memorySeriesStorage, testutil.Closer) {
|
||||
*defaultChunkEncoding = int(encoding)
|
||||
directory := test.NewTemporaryDirectory("test_storage", t)
|
||||
directory := testutil.NewTemporaryDirectory("test_storage", t)
|
||||
o := &MemorySeriesStorageOptions{
|
||||
MemoryChunks: 1000000,
|
||||
MaxChunksToPersist: 1000000,
|
||||
|
|
|
@ -28,7 +28,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -51,7 +51,7 @@ type Client struct {
|
|||
func NewClient(url string, timeout time.Duration) *Client {
|
||||
return &Client{
|
||||
url: url,
|
||||
httpClient: utility.NewDeadlineClient(timeout),
|
||||
httpClient: httputil.NewDeadlineClient(timeout),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -50,7 +50,7 @@ type Client struct {
|
|||
func NewClient(url string, timeout time.Duration) *Client {
|
||||
return &Client{
|
||||
url: url,
|
||||
httpClient: utility.NewDeadlineClient(timeout),
|
||||
httpClient: httputil.NewDeadlineClient(timeout),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package templates
|
||||
package template
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
@ -28,7 +28,7 @@ import (
|
|||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/utility"
|
||||
"github.com/prometheus/prometheus/util/strutil"
|
||||
)
|
||||
|
||||
// A version of vector that's easier to use from templates.
|
||||
|
@ -152,8 +152,8 @@ func NewTemplateExpander(text string, name string, data interface{}, timestamp c
|
|||
},
|
||||
"match": regexp.MatchString,
|
||||
"title": strings.Title,
|
||||
"graphLink": utility.GraphLinkForExpression,
|
||||
"tableLink": utility.TableLinkForExpression,
|
||||
"graphLink": strutil.GraphLinkForExpression,
|
||||
"tableLink": strutil.TableLinkForExpression,
|
||||
"sortByLabel": func(label string, v queryResult) queryResult {
|
||||
sorter := queryResultByLabelSorter{v[:], label}
|
||||
sort.Stable(sorter)
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package templates
|
||||
package template
|
||||
|
||||
import (
|
||||
"math"
|
|
@ -5,11 +5,11 @@ import (
|
|||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/prometheus/prometheus/utility/test"
|
||||
"github.com/prometheus/prometheus/util/testutil"
|
||||
)
|
||||
|
||||
func TestLocking(t *testing.T) {
|
||||
dir := test.NewTemporaryDirectory("test_flock", t)
|
||||
dir := testutil.NewTemporaryDirectory("test_flock", t)
|
||||
defer dir.Close()
|
||||
|
||||
fileName := filepath.Join(dir.Path(), "LOCK")
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package httputils
|
||||
package httputil
|
||||
|
||||
import (
|
||||
"compress/gzip"
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package utility
|
||||
package httputil
|
||||
|
||||
import (
|
||||
"net"
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package httputils
|
||||
package httputil
|
||||
|
||||
import (
|
||||
"encoding/json"
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package utility
|
||||
package strutil
|
||||
|
||||
import (
|
||||
"fmt"
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package test
|
||||
package testutil
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package test
|
||||
package testutil
|
||||
|
||||
// ErrorEqual compares Go errors for equality.
|
||||
func ErrorEqual(left, right error) bool {
|
|
@ -1,58 +0,0 @@
|
|||
// Copyright 2013 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package utility
|
||||
|
||||
// Set is a type which models a set.
|
||||
type Set map[interface{}]struct{}
|
||||
|
||||
// Add adds an item to the set.
|
||||
func (s Set) Add(v interface{}) {
|
||||
s[v] = struct{}{}
|
||||
}
|
||||
|
||||
// Remove removes an item from the set.
|
||||
func (s Set) Remove(v interface{}) {
|
||||
delete(s, v)
|
||||
}
|
||||
|
||||
// Elements returns a slice containing all elements in the set.
|
||||
func (s Set) Elements() []interface{} {
|
||||
result := make([]interface{}, 0, len(s))
|
||||
|
||||
for k := range s {
|
||||
result = append(result, k)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Has returns true if an element is contained in the set.
|
||||
func (s Set) Has(v interface{}) bool {
|
||||
_, p := s[v]
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// Intersection returns a new set with items that exist in both sets.
|
||||
func (s Set) Intersection(o Set) Set {
|
||||
result := Set{}
|
||||
|
||||
for k := range s {
|
||||
if o.Has(k) {
|
||||
result[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
|
@ -1,122 +0,0 @@
|
|||
// Copyright 2013 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package utility
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"testing/quick"
|
||||
)
|
||||
|
||||
func TestSetEqualMemberships(t *testing.T) {
|
||||
f := func(x int) bool {
|
||||
first := make(Set)
|
||||
second := make(Set)
|
||||
|
||||
first.Add(x)
|
||||
second.Add(x)
|
||||
|
||||
intersection := first.Intersection(second)
|
||||
|
||||
members := intersection.Elements()
|
||||
|
||||
return members != nil && len(members) == 1 && members[0] == x
|
||||
}
|
||||
|
||||
if err := quick.Check(f, nil); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetInequalMemberships(t *testing.T) {
|
||||
f := func(x int) bool {
|
||||
first := make(Set)
|
||||
second := make(Set)
|
||||
|
||||
first.Add(x)
|
||||
|
||||
intersection := first.Intersection(second)
|
||||
|
||||
members := intersection.Elements()
|
||||
|
||||
return members != nil && len(members) == 0
|
||||
}
|
||||
|
||||
if err := quick.Check(f, nil); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetAsymmetricMemberships(t *testing.T) {
|
||||
f := func(x int) bool {
|
||||
first := make(Set)
|
||||
second := make(Set)
|
||||
|
||||
first.Add(x)
|
||||
second.Add(x)
|
||||
first.Add(x + 1)
|
||||
second.Add(x + 1)
|
||||
second.Add(x + 2)
|
||||
first.Add(x + 2)
|
||||
first.Add(x + 3)
|
||||
second.Add(x + 4)
|
||||
|
||||
intersection := first.Intersection(second)
|
||||
|
||||
members := intersection.Elements()
|
||||
|
||||
return members != nil && len(members) == 3
|
||||
}
|
||||
|
||||
if err := quick.Check(f, nil); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetRemoval(t *testing.T) {
|
||||
f := func(x int) bool {
|
||||
first := make(Set)
|
||||
|
||||
first.Add(x)
|
||||
first.Remove(x)
|
||||
|
||||
members := first.Elements()
|
||||
|
||||
return members != nil && len(members) == 0
|
||||
}
|
||||
|
||||
if err := quick.Check(f, nil); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetAdditionAndRemoval(t *testing.T) {
|
||||
f := func(x int) bool {
|
||||
first := make(Set)
|
||||
second := make(Set)
|
||||
|
||||
first.Add(x)
|
||||
second.Add(x)
|
||||
first.Add(x + 1)
|
||||
first.Remove(x + 1)
|
||||
|
||||
intersection := first.Intersection(second)
|
||||
members := intersection.Elements()
|
||||
|
||||
return members != nil && len(members) == 1 && members[0] == x
|
||||
}
|
||||
|
||||
if err := quick.Check(f, nil); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
|
@ -20,7 +20,7 @@ SUFFIXES:
|
|||
blob/files.go: $(shell find templates/ static/ -type f)
|
||||
# Note that embed-static.sh excludes map files and the
|
||||
# non-minified bootstrap files.
|
||||
../utility/embed-static.sh static templates | $(GOFMT) > $@
|
||||
../scripts/embed-static.sh static templates | $(GOFMT) > $@
|
||||
|
||||
clean:
|
||||
-rm -f blob/files.go
|
||||
|
|
|
@ -22,7 +22,7 @@ import (
|
|||
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/storage/local"
|
||||
"github.com/prometheus/prometheus/web/httputils"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
// MetricsService manages the /api HTTP endpoint.
|
||||
|
@ -35,7 +35,7 @@ type MetricsService struct {
|
|||
// RegisterHandler registers the handler for the various endpoints below /api.
|
||||
func (msrv *MetricsService) RegisterHandler(pathPrefix string) {
|
||||
handler := func(h func(http.ResponseWriter, *http.Request)) http.Handler {
|
||||
return httputils.CompressionHandler{
|
||||
return httputil.CompressionHandler{
|
||||
Handler: http.HandlerFunc(h),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
|
||||
"github.com/prometheus/prometheus/web/httputils"
|
||||
"github.com/prometheus/prometheus/util/httputil"
|
||||
)
|
||||
|
||||
// Enables cross-site script calls.
|
||||
|
@ -39,7 +39,7 @@ func setAccessControlHeaders(w http.ResponseWriter) {
|
|||
|
||||
func httpJSONError(w http.ResponseWriter, err error, code int) {
|
||||
w.WriteHeader(code)
|
||||
httputils.ErrorJSON(w, err)
|
||||
httputil.ErrorJSON(w, err)
|
||||
}
|
||||
|
||||
func parseTimestampOrNow(t string, now clientmodel.Timestamp) (clientmodel.Timestamp, error) {
|
||||
|
@ -67,7 +67,7 @@ func (serv MetricsService) Query(w http.ResponseWriter, r *http.Request) {
|
|||
setAccessControlHeaders(w)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
params := httputils.GetQueryParams(r)
|
||||
params := httputil.GetQueryParams(r)
|
||||
expr := params.Get("expr")
|
||||
|
||||
timestamp, err := parseTimestampOrNow(params.Get("timestamp"), serv.Now())
|
||||
|
@ -88,7 +88,7 @@ func (serv MetricsService) Query(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
log.Debugf("Instant query: %s\nQuery stats:\n%s\n", expr, query.Stats())
|
||||
|
||||
httputils.RespondJSON(w, res.Value)
|
||||
httputil.RespondJSON(w, res.Value)
|
||||
}
|
||||
|
||||
// QueryRange handles the /api/query_range endpoint.
|
||||
|
@ -96,7 +96,7 @@ func (serv MetricsService) QueryRange(w http.ResponseWriter, r *http.Request) {
|
|||
setAccessControlHeaders(w)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
params := httputils.GetQueryParams(r)
|
||||
params := httputil.GetQueryParams(r)
|
||||
expr := params.Get("expr")
|
||||
|
||||
duration, err := parseDuration(params.Get("range"))
|
||||
|
@ -148,7 +148,7 @@ func (serv MetricsService) QueryRange(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
log.Debugf("Range query: %s\nQuery stats:\n%s\n", expr, query.Stats())
|
||||
httputils.RespondJSON(w, matrix)
|
||||
httputil.RespondJSON(w, matrix)
|
||||
}
|
||||
|
||||
// Metrics handles the /api/metrics endpoint.
|
||||
|
|
|
@ -23,7 +23,7 @@ import (
|
|||
|
||||
clientmodel "github.com/prometheus/client_golang/model"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/templates"
|
||||
"github.com/prometheus/prometheus/template"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -70,13 +70,13 @@ func (h *ConsolesHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
Path: r.URL.Path,
|
||||
}
|
||||
|
||||
template := templates.NewTemplateExpander(string(text), "__console_"+r.URL.Path, data, clientmodel.Now(), h.QueryEngine, h.PathPrefix)
|
||||
tmpl := template.NewTemplateExpander(string(text), "__console_"+r.URL.Path, data, clientmodel.Now(), h.QueryEngine, h.PathPrefix)
|
||||
filenames, err := filepath.Glob(*consoleLibrariesPath + "/*.lib")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
result, err := template.ExpandHTML(filenames)
|
||||
result, err := tmpl.ExpandHTML(filenames)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
|
|
Loading…
Reference in a new issue