Switch from client_golang/model to common/model

This commit is contained in:
Fabian Reinartz 2015-08-20 17:18:46 +02:00
parent 7a6d12a44c
commit 306e8468a0
72 changed files with 1417 additions and 1417 deletions

View file

@ -25,7 +25,7 @@ import (
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
) )
@ -270,7 +270,7 @@ type GlobalConfig struct {
// How frequently to evaluate rules by default. // How frequently to evaluate rules by default.
EvaluationInterval Duration `yaml:"evaluation_interval,omitempty"` EvaluationInterval Duration `yaml:"evaluation_interval,omitempty"`
// The labels to add to any timeseries that this Prometheus instance scrapes. // The labels to add to any timeseries that this Prometheus instance scrapes.
Labels clientmodel.LabelSet `yaml:"labels,omitempty"` Labels model.LabelSet `yaml:"labels,omitempty"`
// Catches all undefined fields and must be empty after parsing. // Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
@ -399,9 +399,9 @@ func (a *BasicAuth) UnmarshalYAML(unmarshal func(interface{}) error) error {
type TargetGroup struct { type TargetGroup struct {
// Targets is a list of targets identified by a label set. Each target is // Targets is a list of targets identified by a label set. Each target is
// uniquely identifiable in the group by its address label. // uniquely identifiable in the group by its address label.
Targets []clientmodel.LabelSet Targets []model.LabelSet
// Labels is a set of labels that is common across all targets in the group. // Labels is a set of labels that is common across all targets in the group.
Labels clientmodel.LabelSet Labels model.LabelSet
// Source is an identifier that describes a group of targets. // Source is an identifier that describes a group of targets.
Source string Source string
@ -415,19 +415,19 @@ func (tg TargetGroup) String() string {
func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error { func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error {
g := struct { g := struct {
Targets []string `yaml:"targets"` Targets []string `yaml:"targets"`
Labels clientmodel.LabelSet `yaml:"labels"` Labels model.LabelSet `yaml:"labels"`
XXX map[string]interface{} `yaml:",inline"` XXX map[string]interface{} `yaml:",inline"`
}{} }{}
if err := unmarshal(&g); err != nil { if err := unmarshal(&g); err != nil {
return err return err
} }
tg.Targets = make([]clientmodel.LabelSet, 0, len(g.Targets)) tg.Targets = make([]model.LabelSet, 0, len(g.Targets))
for _, t := range g.Targets { for _, t := range g.Targets {
if strings.Contains(t, "/") { if strings.Contains(t, "/") {
return fmt.Errorf("%q is not a valid hostname", t) return fmt.Errorf("%q is not a valid hostname", t)
} }
tg.Targets = append(tg.Targets, clientmodel.LabelSet{ tg.Targets = append(tg.Targets, model.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(t), model.AddressLabel: model.LabelValue(t),
}) })
} }
tg.Labels = g.Labels tg.Labels = g.Labels
@ -437,14 +437,14 @@ func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error {
// MarshalYAML implements the yaml.Marshaler interface. // MarshalYAML implements the yaml.Marshaler interface.
func (tg TargetGroup) MarshalYAML() (interface{}, error) { func (tg TargetGroup) MarshalYAML() (interface{}, error) {
g := &struct { g := &struct {
Targets []string `yaml:"targets"` Targets []string `yaml:"targets"`
Labels clientmodel.LabelSet `yaml:"labels,omitempty"` Labels model.LabelSet `yaml:"labels,omitempty"`
}{ }{
Targets: make([]string, 0, len(tg.Targets)), Targets: make([]string, 0, len(tg.Targets)),
Labels: tg.Labels, Labels: tg.Labels,
} }
for _, t := range tg.Targets { for _, t := range tg.Targets {
g.Targets = append(g.Targets, string(t[clientmodel.AddressLabel])) g.Targets = append(g.Targets, string(t[model.AddressLabel]))
} }
return g, nil return g, nil
} }
@ -452,19 +452,19 @@ func (tg TargetGroup) MarshalYAML() (interface{}, error) {
// UnmarshalJSON implements the json.Unmarshaler interface. // UnmarshalJSON implements the json.Unmarshaler interface.
func (tg *TargetGroup) UnmarshalJSON(b []byte) error { func (tg *TargetGroup) UnmarshalJSON(b []byte) error {
g := struct { g := struct {
Targets []string `json:"targets"` Targets []string `json:"targets"`
Labels clientmodel.LabelSet `json:"labels"` Labels model.LabelSet `json:"labels"`
}{} }{}
if err := json.Unmarshal(b, &g); err != nil { if err := json.Unmarshal(b, &g); err != nil {
return err return err
} }
tg.Targets = make([]clientmodel.LabelSet, 0, len(g.Targets)) tg.Targets = make([]model.LabelSet, 0, len(g.Targets))
for _, t := range g.Targets { for _, t := range g.Targets {
if strings.Contains(t, "/") { if strings.Contains(t, "/") {
return fmt.Errorf("%q is not a valid hostname", t) return fmt.Errorf("%q is not a valid hostname", t)
} }
tg.Targets = append(tg.Targets, clientmodel.LabelSet{ tg.Targets = append(tg.Targets, model.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(t), model.AddressLabel: model.LabelValue(t),
}) })
} }
tg.Labels = g.Labels tg.Labels = g.Labels
@ -686,7 +686,7 @@ func (a *RelabelAction) UnmarshalYAML(unmarshal func(interface{}) error) error {
type RelabelConfig struct { type RelabelConfig struct {
// A list of labels from which values are taken and concatenated // A list of labels from which values are taken and concatenated
// with the configured separator in order. // with the configured separator in order.
SourceLabels clientmodel.LabelNames `yaml:"source_labels,flow"` SourceLabels model.LabelNames `yaml:"source_labels,flow"`
// Separator is the string between concatenated values from the source labels. // Separator is the string between concatenated values from the source labels.
Separator string `yaml:"separator,omitempty"` Separator string `yaml:"separator,omitempty"`
// Regex against which the concatenation is matched. // Regex against which the concatenation is matched.
@ -694,7 +694,7 @@ type RelabelConfig struct {
// Modulus to take of the hash of concatenated values from the source labels. // Modulus to take of the hash of concatenated values from the source labels.
Modulus uint64 `yaml:"modulus,omitempty"` Modulus uint64 `yaml:"modulus,omitempty"`
// The label to which the resulting string is written in a replacement. // The label to which the resulting string is written in a replacement.
TargetLabel clientmodel.LabelName `yaml:"target_label,omitempty"` TargetLabel model.LabelName `yaml:"target_label,omitempty"`
// Replacement is the regex replacement pattern to be used. // Replacement is the regex replacement pattern to be used.
Replacement string `yaml:"replacement,omitempty"` Replacement string `yaml:"replacement,omitempty"`
// Action is the action to be performed for the relabeling. // Action is the action to be performed for the relabeling.

View file

@ -24,7 +24,7 @@ import (
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
var expectedConf = &Config{ var expectedConf = &Config{
@ -33,7 +33,7 @@ var expectedConf = &Config{
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
EvaluationInterval: Duration(30 * time.Second), EvaluationInterval: Duration(30 * time.Second),
Labels: clientmodel.LabelSet{ Labels: model.LabelSet{
"monitor": "codelab", "monitor": "codelab",
"foo": "bar", "foo": "bar",
}, },
@ -60,11 +60,11 @@ var expectedConf = &Config{
TargetGroups: []*TargetGroup{ TargetGroups: []*TargetGroup{
{ {
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "localhost:9090"}, {model.AddressLabel: "localhost:9090"},
{clientmodel.AddressLabel: "localhost:9191"}, {model.AddressLabel: "localhost:9191"},
}, },
Labels: clientmodel.LabelSet{ Labels: model.LabelSet{
"my": "label", "my": "label",
"your": "label", "your": "label",
}, },
@ -84,7 +84,7 @@ var expectedConf = &Config{
RelabelConfigs: []*RelabelConfig{ RelabelConfigs: []*RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"job", "__meta_dns_srv_name"}, SourceLabels: model.LabelNames{"job", "__meta_dns_srv_name"},
TargetLabel: "job", TargetLabel: "job",
Separator: ";", Separator: ";",
Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")}, Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")},
@ -126,20 +126,20 @@ var expectedConf = &Config{
RelabelConfigs: []*RelabelConfig{ RelabelConfigs: []*RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"job"}, SourceLabels: model.LabelNames{"job"},
Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")}, Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")},
Separator: ";", Separator: ";",
Action: RelabelDrop, Action: RelabelDrop,
}, },
{ {
SourceLabels: clientmodel.LabelNames{"__address__"}, SourceLabels: model.LabelNames{"__address__"},
TargetLabel: "__tmp_hash", TargetLabel: "__tmp_hash",
Modulus: 8, Modulus: 8,
Separator: ";", Separator: ";",
Action: RelabelHashMod, Action: RelabelHashMod,
}, },
{ {
SourceLabels: clientmodel.LabelNames{"__tmp_hash"}, SourceLabels: model.LabelNames{"__tmp_hash"},
Regex: &Regexp{*regexp.MustCompile("^1$")}, Regex: &Regexp{*regexp.MustCompile("^1$")},
Separator: ";", Separator: ";",
Action: RelabelKeep, Action: RelabelKeep,
@ -147,7 +147,7 @@ var expectedConf = &Config{
}, },
MetricRelabelConfigs: []*RelabelConfig{ MetricRelabelConfigs: []*RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"__name__"}, SourceLabels: model.LabelNames{"__name__"},
Regex: &Regexp{*regexp.MustCompile("expensive_metric.*$")}, Regex: &Regexp{*regexp.MustCompile("expensive_metric.*$")},
Separator: ";", Separator: ";",
Action: RelabelDrop, Action: RelabelDrop,

View file

@ -25,7 +25,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
) )
@ -51,9 +51,9 @@ type NotificationReq struct {
// A reference to the runbook for the alert. // A reference to the runbook for the alert.
Runbook string Runbook string
// Labels associated with this alert notification, including alert name. // Labels associated with this alert notification, including alert name.
Labels clientmodel.LabelSet Labels model.LabelSet
// Current value of alert // Current value of alert
Value clientmodel.SampleValue Value model.SampleValue
// Since when this alert has been active (pending or firing). // Since when this alert has been active (pending or firing).
ActiveSince time.Time ActiveSince time.Time
// A textual representation of the rule that triggered the alert. // A textual representation of the rule that triggered the alert.

View file

@ -21,7 +21,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
type testHTTPPoster struct { type testHTTPPoster struct {
@ -65,10 +65,10 @@ func (s *testNotificationScenario) test(i int, t *testing.T) {
Summary: s.summary, Summary: s.summary,
Description: s.description, Description: s.description,
Runbook: s.runbook, Runbook: s.runbook,
Labels: clientmodel.LabelSet{ Labels: model.LabelSet{
clientmodel.LabelName("instance"): clientmodel.LabelValue("testinstance"), model.LabelName("instance"): model.LabelValue("testinstance"),
}, },
Value: clientmodel.SampleValue(1.0 / 3.0), Value: model.SampleValue(1.0 / 3.0),
ActiveSince: time.Time{}, ActiveSince: time.Time{},
RuleString: "Test rule string", RuleString: "Test rule string",
GeneratorURL: "prometheus_url", GeneratorURL: "prometheus_url",

View file

@ -19,7 +19,7 @@ import (
"golang.org/x/net/context" "golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
) )
@ -32,7 +32,7 @@ type Analyzer struct {
// The expression being analyzed. // The expression being analyzed.
Expr Expr Expr Expr
// The time range for evaluation of Expr. // The time range for evaluation of Expr.
Start, End clientmodel.Timestamp Start, End model.Time
// The preload times for different query time offsets. // The preload times for different query time offsets.
offsetPreloadTimes map[time.Duration]preloadTimes offsetPreloadTimes map[time.Duration]preloadTimes
@ -45,11 +45,11 @@ type preloadTimes struct {
// Instants require single samples to be loaded along the entire query // Instants require single samples to be loaded along the entire query
// range, with intervals between the samples corresponding to the query // range, with intervals between the samples corresponding to the query
// resolution. // resolution.
instants map[clientmodel.Fingerprint]struct{} instants map[model.Fingerprint]struct{}
// Ranges require loading a range of samples at each resolution step, // Ranges require loading a range of samples at each resolution step,
// stretching backwards from the current evaluation timestamp. The length of // stretching backwards from the current evaluation timestamp. The length of
// the range into the past is given by the duration, as in "foo[5m]". // the range into the past is given by the duration, as in "foo[5m]".
ranges map[clientmodel.Fingerprint]time.Duration ranges map[model.Fingerprint]time.Duration
} }
// Analyze the provided expression and attach metrics and fingerprints to data-selecting // Analyze the provided expression and attach metrics and fingerprints to data-selecting
@ -60,8 +60,8 @@ func (a *Analyzer) Analyze(ctx context.Context) error {
getPreloadTimes := func(offset time.Duration) preloadTimes { getPreloadTimes := func(offset time.Duration) preloadTimes {
if _, ok := a.offsetPreloadTimes[offset]; !ok { if _, ok := a.offsetPreloadTimes[offset]; !ok {
a.offsetPreloadTimes[offset] = preloadTimes{ a.offsetPreloadTimes[offset] = preloadTimes{
instants: map[clientmodel.Fingerprint]struct{}{}, instants: map[model.Fingerprint]struct{}{},
ranges: map[clientmodel.Fingerprint]time.Duration{}, ranges: map[model.Fingerprint]time.Duration{},
} }
} }
return a.offsetPreloadTimes[offset] return a.offsetPreloadTimes[offset]
@ -73,7 +73,7 @@ func (a *Analyzer) Analyze(ctx context.Context) error {
switch n := node.(type) { switch n := node.(type) {
case *VectorSelector: case *VectorSelector:
n.metrics = a.Storage.MetricsForLabelMatchers(n.LabelMatchers...) n.metrics = a.Storage.MetricsForLabelMatchers(n.LabelMatchers...)
n.iterators = make(map[clientmodel.Fingerprint]local.SeriesIterator, len(n.metrics)) n.iterators = make(map[model.Fingerprint]local.SeriesIterator, len(n.metrics))
pt := getPreloadTimes(n.Offset) pt := getPreloadTimes(n.Offset)
for fp := range n.metrics { for fp := range n.metrics {
@ -86,7 +86,7 @@ func (a *Analyzer) Analyze(ctx context.Context) error {
} }
case *MatrixSelector: case *MatrixSelector:
n.metrics = a.Storage.MetricsForLabelMatchers(n.LabelMatchers...) n.metrics = a.Storage.MetricsForLabelMatchers(n.LabelMatchers...)
n.iterators = make(map[clientmodel.Fingerprint]local.SeriesIterator, len(n.metrics)) n.iterators = make(map[model.Fingerprint]local.SeriesIterator, len(n.metrics))
pt := getPreloadTimes(n.Offset) pt := getPreloadTimes(n.Offset)
for fp := range n.metrics { for fp := range n.metrics {

View file

@ -18,7 +18,7 @@ import (
"fmt" "fmt"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
@ -59,7 +59,7 @@ type AlertStmt struct {
Name string Name string
Expr Expr Expr Expr
Duration time.Duration Duration time.Duration
Labels clientmodel.LabelSet Labels model.LabelSet
Summary string Summary string
Description string Description string
Runbook string Runbook string
@ -72,7 +72,7 @@ type EvalStmt struct {
// The time boundaries for the evaluation. If Start equals End an instant // The time boundaries for the evaluation. If Start equals End an instant
// is evaluated. // is evaluated.
Start, End clientmodel.Timestamp Start, End model.Time
// Time between two evaluated instants for the range [Start:End]. // Time between two evaluated instants for the range [Start:End].
Interval time.Duration Interval time.Duration
} }
@ -81,7 +81,7 @@ type EvalStmt struct {
type RecordStmt struct { type RecordStmt struct {
Name string Name string
Expr Expr Expr Expr
Labels clientmodel.LabelSet Labels model.LabelSet
} }
func (*AlertStmt) stmt() {} func (*AlertStmt) stmt() {}
@ -136,10 +136,10 @@ type Expressions []Expr
// AggregateExpr represents an aggregation operation on a vector. // AggregateExpr represents an aggregation operation on a vector.
type AggregateExpr struct { type AggregateExpr struct {
Op itemType // The used aggregation operation. Op itemType // The used aggregation operation.
Expr Expr // The vector expression over which is aggregated. Expr Expr // The vector expression over which is aggregated.
Grouping clientmodel.LabelNames // The labels by which to group the vector. Grouping model.LabelNames // The labels by which to group the vector.
KeepExtraLabels bool // Whether to keep extra labels common among result elements. KeepExtraLabels bool // Whether to keep extra labels common among result elements.
} }
// BinaryExpr represents a binary expression between two child expressions. // BinaryExpr represents a binary expression between two child expressions.
@ -166,13 +166,13 @@ type MatrixSelector struct {
LabelMatchers metric.LabelMatchers LabelMatchers metric.LabelMatchers
// The series iterators are populated at query analysis time. // The series iterators are populated at query analysis time.
iterators map[clientmodel.Fingerprint]local.SeriesIterator iterators map[model.Fingerprint]local.SeriesIterator
metrics map[clientmodel.Fingerprint]clientmodel.COWMetric metrics map[model.Fingerprint]model.COWMetric
} }
// NumberLiteral represents a number. // NumberLiteral represents a number.
type NumberLiteral struct { type NumberLiteral struct {
Val clientmodel.SampleValue Val model.SampleValue
} }
// ParenExpr wraps an expression so it cannot be disassembled as a consequence // ParenExpr wraps an expression so it cannot be disassembled as a consequence
@ -200,8 +200,8 @@ type VectorSelector struct {
LabelMatchers metric.LabelMatchers LabelMatchers metric.LabelMatchers
// The series iterators are populated at query analysis time. // The series iterators are populated at query analysis time.
iterators map[clientmodel.Fingerprint]local.SeriesIterator iterators map[model.Fingerprint]local.SeriesIterator
metrics map[clientmodel.Fingerprint]clientmodel.COWMetric metrics map[model.Fingerprint]model.COWMetric
} }
func (e *AggregateExpr) Type() ExprType { return ExprVector } func (e *AggregateExpr) Type() ExprType { return ExprVector }
@ -262,10 +262,10 @@ type VectorMatching struct {
Card VectorMatchCardinality Card VectorMatchCardinality
// On contains the labels which define equality of a pair // On contains the labels which define equality of a pair
// of elements from the vectors. // of elements from the vectors.
On clientmodel.LabelNames On model.LabelNames
// Include contains additional labels that should be included in // Include contains additional labels that should be included in
// the result from the side with the higher cardinality. // the result from the side with the higher cardinality.
Include clientmodel.LabelNames Include model.LabelNames
} }
// A Visitor's Visit method is invoked for each node encountered by Walk. // A Visitor's Visit method is invoked for each node encountered by Walk.

View file

@ -25,7 +25,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
"golang.org/x/net/context" "golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
@ -34,22 +34,22 @@ import (
// SampleStream is a stream of Values belonging to an attached COWMetric. // SampleStream is a stream of Values belonging to an attached COWMetric.
type SampleStream struct { type SampleStream struct {
Metric clientmodel.COWMetric `json:"metric"` Metric model.COWMetric `json:"metric"`
Values metric.Values `json:"values"` Values metric.Values `json:"values"`
} }
// Sample is a single sample belonging to a COWMetric. // Sample is a single sample belonging to a COWMetric.
type Sample struct { type Sample struct {
Metric clientmodel.COWMetric `json:"metric"` Metric model.COWMetric `json:"metric"`
Value clientmodel.SampleValue `json:"value"` Value model.SampleValue `json:"value"`
Timestamp clientmodel.Timestamp `json:"timestamp"` Timestamp model.Time `json:"timestamp"`
} }
// MarshalJSON implements json.Marshaler. // MarshalJSON implements json.Marshaler.
func (s *Sample) MarshalJSON() ([]byte, error) { func (s *Sample) MarshalJSON() ([]byte, error) {
v := struct { v := struct {
Metric clientmodel.COWMetric `json:"metric"` Metric model.COWMetric `json:"metric"`
Value metric.SamplePair `json:"value"` Value metric.SamplePair `json:"value"`
}{ }{
Metric: s.Metric, Metric: s.Metric,
Value: metric.SamplePair{ Value: metric.SamplePair{
@ -63,8 +63,8 @@ func (s *Sample) MarshalJSON() ([]byte, error) {
// Scalar is a scalar value evaluated at the set timestamp. // Scalar is a scalar value evaluated at the set timestamp.
type Scalar struct { type Scalar struct {
Value clientmodel.SampleValue `json:"value"` Value model.SampleValue `json:"value"`
Timestamp clientmodel.Timestamp `json:"timestamp"` Timestamp model.Time `json:"timestamp"`
} }
func (s *Scalar) String() string { func (s *Scalar) String() string {
@ -79,8 +79,8 @@ func (s *Scalar) MarshalJSON() ([]byte, error) {
// String is a string value evaluated at the set timestamp. // String is a string value evaluated at the set timestamp.
type String struct { type String struct {
Value string `json:"value"` Value string `json:"value"`
Timestamp clientmodel.Timestamp `json:"timestamp"` Timestamp model.Time `json:"timestamp"`
} }
// MarshalJSON implements json.Marshaler. // MarshalJSON implements json.Marshaler.
@ -92,7 +92,7 @@ func (s *String) String() string {
return s.Value return s.Value
} }
// Vector is basically only an alias for clientmodel.Samples, but the // Vector is basically only an alias for model.Samples, but the
// contract is that in a Vector, all Samples have the same timestamp. // contract is that in a Vector, all Samples have the same timestamp.
type Vector []*Sample type Vector []*Sample
@ -309,7 +309,7 @@ func (ng *Engine) Stop() {
} }
// NewInstantQuery returns an evaluation query for the given expression at the given time. // NewInstantQuery returns an evaluation query for the given expression at the given time.
func (ng *Engine) NewInstantQuery(qs string, ts clientmodel.Timestamp) (Query, error) { func (ng *Engine) NewInstantQuery(qs string, ts model.Time) (Query, error) {
expr, err := ParseExpr(qs) expr, err := ParseExpr(qs)
if err != nil { if err != nil {
return nil, err return nil, err
@ -322,7 +322,7 @@ func (ng *Engine) NewInstantQuery(qs string, ts clientmodel.Timestamp) (Query, e
// NewRangeQuery returns an evaluation query for the given time range and with // NewRangeQuery returns an evaluation query for the given time range and with
// the resolution set by the interval. // the resolution set by the interval.
func (ng *Engine) NewRangeQuery(qs string, start, end clientmodel.Timestamp, interval time.Duration) (Query, error) { func (ng *Engine) NewRangeQuery(qs string, start, end model.Time, interval time.Duration) (Query, error) {
expr, err := ParseExpr(qs) expr, err := ParseExpr(qs)
if err != nil { if err != nil {
return nil, err return nil, err
@ -336,7 +336,7 @@ func (ng *Engine) NewRangeQuery(qs string, start, end clientmodel.Timestamp, int
return qry, nil return qry, nil
} }
func (ng *Engine) newQuery(expr Expr, start, end clientmodel.Timestamp, interval time.Duration) *query { func (ng *Engine) newQuery(expr Expr, start, end model.Time, interval time.Duration) *query {
es := &EvalStmt{ es := &EvalStmt{
Expr: expr, Expr: expr,
Start: start, Start: start,
@ -459,7 +459,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
numSteps := int(s.End.Sub(s.Start) / s.Interval) numSteps := int(s.End.Sub(s.Start) / s.Interval)
// Range evaluation. // Range evaluation.
sampleStreams := map[clientmodel.Fingerprint]*SampleStream{} sampleStreams := map[model.Fingerprint]*SampleStream{}
for ts := s.Start; !ts.After(s.End); ts = ts.Add(s.Interval) { for ts := s.Start; !ts.After(s.End); ts = ts.Add(s.Interval) {
if err := contextDone(ctx, "range evaluation"); err != nil { if err := contextDone(ctx, "range evaluation"); err != nil {
@ -538,7 +538,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
type evaluator struct { type evaluator struct {
ctx context.Context ctx context.Context
Timestamp clientmodel.Timestamp Timestamp model.Time
} }
// fatalf causes a panic with the input formatted into an error. // fatalf causes a panic with the input formatted into an error.
@ -902,7 +902,7 @@ func (ev *evaluator) vectorBinop(op itemType, lhs, rhs Vector, matching *VectorM
// In many-to-one matching the grouping labels have to ensure a unique metric // In many-to-one matching the grouping labels have to ensure a unique metric
// for the result vector. Check whether those labels have already been added for // for the result vector. Check whether those labels have already been added for
// the same matching labels. // the same matching labels.
insertSig := clientmodel.SignatureForLabels(metric.Metric, matching.Include) insertSig := model.SignatureForLabels(metric.Metric, matching.Include...)
if !exists { if !exists {
insertedSigs = map[uint64]struct{}{} insertedSigs = map[uint64]struct{}{}
matchedSigs[sig] = insertedSigs matchedSigs[sig] = insertedSigs
@ -923,36 +923,36 @@ func (ev *evaluator) vectorBinop(op itemType, lhs, rhs Vector, matching *VectorM
// signatureFunc returns a function that calculates the signature for a metric // signatureFunc returns a function that calculates the signature for a metric
// based on the provided labels. // based on the provided labels.
func signatureFunc(labels ...clientmodel.LabelName) func(m clientmodel.COWMetric) uint64 { func signatureFunc(labels ...model.LabelName) func(m model.COWMetric) uint64 {
if len(labels) == 0 { if len(labels) == 0 {
return func(m clientmodel.COWMetric) uint64 { return func(m model.COWMetric) uint64 {
m.Delete(clientmodel.MetricNameLabel) m.Del(model.MetricNameLabel)
return uint64(m.Metric.Fingerprint()) return uint64(m.Metric.Fingerprint())
} }
} }
return func(m clientmodel.COWMetric) uint64 { return func(m model.COWMetric) uint64 {
return clientmodel.SignatureForLabels(m.Metric, labels) return model.SignatureForLabels(m.Metric, labels...)
} }
} }
// resultMetric returns the metric for the given sample(s) based on the vector // resultMetric returns the metric for the given sample(s) based on the vector
// binary operation and the matching options. // binary operation and the matching options.
func resultMetric(met clientmodel.COWMetric, op itemType, labels ...clientmodel.LabelName) clientmodel.COWMetric { func resultMetric(met model.COWMetric, op itemType, labels ...model.LabelName) model.COWMetric {
if len(labels) == 0 { if len(labels) == 0 {
if shouldDropMetricName(op) { if shouldDropMetricName(op) {
met.Delete(clientmodel.MetricNameLabel) met.Del(model.MetricNameLabel)
} }
return met return met
} }
// As we definitly write, creating a new metric is the easiest solution. // As we definitly write, creating a new metric is the easiest solution.
m := clientmodel.Metric{} m := model.Metric{}
for _, ln := range labels { for _, ln := range labels {
// Included labels from the `group_x` modifier are taken from the "many"-side. // Included labels from the `group_x` modifier are taken from the "many"-side.
if v, ok := met.Metric[ln]; ok { if v, ok := met.Metric[ln]; ok {
m[ln] = v m[ln] = v
} }
} }
return clientmodel.COWMetric{Metric: m, Copied: false} return model.COWMetric{Metric: m, Copied: false}
} }
// vectorScalarBinop evaluates a binary operation between a vector and a scalar. // vectorScalarBinop evaluates a binary operation between a vector and a scalar.
@ -970,7 +970,7 @@ func (ev *evaluator) vectorScalarBinop(op itemType, lhs Vector, rhs *Scalar, swa
if keep { if keep {
lhsSample.Value = value lhsSample.Value = value
if shouldDropMetricName(op) { if shouldDropMetricName(op) {
lhsSample.Metric.Delete(clientmodel.MetricNameLabel) lhsSample.Metric.Del(model.MetricNameLabel)
} }
vector = append(vector, lhsSample) vector = append(vector, lhsSample)
} }
@ -979,7 +979,7 @@ func (ev *evaluator) vectorScalarBinop(op itemType, lhs Vector, rhs *Scalar, swa
} }
// scalarBinop evaluates a binary operation between two scalars. // scalarBinop evaluates a binary operation between two scalars.
func scalarBinop(op itemType, lhs, rhs clientmodel.SampleValue) clientmodel.SampleValue { func scalarBinop(op itemType, lhs, rhs model.SampleValue) model.SampleValue {
switch op { switch op {
case itemADD: case itemADD:
return lhs + rhs return lhs + rhs
@ -991,9 +991,9 @@ func scalarBinop(op itemType, lhs, rhs clientmodel.SampleValue) clientmodel.Samp
return lhs / rhs return lhs / rhs
case itemMOD: case itemMOD:
if rhs != 0 { if rhs != 0 {
return clientmodel.SampleValue(int(lhs) % int(rhs)) return model.SampleValue(int(lhs) % int(rhs))
} }
return clientmodel.SampleValue(math.NaN()) return model.SampleValue(math.NaN())
case itemEQL: case itemEQL:
return btos(lhs == rhs) return btos(lhs == rhs)
case itemNEQ: case itemNEQ:
@ -1011,7 +1011,7 @@ func scalarBinop(op itemType, lhs, rhs clientmodel.SampleValue) clientmodel.Samp
} }
// vectorElemBinop evaluates a binary operation between two vector elements. // vectorElemBinop evaluates a binary operation between two vector elements.
func vectorElemBinop(op itemType, lhs, rhs clientmodel.SampleValue) (clientmodel.SampleValue, bool) { func vectorElemBinop(op itemType, lhs, rhs model.SampleValue) (model.SampleValue, bool) {
switch op { switch op {
case itemADD: case itemADD:
return lhs + rhs, true return lhs + rhs, true
@ -1023,9 +1023,9 @@ func vectorElemBinop(op itemType, lhs, rhs clientmodel.SampleValue) (clientmodel
return lhs / rhs, true return lhs / rhs, true
case itemMOD: case itemMOD:
if rhs != 0 { if rhs != 0 {
return clientmodel.SampleValue(int(lhs) % int(rhs)), true return model.SampleValue(int(lhs) % int(rhs)), true
} }
return clientmodel.SampleValue(math.NaN()), true return model.SampleValue(math.NaN()), true
case itemEQL: case itemEQL:
return lhs, lhs == rhs return lhs, lhs == rhs
case itemNEQ: case itemNEQ:
@ -1043,40 +1043,40 @@ func vectorElemBinop(op itemType, lhs, rhs clientmodel.SampleValue) (clientmodel
} }
// labelIntersection returns the metric of common label/value pairs of two input metrics. // labelIntersection returns the metric of common label/value pairs of two input metrics.
func labelIntersection(metric1, metric2 clientmodel.COWMetric) clientmodel.COWMetric { func labelIntersection(metric1, metric2 model.COWMetric) model.COWMetric {
for label, value := range metric1.Metric { for label, value := range metric1.Metric {
if metric2.Metric[label] != value { if metric2.Metric[label] != value {
metric1.Delete(label) metric1.Del(label)
} }
} }
return metric1 return metric1
} }
type groupedAggregation struct { type groupedAggregation struct {
labels clientmodel.COWMetric labels model.COWMetric
value clientmodel.SampleValue value model.SampleValue
valuesSquaredSum clientmodel.SampleValue valuesSquaredSum model.SampleValue
groupCount int groupCount int
} }
// aggregation evaluates an aggregation operation on a vector. // aggregation evaluates an aggregation operation on a vector.
func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, keepExtra bool, vector Vector) Vector { func (ev *evaluator) aggregation(op itemType, grouping model.LabelNames, keepExtra bool, vector Vector) Vector {
result := map[uint64]*groupedAggregation{} result := map[uint64]*groupedAggregation{}
for _, sample := range vector { for _, sample := range vector {
groupingKey := clientmodel.SignatureForLabels(sample.Metric.Metric, grouping) groupingKey := model.SignatureForLabels(sample.Metric.Metric, grouping...)
groupedResult, ok := result[groupingKey] groupedResult, ok := result[groupingKey]
// Add a new group if it doesn't exist. // Add a new group if it doesn't exist.
if !ok { if !ok {
var m clientmodel.COWMetric var m model.COWMetric
if keepExtra { if keepExtra {
m = sample.Metric m = sample.Metric
m.Delete(clientmodel.MetricNameLabel) m.Del(model.MetricNameLabel)
} else { } else {
m = clientmodel.COWMetric{ m = model.COWMetric{
Metric: clientmodel.Metric{}, Metric: model.Metric{},
Copied: true, Copied: true,
} }
for _, l := range grouping { for _, l := range grouping {
@ -1129,15 +1129,15 @@ func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, k
for _, aggr := range result { for _, aggr := range result {
switch op { switch op {
case itemAvg: case itemAvg:
aggr.value = aggr.value / clientmodel.SampleValue(aggr.groupCount) aggr.value = aggr.value / model.SampleValue(aggr.groupCount)
case itemCount: case itemCount:
aggr.value = clientmodel.SampleValue(aggr.groupCount) aggr.value = model.SampleValue(aggr.groupCount)
case itemStdvar: case itemStdvar:
avg := float64(aggr.value) / float64(aggr.groupCount) avg := float64(aggr.value) / float64(aggr.groupCount)
aggr.value = clientmodel.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg) aggr.value = model.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)
case itemStddev: case itemStddev:
avg := float64(aggr.value) / float64(aggr.groupCount) avg := float64(aggr.value) / float64(aggr.groupCount)
aggr.value = clientmodel.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)) aggr.value = model.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg))
default: default:
// For other aggregations, we already have the right value. // For other aggregations, we already have the right value.
} }
@ -1152,7 +1152,7 @@ func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, k
} }
// btos returns 1 if b is true, 0 otherwise. // btos returns 1 if b is true, 0 otherwise.
func btos(b bool) clientmodel.SampleValue { func btos(b bool) model.SampleValue {
if b { if b {
return 1 return 1
} }
@ -1178,7 +1178,7 @@ var StalenessDelta = 5 * time.Minute
// surrounding a given target time. If samples are found both before and after // surrounding a given target time. If samples are found both before and after
// the target time, the sample value is interpolated between these. Otherwise, // the target time, the sample value is interpolated between these. Otherwise,
// the single closest sample is returned verbatim. // the single closest sample is returned verbatim.
func chooseClosestSample(samples metric.Values, timestamp clientmodel.Timestamp) *metric.SamplePair { func chooseClosestSample(samples metric.Values, timestamp model.Time) *metric.SamplePair {
var closestBefore *metric.SamplePair var closestBefore *metric.SamplePair
var closestAfter *metric.SamplePair var closestAfter *metric.SamplePair
for _, candidate := range samples { for _, candidate := range samples {
@ -1224,12 +1224,12 @@ func chooseClosestSample(samples metric.Values, timestamp clientmodel.Timestamp)
// interpolateSamples interpolates a value at a target time between two // interpolateSamples interpolates a value at a target time between two
// provided sample pairs. // provided sample pairs.
func interpolateSamples(first, second *metric.SamplePair, timestamp clientmodel.Timestamp) *metric.SamplePair { func interpolateSamples(first, second *metric.SamplePair, timestamp model.Time) *metric.SamplePair {
dv := second.Value - first.Value dv := second.Value - first.Value
dt := second.Timestamp.Sub(first.Timestamp) dt := second.Timestamp.Sub(first.Timestamp)
dDt := dv / clientmodel.SampleValue(dt) dDt := dv / model.SampleValue(dt)
offset := clientmodel.SampleValue(timestamp.Sub(first.Timestamp)) offset := model.SampleValue(timestamp.Sub(first.Timestamp))
return &metric.SamplePair{ return &metric.SamplePair{
Value: first.Value + (offset * dDt), Value: first.Value + (offset * dDt),

View file

@ -21,7 +21,7 @@ import (
"strconv" "strconv"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -36,10 +36,10 @@ type Function struct {
Call func(ev *evaluator, args Expressions) Value Call func(ev *evaluator, args Expressions) Value
} }
// === time() clientmodel.SampleValue === // === time() model.SampleValue ===
func funcTime(ev *evaluator, args Expressions) Value { func funcTime(ev *evaluator, args Expressions) Value {
return &Scalar{ return &Scalar{
Value: clientmodel.SampleValue(ev.Timestamp.Unix()), Value: model.SampleValue(ev.Timestamp.Unix()),
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
} }
} }
@ -65,8 +65,8 @@ func funcDelta(ev *evaluator, args Expressions) Value {
continue continue
} }
counterCorrection := clientmodel.SampleValue(0) counterCorrection := model.SampleValue(0)
lastValue := clientmodel.SampleValue(0) lastValue := model.SampleValue(0)
for _, sample := range samples.Values { for _, sample := range samples.Values {
currentValue := sample.Value currentValue := sample.Value
if isCounter && currentValue < lastValue { if isCounter && currentValue < lastValue {
@ -90,7 +90,7 @@ func funcDelta(ev *evaluator, args Expressions) Value {
// them. Depending on how many samples are found under a target interval, // them. Depending on how many samples are found under a target interval,
// the delta results are distorted and temporal aliasing occurs (ugly // the delta results are distorted and temporal aliasing occurs (ugly
// bumps). This effect is corrected for below. // bumps). This effect is corrected for below.
intervalCorrection := clientmodel.SampleValue(targetInterval) / clientmodel.SampleValue(sampledInterval) intervalCorrection := model.SampleValue(targetInterval) / model.SampleValue(sampledInterval)
resultValue *= intervalCorrection resultValue *= intervalCorrection
resultSample := &Sample{ resultSample := &Sample{
@ -98,7 +98,7 @@ func funcDelta(ev *evaluator, args Expressions) Value {
Value: resultValue, Value: resultValue,
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
} }
resultSample.Metric.Delete(clientmodel.MetricNameLabel) resultSample.Metric.Del(model.MetricNameLabel)
resultVector = append(resultVector, resultSample) resultVector = append(resultVector, resultSample)
} }
return resultVector return resultVector
@ -114,7 +114,7 @@ func funcRate(ev *evaluator, args Expressions) Value {
// matrix, such as looking at the samples themselves. // matrix, such as looking at the samples themselves.
interval := args[0].(*MatrixSelector).Range interval := args[0].(*MatrixSelector).Range
for i := range vector { for i := range vector {
vector[i].Value /= clientmodel.SampleValue(interval / time.Second) vector[i].Value /= model.SampleValue(interval / time.Second)
} }
return vector return vector
} }
@ -191,10 +191,10 @@ func funcDropCommonLabels(ev *evaluator, args Expressions) Value {
if len(vector) < 1 { if len(vector) < 1 {
return Vector{} return Vector{}
} }
common := clientmodel.LabelSet{} common := model.LabelSet{}
for k, v := range vector[0].Metric.Metric { for k, v := range vector[0].Metric.Metric {
// TODO(julius): Should we also drop common metric names? // TODO(julius): Should we also drop common metric names?
if k == clientmodel.MetricNameLabel { if k == model.MetricNameLabel {
continue continue
} }
common[k] = v common[k] = v
@ -215,7 +215,7 @@ func funcDropCommonLabels(ev *evaluator, args Expressions) Value {
for _, el := range vector { for _, el := range vector {
for k := range el.Metric.Metric { for k := range el.Metric.Metric {
if _, ok := common[k]; ok { if _, ok := common[k]; ok {
el.Metric.Delete(k) el.Metric.Del(k)
} }
} }
} }
@ -235,8 +235,8 @@ func funcRound(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Floor(float64(el.Value)*toNearestInverse+0.5) / toNearestInverse) el.Value = model.SampleValue(math.Floor(float64(el.Value)*toNearestInverse+0.5) / toNearestInverse)
} }
return vector return vector
} }
@ -245,20 +245,20 @@ func funcRound(ev *evaluator, args Expressions) Value {
func funcScalar(ev *evaluator, args Expressions) Value { func funcScalar(ev *evaluator, args Expressions) Value {
v := ev.evalVector(args[0]) v := ev.evalVector(args[0])
if len(v) != 1 { if len(v) != 1 {
return &Scalar{clientmodel.SampleValue(math.NaN()), ev.Timestamp} return &Scalar{model.SampleValue(math.NaN()), ev.Timestamp}
} }
return &Scalar{clientmodel.SampleValue(v[0].Value), ev.Timestamp} return &Scalar{model.SampleValue(v[0].Value), ev.Timestamp}
} }
// === count_scalar(vector ExprVector) model.SampleValue === // === count_scalar(vector ExprVector) model.SampleValue ===
func funcCountScalar(ev *evaluator, args Expressions) Value { func funcCountScalar(ev *evaluator, args Expressions) Value {
return &Scalar{ return &Scalar{
Value: clientmodel.SampleValue(len(ev.evalVector(args[0]))), Value: model.SampleValue(len(ev.evalVector(args[0]))),
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
} }
} }
func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) clientmodel.SampleValue) Value { func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) model.SampleValue) Value {
matrix := ev.evalMatrix(args[0]) matrix := ev.evalMatrix(args[0])
resultVector := Vector{} resultVector := Vector{}
@ -267,7 +267,7 @@ func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) cl
continue continue
} }
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
resultVector = append(resultVector, &Sample{ resultVector = append(resultVector, &Sample{
Metric: el.Metric, Metric: el.Metric,
Value: aggrFn(el.Values), Value: aggrFn(el.Values),
@ -279,19 +279,19 @@ func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) cl
// === avg_over_time(matrix ExprMatrix) Vector === // === avg_over_time(matrix ExprMatrix) Vector ===
func funcAvgOverTime(ev *evaluator, args Expressions) Value { func funcAvgOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue { return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
var sum clientmodel.SampleValue var sum model.SampleValue
for _, v := range values { for _, v := range values {
sum += v.Value sum += v.Value
} }
return sum / clientmodel.SampleValue(len(values)) return sum / model.SampleValue(len(values))
}) })
} }
// === count_over_time(matrix ExprMatrix) Vector === // === count_over_time(matrix ExprMatrix) Vector ===
func funcCountOverTime(ev *evaluator, args Expressions) Value { func funcCountOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue { return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
return clientmodel.SampleValue(len(values)) return model.SampleValue(len(values))
}) })
} }
@ -299,38 +299,38 @@ func funcCountOverTime(ev *evaluator, args Expressions) Value {
func funcFloor(ev *evaluator, args Expressions) Value { func funcFloor(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Floor(float64(el.Value))) el.Value = model.SampleValue(math.Floor(float64(el.Value)))
} }
return vector return vector
} }
// === max_over_time(matrix ExprMatrix) Vector === // === max_over_time(matrix ExprMatrix) Vector ===
func funcMaxOverTime(ev *evaluator, args Expressions) Value { func funcMaxOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue { return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
max := math.Inf(-1) max := math.Inf(-1)
for _, v := range values { for _, v := range values {
max = math.Max(max, float64(v.Value)) max = math.Max(max, float64(v.Value))
} }
return clientmodel.SampleValue(max) return model.SampleValue(max)
}) })
} }
// === min_over_time(matrix ExprMatrix) Vector === // === min_over_time(matrix ExprMatrix) Vector ===
func funcMinOverTime(ev *evaluator, args Expressions) Value { func funcMinOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue { return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
min := math.Inf(1) min := math.Inf(1)
for _, v := range values { for _, v := range values {
min = math.Min(min, float64(v.Value)) min = math.Min(min, float64(v.Value))
} }
return clientmodel.SampleValue(min) return model.SampleValue(min)
}) })
} }
// === sum_over_time(matrix ExprMatrix) Vector === // === sum_over_time(matrix ExprMatrix) Vector ===
func funcSumOverTime(ev *evaluator, args Expressions) Value { func funcSumOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue { return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
var sum clientmodel.SampleValue var sum model.SampleValue
for _, v := range values { for _, v := range values {
sum += v.Value sum += v.Value
} }
@ -342,8 +342,8 @@ func funcSumOverTime(ev *evaluator, args Expressions) Value {
func funcAbs(ev *evaluator, args Expressions) Value { func funcAbs(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Abs(float64(el.Value))) el.Value = model.SampleValue(math.Abs(float64(el.Value)))
} }
return vector return vector
} }
@ -353,17 +353,17 @@ func funcAbsent(ev *evaluator, args Expressions) Value {
if len(ev.evalVector(args[0])) > 0 { if len(ev.evalVector(args[0])) > 0 {
return Vector{} return Vector{}
} }
m := clientmodel.Metric{} m := model.Metric{}
if vs, ok := args[0].(*VectorSelector); ok { if vs, ok := args[0].(*VectorSelector); ok {
for _, matcher := range vs.LabelMatchers { for _, matcher := range vs.LabelMatchers {
if matcher.Type == metric.Equal && matcher.Name != clientmodel.MetricNameLabel { if matcher.Type == metric.Equal && matcher.Name != model.MetricNameLabel {
m[matcher.Name] = matcher.Value m[matcher.Name] = matcher.Value
} }
} }
} }
return Vector{ return Vector{
&Sample{ &Sample{
Metric: clientmodel.COWMetric{ Metric: model.COWMetric{
Metric: m, Metric: m,
Copied: true, Copied: true,
}, },
@ -377,8 +377,8 @@ func funcAbsent(ev *evaluator, args Expressions) Value {
func funcCeil(ev *evaluator, args Expressions) Value { func funcCeil(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Ceil(float64(el.Value))) el.Value = model.SampleValue(math.Ceil(float64(el.Value)))
} }
return vector return vector
} }
@ -387,8 +387,8 @@ func funcCeil(ev *evaluator, args Expressions) Value {
func funcExp(ev *evaluator, args Expressions) Value { func funcExp(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Exp(float64(el.Value))) el.Value = model.SampleValue(math.Exp(float64(el.Value)))
} }
return vector return vector
} }
@ -397,8 +397,8 @@ func funcExp(ev *evaluator, args Expressions) Value {
func funcSqrt(ev *evaluator, args Expressions) Value { func funcSqrt(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Sqrt(float64(el.Value))) el.Value = model.SampleValue(math.Sqrt(float64(el.Value)))
} }
return vector return vector
} }
@ -407,8 +407,8 @@ func funcSqrt(ev *evaluator, args Expressions) Value {
func funcLn(ev *evaluator, args Expressions) Value { func funcLn(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Log(float64(el.Value))) el.Value = model.SampleValue(math.Log(float64(el.Value)))
} }
return vector return vector
} }
@ -417,8 +417,8 @@ func funcLn(ev *evaluator, args Expressions) Value {
func funcLog2(ev *evaluator, args Expressions) Value { func funcLog2(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Log2(float64(el.Value))) el.Value = model.SampleValue(math.Log2(float64(el.Value)))
} }
return vector return vector
} }
@ -427,8 +427,8 @@ func funcLog2(ev *evaluator, args Expressions) Value {
func funcLog10(ev *evaluator, args Expressions) Value { func funcLog10(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0]) vector := ev.evalVector(args[0])
for _, el := range vector { for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Log10(float64(el.Value))) el.Value = model.SampleValue(math.Log10(float64(el.Value)))
} }
return vector return vector
} }
@ -446,13 +446,13 @@ func funcDeriv(ev *evaluator, args Expressions) Value {
} }
// Least squares. // Least squares.
n := clientmodel.SampleValue(0) var (
sumY := clientmodel.SampleValue(0) n model.SampleValue
sumX := clientmodel.SampleValue(0) sumX, sumY model.SampleValue
sumXY := clientmodel.SampleValue(0) sumXY, sumX2 model.SampleValue
sumX2 := clientmodel.SampleValue(0) )
for _, sample := range samples.Values { for _, sample := range samples.Values {
x := clientmodel.SampleValue(sample.Timestamp.UnixNano() / 1e9) x := model.SampleValue(sample.Timestamp.UnixNano() / 1e9)
n += 1.0 n += 1.0
sumY += sample.Value sumY += sample.Value
sumX += x sumX += x
@ -469,7 +469,7 @@ func funcDeriv(ev *evaluator, args Expressions) Value {
Value: resultValue, Value: resultValue,
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
} }
resultSample.Metric.Delete(clientmodel.MetricNameLabel) resultSample.Metric.Del(model.MetricNameLabel)
resultVector = append(resultVector, resultSample) resultVector = append(resultVector, resultSample)
} }
return resultVector return resultVector
@ -478,16 +478,16 @@ func funcDeriv(ev *evaluator, args Expressions) Value {
// === predict_linear(node ExprMatrix, k ExprScalar) Vector === // === predict_linear(node ExprMatrix, k ExprScalar) Vector ===
func funcPredictLinear(ev *evaluator, args Expressions) Value { func funcPredictLinear(ev *evaluator, args Expressions) Value {
vector := funcDeriv(ev, args[0:1]).(Vector) vector := funcDeriv(ev, args[0:1]).(Vector)
duration := clientmodel.SampleValue(clientmodel.SampleValue(ev.evalFloat(args[1]))) duration := model.SampleValue(model.SampleValue(ev.evalFloat(args[1])))
excludedLabels := map[clientmodel.LabelName]struct{}{ excludedLabels := map[model.LabelName]struct{}{
clientmodel.MetricNameLabel: {}, model.MetricNameLabel: {},
} }
// Calculate predicted delta over the duration. // Calculate predicted delta over the duration.
signatureToDelta := map[uint64]clientmodel.SampleValue{} signatureToDelta := map[uint64]model.SampleValue{}
for _, el := range vector { for _, el := range vector {
signature := clientmodel.SignatureWithoutLabels(el.Metric.Metric, excludedLabels) signature := model.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
signatureToDelta[signature] = el.Value * duration signatureToDelta[signature] = el.Value * duration
} }
@ -498,10 +498,10 @@ func funcPredictLinear(ev *evaluator, args Expressions) Value {
if len(samples.Values) < 2 { if len(samples.Values) < 2 {
continue continue
} }
signature := clientmodel.SignatureWithoutLabels(samples.Metric.Metric, excludedLabels) signature := model.SignatureWithoutLabels(samples.Metric.Metric, excludedLabels)
delta, ok := signatureToDelta[signature] delta, ok := signatureToDelta[signature]
if ok { if ok {
samples.Metric.Delete(clientmodel.MetricNameLabel) samples.Metric.Del(model.MetricNameLabel)
outVec = append(outVec, &Sample{ outVec = append(outVec, &Sample{
Metric: samples.Metric, Metric: samples.Metric,
Value: delta + samples.Values[1].Value, Value: delta + samples.Values[1].Value,
@ -514,25 +514,25 @@ func funcPredictLinear(ev *evaluator, args Expressions) Value {
// === histogram_quantile(k ExprScalar, vector ExprVector) Vector === // === histogram_quantile(k ExprScalar, vector ExprVector) Vector ===
func funcHistogramQuantile(ev *evaluator, args Expressions) Value { func funcHistogramQuantile(ev *evaluator, args Expressions) Value {
q := clientmodel.SampleValue(ev.evalFloat(args[0])) q := model.SampleValue(ev.evalFloat(args[0]))
inVec := ev.evalVector(args[1]) inVec := ev.evalVector(args[1])
outVec := Vector{} outVec := Vector{}
signatureToMetricWithBuckets := map[uint64]*metricWithBuckets{} signatureToMetricWithBuckets := map[uint64]*metricWithBuckets{}
for _, el := range inVec { for _, el := range inVec {
upperBound, err := strconv.ParseFloat( upperBound, err := strconv.ParseFloat(
string(el.Metric.Metric[clientmodel.BucketLabel]), 64, string(el.Metric.Metric[model.BucketLabel]), 64,
) )
if err != nil { if err != nil {
// Oops, no bucket label or malformed label value. Skip. // Oops, no bucket label or malformed label value. Skip.
// TODO(beorn7): Issue a warning somehow. // TODO(beorn7): Issue a warning somehow.
continue continue
} }
signature := clientmodel.SignatureWithoutLabels(el.Metric.Metric, excludedLabels) signature := model.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
mb, ok := signatureToMetricWithBuckets[signature] mb, ok := signatureToMetricWithBuckets[signature]
if !ok { if !ok {
el.Metric.Delete(clientmodel.BucketLabel) el.Metric.Del(model.BucketLabel)
el.Metric.Delete(clientmodel.MetricNameLabel) el.Metric.Del(model.MetricNameLabel)
mb = &metricWithBuckets{el.Metric, nil} mb = &metricWithBuckets{el.Metric, nil}
signatureToMetricWithBuckets[signature] = mb signatureToMetricWithBuckets[signature] = mb
} }
@ -542,7 +542,7 @@ func funcHistogramQuantile(ev *evaluator, args Expressions) Value {
for _, mb := range signatureToMetricWithBuckets { for _, mb := range signatureToMetricWithBuckets {
outVec = append(outVec, &Sample{ outVec = append(outVec, &Sample{
Metric: mb.metric, Metric: mb.metric,
Value: clientmodel.SampleValue(quantile(q, mb.buckets)), Value: model.SampleValue(quantile(q, mb.buckets)),
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
}) })
} }
@ -557,7 +557,7 @@ func funcResets(ev *evaluator, args Expressions) Value {
for _, samples := range in { for _, samples := range in {
resets := 0 resets := 0
prev := clientmodel.SampleValue(samples.Values[0].Value) prev := model.SampleValue(samples.Values[0].Value)
for _, sample := range samples.Values[1:] { for _, sample := range samples.Values[1:] {
current := sample.Value current := sample.Value
if current < prev { if current < prev {
@ -568,10 +568,10 @@ func funcResets(ev *evaluator, args Expressions) Value {
rs := &Sample{ rs := &Sample{
Metric: samples.Metric, Metric: samples.Metric,
Value: clientmodel.SampleValue(resets), Value: model.SampleValue(resets),
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
} }
rs.Metric.Delete(clientmodel.MetricNameLabel) rs.Metric.Del(model.MetricNameLabel)
out = append(out, rs) out = append(out, rs)
} }
return out return out
@ -584,7 +584,7 @@ func funcChanges(ev *evaluator, args Expressions) Value {
for _, samples := range in { for _, samples := range in {
changes := 0 changes := 0
prev := clientmodel.SampleValue(samples.Values[0].Value) prev := model.SampleValue(samples.Values[0].Value)
for _, sample := range samples.Values[1:] { for _, sample := range samples.Values[1:] {
current := sample.Value current := sample.Value
if current != prev { if current != prev {
@ -595,10 +595,10 @@ func funcChanges(ev *evaluator, args Expressions) Value {
rs := &Sample{ rs := &Sample{
Metric: samples.Metric, Metric: samples.Metric,
Value: clientmodel.SampleValue(changes), Value: model.SampleValue(changes),
Timestamp: ev.Timestamp, Timestamp: ev.Timestamp,
} }
rs.Metric.Delete(clientmodel.MetricNameLabel) rs.Metric.Del(model.MetricNameLabel)
out = append(out, rs) out = append(out, rs)
} }
return out return out
@ -608,9 +608,9 @@ func funcChanges(ev *evaluator, args Expressions) Value {
func funcLabelReplace(ev *evaluator, args Expressions) Value { func funcLabelReplace(ev *evaluator, args Expressions) Value {
var ( var (
vector = ev.evalVector(args[0]) vector = ev.evalVector(args[0])
dst = clientmodel.LabelName(ev.evalString(args[1]).Value) dst = model.LabelName(ev.evalString(args[1]).Value)
repl = ev.evalString(args[2]).Value repl = ev.evalString(args[2]).Value
src = clientmodel.LabelName(ev.evalString(args[3]).Value) src = model.LabelName(ev.evalString(args[3]).Value)
regexStr = ev.evalString(args[4]).Value regexStr = ev.evalString(args[4]).Value
) )
@ -618,11 +618,11 @@ func funcLabelReplace(ev *evaluator, args Expressions) Value {
if err != nil { if err != nil {
ev.errorf("invalid regular expression in label_replace(): %s", regexStr) ev.errorf("invalid regular expression in label_replace(): %s", regexStr)
} }
if !clientmodel.LabelNameRE.MatchString(string(dst)) { if !model.LabelNameRE.MatchString(string(dst)) {
ev.errorf("invalid destination label name in label_replace(): %s", dst) ev.errorf("invalid destination label name in label_replace(): %s", dst)
} }
outSet := make(map[clientmodel.Fingerprint]struct{}, len(vector)) outSet := make(map[model.Fingerprint]struct{}, len(vector))
for _, el := range vector { for _, el := range vector {
srcVal := string(el.Metric.Metric[src]) srcVal := string(el.Metric.Metric[src])
indexes := regex.FindStringSubmatchIndex(srcVal) indexes := regex.FindStringSubmatchIndex(srcVal)
@ -632,9 +632,9 @@ func funcLabelReplace(ev *evaluator, args Expressions) Value {
} }
res := regex.ExpandString([]byte{}, repl, srcVal, indexes) res := regex.ExpandString([]byte{}, repl, srcVal, indexes)
if len(res) == 0 { if len(res) == 0 {
el.Metric.Delete(dst) el.Metric.Del(dst)
} else { } else {
el.Metric.Set(dst, clientmodel.LabelValue(res)) el.Metric.Set(dst, model.LabelValue(res))
} }
fp := el.Metric.Metric.Fingerprint() fp := el.Metric.Metric.Fingerprint()

View file

@ -22,7 +22,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -74,7 +74,7 @@ func ParseExpr(input string) (Expr, error) {
} }
// ParseMetric parses the input into a metric // ParseMetric parses the input into a metric
func ParseMetric(input string) (m clientmodel.Metric, err error) { func ParseMetric(input string) (m model.Metric, err error) {
p := newParser(input) p := newParser(input)
defer p.recover(&err) defer p.recover(&err)
@ -103,7 +103,7 @@ func ParseMetricSelector(input string) (m metric.LabelMatchers, err error) {
} }
// parseSeriesDesc parses the description of a time series. // parseSeriesDesc parses the description of a time series.
func parseSeriesDesc(input string) (clientmodel.Metric, []sequenceValue, error) { func parseSeriesDesc(input string) (model.Metric, []sequenceValue, error) {
p := newParser(input) p := newParser(input)
p.lex.seriesDesc = true p.lex.seriesDesc = true
@ -154,7 +154,7 @@ func (p *parser) parseExpr() (expr Expr, err error) {
// sequenceValue is an omittable value in a sequence of time series values. // sequenceValue is an omittable value in a sequence of time series values.
type sequenceValue struct { type sequenceValue struct {
value clientmodel.SampleValue value model.SampleValue
omitted bool omitted bool
} }
@ -166,7 +166,7 @@ func (v sequenceValue) String() string {
} }
// parseSeriesDesc parses a description of a time series into its metric and value sequence. // parseSeriesDesc parses a description of a time series into its metric and value sequence.
func (p *parser) parseSeriesDesc() (m clientmodel.Metric, vals []sequenceValue, err error) { func (p *parser) parseSeriesDesc() (m model.Metric, vals []sequenceValue, err error) {
defer p.recover(&err) defer p.recover(&err)
m = p.metric() m = p.metric()
@ -203,7 +203,7 @@ func (p *parser) parseSeriesDesc() (m clientmodel.Metric, vals []sequenceValue,
} }
k := sign * p.number(p.expect(itemNumber, ctx).val) k := sign * p.number(p.expect(itemNumber, ctx).val)
vals = append(vals, sequenceValue{ vals = append(vals, sequenceValue{
value: clientmodel.SampleValue(k), value: model.SampleValue(k),
}) })
// If there are no offset repetitions specified, proceed with the next value. // If there are no offset repetitions specified, proceed with the next value.
@ -231,7 +231,7 @@ func (p *parser) parseSeriesDesc() (m clientmodel.Metric, vals []sequenceValue,
for i := uint64(0); i < times; i++ { for i := uint64(0); i < times; i++ {
k += offset k += offset
vals = append(vals, sequenceValue{ vals = append(vals, sequenceValue{
value: clientmodel.SampleValue(k), value: model.SampleValue(k),
}) })
} }
} }
@ -384,7 +384,7 @@ func (p *parser) alertStmt() *AlertStmt {
} }
} }
lset := clientmodel.LabelSet{} lset := model.LabelSet{}
if p.peek().typ == itemWith { if p.peek().typ == itemWith {
p.expect(itemWith, ctx) p.expect(itemWith, ctx)
lset = p.labelSet() lset = p.labelSet()
@ -447,7 +447,7 @@ func (p *parser) recordStmt() *RecordStmt {
name := p.expectOneOf(itemIdentifier, itemMetricIdentifier, ctx).val name := p.expectOneOf(itemIdentifier, itemMetricIdentifier, ctx).val
var lset clientmodel.LabelSet var lset model.LabelSet
if p.peek().typ == itemLeftBrace { if p.peek().typ == itemLeftBrace {
lset = p.labelSet() lset = p.labelSet()
} }
@ -638,7 +638,7 @@ func (p *parser) primaryExpr() Expr {
switch t := p.next(); { switch t := p.next(); {
case t.typ == itemNumber: case t.typ == itemNumber:
f := p.number(t.val) f := p.number(t.val)
return &NumberLiteral{clientmodel.SampleValue(f)} return &NumberLiteral{model.SampleValue(f)}
case t.typ == itemString: case t.typ == itemString:
s := t.val[1 : len(t.val)-1] s := t.val[1 : len(t.val)-1]
@ -673,15 +673,15 @@ func (p *parser) primaryExpr() Expr {
// //
// '(' <label_name>, ... ')' // '(' <label_name>, ... ')'
// //
func (p *parser) labels() clientmodel.LabelNames { func (p *parser) labels() model.LabelNames {
const ctx = "grouping opts" const ctx = "grouping opts"
p.expect(itemLeftParen, ctx) p.expect(itemLeftParen, ctx)
labels := clientmodel.LabelNames{} labels := model.LabelNames{}
for { for {
id := p.expect(itemIdentifier, ctx) id := p.expect(itemIdentifier, ctx)
labels = append(labels, clientmodel.LabelName(id.val)) labels = append(labels, model.LabelName(id.val))
if p.peek().typ != itemComma { if p.peek().typ != itemComma {
break break
@ -705,7 +705,7 @@ func (p *parser) aggrExpr() *AggregateExpr {
if !agop.typ.isAggregator() { if !agop.typ.isAggregator() {
p.errorf("expected aggregation operator but got %s", agop) p.errorf("expected aggregation operator but got %s", agop)
} }
var grouping clientmodel.LabelNames var grouping model.LabelNames
var keepExtra bool var keepExtra bool
modifiersFirst := false modifiersFirst := false
@ -788,8 +788,8 @@ func (p *parser) call(name string) *Call {
// //
// '{' [ <labelname> '=' <match_string>, ... ] '}' // '{' [ <labelname> '=' <match_string>, ... ] '}'
// //
func (p *parser) labelSet() clientmodel.LabelSet { func (p *parser) labelSet() model.LabelSet {
set := clientmodel.LabelSet{} set := model.LabelSet{}
for _, lm := range p.labelMatchers(itemEQL) { for _, lm := range p.labelMatchers(itemEQL) {
set[lm.Name] = lm.Value set[lm.Name] = lm.Value
} }
@ -849,8 +849,8 @@ func (p *parser) labelMatchers(operators ...itemType) metric.LabelMatchers {
m, err := metric.NewLabelMatcher( m, err := metric.NewLabelMatcher(
matchType, matchType,
clientmodel.LabelName(label.val), model.LabelName(label.val),
clientmodel.LabelValue(val), model.LabelValue(val),
) )
if err != nil { if err != nil {
p.error(err) p.error(err)
@ -875,9 +875,9 @@ func (p *parser) labelMatchers(operators ...itemType) metric.LabelMatchers {
// <label_set> // <label_set>
// <metric_identifier> [<label_set>] // <metric_identifier> [<label_set>]
// //
func (p *parser) metric() clientmodel.Metric { func (p *parser) metric() model.Metric {
name := "" name := ""
m := clientmodel.Metric{} m := model.Metric{}
t := p.peek().typ t := p.peek().typ
if t == itemIdentifier || t == itemMetricIdentifier { if t == itemIdentifier || t == itemMetricIdentifier {
@ -888,10 +888,10 @@ func (p *parser) metric() clientmodel.Metric {
p.errorf("missing metric name or metric selector") p.errorf("missing metric name or metric selector")
} }
if t == itemLeftBrace { if t == itemLeftBrace {
m = clientmodel.Metric(p.labelSet()) m = model.Metric(p.labelSet())
} }
if name != "" { if name != "" {
m[clientmodel.MetricNameLabel] = clientmodel.LabelValue(name) m[model.MetricNameLabel] = model.LabelValue(name)
} }
return m return m
} }
@ -912,15 +912,15 @@ func (p *parser) vectorSelector(name string) *VectorSelector {
// Metric name must not be set in the label matchers and before at the same time. // Metric name must not be set in the label matchers and before at the same time.
if name != "" { if name != "" {
for _, m := range matchers { for _, m := range matchers {
if m.Name == clientmodel.MetricNameLabel { if m.Name == model.MetricNameLabel {
p.errorf("metric name must not be set twice: %q or %q", name, m.Value) p.errorf("metric name must not be set twice: %q or %q", name, m.Value)
} }
} }
// Set name label matching. // Set name label matching.
matchers = append(matchers, &metric.LabelMatcher{ matchers = append(matchers, &metric.LabelMatcher{
Type: metric.Equal, Type: metric.Equal,
Name: clientmodel.MetricNameLabel, Name: model.MetricNameLabel,
Value: clientmodel.LabelValue(name), Value: model.LabelValue(name),
}) })
} }

View file

@ -21,7 +21,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -37,10 +37,10 @@ var testExpr = []struct {
expected: &NumberLiteral{1}, expected: &NumberLiteral{1},
}, { }, {
input: "+Inf", input: "+Inf",
expected: &NumberLiteral{clientmodel.SampleValue(math.Inf(1))}, expected: &NumberLiteral{model.SampleValue(math.Inf(1))},
}, { }, {
input: "-Inf", input: "-Inf",
expected: &NumberLiteral{clientmodel.SampleValue(math.Inf(-1))}, expected: &NumberLiteral{model.SampleValue(math.Inf(-1))},
}, { }, {
input: ".5", input: ".5",
expected: &NumberLiteral{0.5}, expected: &NumberLiteral{0.5},
@ -129,7 +129,7 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
}, },
@ -139,7 +139,7 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
}, },
@ -232,13 +232,13 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{Card: CardOneToOne}, VectorMatching: &VectorMatching{Card: CardOneToOne},
@ -250,7 +250,7 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &NumberLiteral{1}, RHS: &NumberLiteral{1},
@ -263,7 +263,7 @@ var testExpr = []struct {
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
}, },
@ -274,13 +274,13 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{Card: CardManyToMany}, VectorMatching: &VectorMatching{Card: CardManyToMany},
@ -292,13 +292,13 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{Card: CardManyToMany}, VectorMatching: &VectorMatching{Card: CardManyToMany},
@ -313,13 +313,13 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{Card: CardOneToOne}, VectorMatching: &VectorMatching{Card: CardOneToOne},
@ -329,13 +329,13 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "bla", Name: "bla",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bla"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bla"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "blub", Name: "blub",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "blub"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "blub"},
}, },
}, },
VectorMatching: &VectorMatching{Card: CardManyToMany}, VectorMatching: &VectorMatching{Card: CardManyToMany},
@ -350,7 +350,7 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
RHS: &BinaryExpr{ RHS: &BinaryExpr{
@ -358,24 +358,24 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "bla", Name: "bla",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bla"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bla"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "blub", Name: "blub",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "blub"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "blub"},
}, },
}, },
VectorMatching: &VectorMatching{ VectorMatching: &VectorMatching{
Card: CardOneToMany, Card: CardOneToMany,
On: clientmodel.LabelNames{"baz", "buz"}, On: model.LabelNames{"baz", "buz"},
Include: clientmodel.LabelNames{"test"}, Include: model.LabelNames{"test"},
}, },
}, },
VectorMatching: &VectorMatching{ VectorMatching: &VectorMatching{
Card: CardOneToOne, Card: CardOneToOne,
On: clientmodel.LabelNames{"foo"}, On: model.LabelNames{"foo"},
}, },
}, },
}, { }, {
@ -385,18 +385,18 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{ VectorMatching: &VectorMatching{
Card: CardOneToOne, Card: CardOneToOne,
On: clientmodel.LabelNames{"test", "blub"}, On: model.LabelNames{"test", "blub"},
}, },
}, },
}, { }, {
@ -406,18 +406,18 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{ VectorMatching: &VectorMatching{
Card: CardManyToMany, Card: CardManyToMany,
On: clientmodel.LabelNames{"test", "blub"}, On: model.LabelNames{"test", "blub"},
}, },
}, },
}, { }, {
@ -427,19 +427,19 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{ VectorMatching: &VectorMatching{
Card: CardManyToOne, Card: CardManyToOne,
On: clientmodel.LabelNames{"test", "blub"}, On: model.LabelNames{"test", "blub"},
Include: clientmodel.LabelNames{"bar"}, Include: model.LabelNames{"bar"},
}, },
}, },
}, { }, {
@ -449,19 +449,19 @@ var testExpr = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &VectorSelector{ RHS: &VectorSelector{
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
VectorMatching: &VectorMatching{ VectorMatching: &VectorMatching{
Card: CardOneToMany, Card: CardOneToMany,
On: clientmodel.LabelNames{"test", "blub"}, On: model.LabelNames{"test", "blub"},
Include: clientmodel.LabelNames{"bar", "foo"}, Include: model.LabelNames{"bar", "foo"},
}, },
}, },
}, { }, {
@ -520,7 +520,7 @@ var testExpr = []struct {
Name: "foo", Name: "foo",
Offset: 0, Offset: 0,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
}, { }, {
@ -529,7 +529,7 @@ var testExpr = []struct {
Name: "foo", Name: "foo",
Offset: 5 * time.Minute, Offset: 5 * time.Minute,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
}, { }, {
@ -539,7 +539,7 @@ var testExpr = []struct {
Offset: 0, Offset: 0,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "a", Value: "bc"}, {Type: metric.Equal, Name: "a", Value: "bc"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo:bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo:bar"},
}, },
}, },
}, { }, {
@ -549,7 +549,7 @@ var testExpr = []struct {
Offset: 0, Offset: 0,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "NaN", Value: "bc"}, {Type: metric.Equal, Name: "NaN", Value: "bc"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
}, { }, {
@ -562,7 +562,7 @@ var testExpr = []struct {
{Type: metric.NotEqual, Name: "foo", Value: "bar"}, {Type: metric.NotEqual, Name: "foo", Value: "bar"},
mustLabelMatcher(metric.RegexMatch, "test", "test"), mustLabelMatcher(metric.RegexMatch, "test", "test"),
mustLabelMatcher(metric.RegexNoMatch, "bar", "baz"), mustLabelMatcher(metric.RegexNoMatch, "bar", "baz"),
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
}, { }, {
@ -644,7 +644,7 @@ var testExpr = []struct {
Offset: 0, Offset: 0,
Range: 5 * time.Second, Range: 5 * time.Second,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
}, },
}, },
}, { }, {
@ -654,7 +654,7 @@ var testExpr = []struct {
Offset: 0, Offset: 0,
Range: 5 * time.Minute, Range: 5 * time.Minute,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
}, },
}, },
}, { }, {
@ -664,7 +664,7 @@ var testExpr = []struct {
Offset: 5 * time.Minute, Offset: 5 * time.Minute,
Range: 5 * time.Hour, Range: 5 * time.Hour,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
}, },
}, },
}, { }, {
@ -674,7 +674,7 @@ var testExpr = []struct {
Offset: 10 * time.Second, Offset: 10 * time.Second,
Range: 5 * 24 * time.Hour, Range: 5 * 24 * time.Hour,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
}, },
}, },
}, { }, {
@ -684,7 +684,7 @@ var testExpr = []struct {
Offset: 14 * 24 * time.Hour, Offset: 14 * 24 * time.Hour,
Range: 5 * 7 * 24 * time.Hour, Range: 5 * 7 * 24 * time.Hour,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
}, },
}, },
}, { }, {
@ -695,7 +695,7 @@ var testExpr = []struct {
Range: 5 * 365 * 24 * time.Hour, Range: 5 * 365 * 24 * time.Hour,
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "a", Value: "b"}, {Type: metric.Equal, Name: "a", Value: "b"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
}, },
}, },
}, { }, {
@ -750,10 +750,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
}, },
}, { }, {
input: "sum by (foo) keep_common (some_metric)", input: "sum by (foo) keep_common (some_metric)",
@ -763,10 +763,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
}, },
}, { }, {
input: "sum (some_metric) by (foo,bar) keep_common", input: "sum (some_metric) by (foo,bar) keep_common",
@ -776,10 +776,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo", "bar"}, Grouping: model.LabelNames{"foo", "bar"},
}, },
}, { }, {
input: "avg by (foo)(some_metric)", input: "avg by (foo)(some_metric)",
@ -788,10 +788,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
}, },
}, { }, {
input: "COUNT by (foo) keep_common (some_metric)", input: "COUNT by (foo) keep_common (some_metric)",
@ -800,10 +800,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
KeepExtraLabels: true, KeepExtraLabels: true,
}, },
}, { }, {
@ -813,10 +813,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
KeepExtraLabels: true, KeepExtraLabels: true,
}, },
}, { }, {
@ -826,10 +826,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
}, },
}, { }, {
input: "stddev(some_metric)", input: "stddev(some_metric)",
@ -838,7 +838,7 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
}, },
@ -849,10 +849,10 @@ var testExpr = []struct {
Expr: &VectorSelector{ Expr: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
Grouping: clientmodel.LabelNames{"foo"}, Grouping: model.LabelNames{"foo"},
}, },
}, { }, {
input: `sum some_metric by (test)`, input: `sum some_metric by (test)`,
@ -902,7 +902,7 @@ var testExpr = []struct {
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.NotEqual, Name: "foo", Value: "bar"}, {Type: metric.NotEqual, Name: "foo", Value: "bar"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
}, },
@ -915,7 +915,7 @@ var testExpr = []struct {
&MatrixSelector{ &MatrixSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
Range: 5 * time.Minute, Range: 5 * time.Minute,
}, },
@ -929,7 +929,7 @@ var testExpr = []struct {
&VectorSelector{ &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
}, },
@ -942,7 +942,7 @@ var testExpr = []struct {
&VectorSelector{ &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
&NumberLiteral{5}, &NumberLiteral{5},
@ -1090,14 +1090,14 @@ var testStatement = []struct {
Name: "dc:http_request:rate5m", Name: "dc:http_request:rate5m",
Expr: &AggregateExpr{ Expr: &AggregateExpr{
Op: itemSum, Op: itemSum,
Grouping: clientmodel.LabelNames{"dc"}, Grouping: model.LabelNames{"dc"},
Expr: &Call{ Expr: &Call{
Func: mustGetFunction("rate"), Func: mustGetFunction("rate"),
Args: Expressions{ Args: Expressions{
&MatrixSelector{ &MatrixSelector{
Name: "http_request_count", Name: "http_request_count",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "http_request_count"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "http_request_count"},
}, },
Range: 5 * time.Minute, Range: 5 * time.Minute,
}, },
@ -1113,12 +1113,12 @@ var testStatement = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "dc:http_request:rate5m", Name: "dc:http_request:rate5m",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "dc:http_request:rate5m"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "dc:http_request:rate5m"},
}, },
}, },
RHS: &NumberLiteral{10000}, RHS: &NumberLiteral{10000},
}}, }},
Labels: clientmodel.LabelSet{"service": "testservice"}, Labels: model.LabelSet{"service": "testservice"},
Duration: 5 * time.Minute, Duration: 5 * time.Minute,
Summary: "Global request rate low", Summary: "Global request rate low",
Description: "The global request rate is low", Description: "The global request rate is low",
@ -1129,7 +1129,7 @@ var testStatement = []struct {
Name: "bar", Name: "bar",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "label1", Value: "value1"}, {Type: metric.Equal, Name: "label1", Value: "value1"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
Labels: nil, Labels: nil,
@ -1141,12 +1141,12 @@ var testStatement = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "foo", Name: "foo",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
}, },
}, },
RHS: &NumberLiteral{10}, RHS: &NumberLiteral{10},
}, },
Labels: clientmodel.LabelSet{}, Labels: model.LabelSet{},
Summary: "Baz", Summary: "Baz",
Description: "BazAlert", Description: "BazAlert",
Runbook: "http://my.url", Runbook: "http://my.url",
@ -1162,10 +1162,10 @@ var testStatement = []struct {
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "a", Value: "b"}, {Type: metric.Equal, Name: "a", Value: "b"},
mustLabelMatcher(metric.RegexMatch, "x", "y"), mustLabelMatcher(metric.RegexMatch, "x", "y"),
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
}, },
}, },
Labels: clientmodel.LabelSet{"x": "", "a": "z"}, Labels: model.LabelSet{"x": "", "a": "z"},
}, },
}, },
}, { }, {
@ -1181,12 +1181,12 @@ var testStatement = []struct {
LHS: &VectorSelector{ LHS: &VectorSelector{
Name: "some_metric", Name: "some_metric",
LabelMatchers: metric.LabelMatchers{ LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"}, {Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
}, },
}, },
RHS: &NumberLiteral{1}, RHS: &NumberLiteral{1},
}, },
Labels: clientmodel.LabelSet{}, Labels: model.LabelSet{},
Summary: "Global request rate low", Summary: "Global request rate low",
Description: "The global request rate is low", Description: "The global request rate is low",
}, },
@ -1311,7 +1311,7 @@ func TestParseStatements(t *testing.T) {
} }
} }
func mustLabelMatcher(mt metric.MatchType, name clientmodel.LabelName, val clientmodel.LabelValue) *metric.LabelMatcher { func mustLabelMatcher(mt metric.MatchType, name model.LabelName, val model.LabelValue) *metric.LabelMatcher {
m, err := metric.NewLabelMatcher(mt, name, val) m, err := metric.NewLabelMatcher(mt, name, val)
if err != nil { if err != nil {
panic(err) panic(err)
@ -1329,57 +1329,57 @@ func mustGetFunction(name string) *Function {
var testSeries = []struct { var testSeries = []struct {
input string input string
expectedMetric clientmodel.Metric expectedMetric model.Metric
expectedValues []sequenceValue expectedValues []sequenceValue
fail bool fail bool
}{ }{
{ {
input: `{} 1 2 3`, input: `{} 1 2 3`,
expectedMetric: clientmodel.Metric{}, expectedMetric: model.Metric{},
expectedValues: newSeq(1, 2, 3), expectedValues: newSeq(1, 2, 3),
}, { }, {
input: `{a="b"} -1 2 3`, input: `{a="b"} -1 2 3`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
"a": "b", "a": "b",
}, },
expectedValues: newSeq(-1, 2, 3), expectedValues: newSeq(-1, 2, 3),
}, { }, {
input: `my_metric 1 2 3`, input: `my_metric 1 2 3`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
clientmodel.MetricNameLabel: "my_metric", model.MetricNameLabel: "my_metric",
}, },
expectedValues: newSeq(1, 2, 3), expectedValues: newSeq(1, 2, 3),
}, { }, {
input: `my_metric{} 1 2 3`, input: `my_metric{} 1 2 3`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
clientmodel.MetricNameLabel: "my_metric", model.MetricNameLabel: "my_metric",
}, },
expectedValues: newSeq(1, 2, 3), expectedValues: newSeq(1, 2, 3),
}, { }, {
input: `my_metric{a="b"} 1 2 3`, input: `my_metric{a="b"} 1 2 3`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
clientmodel.MetricNameLabel: "my_metric", model.MetricNameLabel: "my_metric",
"a": "b", "a": "b",
}, },
expectedValues: newSeq(1, 2, 3), expectedValues: newSeq(1, 2, 3),
}, { }, {
input: `my_metric{a="b"} 1 2 3-10x4`, input: `my_metric{a="b"} 1 2 3-10x4`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
clientmodel.MetricNameLabel: "my_metric", model.MetricNameLabel: "my_metric",
"a": "b", "a": "b",
}, },
expectedValues: newSeq(1, 2, 3, -7, -17, -27, -37), expectedValues: newSeq(1, 2, 3, -7, -17, -27, -37),
}, { }, {
input: `my_metric{a="b"} 1 2 3-0x4`, input: `my_metric{a="b"} 1 2 3-0x4`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
clientmodel.MetricNameLabel: "my_metric", model.MetricNameLabel: "my_metric",
"a": "b", "a": "b",
}, },
expectedValues: newSeq(1, 2, 3, 3, 3, 3, 3), expectedValues: newSeq(1, 2, 3, 3, 3, 3, 3),
}, { }, {
input: `my_metric{a="b"} 1 3 _ 5 _x4`, input: `my_metric{a="b"} 1 3 _ 5 _x4`,
expectedMetric: clientmodel.Metric{ expectedMetric: model.Metric{
clientmodel.MetricNameLabel: "my_metric", model.MetricNameLabel: "my_metric",
"a": "b", "a": "b",
}, },
expectedValues: newSeq(1, 3, none, 5, none, none, none, none), expectedValues: newSeq(1, 3, none, 5, none, none, none, none),
@ -1397,7 +1397,7 @@ func newSeq(vals ...float64) (res []sequenceValue) {
if v == none { if v == none {
res = append(res, sequenceValue{omitted: true}) res = append(res, sequenceValue{omitted: true})
} else { } else {
res = append(res, sequenceValue{value: clientmodel.SampleValue(v)}) res = append(res, sequenceValue{value: model.SampleValue(v)})
} }
} }
return res return res

View file

@ -18,7 +18,7 @@ import (
"sort" "sort"
"strings" "strings"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -27,14 +27,14 @@ import (
func (matrix Matrix) String() string { func (matrix Matrix) String() string {
metricStrings := make([]string, 0, len(matrix)) metricStrings := make([]string, 0, len(matrix))
for _, sampleStream := range matrix { for _, sampleStream := range matrix {
metricName, hasName := sampleStream.Metric.Metric[clientmodel.MetricNameLabel] metricName, hasName := sampleStream.Metric.Metric[model.MetricNameLabel]
numLabels := len(sampleStream.Metric.Metric) numLabels := len(sampleStream.Metric.Metric)
if hasName { if hasName {
numLabels-- numLabels--
} }
labelStrings := make([]string, 0, numLabels) labelStrings := make([]string, 0, numLabels)
for label, value := range sampleStream.Metric.Metric { for label, value := range sampleStream.Metric.Metric {
if label != clientmodel.MetricNameLabel { if label != model.MetricNameLabel {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value)) labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
} }
} }
@ -233,7 +233,7 @@ func (node *VectorSelector) String() string {
labelStrings := make([]string, 0, len(node.LabelMatchers)-1) labelStrings := make([]string, 0, len(node.LabelMatchers)-1)
for _, matcher := range node.LabelMatchers { for _, matcher := range node.LabelMatchers {
// Only include the __name__ label if its no equality matching. // Only include the __name__ label if its no equality matching.
if matcher.Name == clientmodel.MetricNameLabel && matcher.Type == metric.Equal { if matcher.Name == model.MetricNameLabel && matcher.Type == metric.Equal {
continue continue
} }
labelStrings = append(labelStrings, matcher.String()) labelStrings = append(labelStrings, matcher.String())

View file

@ -17,21 +17,21 @@ import (
"math" "math"
"sort" "sort"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// Helpers to calculate quantiles. // Helpers to calculate quantiles.
// excludedLabels are the labels to exclude from signature calculation for // excludedLabels are the labels to exclude from signature calculation for
// quantiles. // quantiles.
var excludedLabels = map[clientmodel.LabelName]struct{}{ var excludedLabels = map[model.LabelName]struct{}{
clientmodel.MetricNameLabel: {}, model.MetricNameLabel: {},
clientmodel.BucketLabel: {}, model.BucketLabel: {},
} }
type bucket struct { type bucket struct {
upperBound float64 upperBound float64
count clientmodel.SampleValue count model.SampleValue
} }
// buckets implements sort.Interface. // buckets implements sort.Interface.
@ -42,7 +42,7 @@ func (b buckets) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
func (b buckets) Less(i, j int) bool { return b[i].upperBound < b[j].upperBound } func (b buckets) Less(i, j int) bool { return b[i].upperBound < b[j].upperBound }
type metricWithBuckets struct { type metricWithBuckets struct {
metric clientmodel.COWMetric metric model.COWMetric
buckets buckets buckets buckets
} }
@ -68,7 +68,7 @@ type metricWithBuckets struct {
// If q<0, -Inf is returned. // If q<0, -Inf is returned.
// //
// If q>1, +Inf is returned. // If q>1, +Inf is returned.
func quantile(q clientmodel.SampleValue, buckets buckets) float64 { func quantile(q model.SampleValue, buckets buckets) float64 {
if q < 0 { if q < 0 {
return math.Inf(-1) return math.Inf(-1)
} }

View file

@ -22,7 +22,7 @@ import (
"strings" "strings"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
@ -40,7 +40,7 @@ var (
) )
const ( const (
testStartTime = clientmodel.Timestamp(0) testStartTime = model.Time(0)
epsilon = 0.000001 // Relative error allowed for sample values. epsilon = 0.000001 // Relative error allowed for sample values.
maxErrorCount = 10 maxErrorCount = 10
) )
@ -165,7 +165,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
break break
} }
if f, err := parseNumber(defLine); err == nil { if f, err := parseNumber(defLine); err == nil {
cmd.expect(0, nil, sequenceValue{value: clientmodel.SampleValue(f)}) cmd.expect(0, nil, sequenceValue{value: model.SampleValue(f)})
break break
} }
metric, vals, err := parseSeriesDesc(defLine) metric, vals, err := parseSeriesDesc(defLine)
@ -238,15 +238,15 @@ func (*evalCmd) testCmd() {}
// metrics into the storage. // metrics into the storage.
type loadCmd struct { type loadCmd struct {
gap time.Duration gap time.Duration
metrics map[clientmodel.Fingerprint]clientmodel.Metric metrics map[model.Fingerprint]model.Metric
defs map[clientmodel.Fingerprint]metric.Values defs map[model.Fingerprint]metric.Values
} }
func newLoadCmd(gap time.Duration) *loadCmd { func newLoadCmd(gap time.Duration) *loadCmd {
return &loadCmd{ return &loadCmd{
gap: gap, gap: gap,
metrics: map[clientmodel.Fingerprint]clientmodel.Metric{}, metrics: map[model.Fingerprint]model.Metric{},
defs: map[clientmodel.Fingerprint]metric.Values{}, defs: map[model.Fingerprint]metric.Values{},
} }
} }
@ -255,7 +255,7 @@ func (cmd loadCmd) String() string {
} }
// set a sequence of sample values for the given metric. // set a sequence of sample values for the given metric.
func (cmd *loadCmd) set(m clientmodel.Metric, vals ...sequenceValue) { func (cmd *loadCmd) set(m model.Metric, vals ...sequenceValue) {
fp := m.Fingerprint() fp := m.Fingerprint()
samples := make(metric.Values, 0, len(vals)) samples := make(metric.Values, 0, len(vals))
@ -278,7 +278,7 @@ func (cmd *loadCmd) append(a storage.SampleAppender) {
for fp, samples := range cmd.defs { for fp, samples := range cmd.defs {
met := cmd.metrics[fp] met := cmd.metrics[fp]
for _, smpl := range samples { for _, smpl := range samples {
s := &clientmodel.Sample{ s := &model.Sample{
Metric: met, Metric: met,
Value: smpl.Value, Value: smpl.Value,
Timestamp: smpl.Timestamp, Timestamp: smpl.Timestamp,
@ -292,14 +292,14 @@ func (cmd *loadCmd) append(a storage.SampleAppender) {
// and expects a specific result. // and expects a specific result.
type evalCmd struct { type evalCmd struct {
expr Expr expr Expr
start, end clientmodel.Timestamp start, end model.Time
interval time.Duration interval time.Duration
instant bool instant bool
fail, ordered bool fail, ordered bool
metrics map[clientmodel.Fingerprint]clientmodel.Metric metrics map[model.Fingerprint]model.Metric
expected map[clientmodel.Fingerprint]entry expected map[model.Fingerprint]entry
} }
type entry struct { type entry struct {
@ -311,7 +311,7 @@ func (e entry) String() string {
return fmt.Sprintf("%d: %s", e.pos, e.vals) return fmt.Sprintf("%d: %s", e.pos, e.vals)
} }
func newEvalCmd(expr Expr, start, end clientmodel.Timestamp, interval time.Duration) *evalCmd { func newEvalCmd(expr Expr, start, end model.Time, interval time.Duration) *evalCmd {
return &evalCmd{ return &evalCmd{
expr: expr, expr: expr,
start: start, start: start,
@ -319,8 +319,8 @@ func newEvalCmd(expr Expr, start, end clientmodel.Timestamp, interval time.Durat
interval: interval, interval: interval,
instant: start == end && interval == 0, instant: start == end && interval == 0,
metrics: map[clientmodel.Fingerprint]clientmodel.Metric{}, metrics: map[model.Fingerprint]model.Metric{},
expected: map[clientmodel.Fingerprint]entry{}, expected: map[model.Fingerprint]entry{},
} }
} }
@ -330,7 +330,7 @@ func (ev *evalCmd) String() string {
// expect adds a new metric with a sequence of values to the set of expected // expect adds a new metric with a sequence of values to the set of expected
// results for the query. // results for the query.
func (ev *evalCmd) expect(pos int, m clientmodel.Metric, vals ...sequenceValue) { func (ev *evalCmd) expect(pos int, m model.Metric, vals ...sequenceValue) {
if m == nil { if m == nil {
ev.expected[0] = entry{pos: pos, vals: vals} ev.expected[0] = entry{pos: pos, vals: vals}
return return
@ -347,7 +347,7 @@ func (ev *evalCmd) compareResult(result Value) error {
if ev.instant { if ev.instant {
return fmt.Errorf("received range result on instant evaluation") return fmt.Errorf("received range result on instant evaluation")
} }
seen := map[clientmodel.Fingerprint]bool{} seen := map[model.Fingerprint]bool{}
for pos, v := range val { for pos, v := range val {
fp := v.Metric.Metric.Fingerprint() fp := v.Metric.Metric.Fingerprint()
if _, ok := ev.metrics[fp]; !ok { if _, ok := ev.metrics[fp]; !ok {
@ -374,7 +374,7 @@ func (ev *evalCmd) compareResult(result Value) error {
if !ev.instant { if !ev.instant {
fmt.Errorf("received instant result on range evaluation") fmt.Errorf("received instant result on range evaluation")
} }
seen := map[clientmodel.Fingerprint]bool{} seen := map[model.Fingerprint]bool{}
for pos, v := range val { for pos, v := range val {
fp := v.Metric.Metric.Fingerprint() fp := v.Metric.Metric.Fingerprint()
if _, ok := ev.metrics[fp]; !ok { if _, ok := ev.metrics[fp]; !ok {

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
consul "github.com/hashicorp/consul/api" consul "github.com/hashicorp/consul/api"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
@ -34,21 +34,21 @@ const (
consulRetryInterval = 15 * time.Second consulRetryInterval = 15 * time.Second
// ConsuleAddressLabel is the name for the label containing a target's address. // ConsuleAddressLabel is the name for the label containing a target's address.
ConsulAddressLabel = clientmodel.MetaLabelPrefix + "consul_address" ConsulAddressLabel = model.MetaLabelPrefix + "consul_address"
// ConsuleNodeLabel is the name for the label containing a target's node name. // ConsuleNodeLabel is the name for the label containing a target's node name.
ConsulNodeLabel = clientmodel.MetaLabelPrefix + "consul_node" ConsulNodeLabel = model.MetaLabelPrefix + "consul_node"
// ConsulTagsLabel is the name of the label containing the tags assigned to the target. // ConsulTagsLabel is the name of the label containing the tags assigned to the target.
ConsulTagsLabel = clientmodel.MetaLabelPrefix + "consul_tags" ConsulTagsLabel = model.MetaLabelPrefix + "consul_tags"
// ConsulServiceLabel is the name of the label containing the service name. // ConsulServiceLabel is the name of the label containing the service name.
ConsulServiceLabel = clientmodel.MetaLabelPrefix + "consul_service" ConsulServiceLabel = model.MetaLabelPrefix + "consul_service"
// ConsulServiceAddressLabel is the name of the label containing the (optional) service address. // ConsulServiceAddressLabel is the name of the label containing the (optional) service address.
ConsulServiceAddressLabel = clientmodel.MetaLabelPrefix + "consul_service_address" ConsulServiceAddressLabel = model.MetaLabelPrefix + "consul_service_address"
// ConsulServicePortLabel is the name of the label containing the service port. // ConsulServicePortLabel is the name of the label containing the service port.
ConsulServicePortLabel = clientmodel.MetaLabelPrefix + "consul_service_port" ConsulServicePortLabel = model.MetaLabelPrefix + "consul_service_port"
// ConsulDCLabel is the name of the label containing the datacenter ID. // ConsulDCLabel is the name of the label containing the datacenter ID.
ConsulDCLabel = clientmodel.MetaLabelPrefix + "consul_dc" ConsulDCLabel = model.MetaLabelPrefix + "consul_dc"
// ConsulServiceIDLabel is the name of the label containing the service ID. // ConsulServiceIDLabel is the name of the label containing the service ID.
ConsulServiceIDLabel = clientmodel.MetaLabelPrefix + "consul_service_id" ConsulServiceIDLabel = model.MetaLabelPrefix + "consul_service_id"
) )
// ConsulDiscovery retrieves target information from a Consul server // ConsulDiscovery retrieves target information from a Consul server
@ -226,9 +226,9 @@ func (cd *ConsulDiscovery) watchServices(update chan<- *consulService, done <-ch
srv.tgroup.Source = name srv.tgroup.Source = name
cd.services[name] = srv cd.services[name] = srv
} }
srv.tgroup.Labels = clientmodel.LabelSet{ srv.tgroup.Labels = model.LabelSet{
ConsulServiceLabel: clientmodel.LabelValue(name), ConsulServiceLabel: model.LabelValue(name),
ConsulDCLabel: clientmodel.LabelValue(cd.clientDatacenter), ConsulDCLabel: model.LabelValue(cd.clientDatacenter),
} }
update <- srv update <- srv
} }
@ -263,7 +263,7 @@ func (cd *ConsulDiscovery) watchService(srv *consulService, ch chan<- *config.Ta
continue continue
} }
srv.lastIndex = meta.LastIndex srv.lastIndex = meta.LastIndex
srv.tgroup.Targets = make([]clientmodel.LabelSet, 0, len(nodes)) srv.tgroup.Targets = make([]model.LabelSet, 0, len(nodes))
for _, node := range nodes { for _, node := range nodes {
addr := fmt.Sprintf("%s:%d", node.Address, node.ServicePort) addr := fmt.Sprintf("%s:%d", node.Address, node.ServicePort)
@ -271,14 +271,14 @@ func (cd *ConsulDiscovery) watchService(srv *consulService, ch chan<- *config.Ta
// in relabeling rules don't have to consider tag positions. // in relabeling rules don't have to consider tag positions.
tags := cd.tagSeparator + strings.Join(node.ServiceTags, cd.tagSeparator) + cd.tagSeparator tags := cd.tagSeparator + strings.Join(node.ServiceTags, cd.tagSeparator) + cd.tagSeparator
srv.tgroup.Targets = append(srv.tgroup.Targets, clientmodel.LabelSet{ srv.tgroup.Targets = append(srv.tgroup.Targets, model.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(addr), model.AddressLabel: model.LabelValue(addr),
ConsulAddressLabel: clientmodel.LabelValue(node.Address), ConsulAddressLabel: model.LabelValue(node.Address),
ConsulNodeLabel: clientmodel.LabelValue(node.Node), ConsulNodeLabel: model.LabelValue(node.Node),
ConsulTagsLabel: clientmodel.LabelValue(tags), ConsulTagsLabel: model.LabelValue(tags),
ConsulServiceAddressLabel: clientmodel.LabelValue(node.ServiceAddress), ConsulServiceAddressLabel: model.LabelValue(node.ServiceAddress),
ConsulServicePortLabel: clientmodel.LabelValue(strconv.Itoa(node.ServicePort)), ConsulServicePortLabel: model.LabelValue(strconv.Itoa(node.ServicePort)),
ConsulServiceIDLabel: clientmodel.LabelValue(node.ServiceID), ConsulServiceIDLabel: model.LabelValue(node.ServiceID),
}) })
} }

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
@ -32,7 +32,7 @@ import (
const ( const (
resolvConf = "/etc/resolv.conf" resolvConf = "/etc/resolv.conf"
DNSNameLabel = clientmodel.MetaLabelPrefix + "dns_name" DNSNameLabel = model.MetaLabelPrefix + "dns_name"
// Constants for instrumentation. // Constants for instrumentation.
namespace = "prometheus" namespace = "prometheus"
@ -144,25 +144,25 @@ func (dd *DNSDiscovery) refresh(name string, ch chan<- *config.TargetGroup) erro
tg := &config.TargetGroup{} tg := &config.TargetGroup{}
for _, record := range response.Answer { for _, record := range response.Answer {
target := clientmodel.LabelValue("") target := model.LabelValue("")
switch addr := record.(type) { switch addr := record.(type) {
case *dns.SRV: case *dns.SRV:
// Remove the final dot from rooted DNS names to make them look more usual. // Remove the final dot from rooted DNS names to make them look more usual.
addr.Target = strings.TrimRight(addr.Target, ".") addr.Target = strings.TrimRight(addr.Target, ".")
target = clientmodel.LabelValue(fmt.Sprintf("%s:%d", addr.Target, addr.Port)) target = model.LabelValue(fmt.Sprintf("%s:%d", addr.Target, addr.Port))
case *dns.A: case *dns.A:
target = clientmodel.LabelValue(fmt.Sprintf("%s:%d", addr.A, dd.port)) target = model.LabelValue(fmt.Sprintf("%s:%d", addr.A, dd.port))
case *dns.AAAA: case *dns.AAAA:
target = clientmodel.LabelValue(fmt.Sprintf("%s:%d", addr.AAAA, dd.port)) target = model.LabelValue(fmt.Sprintf("%s:%d", addr.AAAA, dd.port))
default: default:
log.Warnf("%q is not a valid SRV record", record) log.Warnf("%q is not a valid SRV record", record)
continue continue
} }
tg.Targets = append(tg.Targets, clientmodel.LabelSet{ tg.Targets = append(tg.Targets, model.LabelSet{
clientmodel.AddressLabel: target, model.AddressLabel: target,
DNSNameLabel: clientmodel.LabelValue(name), DNSNameLabel: model.LabelValue(name),
}) })
} }

View file

@ -25,12 +25,12 @@ import (
"gopkg.in/fsnotify.v1" "gopkg.in/fsnotify.v1"
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
const FileSDFilepathLabel = clientmodel.MetaLabelPrefix + "filepath" const FileSDFilepathLabel = model.MetaLabelPrefix + "filepath"
// FileDiscovery provides service discovery functionality based // FileDiscovery provides service discovery functionality based
// on files that contain target groups in JSON or YAML format. Refreshing // on files that contain target groups in JSON or YAML format. Refreshing
@ -244,9 +244,9 @@ func readFile(filename string) ([]*config.TargetGroup, error) {
for i, tg := range targetGroups { for i, tg := range targetGroups {
tg.Source = fileSource(filename, i) tg.Source = fileSource(filename, i)
if tg.Labels == nil { if tg.Labels == nil {
tg.Labels = clientmodel.LabelSet{} tg.Labels = model.LabelSet{}
} }
tg.Labels[FileSDFilepathLabel] = clientmodel.LabelValue(filename) tg.Labels[FileSDFilepathLabel] = model.LabelValue(filename)
} }
return targetGroups, nil return targetGroups, nil
} }

View file

@ -25,7 +25,7 @@ import (
"sync" "sync"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/log" "github.com/prometheus/log"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
@ -37,7 +37,7 @@ const (
// kubernetesMetaLabelPrefix is the meta prefix used for all meta labels. // kubernetesMetaLabelPrefix is the meta prefix used for all meta labels.
// in this discovery. // in this discovery.
metaLabelPrefix = clientmodel.MetaLabelPrefix + "kubernetes_" metaLabelPrefix = model.MetaLabelPrefix + "kubernetes_"
// nodeLabel is the name for the label containing a target's node name. // nodeLabel is the name for the label containing a target's node name.
nodeLabel = metaLabelPrefix + "node" nodeLabel = metaLabelPrefix + "node"
// serviceNamespaceLabel is the name for the label containing a target's service namespace. // serviceNamespaceLabel is the name for the label containing a target's service namespace.
@ -224,13 +224,13 @@ func (kd *KubernetesDiscovery) updateNodesTargetGroup() *config.TargetGroup {
for nodeName, node := range kd.nodes { for nodeName, node := range kd.nodes {
address := fmt.Sprintf("%s:%d", node.Status.Addresses[0].Address, kd.Conf.KubeletPort) address := fmt.Sprintf("%s:%d", node.Status.Addresses[0].Address, kd.Conf.KubeletPort)
t := clientmodel.LabelSet{ t := model.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(address), model.AddressLabel: model.LabelValue(address),
nodeLabel: clientmodel.LabelValue(nodeName), nodeLabel: model.LabelValue(nodeName),
} }
for k, v := range node.ObjectMeta.Labels { for k, v := range node.ObjectMeta.Labels {
labelName := strutil.SanitizeLabelName(nodeLabelPrefix + k) labelName := strutil.SanitizeLabelName(nodeLabelPrefix + k)
t[clientmodel.LabelName(labelName)] = clientmodel.LabelValue(v) t[model.LabelName(labelName)] = model.LabelValue(v)
} }
tg.Targets = append(tg.Targets, t) tg.Targets = append(tg.Targets, t)
} }
@ -397,20 +397,20 @@ func (kd *KubernetesDiscovery) addService(service *Service) *config.TargetGroup
func (kd *KubernetesDiscovery) updateServiceTargetGroup(service *Service, endpoints *Endpoints) *config.TargetGroup { func (kd *KubernetesDiscovery) updateServiceTargetGroup(service *Service, endpoints *Endpoints) *config.TargetGroup {
tg := &config.TargetGroup{ tg := &config.TargetGroup{
Source: serviceSource(service), Source: serviceSource(service),
Labels: clientmodel.LabelSet{ Labels: model.LabelSet{
serviceNamespaceLabel: clientmodel.LabelValue(service.ObjectMeta.Namespace), serviceNamespaceLabel: model.LabelValue(service.ObjectMeta.Namespace),
serviceNameLabel: clientmodel.LabelValue(service.ObjectMeta.Name), serviceNameLabel: model.LabelValue(service.ObjectMeta.Name),
}, },
} }
for k, v := range service.ObjectMeta.Labels { for k, v := range service.ObjectMeta.Labels {
labelName := strutil.SanitizeLabelName(serviceLabelPrefix + k) labelName := strutil.SanitizeLabelName(serviceLabelPrefix + k)
tg.Labels[clientmodel.LabelName(labelName)] = clientmodel.LabelValue(v) tg.Labels[model.LabelName(labelName)] = model.LabelValue(v)
} }
for k, v := range service.ObjectMeta.Annotations { for k, v := range service.ObjectMeta.Annotations {
labelName := strutil.SanitizeLabelName(serviceAnnotationPrefix + k) labelName := strutil.SanitizeLabelName(serviceAnnotationPrefix + k)
tg.Labels[clientmodel.LabelName(labelName)] = clientmodel.LabelValue(v) tg.Labels[model.LabelName(labelName)] = model.LabelValue(v)
} }
// Now let's loop through the endpoints & add them to the target group with appropriate labels. // Now let's loop through the endpoints & add them to the target group with appropriate labels.
@ -424,7 +424,7 @@ func (kd *KubernetesDiscovery) updateServiceTargetGroup(service *Service, endpoi
} }
address := fmt.Sprintf("%s:%d", ipAddr, epPort) address := fmt.Sprintf("%s:%d", ipAddr, epPort)
t := clientmodel.LabelSet{clientmodel.AddressLabel: clientmodel.LabelValue(address)} t := model.LabelSet{model.AddressLabel: model.LabelValue(address)}
tg.Targets = append(tg.Targets, t) tg.Targets = append(tg.Targets, t)
} }

View file

@ -14,19 +14,19 @@
package marathon package marathon
import ( import (
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
const ( const (
// metaLabelPrefix is the meta prefix used for all meta labels in this discovery. // metaLabelPrefix is the meta prefix used for all meta labels in this discovery.
metaLabelPrefix = clientmodel.MetaLabelPrefix + "marathon_" metaLabelPrefix = model.MetaLabelPrefix + "marathon_"
// appLabelPrefix is the prefix for the application labels. // appLabelPrefix is the prefix for the application labels.
appLabelPrefix = metaLabelPrefix + "app_label_" appLabelPrefix = metaLabelPrefix + "app_label_"
// appLabel is used for the name of the app in Marathon. // appLabel is used for the name of the app in Marathon.
appLabel clientmodel.LabelName = metaLabelPrefix + "app" appLabel model.LabelName = metaLabelPrefix + "app"
// imageLabel is the label that is used for the docker image running the service. // imageLabel is the label that is used for the docker image running the service.
imageLabel clientmodel.LabelName = metaLabelPrefix + "image" imageLabel model.LabelName = metaLabelPrefix + "image"
// taskLabel contains the mesos task name of the app instance. // taskLabel contains the mesos task name of the app instance.
taskLabel clientmodel.LabelName = metaLabelPrefix + "task" taskLabel model.LabelName = metaLabelPrefix + "task"
) )

View file

@ -16,7 +16,7 @@ package marathon
import ( import (
"fmt" "fmt"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
@ -34,12 +34,12 @@ func AppsToTargetGroups(apps *AppList) map[string]*config.TargetGroup {
func createTargetGroup(app *App) *config.TargetGroup { func createTargetGroup(app *App) *config.TargetGroup {
var ( var (
targets = targetsForApp(app) targets = targetsForApp(app)
appName = clientmodel.LabelValue(app.ID) appName = model.LabelValue(app.ID)
image = clientmodel.LabelValue(app.Container.Docker.Image) image = model.LabelValue(app.Container.Docker.Image)
) )
tg := &config.TargetGroup{ tg := &config.TargetGroup{
Targets: targets, Targets: targets,
Labels: clientmodel.LabelSet{ Labels: model.LabelSet{
appLabel: appName, appLabel: appName,
imageLabel: image, imageLabel: image,
}, },
@ -48,19 +48,19 @@ func createTargetGroup(app *App) *config.TargetGroup {
for ln, lv := range app.Labels { for ln, lv := range app.Labels {
ln = appLabelPrefix + ln ln = appLabelPrefix + ln
tg.Labels[clientmodel.LabelName(ln)] = clientmodel.LabelValue(lv) tg.Labels[model.LabelName(ln)] = model.LabelValue(lv)
} }
return tg return tg
} }
func targetsForApp(app *App) []clientmodel.LabelSet { func targetsForApp(app *App) []model.LabelSet {
targets := make([]clientmodel.LabelSet, 0, len(app.Tasks)) targets := make([]model.LabelSet, 0, len(app.Tasks))
for _, t := range app.Tasks { for _, t := range app.Tasks {
target := targetForTask(&t) target := targetForTask(&t)
targets = append(targets, clientmodel.LabelSet{ targets = append(targets, model.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(target), model.AddressLabel: model.LabelValue(target),
taskLabel: clientmodel.LabelValue(t.ID), taskLabel: model.LabelValue(t.ID),
}) })
} }
return targets return targets

View file

@ -18,7 +18,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/retrieval/discovery/marathon" "github.com/prometheus/prometheus/retrieval/discovery/marathon"
@ -104,8 +104,8 @@ func TestMarathonSDSendGroup(t *testing.T) {
t.Fatalf("Wrong number of targets: %v", tg.Targets) t.Fatalf("Wrong number of targets: %v", tg.Targets)
} }
tgt := tg.Targets[0] tgt := tg.Targets[0]
if tgt[clientmodel.AddressLabel] != "mesos-slave1:31000" { if tgt[model.AddressLabel] != "mesos-slave1:31000" {
t.Fatalf("Wrong target address: %s", tgt[clientmodel.AddressLabel]) t.Fatalf("Wrong target address: %s", tgt[model.AddressLabel])
} }
default: default:
t.Fatal("Did not get a target group.") t.Fatal("Did not get a target group.")

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
"github.com/samuel/go-zookeeper/zk" "github.com/samuel/go-zookeeper/zk"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -33,7 +33,7 @@ import (
const ( const (
serversetNodePrefix = "member_" serversetNodePrefix = "member_"
serversetLabelPrefix = clientmodel.MetaLabelPrefix + "serverset_" serversetLabelPrefix = model.MetaLabelPrefix + "serverset_"
serversetStatusLabel = serversetLabelPrefix + "status" serversetStatusLabel = serversetLabelPrefix + "status"
serversetPathLabel = serversetLabelPrefix + "path" serversetPathLabel = serversetLabelPrefix + "path"
serversetEndpointLabelPrefix = serversetLabelPrefix + "endpoint" serversetEndpointLabelPrefix = serversetLabelPrefix + "endpoint"
@ -110,7 +110,7 @@ func (sd *ServersetDiscovery) processUpdates() {
if event.Data != nil { if event.Data != nil {
labelSet, err := parseServersetMember(*event.Data, event.Path) labelSet, err := parseServersetMember(*event.Data, event.Path)
if err == nil { if err == nil {
tg.Targets = []clientmodel.LabelSet{*labelSet} tg.Targets = []model.LabelSet{*labelSet}
sd.sources[event.Path] = tg sd.sources[event.Path] = tg
} else { } else {
delete(sd.sources, event.Path) delete(sd.sources, event.Path)
@ -144,31 +144,31 @@ func (sd *ServersetDiscovery) Run(ch chan<- *config.TargetGroup, done <-chan str
sd.treeCache.Stop() sd.treeCache.Stop()
} }
func parseServersetMember(data []byte, path string) (*clientmodel.LabelSet, error) { func parseServersetMember(data []byte, path string) (*model.LabelSet, error) {
member := serversetMember{} member := serversetMember{}
err := json.Unmarshal(data, &member) err := json.Unmarshal(data, &member)
if err != nil { if err != nil {
return nil, fmt.Errorf("error unmarshaling serverset member %q: %s", path, err) return nil, fmt.Errorf("error unmarshaling serverset member %q: %s", path, err)
} }
labels := clientmodel.LabelSet{} labels := model.LabelSet{}
labels[serversetPathLabel] = clientmodel.LabelValue(path) labels[serversetPathLabel] = model.LabelValue(path)
labels[clientmodel.AddressLabel] = clientmodel.LabelValue( labels[model.AddressLabel] = model.LabelValue(
fmt.Sprintf("%s:%d", member.ServiceEndpoint.Host, member.ServiceEndpoint.Port)) fmt.Sprintf("%s:%d", member.ServiceEndpoint.Host, member.ServiceEndpoint.Port))
labels[serversetEndpointLabelPrefix+"_host"] = clientmodel.LabelValue(member.ServiceEndpoint.Host) labels[serversetEndpointLabelPrefix+"_host"] = model.LabelValue(member.ServiceEndpoint.Host)
labels[serversetEndpointLabelPrefix+"_port"] = clientmodel.LabelValue(fmt.Sprintf("%d", member.ServiceEndpoint.Port)) labels[serversetEndpointLabelPrefix+"_port"] = model.LabelValue(fmt.Sprintf("%d", member.ServiceEndpoint.Port))
for name, endpoint := range member.AdditionalEndpoints { for name, endpoint := range member.AdditionalEndpoints {
cleanName := clientmodel.LabelName(strutil.SanitizeLabelName(name)) cleanName := model.LabelName(strutil.SanitizeLabelName(name))
labels[serversetEndpointLabelPrefix+"_host_"+cleanName] = clientmodel.LabelValue( labels[serversetEndpointLabelPrefix+"_host_"+cleanName] = model.LabelValue(
endpoint.Host) endpoint.Host)
labels[serversetEndpointLabelPrefix+"_port_"+cleanName] = clientmodel.LabelValue( labels[serversetEndpointLabelPrefix+"_port_"+cleanName] = model.LabelValue(
fmt.Sprintf("%d", endpoint.Port)) fmt.Sprintf("%d", endpoint.Port))
} }
labels[serversetStatusLabel] = clientmodel.LabelValue(member.Status) labels[serversetStatusLabel] = model.LabelValue(member.Status)
return &labels, nil return &labels, nil
} }

View file

@ -16,28 +16,28 @@ package retrieval
import ( import (
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
type nopAppender struct{} type nopAppender struct{}
func (a nopAppender) Append(*clientmodel.Sample) { func (a nopAppender) Append(*model.Sample) {
} }
type slowAppender struct{} type slowAppender struct{}
func (a slowAppender) Append(*clientmodel.Sample) { func (a slowAppender) Append(*model.Sample) {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
return return
} }
type collectResultAppender struct { type collectResultAppender struct {
result clientmodel.Samples result model.Samples
} }
func (a *collectResultAppender) Append(s *clientmodel.Sample) { func (a *collectResultAppender) Append(s *model.Sample) {
for ln, lv := range s.Metric { for ln, lv := range s.Metric {
if len(lv) == 0 { if len(lv) == 0 {
delete(s.Metric, ln) delete(s.Metric, ln)

View file

@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"strings" "strings"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
@ -13,8 +13,8 @@ import (
// Relabel returns a relabeled copy of the given label set. The relabel configurations // Relabel returns a relabeled copy of the given label set. The relabel configurations
// are applied in order of input. // are applied in order of input.
// If a label set is dropped, nil is returned. // If a label set is dropped, nil is returned.
func Relabel(labels clientmodel.LabelSet, cfgs ...*config.RelabelConfig) (clientmodel.LabelSet, error) { func Relabel(labels model.LabelSet, cfgs ...*config.RelabelConfig) (model.LabelSet, error) {
out := clientmodel.LabelSet{} out := model.LabelSet{}
for ln, lv := range labels { for ln, lv := range labels {
out[ln] = lv out[ln] = lv
} }
@ -30,7 +30,7 @@ func Relabel(labels clientmodel.LabelSet, cfgs ...*config.RelabelConfig) (client
return out, nil return out, nil
} }
func relabel(labels clientmodel.LabelSet, cfg *config.RelabelConfig) (clientmodel.LabelSet, error) { func relabel(labels model.LabelSet, cfg *config.RelabelConfig) (model.LabelSet, error) {
values := make([]string, 0, len(cfg.SourceLabels)) values := make([]string, 0, len(cfg.SourceLabels))
for _, ln := range cfg.SourceLabels { for _, ln := range cfg.SourceLabels {
values = append(values, string(labels[ln])) values = append(values, string(labels[ln]))
@ -56,13 +56,13 @@ func relabel(labels clientmodel.LabelSet, cfg *config.RelabelConfig) (clientmode
if len(res) == 0 { if len(res) == 0 {
delete(labels, cfg.TargetLabel) delete(labels, cfg.TargetLabel)
} else { } else {
labels[cfg.TargetLabel] = clientmodel.LabelValue(res) labels[cfg.TargetLabel] = model.LabelValue(res)
} }
case config.RelabelHashMod: case config.RelabelHashMod:
mod := sum64(md5.Sum([]byte(val))) % cfg.Modulus mod := sum64(md5.Sum([]byte(val))) % cfg.Modulus
labels[cfg.TargetLabel] = clientmodel.LabelValue(fmt.Sprintf("%d", mod)) labels[cfg.TargetLabel] = model.LabelValue(fmt.Sprintf("%d", mod))
case config.RelabelLabelMap: case config.RelabelLabelMap:
out := make(clientmodel.LabelSet, len(labels)) out := make(model.LabelSet, len(labels))
// Take a copy to avoid infinite loops. // Take a copy to avoid infinite loops.
for ln, lv := range labels { for ln, lv := range labels {
out[ln] = lv out[ln] = lv
@ -70,7 +70,7 @@ func relabel(labels clientmodel.LabelSet, cfg *config.RelabelConfig) (clientmode
for ln, lv := range labels { for ln, lv := range labels {
if cfg.Regex.MatchString(string(ln)) { if cfg.Regex.MatchString(string(ln)) {
res := cfg.Regex.ReplaceAllString(string(ln), cfg.Replacement) res := cfg.Regex.ReplaceAllString(string(ln), cfg.Replacement)
out[clientmodel.LabelName(res)] = lv out[model.LabelName(res)] = lv
} }
} }
labels = out labels = out

View file

@ -5,34 +5,34 @@ import (
"regexp" "regexp"
"testing" "testing"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
func TestRelabel(t *testing.T) { func TestRelabel(t *testing.T) {
tests := []struct { tests := []struct {
input clientmodel.LabelSet input model.LabelSet
relabel []*config.RelabelConfig relabel []*config.RelabelConfig
output clientmodel.LabelSet output model.LabelSet
}{ }{
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
"b": "bar", "b": "bar",
"c": "baz", "c": "baz",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("f(.*)")}, Regex: &config.Regexp{*regexp.MustCompile("f(.*)")},
TargetLabel: clientmodel.LabelName("d"), TargetLabel: model.LabelName("d"),
Separator: ";", Separator: ";",
Replacement: "ch${1}-ch${1}", Replacement: "ch${1}-ch${1}",
Action: config.RelabelReplace, Action: config.RelabelReplace,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "foo", "a": "foo",
"b": "bar", "b": "bar",
"c": "baz", "c": "baz",
@ -40,30 +40,30 @@ func TestRelabel(t *testing.T) {
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
"b": "bar", "b": "bar",
"c": "baz", "c": "baz",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a", "b"}, SourceLabels: model.LabelNames{"a", "b"},
Regex: &config.Regexp{*regexp.MustCompile("^f(.*);(.*)r$")}, Regex: &config.Regexp{*regexp.MustCompile("^f(.*);(.*)r$")},
TargetLabel: clientmodel.LabelName("a"), TargetLabel: model.LabelName("a"),
Separator: ";", Separator: ";",
Replacement: "b${1}${2}m", // boobam Replacement: "b${1}${2}m", // boobam
Action: config.RelabelReplace, Action: config.RelabelReplace,
}, },
{ {
SourceLabels: clientmodel.LabelNames{"c", "a"}, SourceLabels: model.LabelNames{"c", "a"},
Regex: &config.Regexp{*regexp.MustCompile("(b).*b(.*)ba(.*)")}, Regex: &config.Regexp{*regexp.MustCompile("(b).*b(.*)ba(.*)")},
TargetLabel: clientmodel.LabelName("d"), TargetLabel: model.LabelName("d"),
Separator: ";", Separator: ";",
Replacement: "$1$2$2$3", Replacement: "$1$2$2$3",
Action: config.RelabelReplace, Action: config.RelabelReplace,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "boobam", "a": "boobam",
"b": "bar", "b": "bar",
"c": "baz", "c": "baz",
@ -71,18 +71,18 @@ func TestRelabel(t *testing.T) {
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("o$")}, Regex: &config.Regexp{*regexp.MustCompile("o$")},
Action: config.RelabelDrop, Action: config.RelabelDrop,
}, { }, {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("f(.*)")}, Regex: &config.Regexp{*regexp.MustCompile("f(.*)")},
TargetLabel: clientmodel.LabelName("d"), TargetLabel: model.LabelName("d"),
Separator: ";", Separator: ";",
Replacement: "ch$1-ch$1", Replacement: "ch$1-ch$1",
Action: config.RelabelReplace, Action: config.RelabelReplace,
@ -91,46 +91,46 @@ func TestRelabel(t *testing.T) {
output: nil, output: nil,
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "abc", "a": "abc",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("(b)")}, Regex: &config.Regexp{*regexp.MustCompile("(b)")},
TargetLabel: clientmodel.LabelName("d"), TargetLabel: model.LabelName("d"),
Separator: ";", Separator: ";",
Replacement: "$1", Replacement: "$1",
Action: config.RelabelReplace, Action: config.RelabelReplace,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "abc", "a": "abc",
"d": "b", "d": "b",
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("no-match")}, Regex: &config.Regexp{*regexp.MustCompile("no-match")},
Action: config.RelabelDrop, Action: config.RelabelDrop,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "foo", "a": "foo",
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("no-match")}, Regex: &config.Regexp{*regexp.MustCompile("no-match")},
Action: config.RelabelKeep, Action: config.RelabelKeep,
}, },
@ -138,54 +138,54 @@ func TestRelabel(t *testing.T) {
output: nil, output: nil,
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("^f")}, Regex: &config.Regexp{*regexp.MustCompile("^f")},
Action: config.RelabelKeep, Action: config.RelabelKeep,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "foo", "a": "foo",
}, },
}, },
{ {
// No replacement must be applied if there is no match. // No replacement must be applied if there is no match.
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "boo", "a": "boo",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"a"}, SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("^f")}, Regex: &config.Regexp{*regexp.MustCompile("^f")},
TargetLabel: clientmodel.LabelName("b"), TargetLabel: model.LabelName("b"),
Replacement: "bar", Replacement: "bar",
Action: config.RelabelReplace, Action: config.RelabelReplace,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "boo", "a": "boo",
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
"b": "bar", "b": "bar",
"c": "baz", "c": "baz",
}, },
relabel: []*config.RelabelConfig{ relabel: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"c"}, SourceLabels: model.LabelNames{"c"},
TargetLabel: clientmodel.LabelName("d"), TargetLabel: model.LabelName("d"),
Separator: ";", Separator: ";",
Action: config.RelabelHashMod, Action: config.RelabelHashMod,
Modulus: 1000, Modulus: 1000,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "foo", "a": "foo",
"b": "bar", "b": "bar",
"c": "baz", "c": "baz",
@ -193,7 +193,7 @@ func TestRelabel(t *testing.T) {
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
"b1": "bar", "b1": "bar",
"b2": "baz", "b2": "baz",
@ -205,7 +205,7 @@ func TestRelabel(t *testing.T) {
Action: config.RelabelLabelMap, Action: config.RelabelLabelMap,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "foo", "a": "foo",
"b1": "bar", "b1": "bar",
"b2": "baz", "b2": "baz",
@ -214,7 +214,7 @@ func TestRelabel(t *testing.T) {
}, },
}, },
{ {
input: clientmodel.LabelSet{ input: model.LabelSet{
"a": "foo", "a": "foo",
"__meta_my_bar": "aaa", "__meta_my_bar": "aaa",
"__meta_my_baz": "bbb", "__meta_my_baz": "bbb",
@ -227,7 +227,7 @@ func TestRelabel(t *testing.T) {
Action: config.RelabelLabelMap, Action: config.RelabelLabelMap,
}, },
}, },
output: clientmodel.LabelSet{ output: model.LabelSet{
"a": "foo", "a": "foo",
"__meta_my_bar": "aaa", "__meta_my_bar": "aaa",
"__meta_my_baz": "bbb", "__meta_my_baz": "bbb",

View file

@ -30,7 +30,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
@ -40,10 +40,10 @@ import (
const ( const (
// ScrapeHealthMetricName is the metric name for the synthetic health // ScrapeHealthMetricName is the metric name for the synthetic health
// variable. // variable.
scrapeHealthMetricName clientmodel.LabelValue = "up" scrapeHealthMetricName model.LabelValue = "up"
// ScrapeTimeMetricName is the metric name for the synthetic scrape duration // ScrapeTimeMetricName is the metric name for the synthetic scrape duration
// variable. // variable.
scrapeDurationMetricName clientmodel.LabelValue = "scrape_duration_seconds" scrapeDurationMetricName model.LabelValue = "scrape_duration_seconds"
// Capacity of the channel to buffer samples during ingestion. // Capacity of the channel to buffer samples during ingestion.
ingestedSamplesCap = 256 ingestedSamplesCap = 256
@ -150,7 +150,7 @@ type Target struct {
// Closing scraperStopped signals that scraping has been stopped. // Closing scraperStopped signals that scraping has been stopped.
scraperStopped chan struct{} scraperStopped chan struct{}
// Channel to buffer ingested samples. // Channel to buffer ingested samples.
ingestedSamples chan clientmodel.Samples ingestedSamples chan model.Samples
// Mutex protects the members below. // Mutex protects the members below.
sync.RWMutex sync.RWMutex
@ -159,9 +159,9 @@ type Target struct {
// url is the URL to be scraped. Its host is immutable. // url is the URL to be scraped. Its host is immutable.
url *url.URL url *url.URL
// Labels before any processing. // Labels before any processing.
metaLabels clientmodel.LabelSet metaLabels model.LabelSet
// Any base labels that are added to this target and its metrics. // Any base labels that are added to this target and its metrics.
baseLabels clientmodel.LabelSet baseLabels model.LabelSet
// What is the deadline for the HTTP or HTTPS against this endpoint. // What is the deadline for the HTTP or HTTPS against this endpoint.
deadline time.Duration deadline time.Duration
// The time between two scrapes. // The time between two scrapes.
@ -174,11 +174,11 @@ type Target struct {
} }
// NewTarget creates a reasonably configured target for querying. // NewTarget creates a reasonably configured target for querying.
func NewTarget(cfg *config.ScrapeConfig, baseLabels, metaLabels clientmodel.LabelSet) *Target { func NewTarget(cfg *config.ScrapeConfig, baseLabels, metaLabels model.LabelSet) *Target {
t := &Target{ t := &Target{
url: &url.URL{ url: &url.URL{
Scheme: string(baseLabels[clientmodel.SchemeLabel]), Scheme: string(baseLabels[model.SchemeLabel]),
Host: string(baseLabels[clientmodel.AddressLabel]), Host: string(baseLabels[model.AddressLabel]),
}, },
status: &TargetStatus{}, status: &TargetStatus{},
scraperStopping: make(chan struct{}), scraperStopping: make(chan struct{}),
@ -195,7 +195,7 @@ func (t *Target) Status() *TargetStatus {
// Update overwrites settings in the target that are derived from the job config // Update overwrites settings in the target that are derived from the job config
// it belongs to. // it belongs to.
func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientmodel.LabelSet) { func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels model.LabelSet) {
t.Lock() t.Lock()
defer t.Unlock() defer t.Unlock()
@ -206,19 +206,19 @@ func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientm
} }
t.httpClient = httpClient t.httpClient = httpClient
t.url.Scheme = string(baseLabels[clientmodel.SchemeLabel]) t.url.Scheme = string(baseLabels[model.SchemeLabel])
t.url.Path = string(baseLabels[clientmodel.MetricsPathLabel]) t.url.Path = string(baseLabels[model.MetricsPathLabel])
params := url.Values{} params := url.Values{}
for k, v := range cfg.Params { for k, v := range cfg.Params {
params[k] = make([]string, len(v)) params[k] = make([]string, len(v))
copy(params[k], v) copy(params[k], v)
} }
for k, v := range baseLabels { for k, v := range baseLabels {
if strings.HasPrefix(string(k), clientmodel.ParamLabelPrefix) { if strings.HasPrefix(string(k), model.ParamLabelPrefix) {
if len(params[string(k[len(clientmodel.ParamLabelPrefix):])]) > 0 { if len(params[string(k[len(model.ParamLabelPrefix):])]) > 0 {
params[string(k[len(clientmodel.ParamLabelPrefix):])][0] = string(v) params[string(k[len(model.ParamLabelPrefix):])][0] = string(v)
} else { } else {
params[string(k[len(clientmodel.ParamLabelPrefix):])] = []string{string(v)} params[string(k[len(model.ParamLabelPrefix):])] = []string{string(v)}
} }
} }
} }
@ -229,15 +229,15 @@ func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientm
t.honorLabels = cfg.HonorLabels t.honorLabels = cfg.HonorLabels
t.metaLabels = metaLabels t.metaLabels = metaLabels
t.baseLabels = clientmodel.LabelSet{} t.baseLabels = model.LabelSet{}
// All remaining internal labels will not be part of the label set. // All remaining internal labels will not be part of the label set.
for name, val := range baseLabels { for name, val := range baseLabels {
if !strings.HasPrefix(string(name), clientmodel.ReservedLabelPrefix) { if !strings.HasPrefix(string(name), model.ReservedLabelPrefix) {
t.baseLabels[name] = val t.baseLabels[name] = val
} }
} }
if _, ok := t.baseLabels[clientmodel.InstanceLabel]; !ok { if _, ok := t.baseLabels[model.InstanceLabel]; !ok {
t.baseLabels[clientmodel.InstanceLabel] = clientmodel.LabelValue(t.InstanceIdentifier()) t.baseLabels[model.InstanceLabel] = model.LabelValue(t.InstanceIdentifier())
} }
t.metricRelabelConfigs = cfg.MetricRelabelConfigs t.metricRelabelConfigs = cfg.MetricRelabelConfigs
} }
@ -302,7 +302,7 @@ func (t *Target) String() string {
} }
// Ingest implements an extraction.Ingester. // Ingest implements an extraction.Ingester.
func (t *Target) Ingest(s clientmodel.Samples) error { func (t *Target) Ingest(s model.Samples) error {
t.RLock() t.RLock()
deadline := t.deadline deadline := t.deadline
t.RUnlock() t.RUnlock()
@ -416,7 +416,7 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
defer func() { defer func() {
t.status.setLastError(err) t.status.setLastError(err)
recordScrapeHealth(sampleAppender, clientmodel.TimestampFromTime(start), baseLabels, t.status.Health(), time.Since(start)) recordScrapeHealth(sampleAppender, model.TimeFromTime(start), baseLabels, t.status.Health(), time.Since(start))
}() }()
req, err := http.NewRequest("GET", t.URL().String(), nil) req, err := http.NewRequest("GET", t.URL().String(), nil)
@ -439,10 +439,10 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
return err return err
} }
t.ingestedSamples = make(chan clientmodel.Samples, ingestedSamplesCap) t.ingestedSamples = make(chan model.Samples, ingestedSamplesCap)
processOptions := &extraction.ProcessOptions{ processOptions := &extraction.ProcessOptions{
Timestamp: clientmodel.TimestampFromTime(start), Timestamp: model.TimeFromTime(start),
} }
go func() { go func() {
err = processor.ProcessSingle(resp.Body, t, processOptions) err = processor.ProcessSingle(resp.Body, t, processOptions)
@ -464,14 +464,14 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
// value of the label is stored in a label prefixed with the exported prefix. // value of the label is stored in a label prefixed with the exported prefix.
for ln, lv := range baseLabels { for ln, lv := range baseLabels {
if v, ok := s.Metric[ln]; ok && v != "" { if v, ok := s.Metric[ln]; ok && v != "" {
s.Metric[clientmodel.ExportedLabelPrefix+ln] = v s.Metric[model.ExportedLabelPrefix+ln] = v
} }
s.Metric[ln] = lv s.Metric[ln] = lv
} }
} }
// Avoid the copy in Relabel if there are no configs. // Avoid the copy in Relabel if there are no configs.
if len(metricRelabelConfigs) > 0 { if len(metricRelabelConfigs) > 0 {
labels, err := Relabel(clientmodel.LabelSet(s.Metric), metricRelabelConfigs...) labels, err := Relabel(model.LabelSet(s.Metric), metricRelabelConfigs...)
if err != nil { if err != nil {
log.Errorf("Error while relabeling metric %s of instance %s: %s", s.Metric, req.URL, err) log.Errorf("Error while relabeling metric %s of instance %s: %s", s.Metric, req.URL, err)
continue continue
@ -480,7 +480,7 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
if labels == nil { if labels == nil {
continue continue
} }
s.Metric = clientmodel.Metric(labels) s.Metric = model.Metric(labels)
} }
sampleAppender.Append(s) sampleAppender.Append(s)
} }
@ -503,24 +503,24 @@ func (t *Target) InstanceIdentifier() string {
} }
// fullLabels returns the base labels plus internal labels defining the target. // fullLabels returns the base labels plus internal labels defining the target.
func (t *Target) fullLabels() clientmodel.LabelSet { func (t *Target) fullLabels() model.LabelSet {
t.RLock() t.RLock()
defer t.RUnlock() defer t.RUnlock()
lset := make(clientmodel.LabelSet, len(t.baseLabels)+2) lset := make(model.LabelSet, len(t.baseLabels)+2)
for ln, lv := range t.baseLabels { for ln, lv := range t.baseLabels {
lset[ln] = lv lset[ln] = lv
} }
lset[clientmodel.MetricsPathLabel] = clientmodel.LabelValue(t.url.Path) lset[model.MetricsPathLabel] = model.LabelValue(t.url.Path)
lset[clientmodel.AddressLabel] = clientmodel.LabelValue(t.url.Host) lset[model.AddressLabel] = model.LabelValue(t.url.Host)
lset[clientmodel.SchemeLabel] = clientmodel.LabelValue(t.url.Scheme) lset[model.SchemeLabel] = model.LabelValue(t.url.Scheme)
return lset return lset
} }
// BaseLabels returns a copy of the target's base labels. // BaseLabels returns a copy of the target's base labels.
func (t *Target) BaseLabels() clientmodel.LabelSet { func (t *Target) BaseLabels() model.LabelSet {
t.RLock() t.RLock()
defer t.RUnlock() defer t.RUnlock()
lset := make(clientmodel.LabelSet, len(t.baseLabels)) lset := make(model.LabelSet, len(t.baseLabels))
for ln, lv := range t.baseLabels { for ln, lv := range t.baseLabels {
lset[ln] = lv lset[ln] = lv
} }
@ -528,10 +528,10 @@ func (t *Target) BaseLabels() clientmodel.LabelSet {
} }
// MetaLabels returns a copy of the target's labels before any processing. // MetaLabels returns a copy of the target's labels before any processing.
func (t *Target) MetaLabels() clientmodel.LabelSet { func (t *Target) MetaLabels() model.LabelSet {
t.RLock() t.RLock()
defer t.RUnlock() defer t.RUnlock()
lset := make(clientmodel.LabelSet, len(t.metaLabels)) lset := make(model.LabelSet, len(t.metaLabels))
for ln, lv := range t.metaLabels { for ln, lv := range t.metaLabels {
lset[ln] = lv lset[ln] = lv
} }
@ -540,36 +540,36 @@ func (t *Target) MetaLabels() clientmodel.LabelSet {
func recordScrapeHealth( func recordScrapeHealth(
sampleAppender storage.SampleAppender, sampleAppender storage.SampleAppender,
timestamp clientmodel.Timestamp, timestamp model.Time,
baseLabels clientmodel.LabelSet, baseLabels model.LabelSet,
health TargetHealth, health TargetHealth,
scrapeDuration time.Duration, scrapeDuration time.Duration,
) { ) {
healthMetric := make(clientmodel.Metric, len(baseLabels)+1) healthMetric := make(model.Metric, len(baseLabels)+1)
durationMetric := make(clientmodel.Metric, len(baseLabels)+1) durationMetric := make(model.Metric, len(baseLabels)+1)
healthMetric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(scrapeHealthMetricName) healthMetric[model.MetricNameLabel] = model.LabelValue(scrapeHealthMetricName)
durationMetric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(scrapeDurationMetricName) durationMetric[model.MetricNameLabel] = model.LabelValue(scrapeDurationMetricName)
for label, value := range baseLabels { for label, value := range baseLabels {
healthMetric[label] = value healthMetric[label] = value
durationMetric[label] = value durationMetric[label] = value
} }
healthValue := clientmodel.SampleValue(0) healthValue := model.SampleValue(0)
if health == HealthGood { if health == HealthGood {
healthValue = clientmodel.SampleValue(1) healthValue = model.SampleValue(1)
} }
healthSample := &clientmodel.Sample{ healthSample := &model.Sample{
Metric: healthMetric, Metric: healthMetric,
Timestamp: timestamp, Timestamp: timestamp,
Value: healthValue, Value: healthValue,
} }
durationSample := &clientmodel.Sample{ durationSample := &model.Sample{
Metric: durationMetric, Metric: durationMetric,
Timestamp: timestamp, Timestamp: timestamp,
Value: clientmodel.SampleValue(float64(scrapeDuration) / float64(time.Second)), Value: model.SampleValue(float64(scrapeDuration) / float64(time.Second)),
} }
sampleAppender.Append(healthSample) sampleAppender.Append(healthSample)

View file

@ -17,7 +17,7 @@ import (
"crypto/tls" "crypto/tls"
"crypto/x509" "crypto/x509"
"errors" "errors"
"fmt" // "fmt"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
@ -28,17 +28,17 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
func TestBaseLabels(t *testing.T) { func TestBaseLabels(t *testing.T) {
target := newTestTarget("example.com:80", 0, clientmodel.LabelSet{"job": "some_job", "foo": "bar"}) target := newTestTarget("example.com:80", 0, model.LabelSet{"job": "some_job", "foo": "bar"})
want := clientmodel.LabelSet{ want := model.LabelSet{
clientmodel.JobLabel: "some_job", model.JobLabel: "some_job",
clientmodel.InstanceLabel: "example.com:80", model.InstanceLabel: "example.com:80",
"foo": "bar", "foo": "bar",
} }
got := target.BaseLabels() got := target.BaseLabels()
if !reflect.DeepEqual(want, got) { if !reflect.DeepEqual(want, got) {
@ -49,8 +49,8 @@ func TestBaseLabels(t *testing.T) {
func TestOverwriteLabels(t *testing.T) { func TestOverwriteLabels(t *testing.T) {
type test struct { type test struct {
metric string metric string
resultNormal clientmodel.Metric resultNormal model.Metric
resultHonor clientmodel.Metric resultHonor model.Metric
} }
var tests []test var tests []test
@ -66,40 +66,40 @@ func TestOverwriteLabels(t *testing.T) {
), ),
) )
defer server.Close() defer server.Close()
addr := clientmodel.LabelValue(strings.Split(server.URL, "://")[1]) addr := model.LabelValue(strings.Split(server.URL, "://")[1])
tests = []test{ tests = []test{
{ {
metric: `foo{}`, metric: `foo{}`,
resultNormal: clientmodel.Metric{ resultNormal: model.Metric{
clientmodel.MetricNameLabel: "foo", model.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr, model.InstanceLabel: addr,
}, },
resultHonor: clientmodel.Metric{ resultHonor: model.Metric{
clientmodel.MetricNameLabel: "foo", model.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr, model.InstanceLabel: addr,
}, },
}, },
{ {
metric: `foo{instance=""}`, metric: `foo{instance=""}`,
resultNormal: clientmodel.Metric{ resultNormal: model.Metric{
clientmodel.MetricNameLabel: "foo", model.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr, model.InstanceLabel: addr,
}, },
resultHonor: clientmodel.Metric{ resultHonor: model.Metric{
clientmodel.MetricNameLabel: "foo", model.MetricNameLabel: "foo",
}, },
}, },
{ {
metric: `foo{instance="other_instance"}`, metric: `foo{instance="other_instance"}`,
resultNormal: clientmodel.Metric{ resultNormal: model.Metric{
clientmodel.MetricNameLabel: "foo", model.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr, model.InstanceLabel: addr,
clientmodel.ExportedLabelPrefix + clientmodel.InstanceLabel: "other_instance", model.ExportedLabelPrefix + model.InstanceLabel: "other_instance",
}, },
resultHonor: clientmodel.Metric{ resultHonor: model.Metric{
clientmodel.MetricNameLabel: "foo", model.MetricNameLabel: "foo",
clientmodel.InstanceLabel: "other_instance", model.InstanceLabel: "other_instance",
}, },
}, },
} }
@ -140,31 +140,31 @@ func TestTargetScrapeUpdatesState(t *testing.T) {
} }
} }
func TestTargetScrapeWithFullChannel(t *testing.T) { // func TestTargetScrapeWithFullChannel(t *testing.T) {
server := httptest.NewServer( // server := httptest.NewServer(
http.HandlerFunc( // http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) { // func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", `text/plain; version=0.0.4`) // w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
for i := 0; i < 2*ingestedSamplesCap; i++ { // for i := 0; i < 2*ingestedSamplesCap; i++ {
w.Write([]byte( // w.Write([]byte(
fmt.Sprintf("test_metric_%d{foo=\"bar\"} 123.456\n", i), // fmt.Sprintf("test_metric_%d{foo=\"bar\"} 123.456\n", i),
)) // ))
} // }
}, // },
), // ),
) // )
defer server.Close() // defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{"dings": "bums"}) // testTarget := newTestTarget(server.URL, 10*time.Millisecond, model.LabelSet{"dings": "bums"})
testTarget.scrape(slowAppender{}) // testTarget.scrape(slowAppender{})
if testTarget.status.Health() != HealthBad { // if testTarget.status.Health() != HealthBad {
t.Errorf("Expected target state %v, actual: %v", HealthBad, testTarget.status.Health()) // t.Errorf("Expected target state %v, actual: %v", HealthBad, testTarget.status.Health())
} // }
if testTarget.status.LastError() != errIngestChannelFull { // if testTarget.status.LastError() != errIngestChannelFull {
t.Errorf("Expected target error %q, actual: %q", errIngestChannelFull, testTarget.status.LastError()) // t.Errorf("Expected target error %q, actual: %q", errIngestChannelFull, testTarget.status.LastError())
} // }
} // }
func TestTargetScrapeMetricRelabelConfigs(t *testing.T) { func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
server := httptest.NewServer( server := httptest.NewServer(
@ -177,15 +177,15 @@ func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
), ),
) )
defer server.Close() defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{}) testTarget := newTestTarget(server.URL, 10*time.Millisecond, model.LabelSet{})
testTarget.metricRelabelConfigs = []*config.RelabelConfig{ testTarget.metricRelabelConfigs = []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{"__name__"}, SourceLabels: model.LabelNames{"__name__"},
Regex: &config.Regexp{*regexp.MustCompile(".*drop.*")}, Regex: &config.Regexp{*regexp.MustCompile(".*drop.*")},
Action: config.RelabelDrop, Action: config.RelabelDrop,
}, },
{ {
SourceLabels: clientmodel.LabelNames{"__name__"}, SourceLabels: model.LabelNames{"__name__"},
Regex: &config.Regexp{*regexp.MustCompile(".*(relabel|up).*")}, Regex: &config.Regexp{*regexp.MustCompile(".*(relabel|up).*")},
TargetLabel: "foo", TargetLabel: "foo",
Replacement: "bar", Replacement: "bar",
@ -202,29 +202,29 @@ func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
sample.Value = 0 sample.Value = 0
} }
expected := []*clientmodel.Sample{ expected := []*model.Sample{
{ {
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "test_metric_relabel", model.MetricNameLabel: "test_metric_relabel",
"foo": "bar", "foo": "bar",
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host), model.InstanceLabel: model.LabelValue(testTarget.url.Host),
}, },
Timestamp: 0, Timestamp: 0,
Value: 0, Value: 0,
}, },
// The metrics about the scrape are not affected. // The metrics about the scrape are not affected.
{ {
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: scrapeHealthMetricName, model.MetricNameLabel: scrapeHealthMetricName,
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host), model.InstanceLabel: model.LabelValue(testTarget.url.Host),
}, },
Timestamp: 0, Timestamp: 0,
Value: 0, Value: 0,
}, },
{ {
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: scrapeDurationMetricName, model.MetricNameLabel: scrapeDurationMetricName,
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host), model.InstanceLabel: model.LabelValue(testTarget.url.Host),
}, },
Timestamp: 0, Timestamp: 0,
Value: 0, Value: 0,
@ -238,12 +238,12 @@ func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
} }
func TestTargetRecordScrapeHealth(t *testing.T) { func TestTargetRecordScrapeHealth(t *testing.T) {
testTarget := newTestTarget("example.url:80", 0, clientmodel.LabelSet{clientmodel.JobLabel: "testjob"}) testTarget := newTestTarget("example.url:80", 0, model.LabelSet{model.JobLabel: "testjob"})
now := clientmodel.Now() now := model.Now()
appender := &collectResultAppender{} appender := &collectResultAppender{}
testTarget.status.setLastError(nil) testTarget.status.setLastError(nil)
recordScrapeHealth(appender, now, testTarget.BaseLabels(), testTarget.status.Health(), 2*time.Second) recordScrapeHealth(appender, now.Time(), testTarget.BaseLabels(), testTarget.status.Health(), 2*time.Second)
result := appender.result result := appender.result
@ -252,11 +252,11 @@ func TestTargetRecordScrapeHealth(t *testing.T) {
} }
actual := result[0] actual := result[0]
expected := &clientmodel.Sample{ expected := &model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: scrapeHealthMetricName, model.MetricNameLabel: scrapeHealthMetricName,
clientmodel.InstanceLabel: "example.url:80", model.InstanceLabel: "example.url:80",
clientmodel.JobLabel: "testjob", model.JobLabel: "testjob",
}, },
Timestamp: now, Timestamp: now,
Value: 1, Value: 1,
@ -267,11 +267,11 @@ func TestTargetRecordScrapeHealth(t *testing.T) {
} }
actual = result[1] actual = result[1]
expected = &clientmodel.Sample{ expected = &model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: scrapeDurationMetricName, model.MetricNameLabel: scrapeDurationMetricName,
clientmodel.InstanceLabel: "example.url:80", model.InstanceLabel: "example.url:80",
clientmodel.JobLabel: "testjob", model.JobLabel: "testjob",
}, },
Timestamp: now, Timestamp: now,
Value: 2.0, Value: 2.0,
@ -295,7 +295,7 @@ func TestTargetScrapeTimeout(t *testing.T) {
) )
defer server.Close() defer server.Close()
testTarget := newTestTarget(server.URL, 50*time.Millisecond, clientmodel.LabelSet{}) testTarget := newTestTarget(server.URL, 50*time.Millisecond, model.LabelSet{})
appender := nopAppender{} appender := nopAppender{}
@ -338,7 +338,7 @@ func TestTargetScrape404(t *testing.T) {
) )
defer server.Close() defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{}) testTarget := newTestTarget(server.URL, 10*time.Millisecond, model.LabelSet{})
appender := nopAppender{} appender := nopAppender{}
want := errors.New("server returned HTTP status 404 Not Found") want := errors.New("server returned HTTP status 404 Not Found")
@ -381,7 +381,7 @@ func BenchmarkScrape(b *testing.B) {
) )
defer server.Close() defer server.Close()
testTarget := newTestTarget(server.URL, 100*time.Millisecond, clientmodel.LabelSet{"dings": "bums"}) testTarget := newTestTarget(server.URL, 100*time.Millisecond, model.LabelSet{"dings": "bums"})
appender := nopAppender{} appender := nopAppender{}
b.ResetTimer() b.ResetTimer()
@ -424,10 +424,10 @@ func TestURLParams(t *testing.T) {
"foo": []string{"bar", "baz"}, "foo": []string{"bar", "baz"},
}, },
}, },
clientmodel.LabelSet{ model.LabelSet{
clientmodel.SchemeLabel: clientmodel.LabelValue(serverURL.Scheme), model.SchemeLabel: model.LabelValue(serverURL.Scheme),
clientmodel.AddressLabel: clientmodel.LabelValue(serverURL.Host), model.AddressLabel: model.LabelValue(serverURL.Host),
"__param_foo": "bar", "__param_foo": "bar",
}, },
nil) nil)
app := &collectResultAppender{} app := &collectResultAppender{}
@ -436,7 +436,7 @@ func TestURLParams(t *testing.T) {
} }
} }
func newTestTarget(targetURL string, deadline time.Duration, baseLabels clientmodel.LabelSet) *Target { func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target {
cfg := &config.ScrapeConfig{ cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(deadline), ScrapeTimeout: config.Duration(deadline),
} }
@ -454,8 +454,8 @@ func newTestTarget(targetURL string, deadline time.Duration, baseLabels clientmo
scraperStopping: make(chan struct{}), scraperStopping: make(chan struct{}),
scraperStopped: make(chan struct{}), scraperStopped: make(chan struct{}),
} }
t.baseLabels = clientmodel.LabelSet{ t.baseLabels = model.LabelSet{
clientmodel.InstanceLabel: clientmodel.LabelValue(t.InstanceIdentifier()), model.InstanceLabel: model.LabelValue(t.InstanceIdentifier()),
} }
for baseLabel, baseValue := range baseLabels { for baseLabel, baseValue := range baseLabels {
t.baseLabels[baseLabel] = baseValue t.baseLabels[baseLabel] = baseValue

View file

@ -20,7 +20,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/retrieval/discovery" "github.com/prometheus/prometheus/retrieval/discovery"
@ -52,7 +52,7 @@ type TargetProvider interface {
// target providers. // target providers.
type TargetManager struct { type TargetManager struct {
mtx sync.RWMutex mtx sync.RWMutex
globalLabels clientmodel.LabelSet globalLabels model.LabelSet
sampleAppender storage.SampleAppender sampleAppender storage.SampleAppender
running bool running bool
done chan struct{} done chan struct{}
@ -325,7 +325,7 @@ func (tm *TargetManager) Pools() map[string][]*Target {
for _, ts := range tm.targets { for _, ts := range tm.targets {
for _, t := range ts { for _, t := range ts {
job := string(t.BaseLabels()[clientmodel.JobLabel]) job := string(t.BaseLabels()[model.JobLabel])
pools[job] = append(pools[job], t) pools[job] = append(pools[job], t)
} }
} }
@ -452,7 +452,7 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
targets := make([]*Target, 0, len(tg.Targets)) targets := make([]*Target, 0, len(tg.Targets))
for i, labels := range tg.Targets { for i, labels := range tg.Targets {
addr := string(labels[clientmodel.AddressLabel]) addr := string(labels[model.AddressLabel])
// If no port was provided, infer it based on the used scheme. // If no port was provided, infer it based on the used scheme.
if !strings.Contains(addr, ":") { if !strings.Contains(addr, ":") {
switch cfg.Scheme { switch cfg.Scheme {
@ -463,21 +463,21 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
default: default:
panic(fmt.Errorf("targetsFromGroup: invalid scheme %q", cfg.Scheme)) panic(fmt.Errorf("targetsFromGroup: invalid scheme %q", cfg.Scheme))
} }
labels[clientmodel.AddressLabel] = clientmodel.LabelValue(addr) labels[model.AddressLabel] = model.LabelValue(addr)
} }
for k, v := range cfg.Params { for k, v := range cfg.Params {
if len(v) > 0 { if len(v) > 0 {
labels[clientmodel.LabelName(clientmodel.ParamLabelPrefix+k)] = clientmodel.LabelValue(v[0]) labels[model.LabelName(model.ParamLabelPrefix+k)] = model.LabelValue(v[0])
} }
} }
// Copy labels into the labelset for the target if they are not // Copy labels into the labelset for the target if they are not
// set already. Apply the labelsets in order of decreasing precedence. // set already. Apply the labelsets in order of decreasing precedence.
labelsets := []clientmodel.LabelSet{ labelsets := []model.LabelSet{
tg.Labels, tg.Labels,
{ {
clientmodel.SchemeLabel: clientmodel.LabelValue(cfg.Scheme), model.SchemeLabel: model.LabelValue(cfg.Scheme),
clientmodel.MetricsPathLabel: clientmodel.LabelValue(cfg.MetricsPath), model.MetricsPathLabel: model.LabelValue(cfg.MetricsPath),
clientmodel.JobLabel: clientmodel.LabelValue(cfg.JobName), model.JobLabel: model.LabelValue(cfg.JobName),
}, },
tm.globalLabels, tm.globalLabels,
} }
@ -489,7 +489,7 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
} }
} }
if _, ok := labels[clientmodel.AddressLabel]; !ok { if _, ok := labels[model.AddressLabel]; !ok {
return nil, fmt.Errorf("instance %d in target group %s has no address", i, tg) return nil, fmt.Errorf("instance %d in target group %s has no address", i, tg)
} }
@ -507,7 +507,7 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
for ln := range labels { for ln := range labels {
// Meta labels are deleted after relabelling. Other internal labels propagate to // Meta labels are deleted after relabelling. Other internal labels propagate to
// the target which decides whether they will be part of their label set. // the target which decides whether they will be part of their label set.
if strings.HasPrefix(string(ln), clientmodel.MetaLabelPrefix) { if strings.HasPrefix(string(ln), model.MetaLabelPrefix) {
delete(labels, ln) delete(labels, ln)
} }
} }

View file

@ -20,7 +20,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
) )
@ -28,12 +28,12 @@ import (
func TestPrefixedTargetProvider(t *testing.T) { func TestPrefixedTargetProvider(t *testing.T) {
targetGroups := []*config.TargetGroup{ targetGroups := []*config.TargetGroup{
{ {
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test-1:1234"}, {model.AddressLabel: "test-1:1234"},
}, },
}, { }, {
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test-1:1235"}, {model.AddressLabel: "test-1:1235"},
}, },
}, },
} }
@ -78,9 +78,9 @@ func TestTargetManagerChan(t *testing.T) {
JobName: "test_job1", JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: config.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{{ TargetGroups: []*config.TargetGroup{{
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "example.org:80"}, {model.AddressLabel: "example.org:80"},
{clientmodel.AddressLabel: "example.com:80"}, {model.AddressLabel: "example.com:80"},
}, },
}}, }},
} }
@ -101,72 +101,72 @@ func TestTargetManagerChan(t *testing.T) {
sequence := []struct { sequence := []struct {
tgroup *config.TargetGroup tgroup *config.TargetGroup
expected map[string][]clientmodel.LabelSet expected map[string][]model.LabelSet
}{ }{
{ {
tgroup: &config.TargetGroup{ tgroup: &config.TargetGroup{
Source: "src1", Source: "src1",
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test-1:1234"}, {model.AddressLabel: "test-1:1234"},
{clientmodel.AddressLabel: "test-2:1234", "label": "set"}, {model.AddressLabel: "test-2:1234", "label": "set"},
{clientmodel.AddressLabel: "test-3:1234"}, {model.AddressLabel: "test-3:1234"},
}, },
}, },
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"src1": { "src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1234", "label": "set"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
}, },
}, },
}, { }, {
tgroup: &config.TargetGroup{ tgroup: &config.TargetGroup{
Source: "src2", Source: "src2",
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test-1:1235"}, {model.AddressLabel: "test-1:1235"},
{clientmodel.AddressLabel: "test-2:1235"}, {model.AddressLabel: "test-2:1235"},
{clientmodel.AddressLabel: "test-3:1235"}, {model.AddressLabel: "test-3:1235"},
}, },
Labels: clientmodel.LabelSet{"group": "label"}, Labels: model.LabelSet{"group": "label"},
}, },
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"src1": { "src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1234", "label": "set"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
}, },
"src2": { "src2": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1235", "group": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1235", "group": "label"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1235", "group": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1235", "group": "label"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1235", "group": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1235", "group": "label"},
}, },
}, },
}, { }, {
tgroup: &config.TargetGroup{ tgroup: &config.TargetGroup{
Source: "src2", Source: "src2",
Targets: []clientmodel.LabelSet{}, Targets: []model.LabelSet{},
}, },
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"src1": { "src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1234", "label": "set"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
}, },
}, },
}, { }, {
tgroup: &config.TargetGroup{ tgroup: &config.TargetGroup{
Source: "src1", Source: "src1",
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test-1:1234", "added": "label"}, {model.AddressLabel: "test-1:1234", "added": "label"},
{clientmodel.AddressLabel: "test-3:1234"}, {model.AddressLabel: "test-3:1234"},
{clientmodel.AddressLabel: "test-4:1234", "fancy": "label"}, {model.AddressLabel: "test-4:1234", "fancy": "label"},
}, },
}, },
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"src1": { "src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234", "added": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234", "added": "label"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-4:1234", "fancy": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-4:1234", "fancy": "label"},
}, },
}, },
}, },
@ -210,15 +210,15 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
"testParam": []string{"paramValue", "secondValue"}, "testParam": []string{"paramValue", "secondValue"},
}, },
TargetGroups: []*config.TargetGroup{{ TargetGroups: []*config.TargetGroup{{
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "example.org:80"}, {model.AddressLabel: "example.org:80"},
{clientmodel.AddressLabel: "example.com:80"}, {model.AddressLabel: "example.com:80"},
}, },
}}, }},
RelabelConfigs: []*config.RelabelConfig{ RelabelConfigs: []*config.RelabelConfig{
{ {
// Copy out the URL parameter. // Copy out the URL parameter.
SourceLabels: clientmodel.LabelNames{"__param_testParam"}, SourceLabels: model.LabelNames{"__param_testParam"},
Regex: &config.Regexp{*regexp.MustCompile("^(.*)$")}, Regex: &config.Regexp{*regexp.MustCompile("^(.*)$")},
TargetLabel: "testParam", TargetLabel: "testParam",
Replacement: "$1", Replacement: "$1",
@ -231,38 +231,38 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
ScrapeInterval: config.Duration(1 * time.Minute), ScrapeInterval: config.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{ TargetGroups: []*config.TargetGroup{
{ {
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "example.org:8080"}, {model.AddressLabel: "example.org:8080"},
{clientmodel.AddressLabel: "example.com:8081"}, {model.AddressLabel: "example.com:8081"},
}, },
Labels: clientmodel.LabelSet{ Labels: model.LabelSet{
"foo": "bar", "foo": "bar",
"boom": "box", "boom": "box",
}, },
}, },
{ {
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test.com:1234"}, {model.AddressLabel: "test.com:1234"},
}, },
}, },
{ {
Targets: []clientmodel.LabelSet{ Targets: []model.LabelSet{
{clientmodel.AddressLabel: "test.com:1235"}, {model.AddressLabel: "test.com:1235"},
}, },
Labels: clientmodel.LabelSet{"instance": "fixed"}, Labels: model.LabelSet{"instance": "fixed"},
}, },
}, },
RelabelConfigs: []*config.RelabelConfig{ RelabelConfigs: []*config.RelabelConfig{
{ {
SourceLabels: clientmodel.LabelNames{clientmodel.AddressLabel}, SourceLabels: model.LabelNames{model.AddressLabel},
Regex: &config.Regexp{*regexp.MustCompile(`^test\.(.*?):(.*)`)}, Regex: &config.Regexp{*regexp.MustCompile(`^test\.(.*?):(.*)`)},
Replacement: "foo.${1}:${2}", Replacement: "foo.${1}:${2}",
TargetLabel: clientmodel.AddressLabel, TargetLabel: model.AddressLabel,
Action: config.RelabelReplace, Action: config.RelabelReplace,
}, },
{ {
// Add a new label for example.* targets. // Add a new label for example.* targets.
SourceLabels: clientmodel.LabelNames{clientmodel.AddressLabel, "boom", "foo"}, SourceLabels: model.LabelNames{model.AddressLabel, "boom", "foo"},
Regex: &config.Regexp{*regexp.MustCompile("^example.*?-b([a-z-]+)r$")}, Regex: &config.Regexp{*regexp.MustCompile("^example.*?-b([a-z-]+)r$")},
TargetLabel: "new", TargetLabel: "new",
Replacement: "$1", Replacement: "$1",
@ -271,7 +271,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
}, },
{ {
// Drop an existing label. // Drop an existing label.
SourceLabels: clientmodel.LabelNames{"boom"}, SourceLabels: model.LabelNames{"boom"},
Regex: &config.Regexp{*regexp.MustCompile(".*")}, Regex: &config.Regexp{*regexp.MustCompile(".*")},
TargetLabel: "boom", TargetLabel: "boom",
Replacement: "", Replacement: "",
@ -282,57 +282,57 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
sequence := []struct { sequence := []struct {
scrapeConfigs []*config.ScrapeConfig scrapeConfigs []*config.ScrapeConfig
expected map[string][]clientmodel.LabelSet expected map[string][]model.LabelSet
}{ }{
{ {
scrapeConfigs: []*config.ScrapeConfig{testJob1}, scrapeConfigs: []*config.ScrapeConfig{testJob1},
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"test_job1:static:0:0": { "test_job1:static:0:0": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.org:80", "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.com:80", "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue"},
}, },
}, },
}, { }, {
scrapeConfigs: []*config.ScrapeConfig{testJob1}, scrapeConfigs: []*config.ScrapeConfig{testJob1},
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"test_job1:static:0:0": { "test_job1:static:0:0": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.org:80", "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.com:80", "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue"},
}, },
}, },
}, { }, {
scrapeConfigs: []*config.ScrapeConfig{testJob1, testJob2}, scrapeConfigs: []*config.ScrapeConfig{testJob1, testJob2},
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"test_job1:static:0:0": { "test_job1:static:0:0": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.org:80", "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.com:80", "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue"},
}, },
"test_job2:static:0:0": { "test_job2:static:0:0": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"}, {model.JobLabel: "test_job2", model.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"},
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"}, {model.JobLabel: "test_job2", model.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"},
}, },
"test_job2:static:0:1": { "test_job2:static:0:1": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "foo.com:1234"}, {model.JobLabel: "test_job2", model.InstanceLabel: "foo.com:1234"},
}, },
"test_job2:static:0:2": { "test_job2:static:0:2": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "fixed"}, {model.JobLabel: "test_job2", model.InstanceLabel: "fixed"},
}, },
}, },
}, { }, {
scrapeConfigs: []*config.ScrapeConfig{}, scrapeConfigs: []*config.ScrapeConfig{},
expected: map[string][]clientmodel.LabelSet{}, expected: map[string][]model.LabelSet{},
}, { }, {
scrapeConfigs: []*config.ScrapeConfig{testJob2}, scrapeConfigs: []*config.ScrapeConfig{testJob2},
expected: map[string][]clientmodel.LabelSet{ expected: map[string][]model.LabelSet{
"test_job2:static:0:0": { "test_job2:static:0:0": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"}, {model.JobLabel: "test_job2", model.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"},
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"}, {model.JobLabel: "test_job2", model.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"},
}, },
"test_job2:static:0:1": { "test_job2:static:0:1": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "foo.com:1234"}, {model.JobLabel: "test_job2", model.InstanceLabel: "foo.com:1234"},
}, },
"test_job2:static:0:2": { "test_job2:static:0:2": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "fixed"}, {model.JobLabel: "test_job2", model.InstanceLabel: "fixed"},
}, },
}, },
}, },

View file

@ -19,7 +19,7 @@ import (
"sync" "sync"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -27,12 +27,12 @@ import (
const ( const (
// AlertMetricName is the metric name for synthetic alert timeseries. // AlertMetricName is the metric name for synthetic alert timeseries.
alertMetricName clientmodel.LabelValue = "ALERTS" alertMetricName model.LabelValue = "ALERTS"
// AlertNameLabel is the label name indicating the name of an alert. // AlertNameLabel is the label name indicating the name of an alert.
alertNameLabel clientmodel.LabelName = "alertname" alertNameLabel model.LabelName = "alertname"
// AlertStateLabel is the label name indicating the state of an alert. // AlertStateLabel is the label name indicating the state of an alert.
alertStateLabel clientmodel.LabelName = "alertstate" alertStateLabel model.LabelName = "alertstate"
) )
// AlertState denotes the state of an active alert. // AlertState denotes the state of an active alert.
@ -67,28 +67,28 @@ type Alert struct {
// The name of the alert. // The name of the alert.
Name string Name string
// The vector element labelset triggering this alert. // The vector element labelset triggering this alert.
Labels clientmodel.LabelSet Labels model.LabelSet
// The state of the alert (Pending or Firing). // The state of the alert (Pending or Firing).
State AlertState State AlertState
// The time when the alert first transitioned into Pending state. // The time when the alert first transitioned into Pending state.
ActiveSince clientmodel.Timestamp ActiveSince model.Time
// The value of the alert expression for this vector element. // The value of the alert expression for this vector element.
Value clientmodel.SampleValue Value model.SampleValue
} }
// sample returns a Sample suitable for recording the alert. // sample returns a Sample suitable for recording the alert.
func (a Alert) sample(timestamp clientmodel.Timestamp, value clientmodel.SampleValue) *promql.Sample { func (a Alert) sample(timestamp model.Time, value model.SampleValue) *promql.Sample {
recordedMetric := clientmodel.Metric{} recordedMetric := model.Metric{}
for label, value := range a.Labels { for label, value := range a.Labels {
recordedMetric[label] = value recordedMetric[label] = value
} }
recordedMetric[clientmodel.MetricNameLabel] = alertMetricName recordedMetric[model.MetricNameLabel] = alertMetricName
recordedMetric[alertNameLabel] = clientmodel.LabelValue(a.Name) recordedMetric[alertNameLabel] = model.LabelValue(a.Name)
recordedMetric[alertStateLabel] = clientmodel.LabelValue(a.State.String()) recordedMetric[alertStateLabel] = model.LabelValue(a.State.String())
return &promql.Sample{ return &promql.Sample{
Metric: clientmodel.COWMetric{ Metric: model.COWMetric{
Metric: recordedMetric, Metric: recordedMetric,
Copied: true, Copied: true,
}, },
@ -107,7 +107,7 @@ type AlertingRule struct {
// output vector before an alert transitions from Pending to Firing state. // output vector before an alert transitions from Pending to Firing state.
holdDuration time.Duration holdDuration time.Duration
// Extra labels to attach to the resulting alert sample vectors. // Extra labels to attach to the resulting alert sample vectors.
labels clientmodel.LabelSet labels model.LabelSet
// Short alert summary, suitable for email subjects. // Short alert summary, suitable for email subjects.
summary string summary string
// More detailed alert description. // More detailed alert description.
@ -119,7 +119,7 @@ type AlertingRule struct {
mutex sync.Mutex mutex sync.Mutex
// A map of alerts which are currently active (Pending or Firing), keyed by // A map of alerts which are currently active (Pending or Firing), keyed by
// the fingerprint of the labelset they correspond to. // the fingerprint of the labelset they correspond to.
activeAlerts map[clientmodel.Fingerprint]*Alert activeAlerts map[model.Fingerprint]*Alert
} }
// NewAlertingRule constructs a new AlertingRule. // NewAlertingRule constructs a new AlertingRule.
@ -127,7 +127,7 @@ func NewAlertingRule(
name string, name string,
vector promql.Expr, vector promql.Expr,
holdDuration time.Duration, holdDuration time.Duration,
labels clientmodel.LabelSet, labels model.LabelSet,
summary string, summary string,
description string, description string,
runbook string, runbook string,
@ -141,7 +141,7 @@ func NewAlertingRule(
description: description, description: description,
runbook: runbook, runbook: runbook,
activeAlerts: map[clientmodel.Fingerprint]*Alert{}, activeAlerts: map[model.Fingerprint]*Alert{},
} }
} }
@ -152,7 +152,7 @@ func (rule *AlertingRule) Name() string {
// eval evaluates the rule expression and then creates pending alerts and fires // eval evaluates the rule expression and then creates pending alerts and fires
// or removes previously pending alerts accordingly. // or removes previously pending alerts accordingly.
func (rule *AlertingRule) eval(timestamp clientmodel.Timestamp, engine *promql.Engine) (promql.Vector, error) { func (rule *AlertingRule) eval(timestamp model.Time, engine *promql.Engine) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), timestamp) query, err := engine.NewInstantQuery(rule.vector.String(), timestamp)
if err != nil { if err != nil {
return nil, err return nil, err
@ -167,17 +167,16 @@ func (rule *AlertingRule) eval(timestamp clientmodel.Timestamp, engine *promql.E
// Create pending alerts for any new vector elements in the alert expression // Create pending alerts for any new vector elements in the alert expression
// or update the expression value for existing elements. // or update the expression value for existing elements.
resultFPs := map[clientmodel.Fingerprint]struct{}{} resultFPs := map[model.Fingerprint]struct{}{}
for _, sample := range exprResult { for _, sample := range exprResult {
fp := sample.Metric.Metric.Fingerprint() fp := sample.Metric.Metric.Fingerprint()
resultFPs[fp] = struct{}{} resultFPs[fp] = struct{}{}
if alert, ok := rule.activeAlerts[fp]; !ok { if alert, ok := rule.activeAlerts[fp]; !ok {
labels := clientmodel.LabelSet{} labels := model.LabelSet(sample.Metric.Metric.Clone())
labels.MergeFromMetric(sample.Metric.Metric)
labels = labels.Merge(rule.labels) labels = labels.Merge(rule.labels)
if _, ok := labels[clientmodel.MetricNameLabel]; ok { if _, ok := labels[model.MetricNameLabel]; ok {
delete(labels, clientmodel.MetricNameLabel) delete(labels, model.MetricNameLabel)
} }
rule.activeAlerts[fp] = &Alert{ rule.activeAlerts[fp] = &Alert{
Name: rule.name, Name: rule.name,
@ -231,9 +230,9 @@ func (rule *AlertingRule) String() string {
// resulting snippet is expected to be presented in a <pre> element, so that // resulting snippet is expected to be presented in a <pre> element, so that
// line breaks and other returned whitespace is respected. // line breaks and other returned whitespace is respected.
func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML { func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML {
alertMetric := clientmodel.Metric{ alertMetric := model.Metric{
clientmodel.MetricNameLabel: alertMetricName, model.MetricNameLabel: alertMetricName,
alertNameLabel: clientmodel.LabelValue(rule.name), alertNameLabel: model.LabelValue(rule.name),
} }
s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name) s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name)
s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector) s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector)

View file

@ -26,7 +26,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/notification" "github.com/prometheus/prometheus/notification"
@ -81,7 +81,7 @@ type Rule interface {
// Name returns the name of the rule. // Name returns the name of the rule.
Name() string Name() string
// Eval evaluates the rule, including any associated recording or alerting actions. // Eval evaluates the rule, including any associated recording or alerting actions.
eval(clientmodel.Timestamp, *promql.Engine) (promql.Vector, error) eval(model.Time, *promql.Engine) (promql.Vector, error)
// String returns a human-readable string representation of the rule. // String returns a human-readable string representation of the rule.
String() string String() string
// HTMLSnippet returns a human-readable string representation of the rule, // HTMLSnippet returns a human-readable string representation of the rule,
@ -179,7 +179,7 @@ func (m *Manager) Stop() {
m.done <- true m.done <- true
} }
func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmodel.Timestamp) { func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp model.Time) {
activeAlerts := rule.ActiveAlerts() activeAlerts := rule.ActiveAlerts()
if len(activeAlerts) == 0 { if len(activeAlerts) == 0 {
return return
@ -199,7 +199,7 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
} }
tmplData := struct { tmplData := struct {
Labels map[string]string Labels map[string]string
Value clientmodel.SampleValue Value model.SampleValue
}{ }{
Labels: l, Labels: l,
Value: aa.Value, Value: aa.Value,
@ -222,8 +222,8 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
Summary: expand(rule.summary), Summary: expand(rule.summary),
Description: expand(rule.description), Description: expand(rule.description),
Runbook: rule.runbook, Runbook: rule.runbook,
Labels: aa.Labels.Merge(clientmodel.LabelSet{ Labels: aa.Labels.Merge(model.LabelSet{
alertNameLabel: clientmodel.LabelValue(rule.Name()), alertNameLabel: model.LabelValue(rule.Name()),
}), }),
Value: aa.Value, Value: aa.Value,
ActiveSince: aa.ActiveSince.Time(), ActiveSince: aa.ActiveSince.Time(),
@ -235,7 +235,7 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
} }
func (m *Manager) runIteration() { func (m *Manager) runIteration() {
now := clientmodel.Now() now := model.Now()
wg := sync.WaitGroup{} wg := sync.WaitGroup{}
m.Lock() m.Lock()
@ -274,7 +274,7 @@ func (m *Manager) runIteration() {
} }
for _, s := range vector { for _, s := range vector {
m.sampleAppender.Append(&clientmodel.Sample{ m.sampleAppender.Append(&model.Sample{
Metric: s.Metric.Metric, Metric: s.Metric.Metric,
Value: s.Value, Value: s.Value,
Timestamp: s.Timestamp, Timestamp: s.Timestamp,

View file

@ -20,7 +20,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
) )
@ -55,7 +55,7 @@ func TestAlertingRule(t *testing.T) {
"HTTPRequestRateLow", "HTTPRequestRateLow",
expr, expr,
time.Minute, time.Minute,
clientmodel.LabelSet{"severity": "critical"}, model.LabelSet{"severity": "critical"},
"summary", "description", "runbook", "summary", "description", "runbook",
) )
@ -95,7 +95,7 @@ func TestAlertingRule(t *testing.T) {
} }
for i, test := range tests { for i, test := range tests {
evalTime := clientmodel.Timestamp(0).Add(test.time) evalTime := model.Time(0).Add(test.time)
res, err := rule.eval(evalTime, suite.QueryEngine()) res, err := rule.eval(evalTime, suite.QueryEngine())
if err != nil { if err != nil {
@ -131,7 +131,7 @@ func TestAlertingRule(t *testing.T) {
} }
} }
func annotateWithTime(lines []string, timestamp clientmodel.Timestamp) []string { func annotateWithTime(lines []string, timestamp model.Time) []string {
annotatedLines := []string{} annotatedLines := []string{}
for _, line := range lines { for _, line := range lines {
annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp)) annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp))
@ -149,7 +149,7 @@ func TestTransferAlertState(t *testing.T) {
arule := AlertingRule{ arule := AlertingRule{
name: "test", name: "test",
activeAlerts: map[clientmodel.Fingerprint]*Alert{}, activeAlerts: map[model.Fingerprint]*Alert{},
} }
aruleCopy := arule aruleCopy := arule
@ -166,7 +166,7 @@ func TestTransferAlertState(t *testing.T) {
m.rules = []Rule{ m.rules = []Rule{
&AlertingRule{ &AlertingRule{
name: "test_other", name: "test_other",
activeAlerts: map[clientmodel.Fingerprint]*Alert{}, activeAlerts: map[model.Fingerprint]*Alert{},
}, },
&aruleCopy, &aruleCopy,
} }

View file

@ -17,7 +17,7 @@ import (
"fmt" "fmt"
"html/template" "html/template"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -27,11 +27,11 @@ import (
type RecordingRule struct { type RecordingRule struct {
name string name string
vector promql.Expr vector promql.Expr
labels clientmodel.LabelSet labels model.LabelSet
} }
// NewRecordingRule returns a new recording rule. // NewRecordingRule returns a new recording rule.
func NewRecordingRule(name string, vector promql.Expr, labels clientmodel.LabelSet) *RecordingRule { func NewRecordingRule(name string, vector promql.Expr, labels model.LabelSet) *RecordingRule {
return &RecordingRule{ return &RecordingRule{
name: name, name: name,
vector: vector, vector: vector,
@ -43,7 +43,7 @@ func NewRecordingRule(name string, vector promql.Expr, labels clientmodel.LabelS
func (rule RecordingRule) Name() string { return rule.name } func (rule RecordingRule) Name() string { return rule.name }
// eval evaluates the rule and then overrides the metric names and labels accordingly. // eval evaluates the rule and then overrides the metric names and labels accordingly.
func (rule RecordingRule) eval(timestamp clientmodel.Timestamp, engine *promql.Engine) (promql.Vector, error) { func (rule RecordingRule) eval(timestamp model.Time, engine *promql.Engine) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), timestamp) query, err := engine.NewInstantQuery(rule.vector.String(), timestamp)
if err != nil { if err != nil {
return nil, err return nil, err
@ -69,10 +69,10 @@ func (rule RecordingRule) eval(timestamp clientmodel.Timestamp, engine *promql.E
// Override the metric name and labels. // Override the metric name and labels.
for _, sample := range vector { for _, sample := range vector {
sample.Metric.Set(clientmodel.MetricNameLabel, clientmodel.LabelValue(rule.name)) sample.Metric.Set(model.MetricNameLabel, model.LabelValue(rule.name))
for label, value := range rule.labels { for label, value := range rule.labels {
if value == "" { if value == "" {
sample.Metric.Delete(label) sample.Metric.Del(label)
} else { } else {
sample.Metric.Set(label, value) sample.Metric.Set(label, value)
} }

View file

@ -20,7 +20,7 @@ import (
"sync" "sync"
"sync/atomic" "sync/atomic"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -59,8 +59,8 @@ type chunkDesc struct {
sync.Mutex sync.Mutex
c chunk // nil if chunk is evicted. c chunk // nil if chunk is evicted.
rCnt int rCnt int
chunkFirstTime clientmodel.Timestamp // Used if chunk is evicted. chunkFirstTime model.Time // Used if chunk is evicted.
chunkLastTime clientmodel.Timestamp // Used if chunk is evicted. chunkLastTime model.Time // Used if chunk is evicted.
// evictListElement is nil if the chunk is not in the evict list. // evictListElement is nil if the chunk is not in the evict list.
// evictListElement is _not_ protected by the chunkDesc mutex. // evictListElement is _not_ protected by the chunkDesc mutex.
@ -123,7 +123,7 @@ func (cd *chunkDesc) refCount() int {
return cd.rCnt return cd.rCnt
} }
func (cd *chunkDesc) firstTime() clientmodel.Timestamp { func (cd *chunkDesc) firstTime() model.Time {
cd.Lock() cd.Lock()
defer cd.Unlock() defer cd.Unlock()
@ -133,7 +133,7 @@ func (cd *chunkDesc) firstTime() clientmodel.Timestamp {
return cd.c.firstTime() return cd.c.firstTime()
} }
func (cd *chunkDesc) lastTime() clientmodel.Timestamp { func (cd *chunkDesc) lastTime() model.Time {
cd.Lock() cd.Lock()
defer cd.Unlock() defer cd.Unlock()
@ -164,7 +164,7 @@ func (cd *chunkDesc) isEvicted() bool {
return cd.c == nil return cd.c == nil
} }
func (cd *chunkDesc) contains(t clientmodel.Timestamp) bool { func (cd *chunkDesc) contains(t model.Time) bool {
return !t.Before(cd.firstTime()) && !t.After(cd.lastTime()) return !t.Before(cd.firstTime()) && !t.After(cd.lastTime())
} }
@ -217,7 +217,7 @@ type chunk interface {
// the relevant one and discard the orginal chunk. // the relevant one and discard the orginal chunk.
add(sample *metric.SamplePair) []chunk add(sample *metric.SamplePair) []chunk
clone() chunk clone() chunk
firstTime() clientmodel.Timestamp firstTime() model.Time
newIterator() chunkIterator newIterator() chunkIterator
marshal(io.Writer) error marshal(io.Writer) error
unmarshal(io.Reader) error unmarshal(io.Reader) error
@ -232,24 +232,24 @@ type chunkIterator interface {
// length returns the number of samples in the chunk. // length returns the number of samples in the chunk.
length() int length() int
// Gets the timestamp of the n-th sample in the chunk. // Gets the timestamp of the n-th sample in the chunk.
timestampAtIndex(int) clientmodel.Timestamp timestampAtIndex(int) model.Time
// Gets the last timestamp in the chunk. // Gets the last timestamp in the chunk.
lastTimestamp() clientmodel.Timestamp lastTimestamp() model.Time
// Gets the sample value of the n-th sample in the chunk. // Gets the sample value of the n-th sample in the chunk.
sampleValueAtIndex(int) clientmodel.SampleValue sampleValueAtIndex(int) model.SampleValue
// Gets the last sample value in the chunk. // Gets the last sample value in the chunk.
lastSampleValue() clientmodel.SampleValue lastSampleValue() model.SampleValue
// Gets the two values that are immediately adjacent to a given time. In // Gets the two values that are immediately adjacent to a given time. In
// case a value exist at precisely the given time, only that single // case a value exist at precisely the given time, only that single
// value is returned. Only the first or last value is returned (as a // value is returned. Only the first or last value is returned (as a
// single value), if the given time is before or after the first or last // single value), if the given time is before or after the first or last
// value, respectively. // value, respectively.
valueAtTime(clientmodel.Timestamp) metric.Values valueAtTime(model.Time) metric.Values
// Gets all values contained within a given interval. // Gets all values contained within a given interval.
rangeValues(metric.Interval) metric.Values rangeValues(metric.Interval) metric.Values
// Whether a given timestamp is contained between first and last value // Whether a given timestamp is contained between first and last value
// in the chunk. // in the chunk.
contains(clientmodel.Timestamp) bool contains(model.Time) bool
// values returns a channel, from which all sample values in the chunk // values returns a channel, from which all sample values in the chunk
// can be received in order. The channel is closed after the last // can be received in order. The channel is closed after the last
// one. It is generally not safe to mutate the chunk while the channel // one. It is generally not safe to mutate the chunk while the channel

View file

@ -37,7 +37,7 @@ import (
"io" "io"
"sync" "sync"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -150,9 +150,9 @@ func decodeString(b byteReader) (string, error) {
return string(buf), nil return string(buf), nil
} }
// A Metric is a clientmodel.Metric that implements // A Metric is a model.Metric that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler.
type Metric clientmodel.Metric type Metric model.Metric
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (m Metric) MarshalBinary() ([]byte, error) { func (m Metric) MarshalBinary() ([]byte, error) {
@ -196,16 +196,16 @@ func (m *Metric) UnmarshalFromReader(r byteReader) error {
if err != nil { if err != nil {
return err return err
} }
(*m)[clientmodel.LabelName(ln)] = clientmodel.LabelValue(lv) (*m)[model.LabelName(ln)] = model.LabelValue(lv)
} }
return nil return nil
} }
// A Fingerprint is a clientmodel.Fingerprint that implements // A Fingerprint is a model.Fingerprint that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. The implementation // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. The implementation
// depends on clientmodel.Fingerprint to be convertible to uint64. It encodes // depends on model.Fingerprint to be convertible to uint64. It encodes
// the fingerprint as a big-endian uint64. // the fingerprint as a big-endian uint64.
type Fingerprint clientmodel.Fingerprint type Fingerprint model.Fingerprint
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (fp Fingerprint) MarshalBinary() ([]byte, error) { func (fp Fingerprint) MarshalBinary() ([]byte, error) {
@ -220,10 +220,10 @@ func (fp *Fingerprint) UnmarshalBinary(buf []byte) error {
return nil return nil
} }
// FingerprintSet is a map[clientmodel.Fingerprint]struct{} that // FingerprintSet is a map[model.Fingerprint]struct{} that
// implements encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its // implements encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its
// binary form is identical to that of Fingerprints. // binary form is identical to that of Fingerprints.
type FingerprintSet map[clientmodel.Fingerprint]struct{} type FingerprintSet map[model.Fingerprint]struct{}
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (fps FingerprintSet) MarshalBinary() ([]byte, error) { func (fps FingerprintSet) MarshalBinary() ([]byte, error) {
@ -247,15 +247,15 @@ func (fps *FingerprintSet) UnmarshalBinary(buf []byte) error {
*fps = make(FingerprintSet, numFPs) *fps = make(FingerprintSet, numFPs)
for i := 0; i < int(numFPs); i++ { for i := 0; i < int(numFPs); i++ {
(*fps)[clientmodel.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))] = struct{}{} (*fps)[model.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))] = struct{}{}
} }
return nil return nil
} }
// Fingerprints is a clientmodel.Fingerprints that implements // Fingerprints is a model.Fingerprints that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is
// identical to that of FingerprintSet. // identical to that of FingerprintSet.
type Fingerprints clientmodel.Fingerprints type Fingerprints model.Fingerprints
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (fps Fingerprints) MarshalBinary() ([]byte, error) { func (fps Fingerprints) MarshalBinary() ([]byte, error) {
@ -277,7 +277,7 @@ func (fps *Fingerprints) UnmarshalBinary(buf []byte) error {
*fps = make(Fingerprints, numFPs) *fps = make(Fingerprints, numFPs)
for i := range *fps { for i := range *fps {
(*fps)[i] = clientmodel.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:])) (*fps)[i] = model.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))
} }
return nil return nil
} }
@ -309,14 +309,14 @@ func (lp *LabelPair) UnmarshalBinary(buf []byte) error {
if err != nil { if err != nil {
return err return err
} }
lp.Name = clientmodel.LabelName(n) lp.Name = model.LabelName(n)
lp.Value = clientmodel.LabelValue(v) lp.Value = model.LabelValue(v)
return nil return nil
} }
// LabelName is a clientmodel.LabelName that implements // LabelName is a model.LabelName that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler.
type LabelName clientmodel.LabelName type LabelName model.LabelName
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (l LabelName) MarshalBinary() ([]byte, error) { func (l LabelName) MarshalBinary() ([]byte, error) {
@ -338,10 +338,10 @@ func (l *LabelName) UnmarshalBinary(buf []byte) error {
return nil return nil
} }
// LabelValueSet is a map[clientmodel.LabelValue]struct{} that implements // LabelValueSet is a map[model.LabelValue]struct{} that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is
// identical to that of LabelValues. // identical to that of LabelValues.
type LabelValueSet map[clientmodel.LabelValue]struct{} type LabelValueSet map[model.LabelValue]struct{}
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (vs LabelValueSet) MarshalBinary() ([]byte, error) { func (vs LabelValueSet) MarshalBinary() ([]byte, error) {
@ -371,15 +371,15 @@ func (vs *LabelValueSet) UnmarshalBinary(buf []byte) error {
if err != nil { if err != nil {
return err return err
} }
(*vs)[clientmodel.LabelValue(v)] = struct{}{} (*vs)[model.LabelValue(v)] = struct{}{}
} }
return nil return nil
} }
// LabelValues is a clientmodel.LabelValues that implements // LabelValues is a model.LabelValues that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is
// identical to that of LabelValueSet. // identical to that of LabelValueSet.
type LabelValues clientmodel.LabelValues type LabelValues model.LabelValues
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
func (vs LabelValues) MarshalBinary() ([]byte, error) { func (vs LabelValues) MarshalBinary() ([]byte, error) {
@ -409,7 +409,7 @@ func (vs *LabelValues) UnmarshalBinary(buf []byte) error {
if err != nil { if err != nil {
return err return err
} }
(*vs)[i] = clientmodel.LabelValue(v) (*vs)[i] = model.LabelValue(v)
} }
return nil return nil
} }
@ -417,7 +417,7 @@ func (vs *LabelValues) UnmarshalBinary(buf []byte) error {
// TimeRange is used to define a time range and implements // TimeRange is used to define a time range and implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler.
type TimeRange struct { type TimeRange struct {
First, Last clientmodel.Timestamp First, Last model.Time
} }
// MarshalBinary implements encoding.BinaryMarshaler. // MarshalBinary implements encoding.BinaryMarshaler.
@ -443,7 +443,7 @@ func (tr *TimeRange) UnmarshalBinary(buf []byte) error {
if err != nil { if err != nil {
return err return err
} }
tr.First = clientmodel.Timestamp(first) tr.First = model.Time(first)
tr.Last = clientmodel.Timestamp(last) tr.Last = model.Time(last)
return nil return nil
} }

View file

@ -23,7 +23,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable" "github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/local/index" "github.com/prometheus/prometheus/storage/local/index"
@ -34,12 +34,12 @@ import (
// an error or because the persistence was dirty from the start). Not goroutine // an error or because the persistence was dirty from the start). Not goroutine
// safe. Only call before anything else is running (except index processing // safe. Only call before anything else is running (except index processing
// queue as started by newPersistence). // queue as started by newPersistence).
func (p *persistence) recoverFromCrash(fingerprintToSeries map[clientmodel.Fingerprint]*memorySeries) error { func (p *persistence) recoverFromCrash(fingerprintToSeries map[model.Fingerprint]*memorySeries) error {
// TODO(beorn): We need proper tests for the crash recovery. // TODO(beorn): We need proper tests for the crash recovery.
log.Warn("Starting crash recovery. Prometheus is inoperational until complete.") log.Warn("Starting crash recovery. Prometheus is inoperational until complete.")
log.Warn("To avoid crash recovery in the future, shut down Prometheus with SIGTERM or a HTTP POST to /-/quit.") log.Warn("To avoid crash recovery in the future, shut down Prometheus with SIGTERM or a HTTP POST to /-/quit.")
fpsSeen := map[clientmodel.Fingerprint]struct{}{} fpsSeen := map[model.Fingerprint]struct{}{}
count := 0 count := 0
seriesDirNameFmt := fmt.Sprintf("%%0%dx", seriesDirNameLen) seriesDirNameFmt := fmt.Sprintf("%%0%dx", seriesDirNameLen)
@ -171,9 +171,9 @@ func (p *persistence) recoverFromCrash(fingerprintToSeries map[clientmodel.Finge
// be found there, it is moved into the orphaned directory. // be found there, it is moved into the orphaned directory.
func (p *persistence) sanitizeSeries( func (p *persistence) sanitizeSeries(
dirname string, fi os.FileInfo, dirname string, fi os.FileInfo,
fingerprintToSeries map[clientmodel.Fingerprint]*memorySeries, fingerprintToSeries map[model.Fingerprint]*memorySeries,
fpm fpMappings, fpm fpMappings,
) (clientmodel.Fingerprint, bool) { ) (model.Fingerprint, bool) {
filename := path.Join(dirname, fi.Name()) filename := path.Join(dirname, fi.Name())
purge := func() { purge := func() {
var err error var err error
@ -194,14 +194,16 @@ func (p *persistence) sanitizeSeries(
} }
} }
var fp clientmodel.Fingerprint var fp model.Fingerprint
var err error
if len(fi.Name()) != fpLen-seriesDirNameLen+len(seriesFileSuffix) || if len(fi.Name()) != fpLen-seriesDirNameLen+len(seriesFileSuffix) ||
!strings.HasSuffix(fi.Name(), seriesFileSuffix) { !strings.HasSuffix(fi.Name(), seriesFileSuffix) {
log.Warnf("Unexpected series file name %s.", filename) log.Warnf("Unexpected series file name %s.", filename)
purge() purge()
return fp, false return fp, false
} }
if err := fp.LoadFromString(path.Base(dirname) + fi.Name()[:fpLen-seriesDirNameLen]); err != nil { if fp, err = model.FingerprintFromString(path.Base(dirname) + fi.Name()[:fpLen-seriesDirNameLen]); err != nil {
log.Warnf("Error parsing file name %s: %s", filename, err) log.Warnf("Error parsing file name %s: %s", filename, err)
purge() purge()
return fp, false return fp, false
@ -353,8 +355,8 @@ func (p *persistence) sanitizeSeries(
} }
func (p *persistence) cleanUpArchiveIndexes( func (p *persistence) cleanUpArchiveIndexes(
fpToSeries map[clientmodel.Fingerprint]*memorySeries, fpToSeries map[model.Fingerprint]*memorySeries,
fpsSeen map[clientmodel.Fingerprint]struct{}, fpsSeen map[model.Fingerprint]struct{},
fpm fpMappings, fpm fpMappings,
) error { ) error {
log.Info("Cleaning up archive indexes.") log.Info("Cleaning up archive indexes.")
@ -369,17 +371,17 @@ func (p *persistence) cleanUpArchiveIndexes(
if err := kv.Key(&fp); err != nil { if err := kv.Key(&fp); err != nil {
return err return err
} }
_, fpSeen := fpsSeen[clientmodel.Fingerprint(fp)] _, fpSeen := fpsSeen[model.Fingerprint(fp)]
inMemory := false inMemory := false
if fpSeen { if fpSeen {
_, inMemory = fpToSeries[clientmodel.Fingerprint(fp)] _, inMemory = fpToSeries[model.Fingerprint(fp)]
} }
if !fpSeen || inMemory { if !fpSeen || inMemory {
if inMemory { if inMemory {
log.Warnf("Archive clean-up: Fingerprint %v is not archived. Purging from archive indexes.", clientmodel.Fingerprint(fp)) log.Warnf("Archive clean-up: Fingerprint %v is not archived. Purging from archive indexes.", model.Fingerprint(fp))
} }
if !fpSeen { if !fpSeen {
log.Warnf("Archive clean-up: Fingerprint %v is unknown. Purging from archive indexes.", clientmodel.Fingerprint(fp)) log.Warnf("Archive clean-up: Fingerprint %v is unknown. Purging from archive indexes.", model.Fingerprint(fp))
} }
// It's fine if the fp is not in the archive indexes. // It's fine if the fp is not in the archive indexes.
if _, err := p.archivedFingerprintToMetrics.Delete(fp); err != nil { if _, err := p.archivedFingerprintToMetrics.Delete(fp); err != nil {
@ -393,7 +395,7 @@ func (p *persistence) cleanUpArchiveIndexes(
if err := kv.Value(&m); err != nil { if err := kv.Value(&m); err != nil {
return err return err
} }
maybeAddMapping(clientmodel.Fingerprint(fp), clientmodel.Metric(m), fpm) maybeAddMapping(model.Fingerprint(fp), model.Metric(m), fpm)
// Make sure it is in timerange index, too. // Make sure it is in timerange index, too.
has, err := p.archivedFingerprintToTimeRange.Has(fp) has, err := p.archivedFingerprintToTimeRange.Has(fp)
if err != nil { if err != nil {
@ -407,12 +409,12 @@ func (p *persistence) cleanUpArchiveIndexes(
if _, err := p.archivedFingerprintToMetrics.Delete(fp); err != nil { if _, err := p.archivedFingerprintToMetrics.Delete(fp); err != nil {
return err return err
} }
cds, err := p.loadChunkDescs(clientmodel.Fingerprint(fp), 0) cds, err := p.loadChunkDescs(model.Fingerprint(fp), 0)
if err != nil { if err != nil {
return err return err
} }
series := newMemorySeries(clientmodel.Metric(m), cds, p.seriesFileModTime(clientmodel.Fingerprint(fp))) series := newMemorySeries(model.Metric(m), cds, p.seriesFileModTime(model.Fingerprint(fp)))
fpToSeries[clientmodel.Fingerprint(fp)] = series fpToSeries[model.Fingerprint(fp)] = series
return nil return nil
}); err != nil { }); err != nil {
return err return err
@ -450,7 +452,7 @@ func (p *persistence) cleanUpArchiveIndexes(
} }
func (p *persistence) rebuildLabelIndexes( func (p *persistence) rebuildLabelIndexes(
fpToSeries map[clientmodel.Fingerprint]*memorySeries, fpToSeries map[model.Fingerprint]*memorySeries,
) error { ) error {
count := 0 count := 0
log.Info("Rebuilding label indexes.") log.Info("Rebuilding label indexes.")
@ -472,7 +474,7 @@ func (p *persistence) rebuildLabelIndexes(
if err := kv.Value(&m); err != nil { if err := kv.Value(&m); err != nil {
return err return err
} }
p.indexMetric(clientmodel.Fingerprint(fp), clientmodel.Metric(m)) p.indexMetric(model.Fingerprint(fp), model.Metric(m))
count++ count++
if count%10000 == 0 { if count%10000 == 0 {
log.Infof("%d metrics queued for indexing.", count) log.Infof("%d metrics queued for indexing.", count)
@ -486,7 +488,7 @@ func (p *persistence) rebuildLabelIndexes(
} }
// maybeAddMapping adds a fingerprint mapping to fpm if the FastFingerprint of m is different from fp. // maybeAddMapping adds a fingerprint mapping to fpm if the FastFingerprint of m is different from fp.
func maybeAddMapping(fp clientmodel.Fingerprint, m clientmodel.Metric, fpm fpMappings) { func maybeAddMapping(fp model.Fingerprint, m model.Metric, fpm fpMappings) {
if rawFP := m.FastFingerprint(); rawFP != fp { if rawFP := m.FastFingerprint(); rawFP != fp {
log.Warnf( log.Warnf(
"Metric %v with fingerprint %v is mapped from raw fingerprint %v.", "Metric %v with fingerprint %v is mapped from raw fingerprint %v.",
@ -495,7 +497,7 @@ func maybeAddMapping(fp clientmodel.Fingerprint, m clientmodel.Metric, fpm fpMap
if mappedFPs, ok := fpm[rawFP]; ok { if mappedFPs, ok := fpm[rawFP]; ok {
mappedFPs[metricToUniqueString(m)] = fp mappedFPs[metricToUniqueString(m)] = fp
} else { } else {
fpm[rawFP] = map[string]clientmodel.Fingerprint{ fpm[rawFP] = map[string]model.Fingerprint{
metricToUniqueString(m): fp, metricToUniqueString(m): fp,
} }
} }

View file

@ -20,7 +20,7 @@ import (
"math" "math"
"sort" "sort"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -112,7 +112,7 @@ func (c deltaEncodedChunk) add(s *metric.SamplePair) []chunk {
// int->float. // int->float.
nvb = d4 nvb = d4
nInt = false nInt = false
} else if !isInt && vb == d4 && baseValue+clientmodel.SampleValue(float32(dv)) != s.Value { } else if !isInt && vb == d4 && baseValue+model.SampleValue(float32(dv)) != s.Value {
// float32->float64. // float32->float64.
nvb = d8 nvb = d8
} else { } else {
@ -189,7 +189,7 @@ func (c deltaEncodedChunk) clone() chunk {
} }
// firstTime implements chunk. // firstTime implements chunk.
func (c deltaEncodedChunk) firstTime() clientmodel.Timestamp { func (c deltaEncodedChunk) firstTime() model.Time {
return c.baseTime() return c.baseTime()
} }
@ -255,12 +255,12 @@ func (c deltaEncodedChunk) isInt() bool {
return c[deltaHeaderIsIntOffset] == 1 return c[deltaHeaderIsIntOffset] == 1
} }
func (c deltaEncodedChunk) baseTime() clientmodel.Timestamp { func (c deltaEncodedChunk) baseTime() model.Time {
return clientmodel.Timestamp(binary.LittleEndian.Uint64(c[deltaHeaderBaseTimeOffset:])) return model.Time(binary.LittleEndian.Uint64(c[deltaHeaderBaseTimeOffset:]))
} }
func (c deltaEncodedChunk) baseValue() clientmodel.SampleValue { func (c deltaEncodedChunk) baseValue() model.SampleValue {
return clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(c[deltaHeaderBaseValueOffset:]))) return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(c[deltaHeaderBaseValueOffset:])))
} }
func (c deltaEncodedChunk) sampleSize() int { func (c deltaEncodedChunk) sampleSize() int {
@ -278,8 +278,8 @@ func (c deltaEncodedChunk) len() int {
type deltaEncodedChunkIterator struct { type deltaEncodedChunkIterator struct {
c deltaEncodedChunk c deltaEncodedChunk
len int len int
baseT clientmodel.Timestamp baseT model.Time
baseV clientmodel.SampleValue baseV model.SampleValue
tBytes, vBytes deltaBytes tBytes, vBytes deltaBytes
isInt bool isInt bool
} }
@ -288,7 +288,7 @@ type deltaEncodedChunkIterator struct {
func (it *deltaEncodedChunkIterator) length() int { return it.len } func (it *deltaEncodedChunkIterator) length() int { return it.len }
// valueAtTime implements chunkIterator. // valueAtTime implements chunkIterator.
func (it *deltaEncodedChunkIterator) valueAtTime(t clientmodel.Timestamp) metric.Values { func (it *deltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values {
i := sort.Search(it.len, func(i int) bool { i := sort.Search(it.len, func(i int) bool {
return !it.timestampAtIndex(i).Before(t) return !it.timestampAtIndex(i).Before(t)
}) })
@ -350,7 +350,7 @@ func (it *deltaEncodedChunkIterator) rangeValues(in metric.Interval) metric.Valu
} }
// contains implements chunkIterator. // contains implements chunkIterator.
func (it *deltaEncodedChunkIterator) contains(t clientmodel.Timestamp) bool { func (it *deltaEncodedChunkIterator) contains(t model.Time) bool {
return !t.Before(it.baseT) && !t.After(it.timestampAtIndex(it.len-1)) return !t.Before(it.baseT) && !t.After(it.timestampAtIndex(it.len-1))
} }
@ -370,31 +370,31 @@ func (it *deltaEncodedChunkIterator) values() <-chan *metric.SamplePair {
} }
// timestampAtIndex implements chunkIterator. // timestampAtIndex implements chunkIterator.
func (it *deltaEncodedChunkIterator) timestampAtIndex(idx int) clientmodel.Timestamp { func (it *deltaEncodedChunkIterator) timestampAtIndex(idx int) model.Time {
offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes) offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes)
switch it.tBytes { switch it.tBytes {
case d1: case d1:
return it.baseT + clientmodel.Timestamp(uint8(it.c[offset])) return it.baseT + model.Time(uint8(it.c[offset]))
case d2: case d2:
return it.baseT + clientmodel.Timestamp(binary.LittleEndian.Uint16(it.c[offset:])) return it.baseT + model.Time(binary.LittleEndian.Uint16(it.c[offset:]))
case d4: case d4:
return it.baseT + clientmodel.Timestamp(binary.LittleEndian.Uint32(it.c[offset:])) return it.baseT + model.Time(binary.LittleEndian.Uint32(it.c[offset:]))
case d8: case d8:
// Take absolute value for d8. // Take absolute value for d8.
return clientmodel.Timestamp(binary.LittleEndian.Uint64(it.c[offset:])) return model.Time(binary.LittleEndian.Uint64(it.c[offset:]))
default: default:
panic("invalid number of bytes for time delta") panic("invalid number of bytes for time delta")
} }
} }
// lastTimestamp implements chunkIterator. // lastTimestamp implements chunkIterator.
func (it *deltaEncodedChunkIterator) lastTimestamp() clientmodel.Timestamp { func (it *deltaEncodedChunkIterator) lastTimestamp() model.Time {
return it.timestampAtIndex(it.len - 1) return it.timestampAtIndex(it.len - 1)
} }
// sampleValueAtIndex implements chunkIterator. // sampleValueAtIndex implements chunkIterator.
func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.SampleValue { func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) model.SampleValue {
offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes) + int(it.tBytes) offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes) + int(it.tBytes)
if it.isInt { if it.isInt {
@ -402,11 +402,11 @@ func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.Sam
case d0: case d0:
return it.baseV return it.baseV
case d1: case d1:
return it.baseV + clientmodel.SampleValue(int8(it.c[offset])) return it.baseV + model.SampleValue(int8(it.c[offset]))
case d2: case d2:
return it.baseV + clientmodel.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:]))) return it.baseV + model.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
case d4: case d4:
return it.baseV + clientmodel.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:]))) return it.baseV + model.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
// No d8 for ints. // No d8 for ints.
default: default:
panic("invalid number of bytes for integer delta") panic("invalid number of bytes for integer delta")
@ -414,10 +414,10 @@ func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.Sam
} else { } else {
switch it.vBytes { switch it.vBytes {
case d4: case d4:
return it.baseV + clientmodel.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:]))) return it.baseV + model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
case d8: case d8:
// Take absolute value for d8. // Take absolute value for d8.
return clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:]))) return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
default: default:
panic("invalid number of bytes for floating point delta") panic("invalid number of bytes for floating point delta")
} }
@ -425,6 +425,6 @@ func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.Sam
} }
// lastSampleValue implements chunkIterator. // lastSampleValue implements chunkIterator.
func (it *deltaEncodedChunkIterator) lastSampleValue() clientmodel.SampleValue { func (it *deltaEncodedChunkIterator) lastSampleValue() model.SampleValue {
return it.sampleValueAtIndex(it.len - 1) return it.sampleValueAtIndex(it.len - 1)
} }

View file

@ -16,7 +16,7 @@ package local
import ( import (
"math" "math"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
type deltaBytes byte type deltaBytes byte
@ -29,7 +29,7 @@ const (
d8 deltaBytes = 8 d8 deltaBytes = 8
) )
func bytesNeededForUnsignedTimestampDelta(deltaT clientmodel.Timestamp) deltaBytes { func bytesNeededForUnsignedTimestampDelta(deltaT model.Time) deltaBytes {
switch { switch {
case deltaT > math.MaxUint32: case deltaT > math.MaxUint32:
return d8 return d8
@ -42,7 +42,7 @@ func bytesNeededForUnsignedTimestampDelta(deltaT clientmodel.Timestamp) deltaByt
} }
} }
func bytesNeededForSignedTimestampDelta(deltaT clientmodel.Timestamp) deltaBytes { func bytesNeededForSignedTimestampDelta(deltaT model.Time) deltaBytes {
switch { switch {
case deltaT > math.MaxInt32 || deltaT < math.MinInt32: case deltaT > math.MaxInt32 || deltaT < math.MinInt32:
return d8 return d8
@ -55,7 +55,7 @@ func bytesNeededForSignedTimestampDelta(deltaT clientmodel.Timestamp) deltaBytes
} }
} }
func bytesNeededForIntegerSampleValueDelta(deltaV clientmodel.SampleValue) deltaBytes { func bytesNeededForIntegerSampleValueDelta(deltaV model.SampleValue) deltaBytes {
switch { switch {
case deltaV < math.MinInt32 || deltaV > math.MaxInt32: case deltaV < math.MinInt32 || deltaV > math.MaxInt32:
return d8 return d8
@ -78,7 +78,7 @@ func max(a, b deltaBytes) deltaBytes {
} }
// isInt64 returns true if v can be represented as an int64. // isInt64 returns true if v can be represented as an int64.
func isInt64(v clientmodel.SampleValue) bool { func isInt64(v model.SampleValue) bool {
// Note: Using math.Modf is slower than the conversion approach below. // Note: Using math.Modf is slower than the conversion approach below.
return clientmodel.SampleValue(int64(v)) == v return model.SampleValue(int64(v)) == v
} }

View file

@ -20,7 +20,7 @@ import (
"math" "math"
"sort" "sort"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -105,10 +105,10 @@ func (c doubleDeltaEncodedChunk) add(s *metric.SamplePair) []chunk {
return []chunk{&c, overflowChunks[0]} return []chunk{&c, overflowChunks[0]}
} }
projectedTime := c.baseTime() + clientmodel.Timestamp(c.len())*c.baseTimeDelta() projectedTime := c.baseTime() + model.Time(c.len())*c.baseTimeDelta()
ddt := s.Timestamp - projectedTime ddt := s.Timestamp - projectedTime
projectedValue := c.baseValue() + clientmodel.SampleValue(c.len())*c.baseValueDelta() projectedValue := c.baseValue() + model.SampleValue(c.len())*c.baseValueDelta()
ddv := s.Value - projectedValue ddv := s.Value - projectedValue
ntb, nvb, nInt := tb, vb, c.isInt() ntb, nvb, nInt := tb, vb, c.isInt()
@ -118,7 +118,7 @@ func (c doubleDeltaEncodedChunk) add(s *metric.SamplePair) []chunk {
// int->float. // int->float.
nvb = d4 nvb = d4
nInt = false nInt = false
} else if !c.isInt() && vb == d4 && projectedValue+clientmodel.SampleValue(float32(ddv)) != s.Value { } else if !c.isInt() && vb == d4 && projectedValue+model.SampleValue(float32(ddv)) != s.Value {
// float32->float64. // float32->float64.
nvb = d8 nvb = d8
} else { } else {
@ -195,7 +195,7 @@ func (c doubleDeltaEncodedChunk) clone() chunk {
} }
// firstTime implements chunk. // firstTime implements chunk.
func (c doubleDeltaEncodedChunk) firstTime() clientmodel.Timestamp { func (c doubleDeltaEncodedChunk) firstTime() model.Time {
return c.baseTime() return c.baseTime()
} }
@ -251,16 +251,16 @@ func (c *doubleDeltaEncodedChunk) unmarshalFromBuf(buf []byte) {
// encoding implements chunk. // encoding implements chunk.
func (c doubleDeltaEncodedChunk) encoding() chunkEncoding { return doubleDelta } func (c doubleDeltaEncodedChunk) encoding() chunkEncoding { return doubleDelta }
func (c doubleDeltaEncodedChunk) baseTime() clientmodel.Timestamp { func (c doubleDeltaEncodedChunk) baseTime() model.Time {
return clientmodel.Timestamp( return model.Time(
binary.LittleEndian.Uint64( binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseTimeOffset:], c[doubleDeltaHeaderBaseTimeOffset:],
), ),
) )
} }
func (c doubleDeltaEncodedChunk) baseValue() clientmodel.SampleValue { func (c doubleDeltaEncodedChunk) baseValue() model.SampleValue {
return clientmodel.SampleValue( return model.SampleValue(
math.Float64frombits( math.Float64frombits(
binary.LittleEndian.Uint64( binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseValueOffset:], c[doubleDeltaHeaderBaseValueOffset:],
@ -269,22 +269,22 @@ func (c doubleDeltaEncodedChunk) baseValue() clientmodel.SampleValue {
) )
} }
func (c doubleDeltaEncodedChunk) baseTimeDelta() clientmodel.Timestamp { func (c doubleDeltaEncodedChunk) baseTimeDelta() model.Time {
if len(c) < doubleDeltaHeaderBaseTimeDeltaOffset+8 { if len(c) < doubleDeltaHeaderBaseTimeDeltaOffset+8 {
return 0 return 0
} }
return clientmodel.Timestamp( return model.Time(
binary.LittleEndian.Uint64( binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseTimeDeltaOffset:], c[doubleDeltaHeaderBaseTimeDeltaOffset:],
), ),
) )
} }
func (c doubleDeltaEncodedChunk) baseValueDelta() clientmodel.SampleValue { func (c doubleDeltaEncodedChunk) baseValueDelta() model.SampleValue {
if len(c) < doubleDeltaHeaderBaseValueDeltaOffset+8 { if len(c) < doubleDeltaHeaderBaseValueDeltaOffset+8 {
return 0 return 0
} }
return clientmodel.SampleValue( return model.SampleValue(
math.Float64frombits( math.Float64frombits(
binary.LittleEndian.Uint64( binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseValueDeltaOffset:], c[doubleDeltaHeaderBaseValueDeltaOffset:],
@ -384,8 +384,8 @@ func (c doubleDeltaEncodedChunk) addSecondSample(s *metric.SamplePair, tb, vb de
type doubleDeltaEncodedChunkIterator struct { type doubleDeltaEncodedChunkIterator struct {
c doubleDeltaEncodedChunk c doubleDeltaEncodedChunk
len int len int
baseT, baseΔT clientmodel.Timestamp baseT, baseΔT model.Time
baseV, baseΔV clientmodel.SampleValue baseV, baseΔV model.SampleValue
tBytes, vBytes deltaBytes tBytes, vBytes deltaBytes
isInt bool isInt bool
} }
@ -394,7 +394,7 @@ type doubleDeltaEncodedChunkIterator struct {
func (it *doubleDeltaEncodedChunkIterator) length() int { return it.len } func (it *doubleDeltaEncodedChunkIterator) length() int { return it.len }
// valueAtTime implements chunkIterator. // valueAtTime implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t clientmodel.Timestamp) metric.Values { func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values {
i := sort.Search(it.len, func(i int) bool { i := sort.Search(it.len, func(i int) bool {
return !it.timestampAtIndex(i).Before(t) return !it.timestampAtIndex(i).Before(t)
}) })
@ -456,7 +456,7 @@ func (it *doubleDeltaEncodedChunkIterator) rangeValues(in metric.Interval) metri
} }
// contains implements chunkIterator. // contains implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) contains(t clientmodel.Timestamp) bool { func (it *doubleDeltaEncodedChunkIterator) contains(t model.Time) bool {
return !t.Before(it.baseT) && !t.After(it.timestampAtIndex(it.len-1)) return !t.Before(it.baseT) && !t.After(it.timestampAtIndex(it.len-1))
} }
@ -476,7 +476,7 @@ func (it *doubleDeltaEncodedChunkIterator) values() <-chan *metric.SamplePair {
} }
// timestampAtIndex implements chunkIterator. // timestampAtIndex implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) timestampAtIndex(idx int) clientmodel.Timestamp { func (it *doubleDeltaEncodedChunkIterator) timestampAtIndex(idx int) model.Time {
if idx == 0 { if idx == 0 {
return it.baseT return it.baseT
} }
@ -494,31 +494,31 @@ func (it *doubleDeltaEncodedChunkIterator) timestampAtIndex(idx int) clientmodel
switch it.tBytes { switch it.tBytes {
case d1: case d1:
return it.baseT + return it.baseT +
clientmodel.Timestamp(idx)*it.baseΔT + model.Time(idx)*it.baseΔT +
clientmodel.Timestamp(int8(it.c[offset])) model.Time(int8(it.c[offset]))
case d2: case d2:
return it.baseT + return it.baseT +
clientmodel.Timestamp(idx)*it.baseΔT + model.Time(idx)*it.baseΔT +
clientmodel.Timestamp(int16(binary.LittleEndian.Uint16(it.c[offset:]))) model.Time(int16(binary.LittleEndian.Uint16(it.c[offset:])))
case d4: case d4:
return it.baseT + return it.baseT +
clientmodel.Timestamp(idx)*it.baseΔT + model.Time(idx)*it.baseΔT +
clientmodel.Timestamp(int32(binary.LittleEndian.Uint32(it.c[offset:]))) model.Time(int32(binary.LittleEndian.Uint32(it.c[offset:])))
case d8: case d8:
// Take absolute value for d8. // Take absolute value for d8.
return clientmodel.Timestamp(binary.LittleEndian.Uint64(it.c[offset:])) return model.Time(binary.LittleEndian.Uint64(it.c[offset:]))
default: default:
panic("invalid number of bytes for time delta") panic("invalid number of bytes for time delta")
} }
} }
// lastTimestamp implements chunkIterator. // lastTimestamp implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) lastTimestamp() clientmodel.Timestamp { func (it *doubleDeltaEncodedChunkIterator) lastTimestamp() model.Time {
return it.timestampAtIndex(it.len - 1) return it.timestampAtIndex(it.len - 1)
} }
// sampleValueAtIndex implements chunkIterator. // sampleValueAtIndex implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.SampleValue { func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) model.SampleValue {
if idx == 0 { if idx == 0 {
return it.baseV return it.baseV
} }
@ -537,19 +537,19 @@ func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmod
switch it.vBytes { switch it.vBytes {
case d0: case d0:
return it.baseV + return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV model.SampleValue(idx)*it.baseΔV
case d1: case d1:
return it.baseV + return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV + model.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(int8(it.c[offset])) model.SampleValue(int8(it.c[offset]))
case d2: case d2:
return it.baseV + return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV + model.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:]))) model.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
case d4: case d4:
return it.baseV + return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV + model.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:]))) model.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
// No d8 for ints. // No d8 for ints.
default: default:
panic("invalid number of bytes for integer delta") panic("invalid number of bytes for integer delta")
@ -558,11 +558,11 @@ func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmod
switch it.vBytes { switch it.vBytes {
case d4: case d4:
return it.baseV + return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV + model.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:]))) model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
case d8: case d8:
// Take absolute value for d8. // Take absolute value for d8.
return clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:]))) return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
default: default:
panic("invalid number of bytes for floating point delta") panic("invalid number of bytes for floating point delta")
} }
@ -570,6 +570,6 @@ func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmod
} }
// lastSampleValue implements chunkIterator. // lastSampleValue implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) lastSampleValue() clientmodel.SampleValue { func (it *doubleDeltaEncodedChunkIterator) lastSampleValue() model.SampleValue {
return it.sampleValueAtIndex(it.len - 1) return it.sampleValueAtIndex(it.len - 1)
} }

View file

@ -20,7 +20,7 @@ import (
"os" "os"
"path" "path"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable" "github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
@ -42,7 +42,7 @@ var (
) )
// FingerprintMetricMapping is an in-memory map of fingerprints to metrics. // FingerprintMetricMapping is an in-memory map of fingerprints to metrics.
type FingerprintMetricMapping map[clientmodel.Fingerprint]clientmodel.Metric type FingerprintMetricMapping map[model.Fingerprint]model.Metric
// FingerprintMetricIndex models a database mapping fingerprints to metrics. // FingerprintMetricIndex models a database mapping fingerprints to metrics.
type FingerprintMetricIndex struct { type FingerprintMetricIndex struct {
@ -83,7 +83,7 @@ func (i *FingerprintMetricIndex) UnindexBatch(mapping FingerprintMetricMapping)
// fingerprint is not an error. In that case, (nil, false, nil) is returned. // fingerprint is not an error. In that case, (nil, false, nil) is returned.
// //
// This method is goroutine-safe. // This method is goroutine-safe.
func (i *FingerprintMetricIndex) Lookup(fp clientmodel.Fingerprint) (metric clientmodel.Metric, ok bool, err error) { func (i *FingerprintMetricIndex) Lookup(fp model.Fingerprint) (metric model.Metric, ok bool, err error) {
ok, err = i.Get(codable.Fingerprint(fp), (*codable.Metric)(&metric)) ok, err = i.Get(codable.Fingerprint(fp), (*codable.Metric)(&metric))
return return
} }
@ -105,7 +105,7 @@ func NewFingerprintMetricIndex(basePath string) (*FingerprintMetricIndex, error)
// LabelNameLabelValuesMapping is an in-memory map of label names to // LabelNameLabelValuesMapping is an in-memory map of label names to
// label values. // label values.
type LabelNameLabelValuesMapping map[clientmodel.LabelName]codable.LabelValueSet type LabelNameLabelValuesMapping map[model.LabelName]codable.LabelValueSet
// LabelNameLabelValuesIndex is a KeyValueStore that maps existing label names // LabelNameLabelValuesIndex is a KeyValueStore that maps existing label names
// to all label values stored for that label name. // to all label values stored for that label name.
@ -138,11 +138,11 @@ func (i *LabelNameLabelValuesIndex) IndexBatch(b LabelNameLabelValuesMapping) er
} }
// Lookup looks up all label values for a given label name and returns them as // Lookup looks up all label values for a given label name and returns them as
// clientmodel.LabelValues (which is a slice). Looking up a non-existing label // model.LabelValues (which is a slice). Looking up a non-existing label
// name is not an error. In that case, (nil, false, nil) is returned. // name is not an error. In that case, (nil, false, nil) is returned.
// //
// This method is goroutine-safe. // This method is goroutine-safe.
func (i *LabelNameLabelValuesIndex) Lookup(l clientmodel.LabelName) (values clientmodel.LabelValues, ok bool, err error) { func (i *LabelNameLabelValuesIndex) Lookup(l model.LabelName) (values model.LabelValues, ok bool, err error) {
ok, err = i.Get(codable.LabelName(l), (*codable.LabelValues)(&values)) ok, err = i.Get(codable.LabelName(l), (*codable.LabelValues)(&values))
return return
} }
@ -152,10 +152,10 @@ func (i *LabelNameLabelValuesIndex) Lookup(l clientmodel.LabelName) (values clie
// (nil, false, nil) is returned. // (nil, false, nil) is returned.
// //
// This method is goroutine-safe. // This method is goroutine-safe.
func (i *LabelNameLabelValuesIndex) LookupSet(l clientmodel.LabelName) (values map[clientmodel.LabelValue]struct{}, ok bool, err error) { func (i *LabelNameLabelValuesIndex) LookupSet(l model.LabelName) (values map[model.LabelValue]struct{}, ok bool, err error) {
ok, err = i.Get(codable.LabelName(l), (*codable.LabelValueSet)(&values)) ok, err = i.Get(codable.LabelName(l), (*codable.LabelValueSet)(&values))
if values == nil { if values == nil {
values = map[clientmodel.LabelValue]struct{}{} values = map[model.LabelValue]struct{}{}
} }
return return
} }
@ -216,7 +216,7 @@ func (i *LabelPairFingerprintIndex) IndexBatch(m LabelPairFingerprintsMapping) e
// returned. // returned.
// //
// This method is goroutine-safe. // This method is goroutine-safe.
func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps clientmodel.Fingerprints, ok bool, err error) { func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps model.Fingerprints, ok bool, err error) {
ok, err = i.Get((codable.LabelPair)(p), (*codable.Fingerprints)(&fps)) ok, err = i.Get((codable.LabelPair)(p), (*codable.Fingerprints)(&fps))
return return
} }
@ -226,10 +226,10 @@ func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps clientmodel.
// returned. // returned.
// //
// This method is goroutine-safe. // This method is goroutine-safe.
func (i *LabelPairFingerprintIndex) LookupSet(p metric.LabelPair) (fps map[clientmodel.Fingerprint]struct{}, ok bool, err error) { func (i *LabelPairFingerprintIndex) LookupSet(p metric.LabelPair) (fps map[model.Fingerprint]struct{}, ok bool, err error) {
ok, err = i.Get((codable.LabelPair)(p), (*codable.FingerprintSet)(&fps)) ok, err = i.Get((codable.LabelPair)(p), (*codable.FingerprintSet)(&fps))
if fps == nil { if fps == nil {
fps = map[clientmodel.Fingerprint]struct{}{} fps = map[model.Fingerprint]struct{}{}
} }
return return
} }
@ -266,7 +266,7 @@ type FingerprintTimeRangeIndex struct {
// returned. // returned.
// //
// This method is goroutine-safe. // This method is goroutine-safe.
func (i *FingerprintTimeRangeIndex) Lookup(fp clientmodel.Fingerprint) (firstTime, lastTime clientmodel.Timestamp, ok bool, err error) { func (i *FingerprintTimeRangeIndex) Lookup(fp model.Fingerprint) (firstTime, lastTime model.Time, ok bool, err error) {
var tr codable.TimeRange var tr codable.TimeRange
ok, err = i.Get(codable.Fingerprint(fp), &tr) ok, err = i.Get(codable.Fingerprint(fp), &tr)
return tr.First, tr.Last, ok, err return tr.First, tr.Last, ok, err

View file

@ -18,7 +18,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -34,29 +34,29 @@ type Storage interface {
// processing.) The implementation might remove labels with empty value // processing.) The implementation might remove labels with empty value
// from the provided Sample as those labels are considered equivalent to // from the provided Sample as those labels are considered equivalent to
// a label not present at all. // a label not present at all.
Append(*clientmodel.Sample) Append(*model.Sample)
// NewPreloader returns a new Preloader which allows preloading and pinning // NewPreloader returns a new Preloader which allows preloading and pinning
// series data into memory for use within a query. // series data into memory for use within a query.
NewPreloader() Preloader NewPreloader() Preloader
// MetricsForLabelMatchers returns the metrics from storage that satisfy the given // MetricsForLabelMatchers returns the metrics from storage that satisfy the given
// label matchers. At least one label matcher must be specified that does not // label matchers. At least one label matcher must be specified that does not
// match the empty string. // match the empty string.
MetricsForLabelMatchers(...*metric.LabelMatcher) map[clientmodel.Fingerprint]clientmodel.COWMetric MetricsForLabelMatchers(...*metric.LabelMatcher) map[model.Fingerprint]model.COWMetric
// LastSamplePairForFingerprint returns the last sample pair for the // LastSamplePairForFingerprint returns the last sample pair for the
// provided fingerprint. If the respective time series does not exist or // provided fingerprint. If the respective time series does not exist or
// has an evicted head chunk, nil is returned. // has an evicted head chunk, nil is returned.
LastSamplePairForFingerprint(clientmodel.Fingerprint) *metric.SamplePair LastSamplePairForFingerprint(model.Fingerprint) *metric.SamplePair
// Get all of the label values that are associated with a given label name. // Get all of the label values that are associated with a given label name.
LabelValuesForLabelName(clientmodel.LabelName) clientmodel.LabelValues LabelValuesForLabelName(model.LabelName) model.LabelValues
// Get the metric associated with the provided fingerprint. // Get the metric associated with the provided fingerprint.
MetricForFingerprint(clientmodel.Fingerprint) clientmodel.COWMetric MetricForFingerprint(model.Fingerprint) model.COWMetric
// Construct an iterator for a given fingerprint. // Construct an iterator for a given fingerprint.
// The iterator will never return samples older than retention time, // The iterator will never return samples older than retention time,
// relative to the time NewIterator was called. // relative to the time NewIterator was called.
NewIterator(clientmodel.Fingerprint) SeriesIterator NewIterator(model.Fingerprint) SeriesIterator
// Drop all time series associated with the given fingerprints. This operation // Drop all time series associated with the given fingerprints. This operation
// will not show up in the series operations metrics. // will not show up in the series operations metrics.
DropMetricsForFingerprints(...clientmodel.Fingerprint) DropMetricsForFingerprints(...model.Fingerprint)
// Run the various maintenance loops in goroutines. Returns when the // Run the various maintenance loops in goroutines. Returns when the
// storage is ready to use. Keeps everything running in the background // storage is ready to use. Keeps everything running in the background
// until Stop is called. // until Stop is called.
@ -81,7 +81,7 @@ type SeriesIterator interface {
// value is returned. Only the first or last value is returned (as a // value is returned. Only the first or last value is returned (as a
// single value), if the given time is before or after the first or last // single value), if the given time is before or after the first or last
// value, respectively. // value, respectively.
ValueAtTime(clientmodel.Timestamp) metric.Values ValueAtTime(model.Time) metric.Values
// Gets the boundary values of an interval: the first and last value // Gets the boundary values of an interval: the first and last value
// within a given interval. // within a given interval.
BoundaryValues(metric.Interval) metric.Values BoundaryValues(metric.Interval) metric.Values
@ -94,8 +94,8 @@ type SeriesIterator interface {
// goroutine-safe. // goroutine-safe.
type Preloader interface { type Preloader interface {
PreloadRange( PreloadRange(
fp clientmodel.Fingerprint, fp model.Fingerprint,
from clientmodel.Timestamp, through clientmodel.Timestamp, from model.Time, through model.Time,
stalenessDelta time.Duration, stalenessDelta time.Duration,
) error ) error
// Close unpins any previously requested series data from memory. // Close unpins any previously requested series data from memory.

View file

@ -3,7 +3,7 @@ package local
import ( import (
"sync" "sync"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// fingerprintLocker allows locking individual fingerprints. To limit the number // fingerprintLocker allows locking individual fingerprints. To limit the number
@ -33,11 +33,11 @@ func newFingerprintLocker(preallocatedMutexes int) *fingerprintLocker {
} }
// Lock locks the given fingerprint. // Lock locks the given fingerprint.
func (l *fingerprintLocker) Lock(fp clientmodel.Fingerprint) { func (l *fingerprintLocker) Lock(fp model.Fingerprint) {
l.fpMtxs[uint(fp)%l.numFpMtxs].Lock() l.fpMtxs[uint(fp)%l.numFpMtxs].Lock()
} }
// Unlock unlocks the given fingerprint. // Unlock unlocks the given fingerprint.
func (l *fingerprintLocker) Unlock(fp clientmodel.Fingerprint) { func (l *fingerprintLocker) Unlock(fp model.Fingerprint) {
l.fpMtxs[uint(fp)%l.numFpMtxs].Unlock() l.fpMtxs[uint(fp)%l.numFpMtxs].Unlock()
} }

View file

@ -4,7 +4,7 @@ import (
"sync" "sync"
"testing" "testing"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
func BenchmarkFingerprintLockerParallel(b *testing.B) { func BenchmarkFingerprintLockerParallel(b *testing.B) {
@ -19,8 +19,8 @@ func BenchmarkFingerprintLockerParallel(b *testing.B) {
wg.Add(1) wg.Add(1)
go func(i int) { go func(i int) {
for j := 0; j < numLockOps; j++ { for j := 0; j < numLockOps; j++ {
fp1 := clientmodel.Fingerprint(j % numFingerprints) fp1 := model.Fingerprint(j % numFingerprints)
fp2 := clientmodel.Fingerprint(j%numFingerprints + numFingerprints) fp2 := model.Fingerprint(j%numFingerprints + numFingerprints)
locker.Lock(fp1) locker.Lock(fp1)
locker.Lock(fp2) locker.Lock(fp2)
locker.Unlock(fp2) locker.Unlock(fp2)
@ -38,7 +38,7 @@ func BenchmarkFingerprintLockerSerial(b *testing.B) {
b.ResetTimer() b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
fp := clientmodel.Fingerprint(i % numFingerprints) fp := model.Fingerprint(i % numFingerprints)
locker.Lock(fp) locker.Lock(fp)
locker.Unlock(fp) locker.Unlock(fp)
} }

View file

@ -10,22 +10,22 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
const maxMappedFP = 1 << 20 // About 1M fingerprints reserved for mapping. const maxMappedFP = 1 << 20 // About 1M fingerprints reserved for mapping.
var separatorString = string([]byte{clientmodel.SeparatorByte}) var separatorString = string([]byte{model.SeparatorByte})
// fpMappings maps original fingerprints to a map of string representations of // fpMappings maps original fingerprints to a map of string representations of
// metrics to the truly unique fingerprint. // metrics to the truly unique fingerprint.
type fpMappings map[clientmodel.Fingerprint]map[string]clientmodel.Fingerprint type fpMappings map[model.Fingerprint]map[string]model.Fingerprint
// fpMapper is used to map fingerprints in order to work around fingerprint // fpMapper is used to map fingerprints in order to work around fingerprint
// collisions. // collisions.
type fpMapper struct { type fpMapper struct {
// highestMappedFP has to be aligned for atomic operations. // highestMappedFP has to be aligned for atomic operations.
highestMappedFP clientmodel.Fingerprint highestMappedFP model.Fingerprint
mtx sync.RWMutex // Protects mappings. mtx sync.RWMutex // Protects mappings.
mappings fpMappings mappings fpMappings
@ -65,7 +65,7 @@ func newFPMapper(fpToSeries *seriesMap, p *persistence) (*fpMapper, error) {
// //
// If an error is encountered, it is returned together with the unchanged raw // If an error is encountered, it is returned together with the unchanged raw
// fingerprint. // fingerprint.
func (m *fpMapper) mapFP(fp clientmodel.Fingerprint, metric clientmodel.Metric) (clientmodel.Fingerprint, error) { func (m *fpMapper) mapFP(fp model.Fingerprint, metric model.Metric) (model.Fingerprint, error) {
// First check if we are in the reserved FP space, in which case this is // First check if we are in the reserved FP space, in which case this is
// automatically a collision that has to be mapped. // automatically a collision that has to be mapped.
if fp <= maxMappedFP { if fp <= maxMappedFP {
@ -125,9 +125,9 @@ func (m *fpMapper) mapFP(fp clientmodel.Fingerprint, metric clientmodel.Metric)
// adds it to the collisions map if not yet there. In any case, it returns the // adds it to the collisions map if not yet there. In any case, it returns the
// truly unique fingerprint for the colliding metric. // truly unique fingerprint for the colliding metric.
func (m *fpMapper) maybeAddMapping( func (m *fpMapper) maybeAddMapping(
fp clientmodel.Fingerprint, fp model.Fingerprint,
collidingMetric clientmodel.Metric, collidingMetric model.Metric,
) (clientmodel.Fingerprint, error) { ) (model.Fingerprint, error) {
ms := metricToUniqueString(collidingMetric) ms := metricToUniqueString(collidingMetric)
m.mtx.RLock() m.mtx.RLock()
mappedFPs, ok := m.mappings[fp] mappedFPs, ok := m.mappings[fp]
@ -153,7 +153,7 @@ func (m *fpMapper) maybeAddMapping(
} }
// This is the first collision for fp. // This is the first collision for fp.
mappedFP := m.nextMappedFP() mappedFP := m.nextMappedFP()
mappedFPs = map[string]clientmodel.Fingerprint{ms: mappedFP} mappedFPs = map[string]model.Fingerprint{ms: mappedFP}
m.mtx.Lock() m.mtx.Lock()
m.mappings[fp] = mappedFPs m.mappings[fp] = mappedFPs
m.mappingsCounter.Inc() m.mappingsCounter.Inc()
@ -167,8 +167,8 @@ func (m *fpMapper) maybeAddMapping(
return mappedFP, err return mappedFP, err
} }
func (m *fpMapper) nextMappedFP() clientmodel.Fingerprint { func (m *fpMapper) nextMappedFP() model.Fingerprint {
mappedFP := clientmodel.Fingerprint(atomic.AddUint64((*uint64)(&m.highestMappedFP), 1)) mappedFP := model.Fingerprint(atomic.AddUint64((*uint64)(&m.highestMappedFP), 1))
if mappedFP > maxMappedFP { if mappedFP > maxMappedFP {
panic(fmt.Errorf("more than %v fingerprints mapped in collision detection", maxMappedFP)) panic(fmt.Errorf("more than %v fingerprints mapped in collision detection", maxMappedFP))
} }
@ -192,7 +192,7 @@ func (m *fpMapper) Collect(ch chan<- prometheus.Metric) {
// FastFingerprint function, and its result is not suitable as a key for maps // FastFingerprint function, and its result is not suitable as a key for maps
// and indexes as it might become really large, causing a lot of hashing effort // and indexes as it might become really large, causing a lot of hashing effort
// in maps and a lot of storage overhead in indexes. // in maps and a lot of storage overhead in indexes.
func metricToUniqueString(m clientmodel.Metric) string { func metricToUniqueString(m model.Metric) string {
parts := make([]string, 0, len(m)) parts := make([]string, 0, len(m))
for ln, lv := range m { for ln, lv := range m {
parts = append(parts, string(ln)+separatorString+string(lv)) parts = append(parts, string(ln)+separatorString+string(lv))

View file

@ -3,7 +3,7 @@ package local
import ( import (
"testing" "testing"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
var ( var (
@ -13,31 +13,31 @@ var (
// Note that fingerprints are set and not actually calculated. // Note that fingerprints are set and not actually calculated.
// The collision detection is independent from the actually used // The collision detection is independent from the actually used
// fingerprinting algorithm. // fingerprinting algorithm.
fp1 = clientmodel.Fingerprint(maxMappedFP + 1) fp1 = model.Fingerprint(maxMappedFP + 1)
fp2 = clientmodel.Fingerprint(maxMappedFP + 2) fp2 = model.Fingerprint(maxMappedFP + 2)
fp3 = clientmodel.Fingerprint(1) fp3 = model.Fingerprint(1)
cm11 = clientmodel.Metric{ cm11 = model.Metric{
"foo": "bar", "foo": "bar",
"dings": "bumms", "dings": "bumms",
} }
cm12 = clientmodel.Metric{ cm12 = model.Metric{
"bar": "foo", "bar": "foo",
} }
cm13 = clientmodel.Metric{ cm13 = model.Metric{
"foo": "bar", "foo": "bar",
} }
cm21 = clientmodel.Metric{ cm21 = model.Metric{
"foo": "bumms", "foo": "bumms",
"dings": "bar", "dings": "bar",
} }
cm22 = clientmodel.Metric{ cm22 = model.Metric{
"dings": "foo", "dings": "foo",
"bar": "bumms", "bar": "bumms",
} }
cm31 = clientmodel.Metric{ cm31 = model.Metric{
"bumms": "dings", "bumms": "dings",
} }
cm32 = clientmodel.Metric{ cm32 = model.Metric{
"bumms": "dings", "bumms": "dings",
"bar": "foo", "bar": "foo",
} }
@ -84,12 +84,12 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP { if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
// The mapped cm12 is added to sm, too. That should not change the outcome. // The mapped cm12 is added to sm, too. That should not change the outcome.
sm.put(clientmodel.Fingerprint(1), &memorySeries{metric: cm12}) sm.put(model.Fingerprint(1), &memorySeries{metric: cm12})
gotFP, err = mapper.mapFP(fp1, cm11) gotFP, err = mapper.mapFP(fp1, cm11)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -101,7 +101,7 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP { if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -110,19 +110,19 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP { if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp1, cm13) gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP { if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
// Add cm13 to sm. Should not change anything. // Add cm13 to sm. Should not change anything.
sm.put(clientmodel.Fingerprint(2), &memorySeries{metric: cm13}) sm.put(model.Fingerprint(2), &memorySeries{metric: cm13})
gotFP, err = mapper.mapFP(fp1, cm11) gotFP, err = mapper.mapFP(fp1, cm11)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -134,14 +134,14 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP { if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp1, cm13) gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP { if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -165,10 +165,10 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
sm.put(clientmodel.Fingerprint(3), &memorySeries{metric: cm22}) sm.put(model.Fingerprint(3), &memorySeries{metric: cm22})
gotFP, err = mapper.mapFP(fp2, cm21) gotFP, err = mapper.mapFP(fp2, cm21)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -180,7 +180,7 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -189,20 +189,20 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP { if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
sm.put(clientmodel.Fingerprint(4), &memorySeries{metric: cm31}) sm.put(model.Fingerprint(4), &memorySeries{metric: cm31})
// Map cm32, which is now mapped for two reasons... // Map cm32, which is now mapped for two reasons...
gotFP, err = mapper.mapFP(fp3, cm32) gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP { if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
sm.put(clientmodel.Fingerprint(5), &memorySeries{metric: cm32}) sm.put(model.Fingerprint(5), &memorySeries{metric: cm32})
// Now check ALL the mappings, just to be sure. // Now check ALL the mappings, just to be sure.
gotFP, err = mapper.mapFP(fp1, cm11) gotFP, err = mapper.mapFP(fp1, cm11)
@ -216,14 +216,14 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP { if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp1, cm13) gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP { if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp2, cm21) gotFP, err = mapper.mapFP(fp2, cm21)
@ -237,21 +237,21 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp3, cm31) gotFP, err = mapper.mapFP(fp3, cm31)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP { if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp3, cm32) gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP { if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -271,14 +271,14 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP { if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp1, cm13) gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP { if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp2, cm21) gotFP, err = mapper.mapFP(fp2, cm21)
@ -292,21 +292,21 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp3, cm31) gotFP, err = mapper.mapFP(fp3, cm31)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP { if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp3, cm32) gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP { if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -327,14 +327,14 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP { if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp1, cm13) gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP { if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp2, cm21) gotFP, err = mapper.mapFP(fp2, cm21)
@ -348,21 +348,21 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp3, cm31) gotFP, err = mapper.mapFP(fp3, cm31)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP { if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
gotFP, err = mapper.mapFP(fp3, cm32) gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP { if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -384,7 +384,7 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { // Old mapping still applied. if wantFP := model.Fingerprint(3); gotFP != wantFP { // Old mapping still applied.
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }
@ -396,7 +396,7 @@ func TestFPMapper(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if wantFP := clientmodel.Fingerprint(6); gotFP != wantFP { if wantFP := model.Fingerprint(6); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP) t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
} }

View file

@ -31,7 +31,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable" "github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/local/index" "github.com/prometheus/prometheus/storage/local/index"
@ -76,7 +76,7 @@ const (
indexingQueueCapacity = 1024 * 16 indexingQueueCapacity = 1024 * 16
) )
var fpLen = len(clientmodel.Fingerprint(0).String()) // Length of a fingerprint as string. var fpLen = len(model.Fingerprint(0).String()) // Length of a fingerprint as string.
const ( const (
flagHeadChunkPersisted byte = 1 << iota flagHeadChunkPersisted byte = 1 << iota
@ -93,8 +93,8 @@ const (
) )
type indexingOp struct { type indexingOp struct {
fingerprint clientmodel.Fingerprint fingerprint model.Fingerprint
metric clientmodel.Metric metric model.Metric
opType indexingOpType opType indexingOpType
} }
@ -335,7 +335,7 @@ func (p *persistence) setDirty(dirty bool) {
// pair. This method is goroutine-safe but take into account that metrics queued // pair. This method is goroutine-safe but take into account that metrics queued
// for indexing with IndexMetric might not have made it into the index // for indexing with IndexMetric might not have made it into the index
// yet. (Same applies correspondingly to UnindexMetric.) // yet. (Same applies correspondingly to UnindexMetric.)
func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (clientmodel.Fingerprints, error) { func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (model.Fingerprints, error) {
fps, _, err := p.labelPairToFingerprints.Lookup(lp) fps, _, err := p.labelPairToFingerprints.Lookup(lp)
if err != nil { if err != nil {
return nil, err return nil, err
@ -347,7 +347,7 @@ func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (clientmodel
// name. This method is goroutine-safe but take into account that metrics queued // name. This method is goroutine-safe but take into account that metrics queued
// for indexing with IndexMetric might not have made it into the index // for indexing with IndexMetric might not have made it into the index
// yet. (Same applies correspondingly to UnindexMetric.) // yet. (Same applies correspondingly to UnindexMetric.)
func (p *persistence) labelValuesForLabelName(ln clientmodel.LabelName) (clientmodel.LabelValues, error) { func (p *persistence) labelValuesForLabelName(ln model.LabelName) (model.LabelValues, error) {
lvs, _, err := p.labelNameToLabelValues.Lookup(ln) lvs, _, err := p.labelNameToLabelValues.Lookup(ln)
if err != nil { if err != nil {
return nil, err return nil, err
@ -361,7 +361,7 @@ func (p *persistence) labelValuesForLabelName(ln clientmodel.LabelName) (clientm
// the (zero-based) index of the first persisted chunk within the series // the (zero-based) index of the first persisted chunk within the series
// file. In case of an error, the returned index is -1 (to avoid the // file. In case of an error, the returned index is -1 (to avoid the
// misconception that the chunk was written at position 0). // misconception that the chunk was written at position 0).
func (p *persistence) persistChunks(fp clientmodel.Fingerprint, chunks []chunk) (index int, err error) { func (p *persistence) persistChunks(fp model.Fingerprint, chunks []chunk) (index int, err error) {
defer func() { defer func() {
if err != nil { if err != nil {
log.Error("Error persisting chunks: ", err) log.Error("Error persisting chunks: ", err)
@ -397,7 +397,7 @@ func (p *persistence) persistChunks(fp clientmodel.Fingerprint, chunks []chunk)
// incrementally larger indexes. The indexOffset denotes the offset to be added to // incrementally larger indexes. The indexOffset denotes the offset to be added to
// each index in indexes. It is the caller's responsibility to not persist or // each index in indexes. It is the caller's responsibility to not persist or
// drop anything for the same fingerprint concurrently. // drop anything for the same fingerprint concurrently.
func (p *persistence) loadChunks(fp clientmodel.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) { func (p *persistence) loadChunks(fp model.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) {
f, err := p.openChunkFileForReading(fp) f, err := p.openChunkFileForReading(fp)
if err != nil { if err != nil {
return nil, err return nil, err
@ -448,7 +448,7 @@ func (p *persistence) loadChunks(fp clientmodel.Fingerprint, indexes []int, inde
// the number of chunkDescs to skip from the end of the series file. It is the // the number of chunkDescs to skip from the end of the series file. It is the
// caller's responsibility to not persist or drop anything for the same // caller's responsibility to not persist or drop anything for the same
// fingerprint concurrently. // fingerprint concurrently.
func (p *persistence) loadChunkDescs(fp clientmodel.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) { func (p *persistence) loadChunkDescs(fp model.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) {
f, err := p.openChunkFileForReading(fp) f, err := p.openChunkFileForReading(fp)
if os.IsNotExist(err) { if os.IsNotExist(err) {
return nil, nil return nil, nil
@ -484,8 +484,8 @@ func (p *persistence) loadChunkDescs(fp clientmodel.Fingerprint, offsetFromEnd i
return nil, err return nil, err
} }
cds[i] = &chunkDesc{ cds[i] = &chunkDesc{
chunkFirstTime: clientmodel.Timestamp(binary.LittleEndian.Uint64(chunkTimesBuf)), chunkFirstTime: model.Time(binary.LittleEndian.Uint64(chunkTimesBuf)),
chunkLastTime: clientmodel.Timestamp(binary.LittleEndian.Uint64(chunkTimesBuf[8:])), chunkLastTime: model.Time(binary.LittleEndian.Uint64(chunkTimesBuf[8:])),
} }
} }
chunkDescOps.WithLabelValues(load).Add(float64(len(cds))) chunkDescOps.WithLabelValues(load).Add(float64(len(cds)))
@ -681,7 +681,7 @@ func (p *persistence) checkpointSeriesMapAndHeads(fingerprintToSeries *seriesMap
// utterly goroutine-unsafe. // utterly goroutine-unsafe.
func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist int64, err error) { func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist int64, err error) {
var chunkDescsTotal int64 var chunkDescsTotal int64
fingerprintToSeries := make(map[clientmodel.Fingerprint]*memorySeries) fingerprintToSeries := make(map[model.Fingerprint]*memorySeries)
sm = &seriesMap{m: fingerprintToSeries} sm = &seriesMap{m: fingerprintToSeries}
defer func() { defer func() {
@ -819,8 +819,8 @@ func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist in
return sm, chunksToPersist, nil return sm, chunksToPersist, nil
} }
chunkDescs[i] = &chunkDesc{ chunkDescs[i] = &chunkDesc{
chunkFirstTime: clientmodel.Timestamp(firstTime), chunkFirstTime: model.Time(firstTime),
chunkLastTime: clientmodel.Timestamp(lastTime), chunkLastTime: model.Time(lastTime),
} }
chunkDescsTotal++ chunkDescsTotal++
} else { } else {
@ -842,13 +842,13 @@ func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist in
} }
} }
fingerprintToSeries[clientmodel.Fingerprint(fp)] = &memorySeries{ fingerprintToSeries[model.Fingerprint(fp)] = &memorySeries{
metric: clientmodel.Metric(metric), metric: model.Metric(metric),
chunkDescs: chunkDescs, chunkDescs: chunkDescs,
persistWatermark: int(persistWatermark), persistWatermark: int(persistWatermark),
modTime: modTime, modTime: modTime,
chunkDescsOffset: int(chunkDescsOffset), chunkDescsOffset: int(chunkDescsOffset),
savedFirstTime: clientmodel.Timestamp(savedFirstTime), savedFirstTime: model.Time(savedFirstTime),
lastTime: chunkDescs[len(chunkDescs)-1].lastTime(), lastTime: chunkDescs[len(chunkDescs)-1].lastTime(),
headChunkClosed: persistWatermark >= numChunkDescs, headChunkClosed: persistWatermark >= numChunkDescs,
} }
@ -866,9 +866,9 @@ func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist in
// It is the caller's responsibility to make sure nothing is persisted or loaded // It is the caller's responsibility to make sure nothing is persisted or loaded
// for the same fingerprint concurrently. // for the same fingerprint concurrently.
func (p *persistence) dropAndPersistChunks( func (p *persistence) dropAndPersistChunks(
fp clientmodel.Fingerprint, beforeTime clientmodel.Timestamp, chunks []chunk, fp model.Fingerprint, beforeTime model.Time, chunks []chunk,
) ( ) (
firstTimeNotDropped clientmodel.Timestamp, firstTimeNotDropped model.Time,
offset int, offset int,
numDropped int, numDropped int,
allDropped bool, allDropped bool,
@ -952,11 +952,11 @@ func (p *persistence) dropAndPersistChunks(
if err != nil { if err != nil {
return return
} }
lastTime := clientmodel.Timestamp( lastTime := model.Time(
binary.LittleEndian.Uint64(headerBuf[chunkHeaderLastTimeOffset:]), binary.LittleEndian.Uint64(headerBuf[chunkHeaderLastTimeOffset:]),
) )
if !lastTime.Before(beforeTime) { if !lastTime.Before(beforeTime) {
firstTimeNotDropped = clientmodel.Timestamp( firstTimeNotDropped = model.Time(
binary.LittleEndian.Uint64(headerBuf[chunkHeaderFirstTimeOffset:]), binary.LittleEndian.Uint64(headerBuf[chunkHeaderFirstTimeOffset:]),
) )
chunkOps.WithLabelValues(drop).Add(float64(numDropped)) chunkOps.WithLabelValues(drop).Add(float64(numDropped))
@ -1008,7 +1008,7 @@ func (p *persistence) dropAndPersistChunks(
// deleteSeriesFile deletes a series file belonging to the provided // deleteSeriesFile deletes a series file belonging to the provided
// fingerprint. It returns the number of chunks that were contained in the // fingerprint. It returns the number of chunks that were contained in the
// deleted file. // deleted file.
func (p *persistence) deleteSeriesFile(fp clientmodel.Fingerprint) (int, error) { func (p *persistence) deleteSeriesFile(fp model.Fingerprint) (int, error) {
fname := p.fileNameForFingerprint(fp) fname := p.fileNameForFingerprint(fp)
fi, err := os.Stat(fname) fi, err := os.Stat(fname)
if os.IsNotExist(err) { if os.IsNotExist(err) {
@ -1029,7 +1029,7 @@ func (p *persistence) deleteSeriesFile(fp clientmodel.Fingerprint) (int, error)
// seriesFileModTime returns the modification time of the series file belonging // seriesFileModTime returns the modification time of the series file belonging
// to the provided fingerprint. In case of an error, the zero value of time.Time // to the provided fingerprint. In case of an error, the zero value of time.Time
// is returned. // is returned.
func (p *persistence) seriesFileModTime(fp clientmodel.Fingerprint) time.Time { func (p *persistence) seriesFileModTime(fp model.Fingerprint) time.Time {
var modTime time.Time var modTime time.Time
if fi, err := os.Stat(p.fileNameForFingerprint(fp)); err == nil { if fi, err := os.Stat(p.fileNameForFingerprint(fp)); err == nil {
return fi.ModTime() return fi.ModTime()
@ -1041,7 +1041,7 @@ func (p *persistence) seriesFileModTime(fp clientmodel.Fingerprint) time.Time {
// fingerprintsForLabelPair, labelValuesForLabelName, and // fingerprintsForLabelPair, labelValuesForLabelName, and
// fingerprintsModifiedBefore. If the queue is full, this method blocks until // fingerprintsModifiedBefore. If the queue is full, this method blocks until
// the metric can be queued. This method is goroutine-safe. // the metric can be queued. This method is goroutine-safe.
func (p *persistence) indexMetric(fp clientmodel.Fingerprint, m clientmodel.Metric) { func (p *persistence) indexMetric(fp model.Fingerprint, m model.Metric) {
p.indexingQueue <- indexingOp{fp, m, add} p.indexingQueue <- indexingOp{fp, m, add}
} }
@ -1052,7 +1052,7 @@ func (p *persistence) indexMetric(fp clientmodel.Fingerprint, m clientmodel.Metr
// archived metric. To purge an archived metric, call purgeArchivedFingerprint.) // archived metric. To purge an archived metric, call purgeArchivedFingerprint.)
// If the queue is full, this method blocks until the metric can be queued. This // If the queue is full, this method blocks until the metric can be queued. This
// method is goroutine-safe. // method is goroutine-safe.
func (p *persistence) unindexMetric(fp clientmodel.Fingerprint, m clientmodel.Metric) { func (p *persistence) unindexMetric(fp model.Fingerprint, m model.Metric) {
p.indexingQueue <- indexingOp{fp, m, remove} p.indexingQueue <- indexingOp{fp, m, remove}
} }
@ -1074,7 +1074,7 @@ func (p *persistence) waitForIndexing() {
// metric, together with the first and last timestamp of the series belonging to // metric, together with the first and last timestamp of the series belonging to
// the metric. The caller must have locked the fingerprint. // the metric. The caller must have locked the fingerprint.
func (p *persistence) archiveMetric( func (p *persistence) archiveMetric(
fp clientmodel.Fingerprint, m clientmodel.Metric, first, last clientmodel.Timestamp, fp model.Fingerprint, m model.Metric, first, last model.Time,
) error { ) error {
if err := p.archivedFingerprintToMetrics.Put(codable.Fingerprint(fp), codable.Metric(m)); err != nil { if err := p.archivedFingerprintToMetrics.Put(codable.Fingerprint(fp), codable.Metric(m)); err != nil {
p.setDirty(true) p.setDirty(true)
@ -1090,8 +1090,8 @@ func (p *persistence) archiveMetric(
// hasArchivedMetric returns whether the archived metric for the given // hasArchivedMetric returns whether the archived metric for the given
// fingerprint exists and if yes, what the first and last timestamp in the // fingerprint exists and if yes, what the first and last timestamp in the
// corresponding series is. This method is goroutine-safe. // corresponding series is. This method is goroutine-safe.
func (p *persistence) hasArchivedMetric(fp clientmodel.Fingerprint) ( func (p *persistence) hasArchivedMetric(fp model.Fingerprint) (
hasMetric bool, firstTime, lastTime clientmodel.Timestamp, err error, hasMetric bool, firstTime, lastTime model.Time, err error,
) { ) {
firstTime, lastTime, hasMetric, err = p.archivedFingerprintToTimeRange.Lookup(fp) firstTime, lastTime, hasMetric, err = p.archivedFingerprintToTimeRange.Lookup(fp)
return return
@ -1101,7 +1101,7 @@ func (p *persistence) hasArchivedMetric(fp clientmodel.Fingerprint) (
// sure that the fingerprint is currently archived (the time range will // sure that the fingerprint is currently archived (the time range will
// otherwise be added without the corresponding metric in the archive). // otherwise be added without the corresponding metric in the archive).
func (p *persistence) updateArchivedTimeRange( func (p *persistence) updateArchivedTimeRange(
fp clientmodel.Fingerprint, first, last clientmodel.Timestamp, fp model.Fingerprint, first, last model.Time,
) error { ) error {
return p.archivedFingerprintToTimeRange.Put(codable.Fingerprint(fp), codable.TimeRange{First: first, Last: last}) return p.archivedFingerprintToTimeRange.Put(codable.Fingerprint(fp), codable.TimeRange{First: first, Last: last})
} }
@ -1109,10 +1109,10 @@ func (p *persistence) updateArchivedTimeRange(
// fingerprintsModifiedBefore returns the fingerprints of archived timeseries // fingerprintsModifiedBefore returns the fingerprints of archived timeseries
// that have live samples before the provided timestamp. This method is // that have live samples before the provided timestamp. This method is
// goroutine-safe. // goroutine-safe.
func (p *persistence) fingerprintsModifiedBefore(beforeTime clientmodel.Timestamp) ([]clientmodel.Fingerprint, error) { func (p *persistence) fingerprintsModifiedBefore(beforeTime model.Time) ([]model.Fingerprint, error) {
var fp codable.Fingerprint var fp codable.Fingerprint
var tr codable.TimeRange var tr codable.TimeRange
fps := []clientmodel.Fingerprint{} fps := []model.Fingerprint{}
p.archivedFingerprintToTimeRange.ForEach(func(kv index.KeyValueAccessor) error { p.archivedFingerprintToTimeRange.ForEach(func(kv index.KeyValueAccessor) error {
if err := kv.Value(&tr); err != nil { if err := kv.Value(&tr); err != nil {
return err return err
@ -1121,7 +1121,7 @@ func (p *persistence) fingerprintsModifiedBefore(beforeTime clientmodel.Timestam
if err := kv.Key(&fp); err != nil { if err := kv.Key(&fp); err != nil {
return err return err
} }
fps = append(fps, clientmodel.Fingerprint(fp)) fps = append(fps, model.Fingerprint(fp))
} }
return nil return nil
}) })
@ -1130,7 +1130,7 @@ func (p *persistence) fingerprintsModifiedBefore(beforeTime clientmodel.Timestam
// archivedMetric retrieves the archived metric with the given fingerprint. This // archivedMetric retrieves the archived metric with the given fingerprint. This
// method is goroutine-safe. // method is goroutine-safe.
func (p *persistence) archivedMetric(fp clientmodel.Fingerprint) (clientmodel.Metric, error) { func (p *persistence) archivedMetric(fp model.Fingerprint) (model.Metric, error) {
metric, _, err := p.archivedFingerprintToMetrics.Lookup(fp) metric, _, err := p.archivedFingerprintToMetrics.Lookup(fp)
return metric, err return metric, err
} }
@ -1139,7 +1139,7 @@ func (p *persistence) archivedMetric(fp clientmodel.Fingerprint) (clientmodel.Me
// metric entirely. It also queues the metric for un-indexing (no need to call // metric entirely. It also queues the metric for un-indexing (no need to call
// unindexMetric for the deleted metric.) It does not touch the series file, // unindexMetric for the deleted metric.) It does not touch the series file,
// though. The caller must have locked the fingerprint. // though. The caller must have locked the fingerprint.
func (p *persistence) purgeArchivedMetric(fp clientmodel.Fingerprint) (err error) { func (p *persistence) purgeArchivedMetric(fp model.Fingerprint) (err error) {
defer func() { defer func() {
if err != nil { if err != nil {
p.setDirty(true) p.setDirty(true)
@ -1172,7 +1172,7 @@ func (p *persistence) purgeArchivedMetric(fp clientmodel.Fingerprint) (err error
// contrast to purgeArchivedMetric) does not un-index the metric. If a metric // contrast to purgeArchivedMetric) does not un-index the metric. If a metric
// was actually deleted, the method returns true and the first time and last // was actually deleted, the method returns true and the first time and last
// time of the deleted metric. The caller must have locked the fingerprint. // time of the deleted metric. The caller must have locked the fingerprint.
func (p *persistence) unarchiveMetric(fp clientmodel.Fingerprint) (deletedAnything bool, err error) { func (p *persistence) unarchiveMetric(fp model.Fingerprint) (deletedAnything bool, err error) {
defer func() { defer func() {
if err != nil { if err != nil {
p.setDirty(true) p.setDirty(true)
@ -1232,22 +1232,22 @@ func (p *persistence) close() error {
return lastError return lastError
} }
func (p *persistence) dirNameForFingerprint(fp clientmodel.Fingerprint) string { func (p *persistence) dirNameForFingerprint(fp model.Fingerprint) string {
fpStr := fp.String() fpStr := fp.String()
return path.Join(p.basePath, fpStr[0:seriesDirNameLen]) return path.Join(p.basePath, fpStr[0:seriesDirNameLen])
} }
func (p *persistence) fileNameForFingerprint(fp clientmodel.Fingerprint) string { func (p *persistence) fileNameForFingerprint(fp model.Fingerprint) string {
fpStr := fp.String() fpStr := fp.String()
return path.Join(p.basePath, fpStr[0:seriesDirNameLen], fpStr[seriesDirNameLen:]+seriesFileSuffix) return path.Join(p.basePath, fpStr[0:seriesDirNameLen], fpStr[seriesDirNameLen:]+seriesFileSuffix)
} }
func (p *persistence) tempFileNameForFingerprint(fp clientmodel.Fingerprint) string { func (p *persistence) tempFileNameForFingerprint(fp model.Fingerprint) string {
fpStr := fp.String() fpStr := fp.String()
return path.Join(p.basePath, fpStr[0:seriesDirNameLen], fpStr[seriesDirNameLen:]+seriesTempFileSuffix) return path.Join(p.basePath, fpStr[0:seriesDirNameLen], fpStr[seriesDirNameLen:]+seriesTempFileSuffix)
} }
func (p *persistence) openChunkFileForWriting(fp clientmodel.Fingerprint) (*os.File, error) { func (p *persistence) openChunkFileForWriting(fp model.Fingerprint) (*os.File, error) {
if err := os.MkdirAll(p.dirNameForFingerprint(fp), 0700); err != nil { if err := os.MkdirAll(p.dirNameForFingerprint(fp), 0700); err != nil {
return nil, err return nil, err
} }
@ -1272,7 +1272,7 @@ func (p *persistence) closeChunkFile(f *os.File) {
} }
} }
func (p *persistence) openChunkFileForReading(fp clientmodel.Fingerprint) (*os.File, error) { func (p *persistence) openChunkFileForReading(fp model.Fingerprint) (*os.File, error) {
return os.Open(p.fileNameForFingerprint(fp)) return os.Open(p.fileNameForFingerprint(fp))
} }
@ -1481,9 +1481,9 @@ func (p *persistence) checkpointFPMappings(fpm fpMappings) (err error) {
// mapped fingerprint and any error encountered. If p.mappingsFileName is not // mapped fingerprint and any error encountered. If p.mappingsFileName is not
// found, the method returns (fpMappings{}, 0, nil). Do not call concurrently // found, the method returns (fpMappings{}, 0, nil). Do not call concurrently
// with checkpointFPMappings. // with checkpointFPMappings.
func (p *persistence) loadFPMappings() (fpMappings, clientmodel.Fingerprint, error) { func (p *persistence) loadFPMappings() (fpMappings, model.Fingerprint, error) {
fpm := fpMappings{} fpm := fpMappings{}
var highestMappedFP clientmodel.Fingerprint var highestMappedFP model.Fingerprint
f, err := os.Open(p.mappingsFileName()) f, err := os.Open(p.mappingsFileName())
if os.IsNotExist(err) { if os.IsNotExist(err) {
@ -1523,7 +1523,7 @@ func (p *persistence) loadFPMappings() (fpMappings, clientmodel.Fingerprint, err
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
mappings := make(map[string]clientmodel.Fingerprint, numMappings) mappings := make(map[string]model.Fingerprint, numMappings)
for ; numMappings > 0; numMappings-- { for ; numMappings > 0; numMappings-- {
lenMS, err := binary.ReadUvarint(r) lenMS, err := binary.ReadUvarint(r)
if err != nil { if err != nil {
@ -1537,13 +1537,13 @@ func (p *persistence) loadFPMappings() (fpMappings, clientmodel.Fingerprint, err
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
mappedFP := clientmodel.Fingerprint(fp) mappedFP := model.Fingerprint(fp)
if mappedFP > highestMappedFP { if mappedFP > highestMappedFP {
highestMappedFP = mappedFP highestMappedFP = mappedFP
} }
mappings[string(buf)] = mappedFP mappings[string(buf)] = mappedFP
} }
fpm[clientmodel.Fingerprint(rawFP)] = mappings fpm[model.Fingerprint(rawFP)] = mappings
} }
return fpm, highestMappedFP, nil return fpm, highestMappedFP, nil
} }

View file

@ -19,7 +19,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable" "github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/local/index" "github.com/prometheus/prometheus/storage/local/index"
@ -28,11 +28,11 @@ import (
) )
var ( var (
m1 = clientmodel.Metric{"label": "value1"} m1 = model.Metric{"label": "value1"}
m2 = clientmodel.Metric{"label": "value2"} m2 = model.Metric{"label": "value2"}
m3 = clientmodel.Metric{"label": "value3"} m3 = model.Metric{"label": "value3"}
m4 = clientmodel.Metric{"label": "value4"} m4 = model.Metric{"label": "value4"}
m5 = clientmodel.Metric{"label": "value5"} m5 = model.Metric{"label": "value5"}
) )
func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, testutil.Closer) { func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, testutil.Closer) {
@ -50,20 +50,20 @@ func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, tes
}) })
} }
func buildTestChunks(encoding chunkEncoding) map[clientmodel.Fingerprint][]chunk { func buildTestChunks(encoding chunkEncoding) map[model.Fingerprint][]chunk {
fps := clientmodel.Fingerprints{ fps := model.Fingerprints{
m1.FastFingerprint(), m1.FastFingerprint(),
m2.FastFingerprint(), m2.FastFingerprint(),
m3.FastFingerprint(), m3.FastFingerprint(),
} }
fpToChunks := map[clientmodel.Fingerprint][]chunk{} fpToChunks := map[model.Fingerprint][]chunk{}
for _, fp := range fps { for _, fp := range fps {
fpToChunks[fp] = make([]chunk, 0, 10) fpToChunks[fp] = make([]chunk, 0, 10)
for i := 0; i < 10; i++ { for i := 0; i < 10; i++ {
fpToChunks[fp] = append(fpToChunks[fp], newChunkForEncoding(encoding).add(&metric.SamplePair{ fpToChunks[fp] = append(fpToChunks[fp], newChunkForEncoding(encoding).add(&metric.SamplePair{
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(fp), Value: model.SampleValue(fp),
})[0]) })[0])
} }
} }
@ -89,11 +89,11 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
for fp, chunks := range fpToChunks { for fp, chunks := range fpToChunks {
firstTimeNotDropped, offset, numDropped, allDropped, err := firstTimeNotDropped, offset, numDropped, allDropped, err :=
p.dropAndPersistChunks(fp, clientmodel.Earliest, chunks) p.dropAndPersistChunks(fp, model.Earliest, chunks)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if got, want := firstTimeNotDropped, clientmodel.Timestamp(0); got != want { if got, want := firstTimeNotDropped, model.Time(0); got != want {
t.Errorf("Want firstTimeNotDropped %v, got %v.", got, want) t.Errorf("Want firstTimeNotDropped %v, got %v.", got, want)
} }
if got, want := offset, 0; got != want { if got, want := offset, 0; got != want {
@ -127,7 +127,7 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
t.Errorf("Got %d chunkDescs, want %d.", len(actualChunkDescs), 10) t.Errorf("Got %d chunkDescs, want %d.", len(actualChunkDescs), 10)
} }
for i, cd := range actualChunkDescs { for i, cd := range actualChunkDescs {
if cd.firstTime() != clientmodel.Timestamp(i) || cd.lastTime() != clientmodel.Timestamp(i) { if cd.firstTime() != model.Time(i) || cd.lastTime() != model.Time(i) {
t.Errorf( t.Errorf(
"Want ts=%v, got firstTime=%v, lastTime=%v.", "Want ts=%v, got firstTime=%v, lastTime=%v.",
i, cd.firstTime(), cd.lastTime(), i, cd.firstTime(), cd.lastTime(),
@ -141,7 +141,7 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
t.Errorf("Got %d chunkDescs, want %d.", len(actualChunkDescs), 5) t.Errorf("Got %d chunkDescs, want %d.", len(actualChunkDescs), 5)
} }
for i, cd := range actualChunkDescs { for i, cd := range actualChunkDescs {
if cd.firstTime() != clientmodel.Timestamp(i) || cd.lastTime() != clientmodel.Timestamp(i) { if cd.firstTime() != model.Time(i) || cd.lastTime() != model.Time(i) {
t.Errorf( t.Errorf(
"Want ts=%v, got firstTime=%v, lastTime=%v.", "Want ts=%v, got firstTime=%v, lastTime=%v.",
i, cd.firstTime(), cd.lastTime(), i, cd.firstTime(), cd.lastTime(),
@ -204,11 +204,11 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
// Re-add first two of the chunks. // Re-add first two of the chunks.
for fp, chunks := range fpToChunks { for fp, chunks := range fpToChunks {
firstTimeNotDropped, offset, numDropped, allDropped, err := firstTimeNotDropped, offset, numDropped, allDropped, err :=
p.dropAndPersistChunks(fp, clientmodel.Earliest, chunks[:2]) p.dropAndPersistChunks(fp, model.Earliest, chunks[:2])
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if got, want := firstTimeNotDropped, clientmodel.Timestamp(0); got != want { if got, want := firstTimeNotDropped, model.Time(0); got != want {
t.Errorf("Want firstTimeNotDropped %v, got %v.", got, want) t.Errorf("Want firstTimeNotDropped %v, got %v.", got, want)
} }
if got, want := offset, 0; got != want { if got, want := offset, 0; got != want {
@ -366,12 +366,12 @@ func testCheckpointAndLoadSeriesMapAndHeads(t *testing.T, encoding chunkEncoding
s3.persistWatermark = 1 s3.persistWatermark = 1
for i := 0; i < 10000; i++ { for i := 0; i < 10000; i++ {
s4.add(&metric.SamplePair{ s4.add(&metric.SamplePair{
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(i) / 2, Value: model.SampleValue(i) / 2,
}) })
s5.add(&metric.SamplePair{ s5.add(&metric.SamplePair{
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(i * i), Value: model.SampleValue(i * i),
}) })
} }
s5.persistWatermark = 3 s5.persistWatermark = 3
@ -491,11 +491,11 @@ func TestCheckpointAndLoadFPMappings(t *testing.T) {
defer closer.Close() defer closer.Close()
in := fpMappings{ in := fpMappings{
1: map[string]clientmodel.Fingerprint{ 1: map[string]model.Fingerprint{
"foo": 1, "foo": 1,
"bar": 2, "bar": 2,
}, },
3: map[string]clientmodel.Fingerprint{ 3: map[string]model.Fingerprint{
"baz": 4, "baz": 4,
}, },
} }
@ -508,7 +508,7 @@ func TestCheckpointAndLoadFPMappings(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if got, want := fp, clientmodel.Fingerprint(4); got != want { if got, want := fp, model.Fingerprint(4); got != want {
t.Errorf("got highest FP %v, want %v", got, want) t.Errorf("got highest FP %v, want %v", got, want)
} }
if !reflect.DeepEqual(in, out) { if !reflect.DeepEqual(in, out) {
@ -520,14 +520,14 @@ func testFingerprintsModifiedBefore(t *testing.T, encoding chunkEncoding) {
p, closer := newTestPersistence(t, encoding) p, closer := newTestPersistence(t, encoding)
defer closer.Close() defer closer.Close()
m1 := clientmodel.Metric{"n1": "v1"} m1 := model.Metric{"n1": "v1"}
m2 := clientmodel.Metric{"n2": "v2"} m2 := model.Metric{"n2": "v2"}
m3 := clientmodel.Metric{"n1": "v2"} m3 := model.Metric{"n1": "v2"}
p.archiveMetric(1, m1, 2, 4) p.archiveMetric(1, m1, 2, 4)
p.archiveMetric(2, m2, 1, 6) p.archiveMetric(2, m2, 1, 6)
p.archiveMetric(3, m3, 5, 5) p.archiveMetric(3, m3, 5, 5)
expectedFPs := map[clientmodel.Timestamp][]clientmodel.Fingerprint{ expectedFPs := map[model.Time][]model.Fingerprint{
0: {}, 0: {},
1: {}, 1: {},
2: {2}, 2: {2},
@ -562,7 +562,7 @@ func testFingerprintsModifiedBefore(t *testing.T, encoding chunkEncoding) {
t.Error("expected no unarchival") t.Error("expected no unarchival")
} }
expectedFPs = map[clientmodel.Timestamp][]clientmodel.Fingerprint{ expectedFPs = map[model.Time][]model.Fingerprint{
0: {}, 0: {},
1: {}, 1: {},
2: {2}, 2: {2},
@ -595,8 +595,8 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
p, closer := newTestPersistence(t, encoding) p, closer := newTestPersistence(t, encoding)
defer closer.Close() defer closer.Close()
m1 := clientmodel.Metric{"n1": "v1"} m1 := model.Metric{"n1": "v1"}
m2 := clientmodel.Metric{"n2": "v2"} m2 := model.Metric{"n2": "v2"}
p.archiveMetric(1, m1, 2, 4) p.archiveMetric(1, m1, 2, 4)
p.archiveMetric(2, m2, 1, 6) p.archiveMetric(2, m2, 1, 6)
p.indexMetric(1, m1) p.indexMetric(1, m1)
@ -607,7 +607,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
want := clientmodel.Fingerprints{1} want := model.Fingerprints{1}
if !reflect.DeepEqual(outFPs, want) { if !reflect.DeepEqual(outFPs, want) {
t.Errorf("want %#v, got %#v", want, outFPs) t.Errorf("want %#v, got %#v", want, outFPs)
} }
@ -615,7 +615,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
want = clientmodel.Fingerprints{2} want = model.Fingerprints{2}
if !reflect.DeepEqual(outFPs, want) { if !reflect.DeepEqual(outFPs, want) {
t.Errorf("want %#v, got %#v", want, outFPs) t.Errorf("want %#v, got %#v", want, outFPs)
} }
@ -647,7 +647,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
want = clientmodel.Fingerprints{2} want = model.Fingerprints{2}
if !reflect.DeepEqual(outFPs, want) { if !reflect.DeepEqual(outFPs, want) {
t.Errorf("want %#v, got %#v", want, outFPs) t.Errorf("want %#v, got %#v", want, outFPs)
} }
@ -678,21 +678,21 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
{ {
fpToMetric: index.FingerprintMetricMapping{ fpToMetric: index.FingerprintMetricMapping{
0: { 0: {
clientmodel.MetricNameLabel: "metric_0", model.MetricNameLabel: "metric_0",
"label_1": "value_1", "label_1": "value_1",
}, },
1: { 1: {
clientmodel.MetricNameLabel: "metric_0", model.MetricNameLabel: "metric_0",
"label_2": "value_2", "label_2": "value_2",
"label_3": "value_3", "label_3": "value_3",
}, },
2: { 2: {
clientmodel.MetricNameLabel: "metric_1", model.MetricNameLabel: "metric_1",
"label_1": "value_2", "label_1": "value_2",
}, },
}, },
expectedLnToLvs: index.LabelNameLabelValuesMapping{ expectedLnToLvs: index.LabelNameLabelValuesMapping{
clientmodel.MetricNameLabel: codable.LabelValueSet{ model.MetricNameLabel: codable.LabelValueSet{
"metric_0": struct{}{}, "metric_0": struct{}{},
"metric_1": struct{}{}, "metric_1": struct{}{},
}, },
@ -709,11 +709,11 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
}, },
expectedLpToFps: index.LabelPairFingerprintsMapping{ expectedLpToFps: index.LabelPairFingerprintsMapping{
metric.LabelPair{ metric.LabelPair{
Name: clientmodel.MetricNameLabel, Name: model.MetricNameLabel,
Value: "metric_0", Value: "metric_0",
}: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}}, }: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}},
metric.LabelPair{ metric.LabelPair{
Name: clientmodel.MetricNameLabel, Name: model.MetricNameLabel,
Value: "metric_1", Value: "metric_1",
}: codable.FingerprintSet{2: struct{}{}}, }: codable.FingerprintSet{2: struct{}{}},
metric.LabelPair{ metric.LabelPair{
@ -736,21 +736,21 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
}, { }, {
fpToMetric: index.FingerprintMetricMapping{ fpToMetric: index.FingerprintMetricMapping{
3: { 3: {
clientmodel.MetricNameLabel: "metric_0", model.MetricNameLabel: "metric_0",
"label_1": "value_3", "label_1": "value_3",
}, },
4: { 4: {
clientmodel.MetricNameLabel: "metric_2", model.MetricNameLabel: "metric_2",
"label_2": "value_2", "label_2": "value_2",
"label_3": "value_1", "label_3": "value_1",
}, },
5: { 5: {
clientmodel.MetricNameLabel: "metric_1", model.MetricNameLabel: "metric_1",
"label_1": "value_3", "label_1": "value_3",
}, },
}, },
expectedLnToLvs: index.LabelNameLabelValuesMapping{ expectedLnToLvs: index.LabelNameLabelValuesMapping{
clientmodel.MetricNameLabel: codable.LabelValueSet{ model.MetricNameLabel: codable.LabelValueSet{
"metric_0": struct{}{}, "metric_0": struct{}{},
"metric_1": struct{}{}, "metric_1": struct{}{},
"metric_2": struct{}{}, "metric_2": struct{}{},
@ -770,15 +770,15 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
}, },
expectedLpToFps: index.LabelPairFingerprintsMapping{ expectedLpToFps: index.LabelPairFingerprintsMapping{
metric.LabelPair{ metric.LabelPair{
Name: clientmodel.MetricNameLabel, Name: model.MetricNameLabel,
Value: "metric_0", Value: "metric_0",
}: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}, 3: struct{}{}}, }: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}, 3: struct{}{}},
metric.LabelPair{ metric.LabelPair{
Name: clientmodel.MetricNameLabel, Name: model.MetricNameLabel,
Value: "metric_1", Value: "metric_1",
}: codable.FingerprintSet{2: struct{}{}, 5: struct{}{}}, }: codable.FingerprintSet{2: struct{}{}, 5: struct{}{}},
metric.LabelPair{ metric.LabelPair{
Name: clientmodel.MetricNameLabel, Name: model.MetricNameLabel,
Value: "metric_2", Value: "metric_2",
}: codable.FingerprintSet{4: struct{}{}}, }: codable.FingerprintSet{4: struct{}{}},
metric.LabelPair{ metric.LabelPair{
@ -928,10 +928,10 @@ func BenchmarkLoadChunksSequentially(b *testing.B) {
sequentialIndexes[i] = i sequentialIndexes[i] = i
} }
var fp clientmodel.Fingerprint var fp model.Fingerprint
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for _, s := range fpStrings { for _, s := range fpStrings {
fp.LoadFromString(s) fp, _ = model.FingerprintFromString(s)
cds, err := p.loadChunks(fp, sequentialIndexes, 0) cds, err := p.loadChunks(fp, sequentialIndexes, 0)
if err != nil { if err != nil {
b.Error(err) b.Error(err)
@ -950,10 +950,10 @@ func BenchmarkLoadChunksRandomly(b *testing.B) {
} }
randomIndexes := []int{1, 5, 6, 8, 11, 14, 18, 23, 29, 33, 42, 46} randomIndexes := []int{1, 5, 6, 8, 11, 14, 18, 23, 29, 33, 42, 46}
var fp clientmodel.Fingerprint var fp model.Fingerprint
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for _, s := range fpStrings { for _, s := range fpStrings {
fp.LoadFromString(s) fp, _ = model.FingerprintFromString(s)
cds, err := p.loadChunks(fp, randomIndexes, 0) cds, err := p.loadChunks(fp, randomIndexes, 0)
if err != nil { if err != nil {
b.Error(err) b.Error(err)
@ -970,10 +970,10 @@ func BenchmarkLoadChunkDescs(b *testing.B) {
basePath: "fixtures", basePath: "fixtures",
} }
var fp clientmodel.Fingerprint var fp model.Fingerprint
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for _, s := range fpStrings { for _, s := range fpStrings {
fp.LoadFromString(s) fp, _ = model.FingerprintFromString(s)
cds, err := p.loadChunkDescs(fp, 0) cds, err := p.loadChunkDescs(fp, 0)
if err != nil { if err != nil {
b.Error(err) b.Error(err)

View file

@ -16,7 +16,7 @@ package local
import ( import (
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// memorySeriesPreloader is a Preloader for the memorySeriesStorage. // memorySeriesPreloader is a Preloader for the memorySeriesStorage.
@ -27,8 +27,8 @@ type memorySeriesPreloader struct {
// PreloadRange implements Preloader. // PreloadRange implements Preloader.
func (p *memorySeriesPreloader) PreloadRange( func (p *memorySeriesPreloader) PreloadRange(
fp clientmodel.Fingerprint, fp model.Fingerprint,
from clientmodel.Timestamp, through clientmodel.Timestamp, from model.Time, through model.Time,
stalenessDelta time.Duration, stalenessDelta time.Duration,
) error { ) error {
cds, err := p.storage.preloadChunksForRange(fp, from, through, stalenessDelta) cds, err := p.storage.preloadChunksForRange(fp, from, through, stalenessDelta)
@ -41,7 +41,7 @@ func (p *memorySeriesPreloader) PreloadRange(
/* /*
// MetricAtTime implements Preloader. // MetricAtTime implements Preloader.
func (p *memorySeriesPreloader) MetricAtTime(fp clientmodel.Fingerprint, t clientmodel.Timestamp) error { func (p *memorySeriesPreloader) MetricAtTime(fp model.Fingerprint, t model.Time) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{ cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: t, from: t,
through: t, through: t,
@ -54,7 +54,7 @@ func (p *memorySeriesPreloader) MetricAtTime(fp clientmodel.Fingerprint, t clien
} }
// MetricAtInterval implements Preloader. // MetricAtInterval implements Preloader.
func (p *memorySeriesPreloader) MetricAtInterval(fp clientmodel.Fingerprint, from, through clientmodel.Timestamp, interval time.Duration) error { func (p *memorySeriesPreloader) MetricAtInterval(fp model.Fingerprint, from, through model.Time, interval time.Duration) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{ cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: from, from: from,
through: through, through: through,
@ -68,7 +68,7 @@ func (p *memorySeriesPreloader) MetricAtInterval(fp clientmodel.Fingerprint, fro
} }
// MetricRange implements Preloader. // MetricRange implements Preloader.
func (p *memorySeriesPreloader) MetricRange(fp clientmodel.Fingerprint, t clientmodel.Timestamp, rangeDuration time.Duration) error { func (p *memorySeriesPreloader) MetricRange(fp model.Fingerprint, t model.Time, rangeDuration time.Duration) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{ cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: t, from: t,
through: t, through: t,
@ -82,7 +82,7 @@ func (p *memorySeriesPreloader) MetricRange(fp clientmodel.Fingerprint, t client
} }
// MetricRangeAtInterval implements Preloader. // MetricRangeAtInterval implements Preloader.
func (p *memorySeriesPreloader) MetricRangeAtInterval(fp clientmodel.Fingerprint, from, through clientmodel.Timestamp, interval, rangeDuration time.Duration) error { func (p *memorySeriesPreloader) MetricRangeAtInterval(fp model.Fingerprint, from, through model.Time, interval, rangeDuration time.Duration) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{ cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: from, from: from,
through: through, through: through,

View file

@ -18,7 +18,7 @@ import (
"sync" "sync"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -36,22 +36,22 @@ const (
// fingerprintSeriesPair pairs a fingerprint with a memorySeries pointer. // fingerprintSeriesPair pairs a fingerprint with a memorySeries pointer.
type fingerprintSeriesPair struct { type fingerprintSeriesPair struct {
fp clientmodel.Fingerprint fp model.Fingerprint
series *memorySeries series *memorySeries
} }
// seriesMap maps fingerprints to memory series. All its methods are // seriesMap maps fingerprints to memory series. All its methods are
// goroutine-safe. A SeriesMap is effectively is a goroutine-safe version of // goroutine-safe. A SeriesMap is effectively is a goroutine-safe version of
// map[clientmodel.Fingerprint]*memorySeries. // map[model.Fingerprint]*memorySeries.
type seriesMap struct { type seriesMap struct {
mtx sync.RWMutex mtx sync.RWMutex
m map[clientmodel.Fingerprint]*memorySeries m map[model.Fingerprint]*memorySeries
} }
// newSeriesMap returns a newly allocated empty seriesMap. To create a seriesMap // newSeriesMap returns a newly allocated empty seriesMap. To create a seriesMap
// based on a prefilled map, use an explicit initializer. // based on a prefilled map, use an explicit initializer.
func newSeriesMap() *seriesMap { func newSeriesMap() *seriesMap {
return &seriesMap{m: make(map[clientmodel.Fingerprint]*memorySeries)} return &seriesMap{m: make(map[model.Fingerprint]*memorySeries)}
} }
// length returns the number of mappings in the seriesMap. // length returns the number of mappings in the seriesMap.
@ -64,7 +64,7 @@ func (sm *seriesMap) length() int {
// get returns a memorySeries for a fingerprint. Return values have the same // get returns a memorySeries for a fingerprint. Return values have the same
// semantics as the native Go map. // semantics as the native Go map.
func (sm *seriesMap) get(fp clientmodel.Fingerprint) (s *memorySeries, ok bool) { func (sm *seriesMap) get(fp model.Fingerprint) (s *memorySeries, ok bool) {
sm.mtx.RLock() sm.mtx.RLock()
defer sm.mtx.RUnlock() defer sm.mtx.RUnlock()
@ -73,7 +73,7 @@ func (sm *seriesMap) get(fp clientmodel.Fingerprint) (s *memorySeries, ok bool)
} }
// put adds a mapping to the seriesMap. It panics if s == nil. // put adds a mapping to the seriesMap. It panics if s == nil.
func (sm *seriesMap) put(fp clientmodel.Fingerprint, s *memorySeries) { func (sm *seriesMap) put(fp model.Fingerprint, s *memorySeries) {
sm.mtx.Lock() sm.mtx.Lock()
defer sm.mtx.Unlock() defer sm.mtx.Unlock()
@ -84,7 +84,7 @@ func (sm *seriesMap) put(fp clientmodel.Fingerprint, s *memorySeries) {
} }
// del removes a mapping from the series Map. // del removes a mapping from the series Map.
func (sm *seriesMap) del(fp clientmodel.Fingerprint) { func (sm *seriesMap) del(fp model.Fingerprint) {
sm.mtx.Lock() sm.mtx.Lock()
defer sm.mtx.Unlock() defer sm.mtx.Unlock()
@ -120,8 +120,8 @@ func (sm *seriesMap) iter() <-chan fingerprintSeriesPair {
// for iterating over a map with a 'range' clause. However, if the next element // for iterating over a map with a 'range' clause. However, if the next element
// in iteration order is removed after the current element has been received // in iteration order is removed after the current element has been received
// from the channel, it will still be produced by the channel. // from the channel, it will still be produced by the channel.
func (sm *seriesMap) fpIter() <-chan clientmodel.Fingerprint { func (sm *seriesMap) fpIter() <-chan model.Fingerprint {
ch := make(chan clientmodel.Fingerprint) ch := make(chan model.Fingerprint)
go func() { go func() {
sm.mtx.RLock() sm.mtx.RLock()
for fp := range sm.m { for fp := range sm.m {
@ -136,7 +136,7 @@ func (sm *seriesMap) fpIter() <-chan clientmodel.Fingerprint {
} }
type memorySeries struct { type memorySeries struct {
metric clientmodel.Metric metric model.Metric
// Sorted by start time, overlapping chunk ranges are forbidden. // Sorted by start time, overlapping chunk ranges are forbidden.
chunkDescs []*chunkDesc chunkDescs []*chunkDesc
// The index (within chunkDescs above) of the first chunkDesc that // The index (within chunkDescs above) of the first chunkDesc that
@ -161,10 +161,10 @@ type memorySeries struct {
// chunkDescsOffset is not 0. It can be used to save the firstTime of the // chunkDescsOffset is not 0. It can be used to save the firstTime of the
// first chunk before its chunk desc is evicted. In doubt, this field is // first chunk before its chunk desc is evicted. In doubt, this field is
// just set to the oldest possible timestamp. // just set to the oldest possible timestamp.
savedFirstTime clientmodel.Timestamp savedFirstTime model.Time
// The timestamp of the last sample in this series. Needed for fast access to // The timestamp of the last sample in this series. Needed for fast access to
// ensure timestamp monotonicity during ingestion. // ensure timestamp monotonicity during ingestion.
lastTime clientmodel.Timestamp lastTime model.Time
// Whether the current head chunk has already been finished. If true, // Whether the current head chunk has already been finished. If true,
// the current head chunk must not be modified anymore. // the current head chunk must not be modified anymore.
headChunkClosed bool headChunkClosed bool
@ -182,12 +182,12 @@ type memorySeries struct {
// the provided parameters. chunkDescs can be nil or empty if this is a // the provided parameters. chunkDescs can be nil or empty if this is a
// genuinely new time series (i.e. not one that is being unarchived). In that // genuinely new time series (i.e. not one that is being unarchived). In that
// case, headChunkClosed is set to false, and firstTime and lastTime are both // case, headChunkClosed is set to false, and firstTime and lastTime are both
// set to clientmodel.Earliest. The zero value for modTime can be used if the // set to model.Earliest. The zero value for modTime can be used if the
// modification time of the series file is unknown (e.g. if this is a genuinely // modification time of the series file is unknown (e.g. if this is a genuinely
// new series). // new series).
func newMemorySeries(m clientmodel.Metric, chunkDescs []*chunkDesc, modTime time.Time) *memorySeries { func newMemorySeries(m model.Metric, chunkDescs []*chunkDesc, modTime time.Time) *memorySeries {
firstTime := clientmodel.Earliest firstTime := model.Earliest
lastTime := clientmodel.Earliest lastTime := model.Earliest
if len(chunkDescs) > 0 { if len(chunkDescs) > 0 {
firstTime = chunkDescs[0].firstTime() firstTime = chunkDescs[0].firstTime()
lastTime = chunkDescs[len(chunkDescs)-1].lastTime() lastTime = chunkDescs[len(chunkDescs)-1].lastTime()
@ -281,7 +281,7 @@ func (s *memorySeries) evictChunkDescs(iOldestNotEvicted int) {
// dropChunks removes chunkDescs older than t. The caller must have locked the // dropChunks removes chunkDescs older than t. The caller must have locked the
// fingerprint of the series. // fingerprint of the series.
func (s *memorySeries) dropChunks(t clientmodel.Timestamp) { func (s *memorySeries) dropChunks(t model.Time) {
keepIdx := len(s.chunkDescs) keepIdx := len(s.chunkDescs)
for i, cd := range s.chunkDescs { for i, cd := range s.chunkDescs {
if !cd.lastTime().Before(t) { if !cd.lastTime().Before(t) {
@ -308,7 +308,7 @@ func (s *memorySeries) dropChunks(t clientmodel.Timestamp) {
// preloadChunks is an internal helper method. // preloadChunks is an internal helper method.
func (s *memorySeries) preloadChunks( func (s *memorySeries) preloadChunks(
indexes []int, fp clientmodel.Fingerprint, mss *memorySeriesStorage, indexes []int, fp model.Fingerprint, mss *memorySeriesStorage,
) ([]*chunkDesc, error) { ) ([]*chunkDesc, error) {
loadIndexes := []int{} loadIndexes := []int{}
pinnedChunkDescs := make([]*chunkDesc, 0, len(indexes)) pinnedChunkDescs := make([]*chunkDesc, 0, len(indexes))
@ -343,7 +343,7 @@ func (s *memorySeries) preloadChunks(
} }
/* /*
func (s *memorySeries) preloadChunksAtTime(t clientmodel.Timestamp, p *persistence) (chunkDescs, error) { func (s *memorySeries) preloadChunksAtTime(t model.Time, p *persistence) (chunkDescs, error) {
s.mtx.Lock() s.mtx.Lock()
defer s.mtx.Unlock() defer s.mtx.Unlock()
@ -376,10 +376,10 @@ func (s *memorySeries) preloadChunksAtTime(t clientmodel.Timestamp, p *persisten
// preloadChunksForRange loads chunks for the given range from the persistence. // preloadChunksForRange loads chunks for the given range from the persistence.
// The caller must have locked the fingerprint of the series. // The caller must have locked the fingerprint of the series.
func (s *memorySeries) preloadChunksForRange( func (s *memorySeries) preloadChunksForRange(
from clientmodel.Timestamp, through clientmodel.Timestamp, from model.Time, through model.Time,
fp clientmodel.Fingerprint, mss *memorySeriesStorage, fp model.Fingerprint, mss *memorySeriesStorage,
) ([]*chunkDesc, error) { ) ([]*chunkDesc, error) {
firstChunkDescTime := clientmodel.Latest firstChunkDescTime := model.Latest
if len(s.chunkDescs) > 0 { if len(s.chunkDescs) > 0 {
firstChunkDescTime = s.chunkDescs[0].firstTime() firstChunkDescTime = s.chunkDescs[0].firstTime()
} }
@ -447,7 +447,7 @@ func (s *memorySeries) head() *chunkDesc {
// firstTime returns the timestamp of the first sample in the series. The caller // firstTime returns the timestamp of the first sample in the series. The caller
// must have locked the fingerprint of the memorySeries. // must have locked the fingerprint of the memorySeries.
func (s *memorySeries) firstTime() clientmodel.Timestamp { func (s *memorySeries) firstTime() model.Time {
if s.chunkDescsOffset == 0 && len(s.chunkDescs) > 0 { if s.chunkDescsOffset == 0 && len(s.chunkDescs) > 0 {
return s.chunkDescs[0].firstTime() return s.chunkDescs[0].firstTime()
} }
@ -482,7 +482,7 @@ type memorySeriesIterator struct {
} }
// ValueAtTime implements SeriesIterator. // ValueAtTime implements SeriesIterator.
func (it *memorySeriesIterator) ValueAtTime(t clientmodel.Timestamp) metric.Values { func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values {
// The most common case. We are iterating through a chunk. // The most common case. We are iterating through a chunk.
if it.chunkIt != nil && it.chunkIt.contains(t) { if it.chunkIt != nil && it.chunkIt.contains(t) {
return it.chunkIt.valueAtTime(t) return it.chunkIt.valueAtTime(t)
@ -638,7 +638,7 @@ func (it *memorySeriesIterator) chunkIterator(i int) chunkIterator {
type nopSeriesIterator struct{} type nopSeriesIterator struct{}
// ValueAtTime implements SeriesIterator. // ValueAtTime implements SeriesIterator.
func (_ nopSeriesIterator) ValueAtTime(t clientmodel.Timestamp) metric.Values { func (_ nopSeriesIterator) ValueAtTime(t model.Time) metric.Values {
return metric.Values{} return metric.Values{}
} }

View file

@ -23,7 +23,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
) )
@ -309,7 +309,7 @@ func (s *memorySeriesStorage) WaitForIndexing() {
} }
// NewIterator implements Storage. // NewIterator implements Storage.
func (s *memorySeriesStorage) NewIterator(fp clientmodel.Fingerprint) SeriesIterator { func (s *memorySeriesStorage) NewIterator(fp model.Fingerprint) SeriesIterator {
s.fpLocker.Lock(fp) s.fpLocker.Lock(fp)
defer s.fpLocker.Unlock(fp) defer s.fpLocker.Unlock(fp)
@ -324,12 +324,12 @@ func (s *memorySeriesStorage) NewIterator(fp clientmodel.Fingerprint) SeriesIter
} }
return &boundedIterator{ return &boundedIterator{
it: series.newIterator(), it: series.newIterator(),
start: clientmodel.Now().Add(-s.dropAfter), start: model.Now().Add(-s.dropAfter),
} }
} }
// LastSampleForFingerprint implements Storage. // LastSampleForFingerprint implements Storage.
func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp clientmodel.Fingerprint) *metric.SamplePair { func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp model.Fingerprint) *metric.SamplePair {
s.fpLocker.Lock(fp) s.fpLocker.Lock(fp)
defer s.fpLocker.Unlock(fp) defer s.fpLocker.Unlock(fp)
@ -344,11 +344,11 @@ func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp clientmodel.Finger
// data from earlier than the configured start time. // data from earlier than the configured start time.
type boundedIterator struct { type boundedIterator struct {
it SeriesIterator it SeriesIterator
start clientmodel.Timestamp start model.Time
} }
// ValueAtTime implements the SeriesIterator interface. // ValueAtTime implements the SeriesIterator interface.
func (bit *boundedIterator) ValueAtTime(ts clientmodel.Timestamp) metric.Values { func (bit *boundedIterator) ValueAtTime(ts model.Time) metric.Values {
if ts < bit.start { if ts < bit.start {
return metric.Values{} return metric.Values{}
} }
@ -386,10 +386,10 @@ func (s *memorySeriesStorage) NewPreloader() Preloader {
// fingerprintsForLabelPairs returns the set of fingerprints that have the given labels. // fingerprintsForLabelPairs returns the set of fingerprints that have the given labels.
// This does not work with empty label values. // This does not work with empty label values.
func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPair) map[clientmodel.Fingerprint]struct{} { func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPair) map[model.Fingerprint]struct{} {
var result map[clientmodel.Fingerprint]struct{} var result map[model.Fingerprint]struct{}
for _, pair := range pairs { for _, pair := range pairs {
intersection := map[clientmodel.Fingerprint]struct{}{} intersection := map[model.Fingerprint]struct{}{}
fps, err := s.persistence.fingerprintsForLabelPair(pair) fps, err := s.persistence.fingerprintsForLabelPair(pair)
if err != nil { if err != nil {
log.Error("Error getting fingerprints for label pair: ", err) log.Error("Error getting fingerprints for label pair: ", err)
@ -411,7 +411,7 @@ func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPai
} }
// MetricsForLabelMatchers implements Storage. // MetricsForLabelMatchers implements Storage.
func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelMatcher) map[clientmodel.Fingerprint]clientmodel.COWMetric { func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelMatcher) map[model.Fingerprint]model.COWMetric {
var ( var (
equals []metric.LabelPair equals []metric.LabelPair
filters []*metric.LabelMatcher filters []*metric.LabelMatcher
@ -427,7 +427,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
} }
} }
var resFPs map[clientmodel.Fingerprint]struct{} var resFPs map[model.Fingerprint]struct{}
if len(equals) > 0 { if len(equals) > 0 {
resFPs = s.fingerprintsForLabelPairs(equals...) resFPs = s.fingerprintsForLabelPairs(equals...)
} else { } else {
@ -440,7 +440,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
remaining = append(remaining, matcher) remaining = append(remaining, matcher)
continue continue
} }
intersection := map[clientmodel.Fingerprint]struct{}{} intersection := map[model.Fingerprint]struct{}{}
matches := matcher.Filter(s.LabelValuesForLabelName(matcher.Name)) matches := matcher.Filter(s.LabelValuesForLabelName(matcher.Name))
if len(matches) == 0 { if len(matches) == 0 {
@ -463,7 +463,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
filters = remaining filters = remaining
} }
result := make(map[clientmodel.Fingerprint]clientmodel.COWMetric, len(resFPs)) result := make(map[model.Fingerprint]model.COWMetric, len(resFPs))
for fp := range resFPs { for fp := range resFPs {
result[fp] = s.MetricForFingerprint(fp) result[fp] = s.MetricForFingerprint(fp)
} }
@ -478,7 +478,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
} }
// LabelValuesForLabelName implements Storage. // LabelValuesForLabelName implements Storage.
func (s *memorySeriesStorage) LabelValuesForLabelName(labelName clientmodel.LabelName) clientmodel.LabelValues { func (s *memorySeriesStorage) LabelValuesForLabelName(labelName model.LabelName) model.LabelValues {
lvs, err := s.persistence.labelValuesForLabelName(labelName) lvs, err := s.persistence.labelValuesForLabelName(labelName)
if err != nil { if err != nil {
log.Errorf("Error getting label values for label name %q: %v", labelName, err) log.Errorf("Error getting label values for label name %q: %v", labelName, err)
@ -487,7 +487,7 @@ func (s *memorySeriesStorage) LabelValuesForLabelName(labelName clientmodel.Labe
} }
// MetricForFingerprint implements Storage. // MetricForFingerprint implements Storage.
func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) clientmodel.COWMetric { func (s *memorySeriesStorage) MetricForFingerprint(fp model.Fingerprint) model.COWMetric {
s.fpLocker.Lock(fp) s.fpLocker.Lock(fp)
defer s.fpLocker.Unlock(fp) defer s.fpLocker.Unlock(fp)
@ -495,7 +495,7 @@ func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) c
if ok { if ok {
// Wrap the returned metric in a copy-on-write (COW) metric here because // Wrap the returned metric in a copy-on-write (COW) metric here because
// the caller might mutate it. // the caller might mutate it.
return clientmodel.COWMetric{ return model.COWMetric{
Metric: series.metric, Metric: series.metric,
} }
} }
@ -503,13 +503,13 @@ func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) c
if err != nil { if err != nil {
log.Errorf("Error retrieving archived metric for fingerprint %v: %v", fp, err) log.Errorf("Error retrieving archived metric for fingerprint %v: %v", fp, err)
} }
return clientmodel.COWMetric{ return model.COWMetric{
Metric: metric, Metric: metric,
} }
} }
// DropMetric implements Storage. // DropMetric implements Storage.
func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...clientmodel.Fingerprint) { func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...model.Fingerprint) {
for _, fp := range fps { for _, fp := range fps {
s.fpLocker.Lock(fp) s.fpLocker.Lock(fp)
@ -529,7 +529,7 @@ func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...clientmodel.Fing
} }
// Append implements Storage. // Append implements Storage.
func (s *memorySeriesStorage) Append(sample *clientmodel.Sample) { func (s *memorySeriesStorage) Append(sample *model.Sample) {
for ln, lv := range sample.Metric { for ln, lv := range sample.Metric {
if len(lv) == 0 { if len(lv) == 0 {
delete(sample.Metric, ln) delete(sample.Metric, ln)
@ -580,7 +580,7 @@ func (s *memorySeriesStorage) Append(sample *clientmodel.Sample) {
s.incNumChunksToPersist(completedChunksCount) s.incNumChunksToPersist(completedChunksCount)
} }
func (s *memorySeriesStorage) getOrCreateSeries(fp clientmodel.Fingerprint, m clientmodel.Metric) *memorySeries { func (s *memorySeriesStorage) getOrCreateSeries(fp model.Fingerprint, m model.Metric) *memorySeries {
series, ok := s.fpToSeries.get(fp) series, ok := s.fpToSeries.get(fp)
if !ok { if !ok {
var cds []*chunkDesc var cds []*chunkDesc
@ -614,8 +614,8 @@ func (s *memorySeriesStorage) getOrCreateSeries(fp clientmodel.Fingerprint, m cl
} }
func (s *memorySeriesStorage) preloadChunksForRange( func (s *memorySeriesStorage) preloadChunksForRange(
fp clientmodel.Fingerprint, fp model.Fingerprint,
from clientmodel.Timestamp, through clientmodel.Timestamp, from model.Time, through model.Time,
stalenessDelta time.Duration, stalenessDelta time.Duration,
) ([]*chunkDesc, error) { ) ([]*chunkDesc, error) {
s.fpLocker.Lock(fp) s.fpLocker.Lock(fp)
@ -768,10 +768,10 @@ func (s *memorySeriesStorage) waitForNextFP(numberOfFPs int, maxWaitDurationFact
// cycleThroughMemoryFingerprints returns a channel that emits fingerprints for // cycleThroughMemoryFingerprints returns a channel that emits fingerprints for
// series in memory in a throttled fashion. It continues to cycle through all // series in memory in a throttled fashion. It continues to cycle through all
// fingerprints in memory until s.loopStopping is closed. // fingerprints in memory until s.loopStopping is closed.
func (s *memorySeriesStorage) cycleThroughMemoryFingerprints() chan clientmodel.Fingerprint { func (s *memorySeriesStorage) cycleThroughMemoryFingerprints() chan model.Fingerprint {
memoryFingerprints := make(chan clientmodel.Fingerprint) memoryFingerprints := make(chan model.Fingerprint)
go func() { go func() {
var fpIter <-chan clientmodel.Fingerprint var fpIter <-chan model.Fingerprint
defer func() { defer func() {
if fpIter != nil { if fpIter != nil {
@ -815,14 +815,14 @@ func (s *memorySeriesStorage) cycleThroughMemoryFingerprints() chan clientmodel.
// cycleThroughArchivedFingerprints returns a channel that emits fingerprints // cycleThroughArchivedFingerprints returns a channel that emits fingerprints
// for archived series in a throttled fashion. It continues to cycle through all // for archived series in a throttled fashion. It continues to cycle through all
// archived fingerprints until s.loopStopping is closed. // archived fingerprints until s.loopStopping is closed.
func (s *memorySeriesStorage) cycleThroughArchivedFingerprints() chan clientmodel.Fingerprint { func (s *memorySeriesStorage) cycleThroughArchivedFingerprints() chan model.Fingerprint {
archivedFingerprints := make(chan clientmodel.Fingerprint) archivedFingerprints := make(chan model.Fingerprint)
go func() { go func() {
defer close(archivedFingerprints) defer close(archivedFingerprints)
for { for {
archivedFPs, err := s.persistence.fingerprintsModifiedBefore( archivedFPs, err := s.persistence.fingerprintsModifiedBefore(
clientmodel.Now().Add(-s.dropAfter), model.Now().Add(-s.dropAfter),
) )
if err != nil { if err != nil {
log.Error("Failed to lookup archived fingerprint ranges: ", err) log.Error("Failed to lookup archived fingerprint ranges: ", err)
@ -878,7 +878,7 @@ loop:
dirtySeriesCount = 0 dirtySeriesCount = 0
checkpointTimer.Reset(s.checkpointInterval) checkpointTimer.Reset(s.checkpointInterval)
case fp := <-memoryFingerprints: case fp := <-memoryFingerprints:
if s.maintainMemorySeries(fp, clientmodel.Now().Add(-s.dropAfter)) { if s.maintainMemorySeries(fp, model.Now().Add(-s.dropAfter)) {
dirtySeriesCount++ dirtySeriesCount++
// Check if we have enough "dirty" series so that we need an early checkpoint. // Check if we have enough "dirty" series so that we need an early checkpoint.
// However, if we are already behind persisting chunks, creating a checkpoint // However, if we are already behind persisting chunks, creating a checkpoint
@ -892,7 +892,7 @@ loop:
} }
} }
case fp := <-archivedFingerprints: case fp := <-archivedFingerprints:
s.maintainArchivedSeries(fp, clientmodel.Now().Add(-s.dropAfter)) s.maintainArchivedSeries(fp, model.Now().Add(-s.dropAfter))
} }
} }
// Wait until both channels are closed. // Wait until both channels are closed.
@ -934,7 +934,7 @@ loop:
// //
// Finally, it evicts chunkDescs if there are too many. // Finally, it evicts chunkDescs if there are too many.
func (s *memorySeriesStorage) maintainMemorySeries( func (s *memorySeriesStorage) maintainMemorySeries(
fp clientmodel.Fingerprint, beforeTime clientmodel.Timestamp, fp model.Fingerprint, beforeTime model.Time,
) (becameDirty bool) { ) (becameDirty bool) {
defer func(begin time.Time) { defer func(begin time.Time) {
s.maintainSeriesDuration.WithLabelValues(maintainInMemory).Observe( s.maintainSeriesDuration.WithLabelValues(maintainInMemory).Observe(
@ -1002,7 +1002,7 @@ func (s *memorySeriesStorage) maintainMemorySeries(
// //
// The caller must have locked the fp. // The caller must have locked the fp.
func (s *memorySeriesStorage) writeMemorySeries( func (s *memorySeriesStorage) writeMemorySeries(
fp clientmodel.Fingerprint, series *memorySeries, beforeTime clientmodel.Timestamp, fp model.Fingerprint, series *memorySeries, beforeTime model.Time,
) bool { ) bool {
cds := series.chunksToPersist() cds := series.chunksToPersist()
defer func() { defer func() {
@ -1071,7 +1071,7 @@ func (s *memorySeriesStorage) writeMemorySeries(
// maintainArchivedSeries drops chunks older than beforeTime from an archived // maintainArchivedSeries drops chunks older than beforeTime from an archived
// series. If the series contains no chunks after that, it is purged entirely. // series. If the series contains no chunks after that, it is purged entirely.
func (s *memorySeriesStorage) maintainArchivedSeries(fp clientmodel.Fingerprint, beforeTime clientmodel.Timestamp) { func (s *memorySeriesStorage) maintainArchivedSeries(fp model.Fingerprint, beforeTime model.Time) {
defer func(begin time.Time) { defer func(begin time.Time) {
s.maintainSeriesDuration.WithLabelValues(maintainArchived).Observe( s.maintainSeriesDuration.WithLabelValues(maintainArchived).Observe(
float64(time.Since(begin)) / float64(time.Millisecond), float64(time.Since(begin)) / float64(time.Millisecond),
@ -1109,12 +1109,12 @@ func (s *memorySeriesStorage) maintainArchivedSeries(fp clientmodel.Fingerprint,
} }
// See persistence.loadChunks for detailed explanation. // See persistence.loadChunks for detailed explanation.
func (s *memorySeriesStorage) loadChunks(fp clientmodel.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) { func (s *memorySeriesStorage) loadChunks(fp model.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) {
return s.persistence.loadChunks(fp, indexes, indexOffset) return s.persistence.loadChunks(fp, indexes, indexOffset)
} }
// See persistence.loadChunkDescs for detailed explanation. // See persistence.loadChunkDescs for detailed explanation.
func (s *memorySeriesStorage) loadChunkDescs(fp clientmodel.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) { func (s *memorySeriesStorage) loadChunkDescs(fp model.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) {
return s.persistence.loadChunkDescs(fp, offsetFromEnd) return s.persistence.loadChunkDescs(fp, offsetFromEnd)
} }

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/prometheus/util/testutil"
@ -34,20 +34,20 @@ func TestMatches(t *testing.T) {
storage, closer := NewTestStorage(t, 1) storage, closer := NewTestStorage(t, 1)
defer closer.Close() defer closer.Close()
samples := make([]*clientmodel.Sample, 100) samples := make([]*model.Sample, 100)
fingerprints := make(clientmodel.Fingerprints, 100) fingerprints := make(model.Fingerprints, 100)
for i := range samples { for i := range samples {
metric := clientmodel.Metric{ metric := model.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i)), model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i)),
"label1": clientmodel.LabelValue(fmt.Sprintf("test_%d", i/10)), "label1": model.LabelValue(fmt.Sprintf("test_%d", i/10)),
"label2": clientmodel.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)), "label2": model.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)),
"all": "const", "all": "const",
} }
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Metric: metric, Metric: metric,
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(i), Value: model.SampleValue(i),
} }
fingerprints[i] = metric.FastFingerprint() fingerprints[i] = metric.FastFingerprint()
} }
@ -56,7 +56,7 @@ func TestMatches(t *testing.T) {
} }
storage.WaitForIndexing() storage.WaitForIndexing()
newMatcher := func(matchType metric.MatchType, name clientmodel.LabelName, value clientmodel.LabelValue) *metric.LabelMatcher { newMatcher := func(matchType metric.MatchType, name model.LabelName, value model.LabelValue) *metric.LabelMatcher {
lm, err := metric.NewLabelMatcher(matchType, name, value) lm, err := metric.NewLabelMatcher(matchType, name, value)
if err != nil { if err != nil {
t.Fatalf("error creating label matcher: %s", err) t.Fatalf("error creating label matcher: %s", err)
@ -66,11 +66,11 @@ func TestMatches(t *testing.T) {
var matcherTests = []struct { var matcherTests = []struct {
matchers metric.LabelMatchers matchers metric.LabelMatchers
expected clientmodel.Fingerprints expected model.Fingerprints
}{ }{
{ {
matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "x")}, matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "x")},
expected: clientmodel.Fingerprints{}, expected: model.Fingerprints{},
}, },
{ {
matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "test_0")}, matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "test_0")},
@ -145,7 +145,7 @@ func TestMatches(t *testing.T) {
newMatcher(metric.Equal, "all", "const"), newMatcher(metric.Equal, "all", "const"),
newMatcher(metric.RegexNoMatch, "label1", `test_[3-5]`), newMatcher(metric.RegexNoMatch, "label1", `test_[3-5]`),
}, },
expected: append(append(clientmodel.Fingerprints{}, fingerprints[:30]...), fingerprints[60:]...), expected: append(append(model.Fingerprints{}, fingerprints[:30]...), fingerprints[60:]...),
}, },
{ {
matchers: metric.LabelMatchers{ matchers: metric.LabelMatchers{
@ -159,21 +159,21 @@ func TestMatches(t *testing.T) {
newMatcher(metric.RegexMatch, "label1", `test_[3-5]`), newMatcher(metric.RegexMatch, "label1", `test_[3-5]`),
newMatcher(metric.NotEqual, "label2", `test_4`), newMatcher(metric.NotEqual, "label2", `test_4`),
}, },
expected: append(append(clientmodel.Fingerprints{}, fingerprints[30:35]...), fingerprints[45:60]...), expected: append(append(model.Fingerprints{}, fingerprints[30:35]...), fingerprints[45:60]...),
}, },
{ {
matchers: metric.LabelMatchers{ matchers: metric.LabelMatchers{
newMatcher(metric.Equal, "label1", `nonexistent`), newMatcher(metric.Equal, "label1", `nonexistent`),
newMatcher(metric.RegexMatch, "label2", `test`), newMatcher(metric.RegexMatch, "label2", `test`),
}, },
expected: clientmodel.Fingerprints{}, expected: model.Fingerprints{},
}, },
{ {
matchers: metric.LabelMatchers{ matchers: metric.LabelMatchers{
newMatcher(metric.Equal, "label1", `test_0`), newMatcher(metric.Equal, "label1", `test_0`),
newMatcher(metric.RegexMatch, "label2", `nonexistent`), newMatcher(metric.RegexMatch, "label2", `nonexistent`),
}, },
expected: clientmodel.Fingerprints{}, expected: model.Fingerprints{},
}, },
} }
@ -201,19 +201,19 @@ func TestFingerprintsForLabels(t *testing.T) {
storage, closer := NewTestStorage(t, 1) storage, closer := NewTestStorage(t, 1)
defer closer.Close() defer closer.Close()
samples := make([]*clientmodel.Sample, 100) samples := make([]*model.Sample, 100)
fingerprints := make(clientmodel.Fingerprints, 100) fingerprints := make(model.Fingerprints, 100)
for i := range samples { for i := range samples {
metric := clientmodel.Metric{ metric := model.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i)), model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i)),
"label1": clientmodel.LabelValue(fmt.Sprintf("test_%d", i/10)), "label1": model.LabelValue(fmt.Sprintf("test_%d", i/10)),
"label2": clientmodel.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)), "label2": model.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)),
} }
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Metric: metric, Metric: metric,
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(i), Value: model.SampleValue(i),
} }
fingerprints[i] = metric.FastFingerprint() fingerprints[i] = metric.FastFingerprint()
} }
@ -224,7 +224,7 @@ func TestFingerprintsForLabels(t *testing.T) {
var matcherTests = []struct { var matcherTests = []struct {
pairs []metric.LabelPair pairs []metric.LabelPair
expected clientmodel.Fingerprints expected model.Fingerprints
}{ }{
{ {
pairs: []metric.LabelPair{{"label1", "x"}}, pairs: []metric.LabelPair{{"label1", "x"}},
@ -277,21 +277,21 @@ func TestFingerprintsForLabels(t *testing.T) {
} }
} }
var benchLabelMatchingRes map[clientmodel.Fingerprint]clientmodel.COWMetric var benchLabelMatchingRes map[model.Fingerprint]model.COWMetric
func BenchmarkLabelMatching(b *testing.B) { func BenchmarkLabelMatching(b *testing.B) {
s, closer := NewTestStorage(b, 1) s, closer := NewTestStorage(b, 1)
defer closer.Close() defer closer.Close()
h := fnv.New64a() h := fnv.New64a()
lbl := func(x int) clientmodel.LabelValue { lbl := func(x int) model.LabelValue {
h.Reset() h.Reset()
h.Write([]byte(fmt.Sprintf("%d", x))) h.Write([]byte(fmt.Sprintf("%d", x)))
return clientmodel.LabelValue(fmt.Sprintf("%d", h.Sum64())) return model.LabelValue(fmt.Sprintf("%d", h.Sum64()))
} }
M := 32 M := 32
met := clientmodel.Metric{} met := model.Metric{}
for i := 0; i < M; i++ { for i := 0; i < M; i++ {
met["label_a"] = lbl(i) met["label_a"] = lbl(i)
for j := 0; j < M; j++ { for j := 0; j < M; j++ {
@ -300,7 +300,7 @@ func BenchmarkLabelMatching(b *testing.B) {
met["label_c"] = lbl(k) met["label_c"] = lbl(k)
for l := 0; l < M; l++ { for l := 0; l < M; l++ {
met["label_d"] = lbl(l) met["label_d"] = lbl(l)
s.Append(&clientmodel.Sample{ s.Append(&model.Sample{
Metric: met.Clone(), Metric: met.Clone(),
Timestamp: 0, Timestamp: 0,
Value: 1, Value: 1,
@ -311,7 +311,7 @@ func BenchmarkLabelMatching(b *testing.B) {
} }
s.WaitForIndexing() s.WaitForIndexing()
newMatcher := func(matchType metric.MatchType, name clientmodel.LabelName, value clientmodel.LabelValue) *metric.LabelMatcher { newMatcher := func(matchType metric.MatchType, name model.LabelName, value model.LabelValue) *metric.LabelMatcher {
lm, err := metric.NewLabelMatcher(matchType, name, value) lm, err := metric.NewLabelMatcher(matchType, name, value)
if err != nil { if err != nil {
b.Fatalf("error creating label matcher: %s", err) b.Fatalf("error creating label matcher: %s", err)
@ -360,7 +360,7 @@ func BenchmarkLabelMatching(b *testing.B) {
b.ResetTimer() b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
benchLabelMatchingRes = map[clientmodel.Fingerprint]clientmodel.COWMetric{} benchLabelMatchingRes = map[model.Fingerprint]model.COWMetric{}
for _, mt := range matcherTests { for _, mt := range matcherTests {
benchLabelMatchingRes = s.MetricsForLabelMatchers(mt...) benchLabelMatchingRes = s.MetricsForLabelMatchers(mt...)
} }
@ -370,7 +370,7 @@ func BenchmarkLabelMatching(b *testing.B) {
} }
func TestRetentionCutoff(t *testing.T) { func TestRetentionCutoff(t *testing.T) {
now := clientmodel.Now() now := model.Now()
insertStart := now.Add(-2 * time.Hour) insertStart := now.Add(-2 * time.Hour)
s, closer := NewTestStorage(t, 1) s, closer := NewTestStorage(t, 1)
@ -382,8 +382,8 @@ func TestRetentionCutoff(t *testing.T) {
s.dropAfter = 1 * time.Hour s.dropAfter = 1 * time.Hour
for i := 0; i < 120; i++ { for i := 0; i < 120; i++ {
smpl := &clientmodel.Sample{ smpl := &model.Sample{
Metric: clientmodel.Metric{"job": "test"}, Metric: model.Metric{"job": "test"},
Timestamp: insertStart.Add(time.Duration(i) * time.Minute), // 1 minute intervals. Timestamp: insertStart.Add(time.Duration(i) * time.Minute), // 1 minute intervals.
Value: 1, Value: 1,
} }
@ -391,7 +391,7 @@ func TestRetentionCutoff(t *testing.T) {
} }
s.WaitForIndexing() s.WaitForIndexing()
var fp clientmodel.Fingerprint var fp model.Fingerprint
for f := range s.fingerprintsForLabelPairs(metric.LabelPair{Name: "job", Value: "test"}) { for f := range s.fingerprintsForLabelPairs(metric.LabelPair{Name: "job", Value: "test"}) {
fp = f fp = f
break break
@ -414,7 +414,7 @@ func TestRetentionCutoff(t *testing.T) {
} }
vals = it.RangeValues(metric.Interval{OldestInclusive: insertStart, NewestInclusive: now}) vals = it.RangeValues(metric.Interval{OldestInclusive: insertStart, NewestInclusive: now})
// We get 59 values here because the clientmodel.Now() is slightly later // We get 59 values here because the model.Now() is slightly later
// than our now. // than our now.
if len(vals) != 59 { if len(vals) != 59 {
t.Errorf("expected 59 values but got %d", len(vals)) t.Errorf("expected 59 values but got %d", len(vals))
@ -433,35 +433,35 @@ func TestRetentionCutoff(t *testing.T) {
} }
func TestDropMetrics(t *testing.T) { func TestDropMetrics(t *testing.T) {
now := clientmodel.Now() now := model.Now()
insertStart := now.Add(-2 * time.Hour) insertStart := now.Add(-2 * time.Hour)
s, closer := NewTestStorage(t, 1) s, closer := NewTestStorage(t, 1)
defer closer.Close() defer closer.Close()
m1 := clientmodel.Metric{clientmodel.MetricNameLabel: "test", "n1": "v1"} m1 := model.Metric{model.MetricNameLabel: "test", "n1": "v1"}
m2 := clientmodel.Metric{clientmodel.MetricNameLabel: "test", "n1": "v2"} m2 := model.Metric{model.MetricNameLabel: "test", "n1": "v2"}
N := 120000 N := 120000
for j, m := range []clientmodel.Metric{m1, m2} { for j, m := range []model.Metric{m1, m2} {
for i := 0; i < N; i++ { for i := 0; i < N; i++ {
smpl := &clientmodel.Sample{ smpl := &model.Sample{
Metric: m, Metric: m,
Timestamp: insertStart.Add(time.Duration(i) * time.Millisecond), // 1 minute intervals. Timestamp: insertStart.Add(time.Duration(i) * time.Millisecond), // 1 minute intervals.
Value: clientmodel.SampleValue(j), Value: model.SampleValue(j),
} }
s.Append(smpl) s.Append(smpl)
} }
} }
s.WaitForIndexing() s.WaitForIndexing()
fps := s.fingerprintsForLabelPairs(metric.LabelPair{Name: clientmodel.MetricNameLabel, Value: "test"}) fps := s.fingerprintsForLabelPairs(metric.LabelPair{Name: model.MetricNameLabel, Value: "test"})
if len(fps) != 2 { if len(fps) != 2 {
t.Fatalf("unexpected number of fingerprints: %d", len(fps)) t.Fatalf("unexpected number of fingerprints: %d", len(fps))
} }
var fpList clientmodel.Fingerprints var fpList model.Fingerprints
for fp := range fps { for fp := range fps {
it := s.NewIterator(fp) it := s.NewIterator(fp)
if vals := it.RangeValues(metric.Interval{OldestInclusive: insertStart, NewestInclusive: now}); len(vals) != N { if vals := it.RangeValues(metric.Interval{OldestInclusive: insertStart, NewestInclusive: now}); len(vals) != N {
@ -474,7 +474,7 @@ func TestDropMetrics(t *testing.T) {
s.WaitForIndexing() s.WaitForIndexing()
fps2 := s.fingerprintsForLabelPairs(metric.LabelPair{ fps2 := s.fingerprintsForLabelPairs(metric.LabelPair{
Name: clientmodel.MetricNameLabel, Value: "test", Name: model.MetricNameLabel, Value: "test",
}) })
if len(fps2) != 1 { if len(fps2) != 1 {
t.Fatalf("unexpected number of fingerprints: %d", len(fps2)) t.Fatalf("unexpected number of fingerprints: %d", len(fps2))
@ -493,7 +493,7 @@ func TestDropMetrics(t *testing.T) {
s.WaitForIndexing() s.WaitForIndexing()
fps3 := s.fingerprintsForLabelPairs(metric.LabelPair{ fps3 := s.fingerprintsForLabelPairs(metric.LabelPair{
Name: clientmodel.MetricNameLabel, Value: "test", Name: model.MetricNameLabel, Value: "test",
}) })
if len(fps3) != 0 { if len(fps3) != 0 {
t.Fatalf("unexpected number of fingerprints: %d", len(fps3)) t.Fatalf("unexpected number of fingerprints: %d", len(fps3))
@ -515,11 +515,11 @@ func TestLoop(t *testing.T) {
if testing.Short() { if testing.Short() {
t.Skip("Skipping test in short mode.") t.Skip("Skipping test in short mode.")
} }
samples := make(clientmodel.Samples, 1000) samples := make(model.Samples, 1000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2), Value: model.SampleValue(float64(i) * 0.2),
} }
} }
directory := testutil.NewTemporaryDirectory("test_storage", t) directory := testutil.NewTemporaryDirectory("test_storage", t)
@ -540,7 +540,7 @@ func TestLoop(t *testing.T) {
storage.Append(s) storage.Append(s)
} }
storage.WaitForIndexing() storage.WaitForIndexing()
series, _ := storage.(*memorySeriesStorage).fpToSeries.get(clientmodel.Metric{}.FastFingerprint()) series, _ := storage.(*memorySeriesStorage).fpToSeries.get(model.Metric{}.FastFingerprint())
cdsBefore := len(series.chunkDescs) cdsBefore := len(series.chunkDescs)
time.Sleep(fpMaxWaitDuration + time.Second) // TODO(beorn7): Ugh, need to wait for maintenance to kick in. time.Sleep(fpMaxWaitDuration + time.Second) // TODO(beorn7): Ugh, need to wait for maintenance to kick in.
cdsAfter := len(series.chunkDescs) cdsAfter := len(series.chunkDescs)
@ -554,11 +554,11 @@ func TestLoop(t *testing.T) {
} }
func testChunk(t *testing.T, encoding chunkEncoding) { func testChunk(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 500000) samples := make(model.Samples, 500000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(float64(i) * 0.2), Value: model.SampleValue(float64(i) * 0.2),
} }
} }
s, closer := NewTestStorage(t, encoding) s, closer := NewTestStorage(t, encoding)
@ -604,11 +604,11 @@ func TestChunkType1(t *testing.T) {
} }
func testValueAtTime(t *testing.T, encoding chunkEncoding) { func testValueAtTime(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000) samples := make(model.Samples, 10000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2), Value: model.SampleValue(float64(i) * 0.2),
} }
} }
s, closer := NewTestStorage(t, encoding) s, closer := NewTestStorage(t, encoding)
@ -619,7 +619,7 @@ func testValueAtTime(t *testing.T, encoding chunkEncoding) {
} }
s.WaitForIndexing() s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint() fp := model.Metric{}.FastFingerprint()
it := s.NewIterator(fp) it := s.NewIterator(fp)
@ -697,11 +697,11 @@ func TestValueAtTimeChunkType1(t *testing.T) {
} }
func benchmarkValueAtTime(b *testing.B, encoding chunkEncoding) { func benchmarkValueAtTime(b *testing.B, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000) samples := make(model.Samples, 10000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2), Value: model.SampleValue(float64(i) * 0.2),
} }
} }
s, closer := NewTestStorage(b, encoding) s, closer := NewTestStorage(b, encoding)
@ -712,7 +712,7 @@ func benchmarkValueAtTime(b *testing.B, encoding chunkEncoding) {
} }
s.WaitForIndexing() s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint() fp := model.Metric{}.FastFingerprint()
b.ResetTimer() b.ResetTimer()
@ -770,11 +770,11 @@ func BenchmarkValueAtTimeChunkType1(b *testing.B) {
} }
func testRangeValues(t *testing.T, encoding chunkEncoding) { func testRangeValues(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000) samples := make(model.Samples, 10000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2), Value: model.SampleValue(float64(i) * 0.2),
} }
} }
s, closer := NewTestStorage(t, encoding) s, closer := NewTestStorage(t, encoding)
@ -785,7 +785,7 @@ func testRangeValues(t *testing.T, encoding chunkEncoding) {
} }
s.WaitForIndexing() s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint() fp := model.Metric{}.FastFingerprint()
it := s.NewIterator(fp) it := s.NewIterator(fp)
@ -922,11 +922,11 @@ func TestRangeValuesChunkType1(t *testing.T) {
} }
func benchmarkRangeValues(b *testing.B, encoding chunkEncoding) { func benchmarkRangeValues(b *testing.B, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000) samples := make(model.Samples, 10000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2), Value: model.SampleValue(float64(i) * 0.2),
} }
} }
s, closer := NewTestStorage(b, encoding) s, closer := NewTestStorage(b, encoding)
@ -937,7 +937,7 @@ func benchmarkRangeValues(b *testing.B, encoding chunkEncoding) {
} }
s.WaitForIndexing() s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint() fp := model.Metric{}.FastFingerprint()
b.ResetTimer() b.ResetTimer()
@ -967,11 +967,11 @@ func BenchmarkRangeValuesChunkType1(b *testing.B) {
} }
func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) { func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000) samples := make(model.Samples, 10000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i * i)), Value: model.SampleValue(float64(i * i)),
} }
} }
s, closer := NewTestStorage(t, encoding) s, closer := NewTestStorage(t, encoding)
@ -982,7 +982,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
} }
s.WaitForIndexing() s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint() fp := model.Metric{}.FastFingerprint()
// Drop ~half of the chunks. // Drop ~half of the chunks.
s.maintainMemorySeries(fp, 10000) s.maintainMemorySeries(fp, 10000)
@ -997,7 +997,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
if actual[0].Timestamp < 6000 || actual[0].Timestamp > 10000 { if actual[0].Timestamp < 6000 || actual[0].Timestamp > 10000 {
t.Errorf("1st timestamp out of expected range: %v", actual[0].Timestamp) t.Errorf("1st timestamp out of expected range: %v", actual[0].Timestamp)
} }
want := clientmodel.Timestamp(19998) want := model.Time(19998)
if actual[1].Timestamp != want { if actual[1].Timestamp != want {
t.Errorf("2nd timestamp: want %v, got %v", want, actual[1].Timestamp) t.Errorf("2nd timestamp: want %v, got %v", want, actual[1].Timestamp)
} }
@ -1026,7 +1026,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
// Persist head chunk so we can safely archive. // Persist head chunk so we can safely archive.
series.headChunkClosed = true series.headChunkClosed = true
s.maintainMemorySeries(fp, clientmodel.Earliest) s.maintainMemorySeries(fp, model.Earliest)
// Archive metrics. // Archive metrics.
s.fpToSeries.del(fp) s.fpToSeries.del(fp)
@ -1077,7 +1077,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
// Persist head chunk so we can safely archive. // Persist head chunk so we can safely archive.
series.headChunkClosed = true series.headChunkClosed = true
s.maintainMemorySeries(fp, clientmodel.Earliest) s.maintainMemorySeries(fp, model.Earliest)
// Archive metrics. // Archive metrics.
s.fpToSeries.del(fp) s.fpToSeries.del(fp)
@ -1096,7 +1096,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
} }
// Unarchive metrics. // Unarchive metrics.
s.getOrCreateSeries(fp, clientmodel.Metric{}) s.getOrCreateSeries(fp, model.Metric{})
series, ok = s.fpToSeries.get(fp) series, ok = s.fpToSeries.get(fp)
if !ok { if !ok {
@ -1131,19 +1131,19 @@ func TestEvictAndPurgeSeriesChunkType1(t *testing.T) {
} }
func testEvictAndLoadChunkDescs(t *testing.T, encoding chunkEncoding) { func testEvictAndLoadChunkDescs(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000) samples := make(model.Samples, 10000)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Timestamp: clientmodel.Timestamp(2 * i), Timestamp: model.Time(2 * i),
Value: clientmodel.SampleValue(float64(i * i)), Value: model.SampleValue(float64(i * i)),
} }
} }
// Give last sample a timestamp of now so that the head chunk will not // Give last sample a timestamp of now so that the head chunk will not
// be closed (which would then archive the time series later as // be closed (which would then archive the time series later as
// everything will get evicted). // everything will get evicted).
samples[len(samples)-1] = &clientmodel.Sample{ samples[len(samples)-1] = &model.Sample{
Timestamp: clientmodel.Now(), Timestamp: model.Now(),
Value: clientmodel.SampleValue(3.14), Value: model.SampleValue(3.14),
} }
s, closer := NewTestStorage(t, encoding) s, closer := NewTestStorage(t, encoding)
@ -1157,7 +1157,7 @@ func testEvictAndLoadChunkDescs(t *testing.T, encoding chunkEncoding) {
} }
s.WaitForIndexing() s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint() fp := model.Metric{}.FastFingerprint()
series, ok := s.fpToSeries.get(fp) series, ok := s.fpToSeries.get(fp)
if !ok { if !ok {
@ -1203,16 +1203,16 @@ func TestEvictAndLoadChunkDescsType1(t *testing.T) {
} }
func benchmarkAppend(b *testing.B, encoding chunkEncoding) { func benchmarkAppend(b *testing.B, encoding chunkEncoding) {
samples := make(clientmodel.Samples, b.N) samples := make(model.Samples, b.N)
for i := range samples { for i := range samples {
samples[i] = &clientmodel.Sample{ samples[i] = &model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i%10)), model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
"label1": clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i%10)), "label1": model.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
"label2": clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i%10)), "label2": model.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
}, },
Timestamp: clientmodel.Timestamp(i), Timestamp: model.Time(i),
Value: clientmodel.SampleValue(i), Value: model.SampleValue(i),
} }
} }
b.ResetTimer() b.ResetTimer()
@ -1323,56 +1323,56 @@ func BenchmarkFuzzChunkType1(b *testing.B) {
benchmarkFuzz(b, 1) benchmarkFuzz(b, 1)
} }
func createRandomSamples(metricName string, minLen int) clientmodel.Samples { func createRandomSamples(metricName string, minLen int) model.Samples {
type valueCreator func() clientmodel.SampleValue type valueCreator func() model.SampleValue
type deltaApplier func(clientmodel.SampleValue) clientmodel.SampleValue type deltaApplier func(model.SampleValue) model.SampleValue
var ( var (
maxMetrics = 5 maxMetrics = 5
maxStreakLength = 500 maxStreakLength = 500
maxTimeDelta = 10000 maxTimeDelta = 10000
maxTimeDeltaFactor = 10 maxTimeDeltaFactor = 10
timestamp = clientmodel.Now() - clientmodel.Timestamp(maxTimeDelta*maxTimeDeltaFactor*minLen/4) // So that some timestamps are in the future. timestamp = model.Now() - model.Time(maxTimeDelta*maxTimeDeltaFactor*minLen/4) // So that some timestamps are in the future.
generators = []struct { generators = []struct {
createValue valueCreator createValue valueCreator
applyDelta []deltaApplier applyDelta []deltaApplier
}{ }{
{ // "Boolean". { // "Boolean".
createValue: func() clientmodel.SampleValue { createValue: func() model.SampleValue {
return clientmodel.SampleValue(rand.Intn(2)) return model.SampleValue(rand.Intn(2))
}, },
applyDelta: []deltaApplier{ applyDelta: []deltaApplier{
func(_ clientmodel.SampleValue) clientmodel.SampleValue { func(_ model.SampleValue) model.SampleValue {
return clientmodel.SampleValue(rand.Intn(2)) return model.SampleValue(rand.Intn(2))
}, },
}, },
}, },
{ // Integer with int deltas of various byte length. { // Integer with int deltas of various byte length.
createValue: func() clientmodel.SampleValue { createValue: func() model.SampleValue {
return clientmodel.SampleValue(rand.Int63() - 1<<62) return model.SampleValue(rand.Int63() - 1<<62)
}, },
applyDelta: []deltaApplier{ applyDelta: []deltaApplier{
func(v clientmodel.SampleValue) clientmodel.SampleValue { func(v model.SampleValue) model.SampleValue {
return clientmodel.SampleValue(rand.Intn(1<<8) - 1<<7 + int(v)) return model.SampleValue(rand.Intn(1<<8) - 1<<7 + int(v))
}, },
func(v clientmodel.SampleValue) clientmodel.SampleValue { func(v model.SampleValue) model.SampleValue {
return clientmodel.SampleValue(rand.Intn(1<<16) - 1<<15 + int(v)) return model.SampleValue(rand.Intn(1<<16) - 1<<15 + int(v))
}, },
func(v clientmodel.SampleValue) clientmodel.SampleValue { func(v model.SampleValue) model.SampleValue {
return clientmodel.SampleValue(rand.Int63n(1<<32) - 1<<31 + int64(v)) return model.SampleValue(rand.Int63n(1<<32) - 1<<31 + int64(v))
}, },
}, },
}, },
{ // Float with float32 and float64 deltas. { // Float with float32 and float64 deltas.
createValue: func() clientmodel.SampleValue { createValue: func() model.SampleValue {
return clientmodel.SampleValue(rand.NormFloat64()) return model.SampleValue(rand.NormFloat64())
}, },
applyDelta: []deltaApplier{ applyDelta: []deltaApplier{
func(v clientmodel.SampleValue) clientmodel.SampleValue { func(v model.SampleValue) model.SampleValue {
return v + clientmodel.SampleValue(float32(rand.NormFloat64())) return v + model.SampleValue(float32(rand.NormFloat64()))
}, },
func(v clientmodel.SampleValue) clientmodel.SampleValue { func(v model.SampleValue) model.SampleValue {
return v + clientmodel.SampleValue(rand.NormFloat64()) return v + model.SampleValue(rand.NormFloat64())
}, },
}, },
}, },
@ -1380,17 +1380,17 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
) )
// Prefill result with two samples with colliding metrics (to test fingerprint mapping). // Prefill result with two samples with colliding metrics (to test fingerprint mapping).
result := clientmodel.Samples{ result := model.Samples{
&clientmodel.Sample{ &model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
"instance": "ip-10-33-84-73.l05.ams5.s-cloud.net:24483", "instance": "ip-10-33-84-73.l05.ams5.s-cloud.net:24483",
"status": "503", "status": "503",
}, },
Value: 42, Value: 42,
Timestamp: timestamp, Timestamp: timestamp,
}, },
&clientmodel.Sample{ &model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
"instance": "ip-10-33-84-73.l05.ams5.s-cloud.net:24480", "instance": "ip-10-33-84-73.l05.ams5.s-cloud.net:24480",
"status": "500", "status": "500",
}, },
@ -1399,11 +1399,11 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
}, },
} }
metrics := []clientmodel.Metric{} metrics := []model.Metric{}
for n := rand.Intn(maxMetrics); n >= 0; n-- { for n := rand.Intn(maxMetrics); n >= 0; n-- {
metrics = append(metrics, clientmodel.Metric{ metrics = append(metrics, model.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(metricName), model.MetricNameLabel: model.LabelValue(metricName),
clientmodel.LabelName(fmt.Sprintf("labelname_%d", n+1)): clientmodel.LabelValue(fmt.Sprintf("labelvalue_%d", rand.Int())), model.LabelName(fmt.Sprintf("labelname_%d", n+1)): model.LabelValue(fmt.Sprintf("labelvalue_%d", rand.Int())),
}) })
} }
@ -1414,10 +1414,10 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
generator := generators[rand.Intn(len(generators))] generator := generators[rand.Intn(len(generators))]
createValue := generator.createValue createValue := generator.createValue
applyDelta := generator.applyDelta[rand.Intn(len(generator.applyDelta))] applyDelta := generator.applyDelta[rand.Intn(len(generator.applyDelta))]
incTimestamp := func() { timestamp += clientmodel.Timestamp(timeDelta * (rand.Intn(maxTimeDeltaFactor) + 1)) } incTimestamp := func() { timestamp += model.Time(timeDelta * (rand.Intn(maxTimeDeltaFactor) + 1)) }
switch rand.Intn(4) { switch rand.Intn(4) {
case 0: // A single sample. case 0: // A single sample.
result = append(result, &clientmodel.Sample{ result = append(result, &model.Sample{
Metric: metric, Metric: metric,
Value: createValue(), Value: createValue(),
Timestamp: timestamp, Timestamp: timestamp,
@ -1425,7 +1425,7 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
incTimestamp() incTimestamp()
case 1: // A streak of random sample values. case 1: // A streak of random sample values.
for n := rand.Intn(maxStreakLength); n >= 0; n-- { for n := rand.Intn(maxStreakLength); n >= 0; n-- {
result = append(result, &clientmodel.Sample{ result = append(result, &model.Sample{
Metric: metric, Metric: metric,
Value: createValue(), Value: createValue(),
Timestamp: timestamp, Timestamp: timestamp,
@ -1435,7 +1435,7 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
case 2: // A streak of sample values with incremental changes. case 2: // A streak of sample values with incremental changes.
value := createValue() value := createValue()
for n := rand.Intn(maxStreakLength); n >= 0; n-- { for n := rand.Intn(maxStreakLength); n >= 0; n-- {
result = append(result, &clientmodel.Sample{ result = append(result, &model.Sample{
Metric: metric, Metric: metric,
Value: value, Value: value,
Timestamp: timestamp, Timestamp: timestamp,
@ -1446,7 +1446,7 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
case 3: // A streak of constant sample values. case 3: // A streak of constant sample values.
value := createValue() value := createValue()
for n := rand.Intn(maxStreakLength); n >= 0; n-- { for n := rand.Intn(maxStreakLength); n >= 0; n-- {
result = append(result, &clientmodel.Sample{ result = append(result, &model.Sample{
Metric: metric, Metric: metric,
Value: value, Value: value,
Timestamp: timestamp, Timestamp: timestamp,
@ -1459,12 +1459,12 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
return result return result
} }
func verifyStorage(t testing.TB, s *memorySeriesStorage, samples clientmodel.Samples, maxAge time.Duration) bool { func verifyStorage(t testing.TB, s *memorySeriesStorage, samples model.Samples, maxAge time.Duration) bool {
s.WaitForIndexing() s.WaitForIndexing()
result := true result := true
for _, i := range rand.Perm(len(samples)) { for _, i := range rand.Perm(len(samples)) {
sample := samples[i] sample := samples[i]
if sample.Timestamp.Before(clientmodel.TimestampFromTime(time.Now().Add(-maxAge))) { if sample.Timestamp.Before(model.TimeFromUnixNano(time.Now().Add(-maxAge).UnixNano())) {
continue continue
// TODO: Once we have a guaranteed cutoff at the // TODO: Once we have a guaranteed cutoff at the
// retention period, we can verify here that no results // retention period, we can verify here that no results
@ -1501,15 +1501,15 @@ func TestAppendOutOfOrder(t *testing.T) {
s, closer := NewTestStorage(t, 1) s, closer := NewTestStorage(t, 1)
defer closer.Close() defer closer.Close()
m := clientmodel.Metric{ m := model.Metric{
clientmodel.MetricNameLabel: "out_of_order", model.MetricNameLabel: "out_of_order",
} }
for i, t := range []int{0, 2, 2, 1} { for i, t := range []int{0, 2, 2, 1} {
s.Append(&clientmodel.Sample{ s.Append(&model.Sample{
Metric: m, Metric: m,
Timestamp: clientmodel.Timestamp(t), Timestamp: model.Time(t),
Value: clientmodel.SampleValue(i), Value: model.SampleValue(i),
}) })
} }

View file

@ -14,13 +14,13 @@
package metric package metric
import ( import (
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// LabelPair pairs a name with a value. // LabelPair pairs a name with a value.
type LabelPair struct { type LabelPair struct {
Name clientmodel.LabelName Name model.LabelName
Value clientmodel.LabelValue Value model.LabelValue
} }
// Equal returns true iff both the Name and the Value of this LabelPair and o // Equal returns true iff both the Name and the Value of this LabelPair and o

View file

@ -17,7 +17,7 @@ import (
"fmt" "fmt"
"regexp" "regexp"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// MatchType is an enum for label matching types. // MatchType is an enum for label matching types.
@ -50,13 +50,13 @@ type LabelMatchers []*LabelMatcher
// LabelMatcher models the matching of a label. // LabelMatcher models the matching of a label.
type LabelMatcher struct { type LabelMatcher struct {
Type MatchType Type MatchType
Name clientmodel.LabelName Name model.LabelName
Value clientmodel.LabelValue Value model.LabelValue
re *regexp.Regexp re *regexp.Regexp
} }
// NewLabelMatcher returns a LabelMatcher object ready to use. // NewLabelMatcher returns a LabelMatcher object ready to use.
func NewLabelMatcher(matchType MatchType, name clientmodel.LabelName, value clientmodel.LabelValue) (*LabelMatcher, error) { func NewLabelMatcher(matchType MatchType, name model.LabelName, value model.LabelValue) (*LabelMatcher, error) {
m := &LabelMatcher{ m := &LabelMatcher{
Type: matchType, Type: matchType,
Name: name, Name: name,
@ -77,7 +77,7 @@ func (m *LabelMatcher) String() string {
} }
// Match returns true if the label matcher matches the supplied label value. // Match returns true if the label matcher matches the supplied label value.
func (m *LabelMatcher) Match(v clientmodel.LabelValue) bool { func (m *LabelMatcher) Match(v model.LabelValue) bool {
switch m.Type { switch m.Type {
case Equal: case Equal:
return m.Value == v return m.Value == v
@ -94,8 +94,8 @@ func (m *LabelMatcher) Match(v clientmodel.LabelValue) bool {
// Filter takes a list of label values and returns all label values which match // Filter takes a list of label values and returns all label values which match
// the label matcher. // the label matcher.
func (m *LabelMatcher) Filter(in clientmodel.LabelValues) clientmodel.LabelValues { func (m *LabelMatcher) Filter(in model.LabelValues) model.LabelValues {
out := clientmodel.LabelValues{} out := model.LabelValues{}
for _, v := range in { for _, v := range in {
if m.Match(v) { if m.Match(v) {
out = append(out, v) out = append(out, v)

View file

@ -17,7 +17,7 @@ import (
"fmt" "fmt"
"strconv" "strconv"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// MarshalJSON implements json.Marshaler. // MarshalJSON implements json.Marshaler.
@ -27,8 +27,8 @@ func (s SamplePair) MarshalJSON() ([]byte, error) {
// SamplePair pairs a SampleValue with a Timestamp. // SamplePair pairs a SampleValue with a Timestamp.
type SamplePair struct { type SamplePair struct {
Timestamp clientmodel.Timestamp Timestamp model.Time
Value clientmodel.SampleValue Value model.SampleValue
} }
// Equal returns true if this SamplePair and o have equal Values and equal // Equal returns true if this SamplePair and o have equal Values and equal
@ -50,6 +50,6 @@ type Values []SamplePair
// Interval describes the inclusive interval between two Timestamps. // Interval describes the inclusive interval between two Timestamps.
type Interval struct { type Interval struct {
OldestInclusive clientmodel.Timestamp OldestInclusive model.Time
NewestInclusive clientmodel.Timestamp NewestInclusive model.Time
} }

View file

@ -23,10 +23,9 @@ import (
"net/url" "net/url"
"time" "time"
"github.com/prometheus/common/model"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
) )
@ -63,23 +62,23 @@ type StoreSamplesRequest struct {
// point represents a single InfluxDB measurement. // point represents a single InfluxDB measurement.
type point struct { type point struct {
Timestamp int64 `json:"timestamp"` Timestamp int64 `json:"timestamp"`
Precision string `json:"precision"` Precision string `json:"precision"`
Name clientmodel.LabelValue `json:"name"` Name model.LabelValue `json:"name"`
Tags clientmodel.LabelSet `json:"tags"` Tags model.LabelSet `json:"tags"`
Fields fields `json:"fields"` Fields fields `json:"fields"`
} }
// fields represents the fields/columns sent to InfluxDB for a given measurement. // fields represents the fields/columns sent to InfluxDB for a given measurement.
type fields struct { type fields struct {
Value clientmodel.SampleValue `json:"value"` Value model.SampleValue `json:"value"`
} }
// tagsFromMetric extracts InfluxDB tags from a Prometheus metric. // tagsFromMetric extracts InfluxDB tags from a Prometheus metric.
func tagsFromMetric(m clientmodel.Metric) clientmodel.LabelSet { func tagsFromMetric(m model.Metric) model.LabelSet {
tags := make(clientmodel.LabelSet, len(m)-1) tags := make(model.LabelSet, len(m)-1)
for l, v := range m { for l, v := range m {
if l == clientmodel.MetricNameLabel { if l == model.MetricNameLabel {
continue continue
} }
tags[l] = v tags[l] = v
@ -88,7 +87,7 @@ func tagsFromMetric(m clientmodel.Metric) clientmodel.LabelSet {
} }
// Store sends a batch of samples to InfluxDB via its HTTP API. // Store sends a batch of samples to InfluxDB via its HTTP API.
func (c *Client) Store(samples clientmodel.Samples) error { func (c *Client) Store(samples model.Samples) error {
points := make([]point, 0, len(samples)) points := make([]point, 0, len(samples))
for _, s := range samples { for _, s := range samples {
v := float64(s.Value) v := float64(s.Value)
@ -98,7 +97,7 @@ func (c *Client) Store(samples clientmodel.Samples) error {
log.Warnf("cannot send value %f to InfluxDB, skipping sample %#v", v, s) log.Warnf("cannot send value %f to InfluxDB, skipping sample %#v", v, s)
continue continue
} }
metric := s.Metric[clientmodel.MetricNameLabel] metric := s.Metric[model.MetricNameLabel]
points = append(points, point{ points = append(points, point{
Timestamp: s.Timestamp.UnixNano(), Timestamp: s.Timestamp.UnixNano(),
Precision: "n", Precision: "n",

View file

@ -21,33 +21,33 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
func TestClient(t *testing.T) { func TestClient(t *testing.T) {
samples := clientmodel.Samples{ samples := model.Samples{
{ {
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "testmetric", model.MetricNameLabel: "testmetric",
"test_label": "test_label_value1", "test_label": "test_label_value1",
}, },
Timestamp: clientmodel.Timestamp(123456789123), Timestamp: model.Time(123456789123),
Value: 1.23, Value: 1.23,
}, },
{ {
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "testmetric", model.MetricNameLabel: "testmetric",
"test_label": "test_label_value2", "test_label": "test_label_value2",
}, },
Timestamp: clientmodel.Timestamp(123456789123), Timestamp: model.Time(123456789123),
Value: 5.1234, Value: 5.1234,
}, },
{ {
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "special_float_value", model.MetricNameLabel: "special_float_value",
}, },
Timestamp: clientmodel.Timestamp(123456789123), Timestamp: model.Time(123456789123),
Value: clientmodel.SampleValue(math.NaN()), Value: model.SampleValue(math.NaN()),
}, },
} }

View file

@ -26,7 +26,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
) )
@ -64,10 +64,10 @@ type StoreSamplesRequest struct {
} }
// tagsFromMetric translates Prometheus metric into OpenTSDB tags. // tagsFromMetric translates Prometheus metric into OpenTSDB tags.
func tagsFromMetric(m clientmodel.Metric) map[string]TagValue { func tagsFromMetric(m model.Metric) map[string]TagValue {
tags := make(map[string]TagValue, len(m)-1) tags := make(map[string]TagValue, len(m)-1)
for l, v := range m { for l, v := range m {
if l == clientmodel.MetricNameLabel { if l == model.MetricNameLabel {
continue continue
} }
tags[string(l)] = TagValue(v) tags[string(l)] = TagValue(v)
@ -76,7 +76,7 @@ func tagsFromMetric(m clientmodel.Metric) map[string]TagValue {
} }
// Store sends a batch of samples to OpenTSDB via its HTTP API. // Store sends a batch of samples to OpenTSDB via its HTTP API.
func (c *Client) Store(samples clientmodel.Samples) error { func (c *Client) Store(samples model.Samples) error {
reqs := make([]StoreSamplesRequest, 0, len(samples)) reqs := make([]StoreSamplesRequest, 0, len(samples))
for _, s := range samples { for _, s := range samples {
v := float64(s.Value) v := float64(s.Value)
@ -84,7 +84,7 @@ func (c *Client) Store(samples clientmodel.Samples) error {
log.Warnf("cannot send value %f to OpenTSDB, skipping sample %#v", v, s) log.Warnf("cannot send value %f to OpenTSDB, skipping sample %#v", v, s)
continue continue
} }
metric := TagValue(s.Metric[clientmodel.MetricNameLabel]) metric := TagValue(s.Metric[model.MetricNameLabel])
reqs = append(reqs, StoreSamplesRequest{ reqs = append(reqs, StoreSamplesRequest{
Metric: metric, Metric: metric,
Timestamp: s.Timestamp.Unix(), Timestamp: s.Timestamp.Unix(),

View file

@ -19,14 +19,14 @@ import (
"reflect" "reflect"
"testing" "testing"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
var ( var (
metric = clientmodel.Metric{ metric = model.Metric{
clientmodel.MetricNameLabel: "test:metric", model.MetricNameLabel: "test:metric",
"testlabel": "test:value", "testlabel": "test:value",
"many_chars": "abc!ABC:012-3!45ö67~89./", "many_chars": "abc!ABC:012-3!45ö67~89./",
} }
) )

View file

@ -4,14 +4,14 @@ import (
"bytes" "bytes"
"fmt" "fmt"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// TagValue is a clientmodel.LabelValue that implements json.Marshaler and // TagValue is a model.LabelValue that implements json.Marshaler and
// json.Unmarshaler. These implementations avoid characters illegal in // json.Unmarshaler. These implementations avoid characters illegal in
// OpenTSDB. See the MarshalJSON for details. TagValue is used for the values of // OpenTSDB. See the MarshalJSON for details. TagValue is used for the values of
// OpenTSDB tags as well as for OpenTSDB metric names. // OpenTSDB tags as well as for OpenTSDB metric names.
type TagValue clientmodel.LabelValue type TagValue model.LabelValue
// MarshalJSON marshals this TagValue into JSON that only contains runes allowed // MarshalJSON marshals this TagValue into JSON that only contains runes allowed
// in OpenTSDB. It implements json.Marshaler. The runes allowed in OpenTSDB are // in OpenTSDB. It implements json.Marshaler. The runes allowed in OpenTSDB are

View file

@ -19,7 +19,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
const ( const (
@ -47,7 +47,7 @@ const (
// external timeseries database. // external timeseries database.
type StorageClient interface { type StorageClient interface {
// Store stores the given samples in the remote storage. // Store stores the given samples in the remote storage.
Store(clientmodel.Samples) error Store(model.Samples) error
// Name identifies the remote storage implementation. // Name identifies the remote storage implementation.
Name() string Name() string
} }
@ -56,8 +56,8 @@ type StorageClient interface {
// indicated by the provided StorageClient. // indicated by the provided StorageClient.
type StorageQueueManager struct { type StorageQueueManager struct {
tsdb StorageClient tsdb StorageClient
queue chan *clientmodel.Sample queue chan *model.Sample
pendingSamples clientmodel.Samples pendingSamples model.Samples
sendSemaphore chan bool sendSemaphore chan bool
drained chan bool drained chan bool
@ -76,7 +76,7 @@ func NewStorageQueueManager(tsdb StorageClient, queueCapacity int) *StorageQueue
return &StorageQueueManager{ return &StorageQueueManager{
tsdb: tsdb, tsdb: tsdb,
queue: make(chan *clientmodel.Sample, queueCapacity), queue: make(chan *model.Sample, queueCapacity),
sendSemaphore: make(chan bool, maxConcurrentSends), sendSemaphore: make(chan bool, maxConcurrentSends),
drained: make(chan bool), drained: make(chan bool),
@ -127,7 +127,7 @@ func NewStorageQueueManager(tsdb StorageClient, queueCapacity int) *StorageQueue
// Append queues a sample to be sent to the remote storage. It drops the // Append queues a sample to be sent to the remote storage. It drops the
// sample on the floor if the queue is full. It implements // sample on the floor if the queue is full. It implements
// storage.SampleAppender. // storage.SampleAppender.
func (t *StorageQueueManager) Append(s *clientmodel.Sample) { func (t *StorageQueueManager) Append(s *model.Sample) {
select { select {
case t.queue <- s: case t.queue <- s:
default: default:
@ -165,7 +165,7 @@ func (t *StorageQueueManager) Collect(ch chan<- prometheus.Metric) {
ch <- t.queueCapacity ch <- t.queueCapacity
} }
func (t *StorageQueueManager) sendSamples(s clientmodel.Samples) { func (t *StorageQueueManager) sendSamples(s model.Samples) {
t.sendSemaphore <- true t.sendSemaphore <- true
defer func() { defer func() {
<-t.sendSemaphore <-t.sendSemaphore

View file

@ -17,16 +17,16 @@ import (
"sync" "sync"
"testing" "testing"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
type TestStorageClient struct { type TestStorageClient struct {
receivedSamples clientmodel.Samples receivedSamples model.Samples
expectedSamples clientmodel.Samples expectedSamples model.Samples
wg sync.WaitGroup wg sync.WaitGroup
} }
func (c *TestStorageClient) expectSamples(s clientmodel.Samples) { func (c *TestStorageClient) expectSamples(s model.Samples) {
c.expectedSamples = append(c.expectedSamples, s...) c.expectedSamples = append(c.expectedSamples, s...)
c.wg.Add(len(s)) c.wg.Add(len(s))
} }
@ -40,7 +40,7 @@ func (c *TestStorageClient) waitForExpectedSamples(t *testing.T) {
} }
} }
func (c *TestStorageClient) Store(s clientmodel.Samples) error { func (c *TestStorageClient) Store(s model.Samples) error {
c.receivedSamples = append(c.receivedSamples, s...) c.receivedSamples = append(c.receivedSamples, s...)
c.wg.Add(-len(s)) c.wg.Add(-len(s))
return nil return nil
@ -55,13 +55,13 @@ func TestSampleDelivery(t *testing.T) {
// batch timeout case. // batch timeout case.
n := maxSamplesPerSend * 2 n := maxSamplesPerSend * 2
samples := make(clientmodel.Samples, 0, n) samples := make(model.Samples, 0, n)
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
samples = append(samples, &clientmodel.Sample{ samples = append(samples, &model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "test_metric", model.MetricNameLabel: "test_metric",
}, },
Value: clientmodel.SampleValue(i), Value: model.SampleValue(i),
}) })
} }

View file

@ -21,7 +21,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// Storage collects multiple remote storage queues. // Storage collects multiple remote storage queues.
@ -70,7 +70,7 @@ func (s *Storage) Stop() {
} }
// Append implements storage.SampleAppender. // Append implements storage.SampleAppender.
func (s *Storage) Append(smpl *clientmodel.Sample) { func (s *Storage) Append(smpl *model.Sample) {
for _, q := range s.queues { for _, q := range s.queues {
q.Append(smpl) q.Append(smpl)
} }

View file

@ -14,13 +14,13 @@
package storage package storage
import ( import (
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
// SampleAppender is the interface to append samples to both, local and remote // SampleAppender is the interface to append samples to both, local and remote
// storage. // storage.
type SampleAppender interface { type SampleAppender interface {
Append(*clientmodel.Sample) Append(*model.Sample)
} }
// Fanout is a SampleAppender that appends every sample to a list of other // Fanout is a SampleAppender that appends every sample to a list of other
@ -30,7 +30,7 @@ type Fanout []SampleAppender
// Append implements SampleAppender. It appends the provided sample to all // Append implements SampleAppender. It appends the provided sample to all
// SampleAppenders in the Fanout slice and waits for each append to complete // SampleAppenders in the Fanout slice and waits for each append to complete
// before proceeding with the next. // before proceeding with the next.
func (f Fanout) Append(s *clientmodel.Sample) { func (f Fanout) Append(s *model.Sample) {
for _, a := range f { for _, a := range f {
a.Append(s) a.Append(s)
} }

View file

@ -25,7 +25,7 @@ import (
html_template "html/template" html_template "html/template"
text_template "text/template" text_template "text/template"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/strutil"
@ -55,7 +55,7 @@ func (q queryResultByLabelSorter) Swap(i, j int) {
q.results[i], q.results[j] = q.results[j], q.results[i] q.results[i], q.results[j] = q.results[j], q.results[i]
} }
func query(q string, timestamp clientmodel.Timestamp, queryEngine *promql.Engine) (queryResult, error) { func query(q string, timestamp model.Time, queryEngine *promql.Engine) (queryResult, error) {
query, err := queryEngine.NewInstantQuery(q, timestamp) query, err := queryEngine.NewInstantQuery(q, timestamp)
if err != nil { if err != nil {
return nil, err return nil, err
@ -78,8 +78,8 @@ func query(q string, timestamp clientmodel.Timestamp, queryEngine *promql.Engine
}} }}
case *promql.String: case *promql.String:
vector = promql.Vector{&promql.Sample{ vector = promql.Vector{&promql.Sample{
Metric: clientmodel.COWMetric{ Metric: model.COWMetric{
Metric: clientmodel.Metric{"__value__": clientmodel.LabelValue(v.Value)}, Metric: model.Metric{"__value__": model.LabelValue(v.Value)},
Copied: true, Copied: true,
}, },
Timestamp: v.Timestamp, Timestamp: v.Timestamp,
@ -112,7 +112,7 @@ type templateExpander struct {
} }
// NewTemplateExpander returns a template expander ready to use. // NewTemplateExpander returns a template expander ready to use.
func NewTemplateExpander(text string, name string, data interface{}, timestamp clientmodel.Timestamp, queryEngine *promql.Engine, pathPrefix string) *templateExpander { func NewTemplateExpander(text string, name string, data interface{}, timestamp model.Time, queryEngine *promql.Engine, pathPrefix string) *templateExpander {
return &templateExpander{ return &templateExpander{
text: text, text: text,
name: name, name: name,
@ -242,7 +242,7 @@ func NewTemplateExpander(text string, name string, data interface{}, timestamp c
if math.IsNaN(v) || math.IsInf(v, 0) { if math.IsNaN(v) || math.IsInf(v, 0) {
return fmt.Sprintf("%.4g", v) return fmt.Sprintf("%.4g", v)
} }
t := clientmodel.TimestampFromUnixNano(int64(v * 1e9)).Time().UTC() t := model.TimeFromUnixNano(int64(v * 1e9)).Time().UTC()
return fmt.Sprint(t) return fmt.Sprint(t)
}, },
"pathPrefix": func() string { "pathPrefix": func() string {

View file

@ -17,7 +17,7 @@ import (
"math" "math"
"testing" "testing"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
@ -140,7 +140,7 @@ func TestTemplateExpansion(t *testing.T) {
output: "+Inf:+Inf:+Inf:+Inf:-Inf:-Inf:-Inf:-Inf:NaN:NaN:NaN:NaN:", output: "+Inf:+Inf:+Inf:+Inf:-Inf:-Inf:-Inf:-Inf:NaN:NaN:NaN:NaN:",
}, },
{ {
// HumanizeTimestamp - clientmodel.SampleValue input. // HumanizeTimestamp - model.SampleValue input.
text: "{{ 1435065584.128 | humanizeTimestamp }}", text: "{{ 1435065584.128 | humanizeTimestamp }}",
output: "2015-06-23 13:19:44.128 +0000 UTC", output: "2015-06-23 13:19:44.128 +0000 UTC",
}, },
@ -172,20 +172,20 @@ func TestTemplateExpansion(t *testing.T) {
}, },
} }
time := clientmodel.Timestamp(0) time := model.Time(0)
storage, closer := local.NewTestStorage(t, 1) storage, closer := local.NewTestStorage(t, 1)
defer closer.Close() defer closer.Close()
storage.Append(&clientmodel.Sample{ storage.Append(&model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "metric", model.MetricNameLabel: "metric",
"instance": "a"}, "instance": "a"},
Value: 11, Value: 11,
}) })
storage.Append(&clientmodel.Sample{ storage.Append(&model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "metric", model.MetricNameLabel: "metric",
"instance": "b"}, "instance": "b"},
Value: 21, Value: 21,
}) })
storage.WaitForIndexing() storage.WaitForIndexing()

View file

@ -18,7 +18,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
@ -28,7 +28,7 @@ import (
// API manages the /api HTTP endpoint. // API manages the /api HTTP endpoint.
type API struct { type API struct {
Now func() clientmodel.Timestamp Now func() model.Time
Storage local.Storage Storage local.Storage
QueryEngine *promql.Engine QueryEngine *promql.Engine
} }

View file

@ -21,7 +21,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
@ -35,9 +35,9 @@ import (
// query layer precisely without any change. Thus we round to seconds and then // query layer precisely without any change. Thus we round to seconds and then
// add known-good digits after the decimal point which behave well in // add known-good digits after the decimal point which behave well in
// parsing/re-formatting. // parsing/re-formatting.
var testTimestamp = clientmodel.TimestampFromTime(time.Now().Round(time.Second)).Add(124 * time.Millisecond) var testTimestamp = model.TimeFromUnix(time.Now().Round(time.Second).Unix()).Add(124 * time.Millisecond)
func testNow() clientmodel.Timestamp { func testNow() model.Time {
return testTimestamp return testTimestamp
} }
@ -89,9 +89,9 @@ func TestQuery(t *testing.T) {
storage, closer := local.NewTestStorage(t, 1) storage, closer := local.NewTestStorage(t, 1)
defer closer.Close() defer closer.Close()
storage.Append(&clientmodel.Sample{ storage.Append(&model.Sample{
Metric: clientmodel.Metric{ Metric: model.Metric{
clientmodel.MetricNameLabel: "testmetric", model.MetricNameLabel: "testmetric",
}, },
Timestamp: testTimestamp, Timestamp: testTimestamp,
Value: 0, Value: 0,

View file

@ -26,7 +26,7 @@ import (
"github.com/prometheus/log" "github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
) )
@ -44,7 +44,7 @@ func httpJSONError(w http.ResponseWriter, err error, code int) {
errorJSON(w, err) errorJSON(w, err)
} }
func parseTimestampOrNow(t string, now clientmodel.Timestamp) (clientmodel.Timestamp, error) { func parseTimestampOrNow(t string, now model.Time) (model.Time, error) {
if t == "" { if t == "" {
return now, nil return now, nil
} }
@ -53,7 +53,7 @@ func parseTimestampOrNow(t string, now clientmodel.Timestamp) (clientmodel.Times
if err != nil { if err != nil {
return 0, err return 0, err
} }
return clientmodel.TimestampFromUnixNano(int64(tFloat * float64(time.Second/time.Nanosecond))), nil return model.TimeFromUnixNano(int64(tFloat * float64(time.Second/time.Nanosecond))), nil
} }
func parseDuration(d string) (time.Duration, error) { func parseDuration(d string) (time.Duration, error) {
@ -223,7 +223,7 @@ func (api *API) Metrics(w http.ResponseWriter, r *http.Request) {
setAccessControlHeaders(w) setAccessControlHeaders(w)
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
metricNames := api.Storage.LabelValuesForLabelName(clientmodel.MetricNameLabel) metricNames := api.Storage.LabelValuesForLabelName(model.MetricNameLabel)
sort.Sort(metricNames) sort.Sort(metricNames)
resultBytes, err := json.Marshal(metricNames) resultBytes, err := json.Marshal(metricNames)
if err != nil { if err != nil {

View file

@ -17,7 +17,7 @@ import (
"testing" "testing"
"time" "time"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
) )
func TestParseTimestampOrNow(t *testing.T) { func TestParseTimestampOrNow(t *testing.T) {
@ -33,7 +33,7 @@ func TestParseTimestampOrNow(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("err = %s; want nil", err) t.Fatalf("err = %s; want nil", err)
} }
expTS := clientmodel.TimestampFromUnixNano(1426956073123000000) expTS := model.TimeFromUnixNano(1426956073123000000)
if !ts.Equal(expTS) { if !ts.Equal(expTS) {
t.Fatalf("ts = %v; want %v", ts, expTS) t.Fatalf("ts = %v; want %v", ts, expTS)
} }

View file

@ -12,7 +12,7 @@ import (
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"golang.org/x/net/context" "golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
@ -174,10 +174,10 @@ func (api *API) queryRange(r *http.Request) (interface{}, *apiError) {
func (api *API) labelValues(r *http.Request) (interface{}, *apiError) { func (api *API) labelValues(r *http.Request) (interface{}, *apiError) {
name := route.Param(api.context(r), "name") name := route.Param(api.context(r), "name")
if !clientmodel.LabelNameRE.MatchString(name) { if !model.LabelNameRE.MatchString(name) {
return nil, &apiError{errorBadData, fmt.Errorf("invalid label name: %q", name)} return nil, &apiError{errorBadData, fmt.Errorf("invalid label name: %q", name)}
} }
vals := api.Storage.LabelValuesForLabelName(clientmodel.LabelName(name)) vals := api.Storage.LabelValuesForLabelName(model.LabelName(name))
sort.Sort(vals) sort.Sort(vals)
return vals, nil return vals, nil
@ -188,7 +188,7 @@ func (api *API) series(r *http.Request) (interface{}, *apiError) {
if len(r.Form["match[]"]) == 0 { if len(r.Form["match[]"]) == 0 {
return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")} return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}
} }
res := map[clientmodel.Fingerprint]clientmodel.COWMetric{} res := map[model.Fingerprint]model.COWMetric{}
for _, lm := range r.Form["match[]"] { for _, lm := range r.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(lm) matchers, err := promql.ParseMetricSelector(lm)
@ -200,7 +200,7 @@ func (api *API) series(r *http.Request) (interface{}, *apiError) {
} }
} }
metrics := make([]clientmodel.Metric, 0, len(res)) metrics := make([]model.Metric, 0, len(res))
for _, met := range res { for _, met := range res {
metrics = append(metrics, met.Metric) metrics = append(metrics, met.Metric)
} }
@ -212,7 +212,7 @@ func (api *API) dropSeries(r *http.Request) (interface{}, *apiError) {
if len(r.Form["match[]"]) == 0 { if len(r.Form["match[]"]) == 0 {
return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")} return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}
} }
fps := map[clientmodel.Fingerprint]struct{}{} fps := map[model.Fingerprint]struct{}{}
for _, lm := range r.Form["match[]"] { for _, lm := range r.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(lm) matchers, err := promql.ParseMetricSelector(lm)
@ -265,13 +265,13 @@ func respondError(w http.ResponseWriter, apiErr *apiError, data interface{}) {
w.Write(b) w.Write(b)
} }
func parseTime(s string) (clientmodel.Timestamp, error) { func parseTime(s string) (model.Time, error) {
if t, err := strconv.ParseFloat(s, 64); err == nil { if t, err := strconv.ParseFloat(s, 64); err == nil {
ts := int64(t * float64(time.Second)) ts := int64(t * float64(time.Second))
return clientmodel.TimestampFromUnixNano(ts), nil return model.TimeFromUnixNano(ts), nil
} }
if t, err := time.Parse(time.RFC3339Nano, s); err == nil { if t, err := time.Parse(time.RFC3339Nano, s); err == nil {
return clientmodel.TimestampFromTime(t), nil return model.TimeFromUnixNano(t.UnixNano()), nil
} }
return 0, fmt.Errorf("cannot parse %q to a valid timestamp", s) return 0, fmt.Errorf("cannot parse %q to a valid timestamp", s)
} }

View file

@ -14,7 +14,7 @@ import (
"golang.org/x/net/context" "golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model" "github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/storage/metric"
@ -42,7 +42,7 @@ func TestEndpoints(t *testing.T) {
QueryEngine: suite.QueryEngine(), QueryEngine: suite.QueryEngine(),
} }
start := clientmodel.Timestamp(0) start := model.Time(0)
var tests = []struct { var tests = []struct {
endpoint apiFunc endpoint apiFunc
params map[string]string params map[string]string
@ -173,7 +173,7 @@ func TestEndpoints(t *testing.T) {
params: map[string]string{ params: map[string]string{
"name": "__name__", "name": "__name__",
}, },
response: clientmodel.LabelValues{ response: model.LabelValues{
"test_metric1", "test_metric1",
"test_metric2", "test_metric2",
}, },
@ -183,7 +183,7 @@ func TestEndpoints(t *testing.T) {
params: map[string]string{ params: map[string]string{
"name": "foo", "name": "foo",
}, },
response: clientmodel.LabelValues{ response: model.LabelValues{
"bar", "bar",
"boo", "boo",
}, },
@ -201,7 +201,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{ query: url.Values{
"match[]": []string{`test_metric2`}, "match[]": []string{`test_metric2`},
}, },
response: []clientmodel.Metric{ response: []model.Metric{
{ {
"__name__": "test_metric2", "__name__": "test_metric2",
"foo": "boo", "foo": "boo",
@ -213,7 +213,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{ query: url.Values{
"match[]": []string{`test_metric1{foo=~"o$"}`}, "match[]": []string{`test_metric1{foo=~"o$"}`},
}, },
response: []clientmodel.Metric{ response: []model.Metric{
{ {
"__name__": "test_metric1", "__name__": "test_metric1",
"foo": "boo", "foo": "boo",
@ -225,7 +225,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{ query: url.Values{
"match[]": []string{`test_metric1{foo=~"o$"}`, `test_metric1{foo=~"o$"}`}, "match[]": []string{`test_metric1{foo=~"o$"}`, `test_metric1{foo=~"o$"}`},
}, },
response: []clientmodel.Metric{ response: []model.Metric{
{ {
"__name__": "test_metric1", "__name__": "test_metric1",
"foo": "boo", "foo": "boo",
@ -237,7 +237,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{ query: url.Values{
"match[]": []string{`test_metric1{foo=~"o$"}`, `none`}, "match[]": []string{`test_metric1{foo=~"o$"}`, `none`},
}, },
response: []clientmodel.Metric{ response: []model.Metric{
{ {
"__name__": "test_metric1", "__name__": "test_metric1",
"foo": "boo", "foo": "boo",
@ -269,7 +269,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{ query: url.Values{
"match[]": []string{`test_metric1`}, "match[]": []string{`test_metric1`},
}, },
response: []clientmodel.Metric{ response: []model.Metric{
{ {
"__name__": "test_metric1", "__name__": "test_metric1",
"foo": "bar", "foo": "bar",
@ -445,7 +445,7 @@ func TestParseTime(t *testing.T) {
t.Errorf("Expected error for %q but got none", test.input) t.Errorf("Expected error for %q but got none", test.input)
continue continue
} }
res := clientmodel.TimestampFromTime(test.result) res := model.TimeFromUnixNano(test.result.UnixNano())
if !test.fail && ts != res { if !test.fail && ts != res {
t.Errorf("Expected time %v for input %q but got %v", res, test.input, ts) t.Errorf("Expected time %v for input %q but got %v", res, test.input, ts)
} }

View file

@ -12,8 +12,8 @@ import (
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local" "github.com/prometheus/prometheus/storage/local"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/client_model/go" dto "github.com/prometheus/client_model/go"
"github.com/prometheus/common/model"
) )
type Federation struct { type Federation struct {
@ -23,7 +23,7 @@ type Federation struct {
func (fed *Federation) ServeHTTP(w http.ResponseWriter, req *http.Request) { func (fed *Federation) ServeHTTP(w http.ResponseWriter, req *http.Request) {
req.ParseForm() req.ParseForm()
metrics := map[clientmodel.Fingerprint]clientmodel.COWMetric{} metrics := map[model.Fingerprint]model.COWMetric{}
for _, s := range req.Form["match[]"] { for _, s := range req.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(s) matchers, err := promql.ParseMetricSelector(s)
@ -58,7 +58,7 @@ func (fed *Federation) ServeHTTP(w http.ResponseWriter, req *http.Request) {
protMetric.Label = protMetric.Label[:0] protMetric.Label = protMetric.Label[:0]
for ln, lv := range met.Metric { for ln, lv := range met.Metric {
if ln == clientmodel.MetricNameLabel { if ln == model.MetricNameLabel {
protMetricFam.Name = proto.String(string(lv)) protMetricFam.Name = proto.String(string(lv))
continue continue
} }

View file

@ -31,8 +31,8 @@ import (
pprof_runtime "runtime/pprof" pprof_runtime "runtime/pprof"
template_text "text/template" template_text "text/template"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model"
"github.com/prometheus/log" "github.com/prometheus/log"
"github.com/prometheus/prometheus/config" "github.com/prometheus/prometheus/config"
@ -128,7 +128,7 @@ func New(st local.Storage, qe *promql.Engine, rm *rules.Manager, status *Prometh
apiLegacy: &legacy.API{ apiLegacy: &legacy.API{
QueryEngine: qe, QueryEngine: qe,
Storage: st, Storage: st,
Now: clientmodel.Now, Now: model.Now,
}, },
federation: &Federation{ federation: &Federation{
Storage: st, Storage: st,
@ -257,7 +257,7 @@ func (h *Handler) consoles(w http.ResponseWriter, r *http.Request) {
Path: strings.TrimLeft(name, "/"), Path: strings.TrimLeft(name, "/"),
} }
tmpl := template.NewTemplateExpander(string(text), "__console_"+name, data, clientmodel.Now(), h.queryEngine, h.options.ExternalURL.Path) tmpl := template.NewTemplateExpander(string(text), "__console_"+name, data, model.Now(), h.queryEngine, h.options.ExternalURL.Path)
filenames, err := filepath.Glob(h.options.ConsoleLibrariesPath + "/*.lib") filenames, err := filepath.Glob(h.options.ConsoleLibrariesPath + "/*.lib")
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
@ -351,7 +351,7 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
"since": time.Since, "since": time.Since,
"consolesPath": func() string { return consolesPath }, "consolesPath": func() string { return consolesPath },
"pathPrefix": func() string { return opts.ExternalURL.Path }, "pathPrefix": func() string { return opts.ExternalURL.Path },
"stripLabels": func(lset clientmodel.LabelSet, labels ...clientmodel.LabelName) clientmodel.LabelSet { "stripLabels": func(lset model.LabelSet, labels ...model.LabelName) model.LabelSet {
for _, ln := range labels { for _, ln := range labels {
delete(lset, ln) delete(lset, ln)
} }
@ -426,7 +426,7 @@ func (h *Handler) executeTemplate(w http.ResponseWriter, name string, data inter
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
} }
tmpl := template.NewTemplateExpander(text, name, data, clientmodel.Now(), h.queryEngine, h.options.ExternalURL.Path) tmpl := template.NewTemplateExpander(text, name, data, model.Now(), h.queryEngine, h.options.ExternalURL.Path)
tmpl.Funcs(tmplFuncs(h.consolesPath(), h.options)) tmpl.Funcs(tmplFuncs(h.consolesPath(), h.options))
result, err := tmpl.ExpandHTML(nil) result, err := tmpl.ExpandHTML(nil)