Switch from client_golang/model to common/model

This commit is contained in:
Fabian Reinartz 2015-08-20 17:18:46 +02:00
parent 7a6d12a44c
commit 306e8468a0
72 changed files with 1417 additions and 1417 deletions

View file

@ -25,7 +25,7 @@ import (
"gopkg.in/yaml.v2"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/strutil"
)
@ -270,7 +270,7 @@ type GlobalConfig struct {
// How frequently to evaluate rules by default.
EvaluationInterval Duration `yaml:"evaluation_interval,omitempty"`
// The labels to add to any timeseries that this Prometheus instance scrapes.
Labels clientmodel.LabelSet `yaml:"labels,omitempty"`
Labels model.LabelSet `yaml:"labels,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`
@ -399,9 +399,9 @@ func (a *BasicAuth) UnmarshalYAML(unmarshal func(interface{}) error) error {
type TargetGroup struct {
// Targets is a list of targets identified by a label set. Each target is
// uniquely identifiable in the group by its address label.
Targets []clientmodel.LabelSet
Targets []model.LabelSet
// Labels is a set of labels that is common across all targets in the group.
Labels clientmodel.LabelSet
Labels model.LabelSet
// Source is an identifier that describes a group of targets.
Source string
@ -415,19 +415,19 @@ func (tg TargetGroup) String() string {
func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error {
g := struct {
Targets []string `yaml:"targets"`
Labels clientmodel.LabelSet `yaml:"labels"`
Labels model.LabelSet `yaml:"labels"`
XXX map[string]interface{} `yaml:",inline"`
}{}
if err := unmarshal(&g); err != nil {
return err
}
tg.Targets = make([]clientmodel.LabelSet, 0, len(g.Targets))
tg.Targets = make([]model.LabelSet, 0, len(g.Targets))
for _, t := range g.Targets {
if strings.Contains(t, "/") {
return fmt.Errorf("%q is not a valid hostname", t)
}
tg.Targets = append(tg.Targets, clientmodel.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(t),
tg.Targets = append(tg.Targets, model.LabelSet{
model.AddressLabel: model.LabelValue(t),
})
}
tg.Labels = g.Labels
@ -437,14 +437,14 @@ func (tg *TargetGroup) UnmarshalYAML(unmarshal func(interface{}) error) error {
// MarshalYAML implements the yaml.Marshaler interface.
func (tg TargetGroup) MarshalYAML() (interface{}, error) {
g := &struct {
Targets []string `yaml:"targets"`
Labels clientmodel.LabelSet `yaml:"labels,omitempty"`
Targets []string `yaml:"targets"`
Labels model.LabelSet `yaml:"labels,omitempty"`
}{
Targets: make([]string, 0, len(tg.Targets)),
Labels: tg.Labels,
}
for _, t := range tg.Targets {
g.Targets = append(g.Targets, string(t[clientmodel.AddressLabel]))
g.Targets = append(g.Targets, string(t[model.AddressLabel]))
}
return g, nil
}
@ -452,19 +452,19 @@ func (tg TargetGroup) MarshalYAML() (interface{}, error) {
// UnmarshalJSON implements the json.Unmarshaler interface.
func (tg *TargetGroup) UnmarshalJSON(b []byte) error {
g := struct {
Targets []string `json:"targets"`
Labels clientmodel.LabelSet `json:"labels"`
Targets []string `json:"targets"`
Labels model.LabelSet `json:"labels"`
}{}
if err := json.Unmarshal(b, &g); err != nil {
return err
}
tg.Targets = make([]clientmodel.LabelSet, 0, len(g.Targets))
tg.Targets = make([]model.LabelSet, 0, len(g.Targets))
for _, t := range g.Targets {
if strings.Contains(t, "/") {
return fmt.Errorf("%q is not a valid hostname", t)
}
tg.Targets = append(tg.Targets, clientmodel.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(t),
tg.Targets = append(tg.Targets, model.LabelSet{
model.AddressLabel: model.LabelValue(t),
})
}
tg.Labels = g.Labels
@ -686,7 +686,7 @@ func (a *RelabelAction) UnmarshalYAML(unmarshal func(interface{}) error) error {
type RelabelConfig struct {
// A list of labels from which values are taken and concatenated
// with the configured separator in order.
SourceLabels clientmodel.LabelNames `yaml:"source_labels,flow"`
SourceLabels model.LabelNames `yaml:"source_labels,flow"`
// Separator is the string between concatenated values from the source labels.
Separator string `yaml:"separator,omitempty"`
// Regex against which the concatenation is matched.
@ -694,7 +694,7 @@ type RelabelConfig struct {
// Modulus to take of the hash of concatenated values from the source labels.
Modulus uint64 `yaml:"modulus,omitempty"`
// The label to which the resulting string is written in a replacement.
TargetLabel clientmodel.LabelName `yaml:"target_label,omitempty"`
TargetLabel model.LabelName `yaml:"target_label,omitempty"`
// Replacement is the regex replacement pattern to be used.
Replacement string `yaml:"replacement,omitempty"`
// Action is the action to be performed for the relabeling.

View file

@ -24,7 +24,7 @@ import (
"gopkg.in/yaml.v2"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
var expectedConf = &Config{
@ -33,7 +33,7 @@ var expectedConf = &Config{
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
EvaluationInterval: Duration(30 * time.Second),
Labels: clientmodel.LabelSet{
Labels: model.LabelSet{
"monitor": "codelab",
"foo": "bar",
},
@ -60,11 +60,11 @@ var expectedConf = &Config{
TargetGroups: []*TargetGroup{
{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "localhost:9090"},
{clientmodel.AddressLabel: "localhost:9191"},
Targets: []model.LabelSet{
{model.AddressLabel: "localhost:9090"},
{model.AddressLabel: "localhost:9191"},
},
Labels: clientmodel.LabelSet{
Labels: model.LabelSet{
"my": "label",
"your": "label",
},
@ -84,7 +84,7 @@ var expectedConf = &Config{
RelabelConfigs: []*RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"job", "__meta_dns_srv_name"},
SourceLabels: model.LabelNames{"job", "__meta_dns_srv_name"},
TargetLabel: "job",
Separator: ";",
Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")},
@ -126,20 +126,20 @@ var expectedConf = &Config{
RelabelConfigs: []*RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"job"},
SourceLabels: model.LabelNames{"job"},
Regex: &Regexp{*regexp.MustCompile("(.*)some-[regex]$")},
Separator: ";",
Action: RelabelDrop,
},
{
SourceLabels: clientmodel.LabelNames{"__address__"},
SourceLabels: model.LabelNames{"__address__"},
TargetLabel: "__tmp_hash",
Modulus: 8,
Separator: ";",
Action: RelabelHashMod,
},
{
SourceLabels: clientmodel.LabelNames{"__tmp_hash"},
SourceLabels: model.LabelNames{"__tmp_hash"},
Regex: &Regexp{*regexp.MustCompile("^1$")},
Separator: ";",
Action: RelabelKeep,
@ -147,7 +147,7 @@ var expectedConf = &Config{
},
MetricRelabelConfigs: []*RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"__name__"},
SourceLabels: model.LabelNames{"__name__"},
Regex: &Regexp{*regexp.MustCompile("expensive_metric.*$")},
Separator: ";",
Action: RelabelDrop,

View file

@ -25,7 +25,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/httputil"
)
@ -51,9 +51,9 @@ type NotificationReq struct {
// A reference to the runbook for the alert.
Runbook string
// Labels associated with this alert notification, including alert name.
Labels clientmodel.LabelSet
Labels model.LabelSet
// Current value of alert
Value clientmodel.SampleValue
Value model.SampleValue
// Since when this alert has been active (pending or firing).
ActiveSince time.Time
// A textual representation of the rule that triggered the alert.

View file

@ -21,7 +21,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
type testHTTPPoster struct {
@ -65,10 +65,10 @@ func (s *testNotificationScenario) test(i int, t *testing.T) {
Summary: s.summary,
Description: s.description,
Runbook: s.runbook,
Labels: clientmodel.LabelSet{
clientmodel.LabelName("instance"): clientmodel.LabelValue("testinstance"),
Labels: model.LabelSet{
model.LabelName("instance"): model.LabelValue("testinstance"),
},
Value: clientmodel.SampleValue(1.0 / 3.0),
Value: model.SampleValue(1.0 / 3.0),
ActiveSince: time.Time{},
RuleString: "Test rule string",
GeneratorURL: "prometheus_url",

View file

@ -19,7 +19,7 @@ import (
"golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local"
)
@ -32,7 +32,7 @@ type Analyzer struct {
// The expression being analyzed.
Expr Expr
// The time range for evaluation of Expr.
Start, End clientmodel.Timestamp
Start, End model.Time
// The preload times for different query time offsets.
offsetPreloadTimes map[time.Duration]preloadTimes
@ -45,11 +45,11 @@ type preloadTimes struct {
// Instants require single samples to be loaded along the entire query
// range, with intervals between the samples corresponding to the query
// resolution.
instants map[clientmodel.Fingerprint]struct{}
instants map[model.Fingerprint]struct{}
// Ranges require loading a range of samples at each resolution step,
// stretching backwards from the current evaluation timestamp. The length of
// the range into the past is given by the duration, as in "foo[5m]".
ranges map[clientmodel.Fingerprint]time.Duration
ranges map[model.Fingerprint]time.Duration
}
// Analyze the provided expression and attach metrics and fingerprints to data-selecting
@ -60,8 +60,8 @@ func (a *Analyzer) Analyze(ctx context.Context) error {
getPreloadTimes := func(offset time.Duration) preloadTimes {
if _, ok := a.offsetPreloadTimes[offset]; !ok {
a.offsetPreloadTimes[offset] = preloadTimes{
instants: map[clientmodel.Fingerprint]struct{}{},
ranges: map[clientmodel.Fingerprint]time.Duration{},
instants: map[model.Fingerprint]struct{}{},
ranges: map[model.Fingerprint]time.Duration{},
}
}
return a.offsetPreloadTimes[offset]
@ -73,7 +73,7 @@ func (a *Analyzer) Analyze(ctx context.Context) error {
switch n := node.(type) {
case *VectorSelector:
n.metrics = a.Storage.MetricsForLabelMatchers(n.LabelMatchers...)
n.iterators = make(map[clientmodel.Fingerprint]local.SeriesIterator, len(n.metrics))
n.iterators = make(map[model.Fingerprint]local.SeriesIterator, len(n.metrics))
pt := getPreloadTimes(n.Offset)
for fp := range n.metrics {
@ -86,7 +86,7 @@ func (a *Analyzer) Analyze(ctx context.Context) error {
}
case *MatrixSelector:
n.metrics = a.Storage.MetricsForLabelMatchers(n.LabelMatchers...)
n.iterators = make(map[clientmodel.Fingerprint]local.SeriesIterator, len(n.metrics))
n.iterators = make(map[model.Fingerprint]local.SeriesIterator, len(n.metrics))
pt := getPreloadTimes(n.Offset)
for fp := range n.metrics {

View file

@ -18,7 +18,7 @@ import (
"fmt"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/storage/metric"
@ -59,7 +59,7 @@ type AlertStmt struct {
Name string
Expr Expr
Duration time.Duration
Labels clientmodel.LabelSet
Labels model.LabelSet
Summary string
Description string
Runbook string
@ -72,7 +72,7 @@ type EvalStmt struct {
// The time boundaries for the evaluation. If Start equals End an instant
// is evaluated.
Start, End clientmodel.Timestamp
Start, End model.Time
// Time between two evaluated instants for the range [Start:End].
Interval time.Duration
}
@ -81,7 +81,7 @@ type EvalStmt struct {
type RecordStmt struct {
Name string
Expr Expr
Labels clientmodel.LabelSet
Labels model.LabelSet
}
func (*AlertStmt) stmt() {}
@ -136,10 +136,10 @@ type Expressions []Expr
// AggregateExpr represents an aggregation operation on a vector.
type AggregateExpr struct {
Op itemType // The used aggregation operation.
Expr Expr // The vector expression over which is aggregated.
Grouping clientmodel.LabelNames // The labels by which to group the vector.
KeepExtraLabels bool // Whether to keep extra labels common among result elements.
Op itemType // The used aggregation operation.
Expr Expr // The vector expression over which is aggregated.
Grouping model.LabelNames // The labels by which to group the vector.
KeepExtraLabels bool // Whether to keep extra labels common among result elements.
}
// BinaryExpr represents a binary expression between two child expressions.
@ -166,13 +166,13 @@ type MatrixSelector struct {
LabelMatchers metric.LabelMatchers
// The series iterators are populated at query analysis time.
iterators map[clientmodel.Fingerprint]local.SeriesIterator
metrics map[clientmodel.Fingerprint]clientmodel.COWMetric
iterators map[model.Fingerprint]local.SeriesIterator
metrics map[model.Fingerprint]model.COWMetric
}
// NumberLiteral represents a number.
type NumberLiteral struct {
Val clientmodel.SampleValue
Val model.SampleValue
}
// ParenExpr wraps an expression so it cannot be disassembled as a consequence
@ -200,8 +200,8 @@ type VectorSelector struct {
LabelMatchers metric.LabelMatchers
// The series iterators are populated at query analysis time.
iterators map[clientmodel.Fingerprint]local.SeriesIterator
metrics map[clientmodel.Fingerprint]clientmodel.COWMetric
iterators map[model.Fingerprint]local.SeriesIterator
metrics map[model.Fingerprint]model.COWMetric
}
func (e *AggregateExpr) Type() ExprType { return ExprVector }
@ -262,10 +262,10 @@ type VectorMatching struct {
Card VectorMatchCardinality
// On contains the labels which define equality of a pair
// of elements from the vectors.
On clientmodel.LabelNames
On model.LabelNames
// Include contains additional labels that should be included in
// the result from the side with the higher cardinality.
Include clientmodel.LabelNames
Include model.LabelNames
}
// A Visitor's Visit method is invoked for each node encountered by Walk.

View file

@ -25,7 +25,7 @@ import (
"github.com/prometheus/log"
"golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local"
"github.com/prometheus/prometheus/storage/metric"
@ -34,22 +34,22 @@ import (
// SampleStream is a stream of Values belonging to an attached COWMetric.
type SampleStream struct {
Metric clientmodel.COWMetric `json:"metric"`
Values metric.Values `json:"values"`
Metric model.COWMetric `json:"metric"`
Values metric.Values `json:"values"`
}
// Sample is a single sample belonging to a COWMetric.
type Sample struct {
Metric clientmodel.COWMetric `json:"metric"`
Value clientmodel.SampleValue `json:"value"`
Timestamp clientmodel.Timestamp `json:"timestamp"`
Metric model.COWMetric `json:"metric"`
Value model.SampleValue `json:"value"`
Timestamp model.Time `json:"timestamp"`
}
// MarshalJSON implements json.Marshaler.
func (s *Sample) MarshalJSON() ([]byte, error) {
v := struct {
Metric clientmodel.COWMetric `json:"metric"`
Value metric.SamplePair `json:"value"`
Metric model.COWMetric `json:"metric"`
Value metric.SamplePair `json:"value"`
}{
Metric: s.Metric,
Value: metric.SamplePair{
@ -63,8 +63,8 @@ func (s *Sample) MarshalJSON() ([]byte, error) {
// Scalar is a scalar value evaluated at the set timestamp.
type Scalar struct {
Value clientmodel.SampleValue `json:"value"`
Timestamp clientmodel.Timestamp `json:"timestamp"`
Value model.SampleValue `json:"value"`
Timestamp model.Time `json:"timestamp"`
}
func (s *Scalar) String() string {
@ -79,8 +79,8 @@ func (s *Scalar) MarshalJSON() ([]byte, error) {
// String is a string value evaluated at the set timestamp.
type String struct {
Value string `json:"value"`
Timestamp clientmodel.Timestamp `json:"timestamp"`
Value string `json:"value"`
Timestamp model.Time `json:"timestamp"`
}
// MarshalJSON implements json.Marshaler.
@ -92,7 +92,7 @@ func (s *String) String() string {
return s.Value
}
// Vector is basically only an alias for clientmodel.Samples, but the
// Vector is basically only an alias for model.Samples, but the
// contract is that in a Vector, all Samples have the same timestamp.
type Vector []*Sample
@ -309,7 +309,7 @@ func (ng *Engine) Stop() {
}
// NewInstantQuery returns an evaluation query for the given expression at the given time.
func (ng *Engine) NewInstantQuery(qs string, ts clientmodel.Timestamp) (Query, error) {
func (ng *Engine) NewInstantQuery(qs string, ts model.Time) (Query, error) {
expr, err := ParseExpr(qs)
if err != nil {
return nil, err
@ -322,7 +322,7 @@ func (ng *Engine) NewInstantQuery(qs string, ts clientmodel.Timestamp) (Query, e
// NewRangeQuery returns an evaluation query for the given time range and with
// the resolution set by the interval.
func (ng *Engine) NewRangeQuery(qs string, start, end clientmodel.Timestamp, interval time.Duration) (Query, error) {
func (ng *Engine) NewRangeQuery(qs string, start, end model.Time, interval time.Duration) (Query, error) {
expr, err := ParseExpr(qs)
if err != nil {
return nil, err
@ -336,7 +336,7 @@ func (ng *Engine) NewRangeQuery(qs string, start, end clientmodel.Timestamp, int
return qry, nil
}
func (ng *Engine) newQuery(expr Expr, start, end clientmodel.Timestamp, interval time.Duration) *query {
func (ng *Engine) newQuery(expr Expr, start, end model.Time, interval time.Duration) *query {
es := &EvalStmt{
Expr: expr,
Start: start,
@ -459,7 +459,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
numSteps := int(s.End.Sub(s.Start) / s.Interval)
// Range evaluation.
sampleStreams := map[clientmodel.Fingerprint]*SampleStream{}
sampleStreams := map[model.Fingerprint]*SampleStream{}
for ts := s.Start; !ts.After(s.End); ts = ts.Add(s.Interval) {
if err := contextDone(ctx, "range evaluation"); err != nil {
@ -538,7 +538,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
type evaluator struct {
ctx context.Context
Timestamp clientmodel.Timestamp
Timestamp model.Time
}
// fatalf causes a panic with the input formatted into an error.
@ -902,7 +902,7 @@ func (ev *evaluator) vectorBinop(op itemType, lhs, rhs Vector, matching *VectorM
// In many-to-one matching the grouping labels have to ensure a unique metric
// for the result vector. Check whether those labels have already been added for
// the same matching labels.
insertSig := clientmodel.SignatureForLabels(metric.Metric, matching.Include)
insertSig := model.SignatureForLabels(metric.Metric, matching.Include...)
if !exists {
insertedSigs = map[uint64]struct{}{}
matchedSigs[sig] = insertedSigs
@ -923,36 +923,36 @@ func (ev *evaluator) vectorBinop(op itemType, lhs, rhs Vector, matching *VectorM
// signatureFunc returns a function that calculates the signature for a metric
// based on the provided labels.
func signatureFunc(labels ...clientmodel.LabelName) func(m clientmodel.COWMetric) uint64 {
func signatureFunc(labels ...model.LabelName) func(m model.COWMetric) uint64 {
if len(labels) == 0 {
return func(m clientmodel.COWMetric) uint64 {
m.Delete(clientmodel.MetricNameLabel)
return func(m model.COWMetric) uint64 {
m.Del(model.MetricNameLabel)
return uint64(m.Metric.Fingerprint())
}
}
return func(m clientmodel.COWMetric) uint64 {
return clientmodel.SignatureForLabels(m.Metric, labels)
return func(m model.COWMetric) uint64 {
return model.SignatureForLabels(m.Metric, labels...)
}
}
// resultMetric returns the metric for the given sample(s) based on the vector
// binary operation and the matching options.
func resultMetric(met clientmodel.COWMetric, op itemType, labels ...clientmodel.LabelName) clientmodel.COWMetric {
func resultMetric(met model.COWMetric, op itemType, labels ...model.LabelName) model.COWMetric {
if len(labels) == 0 {
if shouldDropMetricName(op) {
met.Delete(clientmodel.MetricNameLabel)
met.Del(model.MetricNameLabel)
}
return met
}
// As we definitly write, creating a new metric is the easiest solution.
m := clientmodel.Metric{}
m := model.Metric{}
for _, ln := range labels {
// Included labels from the `group_x` modifier are taken from the "many"-side.
if v, ok := met.Metric[ln]; ok {
m[ln] = v
}
}
return clientmodel.COWMetric{Metric: m, Copied: false}
return model.COWMetric{Metric: m, Copied: false}
}
// vectorScalarBinop evaluates a binary operation between a vector and a scalar.
@ -970,7 +970,7 @@ func (ev *evaluator) vectorScalarBinop(op itemType, lhs Vector, rhs *Scalar, swa
if keep {
lhsSample.Value = value
if shouldDropMetricName(op) {
lhsSample.Metric.Delete(clientmodel.MetricNameLabel)
lhsSample.Metric.Del(model.MetricNameLabel)
}
vector = append(vector, lhsSample)
}
@ -979,7 +979,7 @@ func (ev *evaluator) vectorScalarBinop(op itemType, lhs Vector, rhs *Scalar, swa
}
// scalarBinop evaluates a binary operation between two scalars.
func scalarBinop(op itemType, lhs, rhs clientmodel.SampleValue) clientmodel.SampleValue {
func scalarBinop(op itemType, lhs, rhs model.SampleValue) model.SampleValue {
switch op {
case itemADD:
return lhs + rhs
@ -991,9 +991,9 @@ func scalarBinop(op itemType, lhs, rhs clientmodel.SampleValue) clientmodel.Samp
return lhs / rhs
case itemMOD:
if rhs != 0 {
return clientmodel.SampleValue(int(lhs) % int(rhs))
return model.SampleValue(int(lhs) % int(rhs))
}
return clientmodel.SampleValue(math.NaN())
return model.SampleValue(math.NaN())
case itemEQL:
return btos(lhs == rhs)
case itemNEQ:
@ -1011,7 +1011,7 @@ func scalarBinop(op itemType, lhs, rhs clientmodel.SampleValue) clientmodel.Samp
}
// vectorElemBinop evaluates a binary operation between two vector elements.
func vectorElemBinop(op itemType, lhs, rhs clientmodel.SampleValue) (clientmodel.SampleValue, bool) {
func vectorElemBinop(op itemType, lhs, rhs model.SampleValue) (model.SampleValue, bool) {
switch op {
case itemADD:
return lhs + rhs, true
@ -1023,9 +1023,9 @@ func vectorElemBinop(op itemType, lhs, rhs clientmodel.SampleValue) (clientmodel
return lhs / rhs, true
case itemMOD:
if rhs != 0 {
return clientmodel.SampleValue(int(lhs) % int(rhs)), true
return model.SampleValue(int(lhs) % int(rhs)), true
}
return clientmodel.SampleValue(math.NaN()), true
return model.SampleValue(math.NaN()), true
case itemEQL:
return lhs, lhs == rhs
case itemNEQ:
@ -1043,40 +1043,40 @@ func vectorElemBinop(op itemType, lhs, rhs clientmodel.SampleValue) (clientmodel
}
// labelIntersection returns the metric of common label/value pairs of two input metrics.
func labelIntersection(metric1, metric2 clientmodel.COWMetric) clientmodel.COWMetric {
func labelIntersection(metric1, metric2 model.COWMetric) model.COWMetric {
for label, value := range metric1.Metric {
if metric2.Metric[label] != value {
metric1.Delete(label)
metric1.Del(label)
}
}
return metric1
}
type groupedAggregation struct {
labels clientmodel.COWMetric
value clientmodel.SampleValue
valuesSquaredSum clientmodel.SampleValue
labels model.COWMetric
value model.SampleValue
valuesSquaredSum model.SampleValue
groupCount int
}
// aggregation evaluates an aggregation operation on a vector.
func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, keepExtra bool, vector Vector) Vector {
func (ev *evaluator) aggregation(op itemType, grouping model.LabelNames, keepExtra bool, vector Vector) Vector {
result := map[uint64]*groupedAggregation{}
for _, sample := range vector {
groupingKey := clientmodel.SignatureForLabels(sample.Metric.Metric, grouping)
groupingKey := model.SignatureForLabels(sample.Metric.Metric, grouping...)
groupedResult, ok := result[groupingKey]
// Add a new group if it doesn't exist.
if !ok {
var m clientmodel.COWMetric
var m model.COWMetric
if keepExtra {
m = sample.Metric
m.Delete(clientmodel.MetricNameLabel)
m.Del(model.MetricNameLabel)
} else {
m = clientmodel.COWMetric{
Metric: clientmodel.Metric{},
m = model.COWMetric{
Metric: model.Metric{},
Copied: true,
}
for _, l := range grouping {
@ -1129,15 +1129,15 @@ func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, k
for _, aggr := range result {
switch op {
case itemAvg:
aggr.value = aggr.value / clientmodel.SampleValue(aggr.groupCount)
aggr.value = aggr.value / model.SampleValue(aggr.groupCount)
case itemCount:
aggr.value = clientmodel.SampleValue(aggr.groupCount)
aggr.value = model.SampleValue(aggr.groupCount)
case itemStdvar:
avg := float64(aggr.value) / float64(aggr.groupCount)
aggr.value = clientmodel.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)
aggr.value = model.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)
case itemStddev:
avg := float64(aggr.value) / float64(aggr.groupCount)
aggr.value = clientmodel.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg))
aggr.value = model.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg))
default:
// For other aggregations, we already have the right value.
}
@ -1152,7 +1152,7 @@ func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, k
}
// btos returns 1 if b is true, 0 otherwise.
func btos(b bool) clientmodel.SampleValue {
func btos(b bool) model.SampleValue {
if b {
return 1
}
@ -1178,7 +1178,7 @@ var StalenessDelta = 5 * time.Minute
// surrounding a given target time. If samples are found both before and after
// the target time, the sample value is interpolated between these. Otherwise,
// the single closest sample is returned verbatim.
func chooseClosestSample(samples metric.Values, timestamp clientmodel.Timestamp) *metric.SamplePair {
func chooseClosestSample(samples metric.Values, timestamp model.Time) *metric.SamplePair {
var closestBefore *metric.SamplePair
var closestAfter *metric.SamplePair
for _, candidate := range samples {
@ -1224,12 +1224,12 @@ func chooseClosestSample(samples metric.Values, timestamp clientmodel.Timestamp)
// interpolateSamples interpolates a value at a target time between two
// provided sample pairs.
func interpolateSamples(first, second *metric.SamplePair, timestamp clientmodel.Timestamp) *metric.SamplePair {
func interpolateSamples(first, second *metric.SamplePair, timestamp model.Time) *metric.SamplePair {
dv := second.Value - first.Value
dt := second.Timestamp.Sub(first.Timestamp)
dDt := dv / clientmodel.SampleValue(dt)
offset := clientmodel.SampleValue(timestamp.Sub(first.Timestamp))
dDt := dv / model.SampleValue(dt)
offset := model.SampleValue(timestamp.Sub(first.Timestamp))
return &metric.SamplePair{
Value: first.Value + (offset * dDt),

View file

@ -21,7 +21,7 @@ import (
"strconv"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -36,10 +36,10 @@ type Function struct {
Call func(ev *evaluator, args Expressions) Value
}
// === time() clientmodel.SampleValue ===
// === time() model.SampleValue ===
func funcTime(ev *evaluator, args Expressions) Value {
return &Scalar{
Value: clientmodel.SampleValue(ev.Timestamp.Unix()),
Value: model.SampleValue(ev.Timestamp.Unix()),
Timestamp: ev.Timestamp,
}
}
@ -65,8 +65,8 @@ func funcDelta(ev *evaluator, args Expressions) Value {
continue
}
counterCorrection := clientmodel.SampleValue(0)
lastValue := clientmodel.SampleValue(0)
counterCorrection := model.SampleValue(0)
lastValue := model.SampleValue(0)
for _, sample := range samples.Values {
currentValue := sample.Value
if isCounter && currentValue < lastValue {
@ -90,7 +90,7 @@ func funcDelta(ev *evaluator, args Expressions) Value {
// them. Depending on how many samples are found under a target interval,
// the delta results are distorted and temporal aliasing occurs (ugly
// bumps). This effect is corrected for below.
intervalCorrection := clientmodel.SampleValue(targetInterval) / clientmodel.SampleValue(sampledInterval)
intervalCorrection := model.SampleValue(targetInterval) / model.SampleValue(sampledInterval)
resultValue *= intervalCorrection
resultSample := &Sample{
@ -98,7 +98,7 @@ func funcDelta(ev *evaluator, args Expressions) Value {
Value: resultValue,
Timestamp: ev.Timestamp,
}
resultSample.Metric.Delete(clientmodel.MetricNameLabel)
resultSample.Metric.Del(model.MetricNameLabel)
resultVector = append(resultVector, resultSample)
}
return resultVector
@ -114,7 +114,7 @@ func funcRate(ev *evaluator, args Expressions) Value {
// matrix, such as looking at the samples themselves.
interval := args[0].(*MatrixSelector).Range
for i := range vector {
vector[i].Value /= clientmodel.SampleValue(interval / time.Second)
vector[i].Value /= model.SampleValue(interval / time.Second)
}
return vector
}
@ -191,10 +191,10 @@ func funcDropCommonLabels(ev *evaluator, args Expressions) Value {
if len(vector) < 1 {
return Vector{}
}
common := clientmodel.LabelSet{}
common := model.LabelSet{}
for k, v := range vector[0].Metric.Metric {
// TODO(julius): Should we also drop common metric names?
if k == clientmodel.MetricNameLabel {
if k == model.MetricNameLabel {
continue
}
common[k] = v
@ -215,7 +215,7 @@ func funcDropCommonLabels(ev *evaluator, args Expressions) Value {
for _, el := range vector {
for k := range el.Metric.Metric {
if _, ok := common[k]; ok {
el.Metric.Delete(k)
el.Metric.Del(k)
}
}
}
@ -235,8 +235,8 @@ func funcRound(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Floor(float64(el.Value)*toNearestInverse+0.5) / toNearestInverse)
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Floor(float64(el.Value)*toNearestInverse+0.5) / toNearestInverse)
}
return vector
}
@ -245,20 +245,20 @@ func funcRound(ev *evaluator, args Expressions) Value {
func funcScalar(ev *evaluator, args Expressions) Value {
v := ev.evalVector(args[0])
if len(v) != 1 {
return &Scalar{clientmodel.SampleValue(math.NaN()), ev.Timestamp}
return &Scalar{model.SampleValue(math.NaN()), ev.Timestamp}
}
return &Scalar{clientmodel.SampleValue(v[0].Value), ev.Timestamp}
return &Scalar{model.SampleValue(v[0].Value), ev.Timestamp}
}
// === count_scalar(vector ExprVector) model.SampleValue ===
func funcCountScalar(ev *evaluator, args Expressions) Value {
return &Scalar{
Value: clientmodel.SampleValue(len(ev.evalVector(args[0]))),
Value: model.SampleValue(len(ev.evalVector(args[0]))),
Timestamp: ev.Timestamp,
}
}
func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) clientmodel.SampleValue) Value {
func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) model.SampleValue) Value {
matrix := ev.evalMatrix(args[0])
resultVector := Vector{}
@ -267,7 +267,7 @@ func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) cl
continue
}
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Metric.Del(model.MetricNameLabel)
resultVector = append(resultVector, &Sample{
Metric: el.Metric,
Value: aggrFn(el.Values),
@ -279,19 +279,19 @@ func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) cl
// === avg_over_time(matrix ExprMatrix) Vector ===
func funcAvgOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue {
var sum clientmodel.SampleValue
return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
var sum model.SampleValue
for _, v := range values {
sum += v.Value
}
return sum / clientmodel.SampleValue(len(values))
return sum / model.SampleValue(len(values))
})
}
// === count_over_time(matrix ExprMatrix) Vector ===
func funcCountOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue {
return clientmodel.SampleValue(len(values))
return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
return model.SampleValue(len(values))
})
}
@ -299,38 +299,38 @@ func funcCountOverTime(ev *evaluator, args Expressions) Value {
func funcFloor(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Floor(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Floor(float64(el.Value)))
}
return vector
}
// === max_over_time(matrix ExprMatrix) Vector ===
func funcMaxOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue {
return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
max := math.Inf(-1)
for _, v := range values {
max = math.Max(max, float64(v.Value))
}
return clientmodel.SampleValue(max)
return model.SampleValue(max)
})
}
// === min_over_time(matrix ExprMatrix) Vector ===
func funcMinOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue {
return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
min := math.Inf(1)
for _, v := range values {
min = math.Min(min, float64(v.Value))
}
return clientmodel.SampleValue(min)
return model.SampleValue(min)
})
}
// === sum_over_time(matrix ExprMatrix) Vector ===
func funcSumOverTime(ev *evaluator, args Expressions) Value {
return aggrOverTime(ev, args, func(values metric.Values) clientmodel.SampleValue {
var sum clientmodel.SampleValue
return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue {
var sum model.SampleValue
for _, v := range values {
sum += v.Value
}
@ -342,8 +342,8 @@ func funcSumOverTime(ev *evaluator, args Expressions) Value {
func funcAbs(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Abs(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Abs(float64(el.Value)))
}
return vector
}
@ -353,17 +353,17 @@ func funcAbsent(ev *evaluator, args Expressions) Value {
if len(ev.evalVector(args[0])) > 0 {
return Vector{}
}
m := clientmodel.Metric{}
m := model.Metric{}
if vs, ok := args[0].(*VectorSelector); ok {
for _, matcher := range vs.LabelMatchers {
if matcher.Type == metric.Equal && matcher.Name != clientmodel.MetricNameLabel {
if matcher.Type == metric.Equal && matcher.Name != model.MetricNameLabel {
m[matcher.Name] = matcher.Value
}
}
}
return Vector{
&Sample{
Metric: clientmodel.COWMetric{
Metric: model.COWMetric{
Metric: m,
Copied: true,
},
@ -377,8 +377,8 @@ func funcAbsent(ev *evaluator, args Expressions) Value {
func funcCeil(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Ceil(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Ceil(float64(el.Value)))
}
return vector
}
@ -387,8 +387,8 @@ func funcCeil(ev *evaluator, args Expressions) Value {
func funcExp(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Exp(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Exp(float64(el.Value)))
}
return vector
}
@ -397,8 +397,8 @@ func funcExp(ev *evaluator, args Expressions) Value {
func funcSqrt(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Sqrt(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Sqrt(float64(el.Value)))
}
return vector
}
@ -407,8 +407,8 @@ func funcSqrt(ev *evaluator, args Expressions) Value {
func funcLn(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Log(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Log(float64(el.Value)))
}
return vector
}
@ -417,8 +417,8 @@ func funcLn(ev *evaluator, args Expressions) Value {
func funcLog2(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Log2(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Log2(float64(el.Value)))
}
return vector
}
@ -427,8 +427,8 @@ func funcLog2(ev *evaluator, args Expressions) Value {
func funcLog10(ev *evaluator, args Expressions) Value {
vector := ev.evalVector(args[0])
for _, el := range vector {
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Value = clientmodel.SampleValue(math.Log10(float64(el.Value)))
el.Metric.Del(model.MetricNameLabel)
el.Value = model.SampleValue(math.Log10(float64(el.Value)))
}
return vector
}
@ -446,13 +446,13 @@ func funcDeriv(ev *evaluator, args Expressions) Value {
}
// Least squares.
n := clientmodel.SampleValue(0)
sumY := clientmodel.SampleValue(0)
sumX := clientmodel.SampleValue(0)
sumXY := clientmodel.SampleValue(0)
sumX2 := clientmodel.SampleValue(0)
var (
n model.SampleValue
sumX, sumY model.SampleValue
sumXY, sumX2 model.SampleValue
)
for _, sample := range samples.Values {
x := clientmodel.SampleValue(sample.Timestamp.UnixNano() / 1e9)
x := model.SampleValue(sample.Timestamp.UnixNano() / 1e9)
n += 1.0
sumY += sample.Value
sumX += x
@ -469,7 +469,7 @@ func funcDeriv(ev *evaluator, args Expressions) Value {
Value: resultValue,
Timestamp: ev.Timestamp,
}
resultSample.Metric.Delete(clientmodel.MetricNameLabel)
resultSample.Metric.Del(model.MetricNameLabel)
resultVector = append(resultVector, resultSample)
}
return resultVector
@ -478,16 +478,16 @@ func funcDeriv(ev *evaluator, args Expressions) Value {
// === predict_linear(node ExprMatrix, k ExprScalar) Vector ===
func funcPredictLinear(ev *evaluator, args Expressions) Value {
vector := funcDeriv(ev, args[0:1]).(Vector)
duration := clientmodel.SampleValue(clientmodel.SampleValue(ev.evalFloat(args[1])))
duration := model.SampleValue(model.SampleValue(ev.evalFloat(args[1])))
excludedLabels := map[clientmodel.LabelName]struct{}{
clientmodel.MetricNameLabel: {},
excludedLabels := map[model.LabelName]struct{}{
model.MetricNameLabel: {},
}
// Calculate predicted delta over the duration.
signatureToDelta := map[uint64]clientmodel.SampleValue{}
signatureToDelta := map[uint64]model.SampleValue{}
for _, el := range vector {
signature := clientmodel.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
signature := model.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
signatureToDelta[signature] = el.Value * duration
}
@ -498,10 +498,10 @@ func funcPredictLinear(ev *evaluator, args Expressions) Value {
if len(samples.Values) < 2 {
continue
}
signature := clientmodel.SignatureWithoutLabels(samples.Metric.Metric, excludedLabels)
signature := model.SignatureWithoutLabels(samples.Metric.Metric, excludedLabels)
delta, ok := signatureToDelta[signature]
if ok {
samples.Metric.Delete(clientmodel.MetricNameLabel)
samples.Metric.Del(model.MetricNameLabel)
outVec = append(outVec, &Sample{
Metric: samples.Metric,
Value: delta + samples.Values[1].Value,
@ -514,25 +514,25 @@ func funcPredictLinear(ev *evaluator, args Expressions) Value {
// === histogram_quantile(k ExprScalar, vector ExprVector) Vector ===
func funcHistogramQuantile(ev *evaluator, args Expressions) Value {
q := clientmodel.SampleValue(ev.evalFloat(args[0]))
q := model.SampleValue(ev.evalFloat(args[0]))
inVec := ev.evalVector(args[1])
outVec := Vector{}
signatureToMetricWithBuckets := map[uint64]*metricWithBuckets{}
for _, el := range inVec {
upperBound, err := strconv.ParseFloat(
string(el.Metric.Metric[clientmodel.BucketLabel]), 64,
string(el.Metric.Metric[model.BucketLabel]), 64,
)
if err != nil {
// Oops, no bucket label or malformed label value. Skip.
// TODO(beorn7): Issue a warning somehow.
continue
}
signature := clientmodel.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
signature := model.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
mb, ok := signatureToMetricWithBuckets[signature]
if !ok {
el.Metric.Delete(clientmodel.BucketLabel)
el.Metric.Delete(clientmodel.MetricNameLabel)
el.Metric.Del(model.BucketLabel)
el.Metric.Del(model.MetricNameLabel)
mb = &metricWithBuckets{el.Metric, nil}
signatureToMetricWithBuckets[signature] = mb
}
@ -542,7 +542,7 @@ func funcHistogramQuantile(ev *evaluator, args Expressions) Value {
for _, mb := range signatureToMetricWithBuckets {
outVec = append(outVec, &Sample{
Metric: mb.metric,
Value: clientmodel.SampleValue(quantile(q, mb.buckets)),
Value: model.SampleValue(quantile(q, mb.buckets)),
Timestamp: ev.Timestamp,
})
}
@ -557,7 +557,7 @@ func funcResets(ev *evaluator, args Expressions) Value {
for _, samples := range in {
resets := 0
prev := clientmodel.SampleValue(samples.Values[0].Value)
prev := model.SampleValue(samples.Values[0].Value)
for _, sample := range samples.Values[1:] {
current := sample.Value
if current < prev {
@ -568,10 +568,10 @@ func funcResets(ev *evaluator, args Expressions) Value {
rs := &Sample{
Metric: samples.Metric,
Value: clientmodel.SampleValue(resets),
Value: model.SampleValue(resets),
Timestamp: ev.Timestamp,
}
rs.Metric.Delete(clientmodel.MetricNameLabel)
rs.Metric.Del(model.MetricNameLabel)
out = append(out, rs)
}
return out
@ -584,7 +584,7 @@ func funcChanges(ev *evaluator, args Expressions) Value {
for _, samples := range in {
changes := 0
prev := clientmodel.SampleValue(samples.Values[0].Value)
prev := model.SampleValue(samples.Values[0].Value)
for _, sample := range samples.Values[1:] {
current := sample.Value
if current != prev {
@ -595,10 +595,10 @@ func funcChanges(ev *evaluator, args Expressions) Value {
rs := &Sample{
Metric: samples.Metric,
Value: clientmodel.SampleValue(changes),
Value: model.SampleValue(changes),
Timestamp: ev.Timestamp,
}
rs.Metric.Delete(clientmodel.MetricNameLabel)
rs.Metric.Del(model.MetricNameLabel)
out = append(out, rs)
}
return out
@ -608,9 +608,9 @@ func funcChanges(ev *evaluator, args Expressions) Value {
func funcLabelReplace(ev *evaluator, args Expressions) Value {
var (
vector = ev.evalVector(args[0])
dst = clientmodel.LabelName(ev.evalString(args[1]).Value)
dst = model.LabelName(ev.evalString(args[1]).Value)
repl = ev.evalString(args[2]).Value
src = clientmodel.LabelName(ev.evalString(args[3]).Value)
src = model.LabelName(ev.evalString(args[3]).Value)
regexStr = ev.evalString(args[4]).Value
)
@ -618,11 +618,11 @@ func funcLabelReplace(ev *evaluator, args Expressions) Value {
if err != nil {
ev.errorf("invalid regular expression in label_replace(): %s", regexStr)
}
if !clientmodel.LabelNameRE.MatchString(string(dst)) {
if !model.LabelNameRE.MatchString(string(dst)) {
ev.errorf("invalid destination label name in label_replace(): %s", dst)
}
outSet := make(map[clientmodel.Fingerprint]struct{}, len(vector))
outSet := make(map[model.Fingerprint]struct{}, len(vector))
for _, el := range vector {
srcVal := string(el.Metric.Metric[src])
indexes := regex.FindStringSubmatchIndex(srcVal)
@ -632,9 +632,9 @@ func funcLabelReplace(ev *evaluator, args Expressions) Value {
}
res := regex.ExpandString([]byte{}, repl, srcVal, indexes)
if len(res) == 0 {
el.Metric.Delete(dst)
el.Metric.Del(dst)
} else {
el.Metric.Set(dst, clientmodel.LabelValue(res))
el.Metric.Set(dst, model.LabelValue(res))
}
fp := el.Metric.Metric.Fingerprint()

View file

@ -22,7 +22,7 @@ import (
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/strutil"
@ -74,7 +74,7 @@ func ParseExpr(input string) (Expr, error) {
}
// ParseMetric parses the input into a metric
func ParseMetric(input string) (m clientmodel.Metric, err error) {
func ParseMetric(input string) (m model.Metric, err error) {
p := newParser(input)
defer p.recover(&err)
@ -103,7 +103,7 @@ func ParseMetricSelector(input string) (m metric.LabelMatchers, err error) {
}
// parseSeriesDesc parses the description of a time series.
func parseSeriesDesc(input string) (clientmodel.Metric, []sequenceValue, error) {
func parseSeriesDesc(input string) (model.Metric, []sequenceValue, error) {
p := newParser(input)
p.lex.seriesDesc = true
@ -154,7 +154,7 @@ func (p *parser) parseExpr() (expr Expr, err error) {
// sequenceValue is an omittable value in a sequence of time series values.
type sequenceValue struct {
value clientmodel.SampleValue
value model.SampleValue
omitted bool
}
@ -166,7 +166,7 @@ func (v sequenceValue) String() string {
}
// parseSeriesDesc parses a description of a time series into its metric and value sequence.
func (p *parser) parseSeriesDesc() (m clientmodel.Metric, vals []sequenceValue, err error) {
func (p *parser) parseSeriesDesc() (m model.Metric, vals []sequenceValue, err error) {
defer p.recover(&err)
m = p.metric()
@ -203,7 +203,7 @@ func (p *parser) parseSeriesDesc() (m clientmodel.Metric, vals []sequenceValue,
}
k := sign * p.number(p.expect(itemNumber, ctx).val)
vals = append(vals, sequenceValue{
value: clientmodel.SampleValue(k),
value: model.SampleValue(k),
})
// If there are no offset repetitions specified, proceed with the next value.
@ -231,7 +231,7 @@ func (p *parser) parseSeriesDesc() (m clientmodel.Metric, vals []sequenceValue,
for i := uint64(0); i < times; i++ {
k += offset
vals = append(vals, sequenceValue{
value: clientmodel.SampleValue(k),
value: model.SampleValue(k),
})
}
}
@ -384,7 +384,7 @@ func (p *parser) alertStmt() *AlertStmt {
}
}
lset := clientmodel.LabelSet{}
lset := model.LabelSet{}
if p.peek().typ == itemWith {
p.expect(itemWith, ctx)
lset = p.labelSet()
@ -447,7 +447,7 @@ func (p *parser) recordStmt() *RecordStmt {
name := p.expectOneOf(itemIdentifier, itemMetricIdentifier, ctx).val
var lset clientmodel.LabelSet
var lset model.LabelSet
if p.peek().typ == itemLeftBrace {
lset = p.labelSet()
}
@ -638,7 +638,7 @@ func (p *parser) primaryExpr() Expr {
switch t := p.next(); {
case t.typ == itemNumber:
f := p.number(t.val)
return &NumberLiteral{clientmodel.SampleValue(f)}
return &NumberLiteral{model.SampleValue(f)}
case t.typ == itemString:
s := t.val[1 : len(t.val)-1]
@ -673,15 +673,15 @@ func (p *parser) primaryExpr() Expr {
//
// '(' <label_name>, ... ')'
//
func (p *parser) labels() clientmodel.LabelNames {
func (p *parser) labels() model.LabelNames {
const ctx = "grouping opts"
p.expect(itemLeftParen, ctx)
labels := clientmodel.LabelNames{}
labels := model.LabelNames{}
for {
id := p.expect(itemIdentifier, ctx)
labels = append(labels, clientmodel.LabelName(id.val))
labels = append(labels, model.LabelName(id.val))
if p.peek().typ != itemComma {
break
@ -705,7 +705,7 @@ func (p *parser) aggrExpr() *AggregateExpr {
if !agop.typ.isAggregator() {
p.errorf("expected aggregation operator but got %s", agop)
}
var grouping clientmodel.LabelNames
var grouping model.LabelNames
var keepExtra bool
modifiersFirst := false
@ -788,8 +788,8 @@ func (p *parser) call(name string) *Call {
//
// '{' [ <labelname> '=' <match_string>, ... ] '}'
//
func (p *parser) labelSet() clientmodel.LabelSet {
set := clientmodel.LabelSet{}
func (p *parser) labelSet() model.LabelSet {
set := model.LabelSet{}
for _, lm := range p.labelMatchers(itemEQL) {
set[lm.Name] = lm.Value
}
@ -849,8 +849,8 @@ func (p *parser) labelMatchers(operators ...itemType) metric.LabelMatchers {
m, err := metric.NewLabelMatcher(
matchType,
clientmodel.LabelName(label.val),
clientmodel.LabelValue(val),
model.LabelName(label.val),
model.LabelValue(val),
)
if err != nil {
p.error(err)
@ -875,9 +875,9 @@ func (p *parser) labelMatchers(operators ...itemType) metric.LabelMatchers {
// <label_set>
// <metric_identifier> [<label_set>]
//
func (p *parser) metric() clientmodel.Metric {
func (p *parser) metric() model.Metric {
name := ""
m := clientmodel.Metric{}
m := model.Metric{}
t := p.peek().typ
if t == itemIdentifier || t == itemMetricIdentifier {
@ -888,10 +888,10 @@ func (p *parser) metric() clientmodel.Metric {
p.errorf("missing metric name or metric selector")
}
if t == itemLeftBrace {
m = clientmodel.Metric(p.labelSet())
m = model.Metric(p.labelSet())
}
if name != "" {
m[clientmodel.MetricNameLabel] = clientmodel.LabelValue(name)
m[model.MetricNameLabel] = model.LabelValue(name)
}
return m
}
@ -912,15 +912,15 @@ func (p *parser) vectorSelector(name string) *VectorSelector {
// Metric name must not be set in the label matchers and before at the same time.
if name != "" {
for _, m := range matchers {
if m.Name == clientmodel.MetricNameLabel {
if m.Name == model.MetricNameLabel {
p.errorf("metric name must not be set twice: %q or %q", name, m.Value)
}
}
// Set name label matching.
matchers = append(matchers, &metric.LabelMatcher{
Type: metric.Equal,
Name: clientmodel.MetricNameLabel,
Value: clientmodel.LabelValue(name),
Name: model.MetricNameLabel,
Value: model.LabelValue(name),
})
}

View file

@ -21,7 +21,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -37,10 +37,10 @@ var testExpr = []struct {
expected: &NumberLiteral{1},
}, {
input: "+Inf",
expected: &NumberLiteral{clientmodel.SampleValue(math.Inf(1))},
expected: &NumberLiteral{model.SampleValue(math.Inf(1))},
}, {
input: "-Inf",
expected: &NumberLiteral{clientmodel.SampleValue(math.Inf(-1))},
expected: &NumberLiteral{model.SampleValue(math.Inf(-1))},
}, {
input: ".5",
expected: &NumberLiteral{0.5},
@ -129,7 +129,7 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
},
@ -139,7 +139,7 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
},
@ -232,13 +232,13 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{Card: CardOneToOne},
@ -250,7 +250,7 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &NumberLiteral{1},
@ -263,7 +263,7 @@ var testExpr = []struct {
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
},
@ -274,13 +274,13 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{Card: CardManyToMany},
@ -292,13 +292,13 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{Card: CardManyToMany},
@ -313,13 +313,13 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{Card: CardOneToOne},
@ -329,13 +329,13 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "bla",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bla"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bla"},
},
},
RHS: &VectorSelector{
Name: "blub",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "blub"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "blub"},
},
},
VectorMatching: &VectorMatching{Card: CardManyToMany},
@ -350,7 +350,7 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
RHS: &BinaryExpr{
@ -358,24 +358,24 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "bla",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bla"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bla"},
},
},
RHS: &VectorSelector{
Name: "blub",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "blub"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "blub"},
},
},
VectorMatching: &VectorMatching{
Card: CardOneToMany,
On: clientmodel.LabelNames{"baz", "buz"},
Include: clientmodel.LabelNames{"test"},
On: model.LabelNames{"baz", "buz"},
Include: model.LabelNames{"test"},
},
},
VectorMatching: &VectorMatching{
Card: CardOneToOne,
On: clientmodel.LabelNames{"foo"},
On: model.LabelNames{"foo"},
},
},
}, {
@ -385,18 +385,18 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{
Card: CardOneToOne,
On: clientmodel.LabelNames{"test", "blub"},
On: model.LabelNames{"test", "blub"},
},
},
}, {
@ -406,18 +406,18 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{
Card: CardManyToMany,
On: clientmodel.LabelNames{"test", "blub"},
On: model.LabelNames{"test", "blub"},
},
},
}, {
@ -427,19 +427,19 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{
Card: CardManyToOne,
On: clientmodel.LabelNames{"test", "blub"},
Include: clientmodel.LabelNames{"bar"},
On: model.LabelNames{"test", "blub"},
Include: model.LabelNames{"bar"},
},
},
}, {
@ -449,19 +449,19 @@ var testExpr = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &VectorSelector{
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
VectorMatching: &VectorMatching{
Card: CardOneToMany,
On: clientmodel.LabelNames{"test", "blub"},
Include: clientmodel.LabelNames{"bar", "foo"},
On: model.LabelNames{"test", "blub"},
Include: model.LabelNames{"bar", "foo"},
},
},
}, {
@ -520,7 +520,7 @@ var testExpr = []struct {
Name: "foo",
Offset: 0,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
}, {
@ -529,7 +529,7 @@ var testExpr = []struct {
Name: "foo",
Offset: 5 * time.Minute,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
}, {
@ -539,7 +539,7 @@ var testExpr = []struct {
Offset: 0,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "a", Value: "bc"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo:bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo:bar"},
},
},
}, {
@ -549,7 +549,7 @@ var testExpr = []struct {
Offset: 0,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "NaN", Value: "bc"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
}, {
@ -562,7 +562,7 @@ var testExpr = []struct {
{Type: metric.NotEqual, Name: "foo", Value: "bar"},
mustLabelMatcher(metric.RegexMatch, "test", "test"),
mustLabelMatcher(metric.RegexNoMatch, "bar", "baz"),
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
}, {
@ -644,7 +644,7 @@ var testExpr = []struct {
Offset: 0,
Range: 5 * time.Second,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
},
},
}, {
@ -654,7 +654,7 @@ var testExpr = []struct {
Offset: 0,
Range: 5 * time.Minute,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
},
},
}, {
@ -664,7 +664,7 @@ var testExpr = []struct {
Offset: 5 * time.Minute,
Range: 5 * time.Hour,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
},
},
}, {
@ -674,7 +674,7 @@ var testExpr = []struct {
Offset: 10 * time.Second,
Range: 5 * 24 * time.Hour,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
},
},
}, {
@ -684,7 +684,7 @@ var testExpr = []struct {
Offset: 14 * 24 * time.Hour,
Range: 5 * 7 * 24 * time.Hour,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
},
},
}, {
@ -695,7 +695,7 @@ var testExpr = []struct {
Range: 5 * 365 * 24 * time.Hour,
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "a", Value: "b"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "test"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "test"},
},
},
}, {
@ -750,10 +750,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
},
}, {
input: "sum by (foo) keep_common (some_metric)",
@ -763,10 +763,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
},
}, {
input: "sum (some_metric) by (foo,bar) keep_common",
@ -776,10 +776,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo", "bar"},
Grouping: model.LabelNames{"foo", "bar"},
},
}, {
input: "avg by (foo)(some_metric)",
@ -788,10 +788,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
},
}, {
input: "COUNT by (foo) keep_common (some_metric)",
@ -800,10 +800,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
KeepExtraLabels: true,
},
}, {
@ -813,10 +813,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
KeepExtraLabels: true,
},
}, {
@ -826,10 +826,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
},
}, {
input: "stddev(some_metric)",
@ -838,7 +838,7 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
},
@ -849,10 +849,10 @@ var testExpr = []struct {
Expr: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
Grouping: clientmodel.LabelNames{"foo"},
Grouping: model.LabelNames{"foo"},
},
}, {
input: `sum some_metric by (test)`,
@ -902,7 +902,7 @@ var testExpr = []struct {
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.NotEqual, Name: "foo", Value: "bar"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
},
@ -915,7 +915,7 @@ var testExpr = []struct {
&MatrixSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
Range: 5 * time.Minute,
},
@ -929,7 +929,7 @@ var testExpr = []struct {
&VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
},
@ -942,7 +942,7 @@ var testExpr = []struct {
&VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
&NumberLiteral{5},
@ -1090,14 +1090,14 @@ var testStatement = []struct {
Name: "dc:http_request:rate5m",
Expr: &AggregateExpr{
Op: itemSum,
Grouping: clientmodel.LabelNames{"dc"},
Grouping: model.LabelNames{"dc"},
Expr: &Call{
Func: mustGetFunction("rate"),
Args: Expressions{
&MatrixSelector{
Name: "http_request_count",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "http_request_count"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "http_request_count"},
},
Range: 5 * time.Minute,
},
@ -1113,12 +1113,12 @@ var testStatement = []struct {
LHS: &VectorSelector{
Name: "dc:http_request:rate5m",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "dc:http_request:rate5m"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "dc:http_request:rate5m"},
},
},
RHS: &NumberLiteral{10000},
}},
Labels: clientmodel.LabelSet{"service": "testservice"},
Labels: model.LabelSet{"service": "testservice"},
Duration: 5 * time.Minute,
Summary: "Global request rate low",
Description: "The global request rate is low",
@ -1129,7 +1129,7 @@ var testStatement = []struct {
Name: "bar",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "label1", Value: "value1"},
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
Labels: nil,
@ -1141,12 +1141,12 @@ var testStatement = []struct {
LHS: &VectorSelector{
Name: "foo",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "foo"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "foo"},
},
},
RHS: &NumberLiteral{10},
},
Labels: clientmodel.LabelSet{},
Labels: model.LabelSet{},
Summary: "Baz",
Description: "BazAlert",
Runbook: "http://my.url",
@ -1162,10 +1162,10 @@ var testStatement = []struct {
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: "a", Value: "b"},
mustLabelMatcher(metric.RegexMatch, "x", "y"),
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "bar"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "bar"},
},
},
Labels: clientmodel.LabelSet{"x": "", "a": "z"},
Labels: model.LabelSet{"x": "", "a": "z"},
},
},
}, {
@ -1181,12 +1181,12 @@ var testStatement = []struct {
LHS: &VectorSelector{
Name: "some_metric",
LabelMatchers: metric.LabelMatchers{
{Type: metric.Equal, Name: clientmodel.MetricNameLabel, Value: "some_metric"},
{Type: metric.Equal, Name: model.MetricNameLabel, Value: "some_metric"},
},
},
RHS: &NumberLiteral{1},
},
Labels: clientmodel.LabelSet{},
Labels: model.LabelSet{},
Summary: "Global request rate low",
Description: "The global request rate is low",
},
@ -1311,7 +1311,7 @@ func TestParseStatements(t *testing.T) {
}
}
func mustLabelMatcher(mt metric.MatchType, name clientmodel.LabelName, val clientmodel.LabelValue) *metric.LabelMatcher {
func mustLabelMatcher(mt metric.MatchType, name model.LabelName, val model.LabelValue) *metric.LabelMatcher {
m, err := metric.NewLabelMatcher(mt, name, val)
if err != nil {
panic(err)
@ -1329,57 +1329,57 @@ func mustGetFunction(name string) *Function {
var testSeries = []struct {
input string
expectedMetric clientmodel.Metric
expectedMetric model.Metric
expectedValues []sequenceValue
fail bool
}{
{
input: `{} 1 2 3`,
expectedMetric: clientmodel.Metric{},
expectedMetric: model.Metric{},
expectedValues: newSeq(1, 2, 3),
}, {
input: `{a="b"} -1 2 3`,
expectedMetric: clientmodel.Metric{
expectedMetric: model.Metric{
"a": "b",
},
expectedValues: newSeq(-1, 2, 3),
}, {
input: `my_metric 1 2 3`,
expectedMetric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
expectedMetric: model.Metric{
model.MetricNameLabel: "my_metric",
},
expectedValues: newSeq(1, 2, 3),
}, {
input: `my_metric{} 1 2 3`,
expectedMetric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
expectedMetric: model.Metric{
model.MetricNameLabel: "my_metric",
},
expectedValues: newSeq(1, 2, 3),
}, {
input: `my_metric{a="b"} 1 2 3`,
expectedMetric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
expectedMetric: model.Metric{
model.MetricNameLabel: "my_metric",
"a": "b",
},
expectedValues: newSeq(1, 2, 3),
}, {
input: `my_metric{a="b"} 1 2 3-10x4`,
expectedMetric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
expectedMetric: model.Metric{
model.MetricNameLabel: "my_metric",
"a": "b",
},
expectedValues: newSeq(1, 2, 3, -7, -17, -27, -37),
}, {
input: `my_metric{a="b"} 1 2 3-0x4`,
expectedMetric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
expectedMetric: model.Metric{
model.MetricNameLabel: "my_metric",
"a": "b",
},
expectedValues: newSeq(1, 2, 3, 3, 3, 3, 3),
}, {
input: `my_metric{a="b"} 1 3 _ 5 _x4`,
expectedMetric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
expectedMetric: model.Metric{
model.MetricNameLabel: "my_metric",
"a": "b",
},
expectedValues: newSeq(1, 3, none, 5, none, none, none, none),
@ -1397,7 +1397,7 @@ func newSeq(vals ...float64) (res []sequenceValue) {
if v == none {
res = append(res, sequenceValue{omitted: true})
} else {
res = append(res, sequenceValue{value: clientmodel.SampleValue(v)})
res = append(res, sequenceValue{value: model.SampleValue(v)})
}
}
return res

View file

@ -18,7 +18,7 @@ import (
"sort"
"strings"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/strutil"
@ -27,14 +27,14 @@ import (
func (matrix Matrix) String() string {
metricStrings := make([]string, 0, len(matrix))
for _, sampleStream := range matrix {
metricName, hasName := sampleStream.Metric.Metric[clientmodel.MetricNameLabel]
metricName, hasName := sampleStream.Metric.Metric[model.MetricNameLabel]
numLabels := len(sampleStream.Metric.Metric)
if hasName {
numLabels--
}
labelStrings := make([]string, 0, numLabels)
for label, value := range sampleStream.Metric.Metric {
if label != clientmodel.MetricNameLabel {
if label != model.MetricNameLabel {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
}
}
@ -233,7 +233,7 @@ func (node *VectorSelector) String() string {
labelStrings := make([]string, 0, len(node.LabelMatchers)-1)
for _, matcher := range node.LabelMatchers {
// Only include the __name__ label if its no equality matching.
if matcher.Name == clientmodel.MetricNameLabel && matcher.Type == metric.Equal {
if matcher.Name == model.MetricNameLabel && matcher.Type == metric.Equal {
continue
}
labelStrings = append(labelStrings, matcher.String())

View file

@ -17,21 +17,21 @@ import (
"math"
"sort"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// Helpers to calculate quantiles.
// excludedLabels are the labels to exclude from signature calculation for
// quantiles.
var excludedLabels = map[clientmodel.LabelName]struct{}{
clientmodel.MetricNameLabel: {},
clientmodel.BucketLabel: {},
var excludedLabels = map[model.LabelName]struct{}{
model.MetricNameLabel: {},
model.BucketLabel: {},
}
type bucket struct {
upperBound float64
count clientmodel.SampleValue
count model.SampleValue
}
// buckets implements sort.Interface.
@ -42,7 +42,7 @@ func (b buckets) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
func (b buckets) Less(i, j int) bool { return b[i].upperBound < b[j].upperBound }
type metricWithBuckets struct {
metric clientmodel.COWMetric
metric model.COWMetric
buckets buckets
}
@ -68,7 +68,7 @@ type metricWithBuckets struct {
// If q<0, -Inf is returned.
//
// If q>1, +Inf is returned.
func quantile(q clientmodel.SampleValue, buckets buckets) float64 {
func quantile(q model.SampleValue, buckets buckets) float64 {
if q < 0 {
return math.Inf(-1)
}

View file

@ -22,7 +22,7 @@ import (
"strings"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/local"
@ -40,7 +40,7 @@ var (
)
const (
testStartTime = clientmodel.Timestamp(0)
testStartTime = model.Time(0)
epsilon = 0.000001 // Relative error allowed for sample values.
maxErrorCount = 10
)
@ -165,7 +165,7 @@ func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
break
}
if f, err := parseNumber(defLine); err == nil {
cmd.expect(0, nil, sequenceValue{value: clientmodel.SampleValue(f)})
cmd.expect(0, nil, sequenceValue{value: model.SampleValue(f)})
break
}
metric, vals, err := parseSeriesDesc(defLine)
@ -238,15 +238,15 @@ func (*evalCmd) testCmd() {}
// metrics into the storage.
type loadCmd struct {
gap time.Duration
metrics map[clientmodel.Fingerprint]clientmodel.Metric
defs map[clientmodel.Fingerprint]metric.Values
metrics map[model.Fingerprint]model.Metric
defs map[model.Fingerprint]metric.Values
}
func newLoadCmd(gap time.Duration) *loadCmd {
return &loadCmd{
gap: gap,
metrics: map[clientmodel.Fingerprint]clientmodel.Metric{},
defs: map[clientmodel.Fingerprint]metric.Values{},
metrics: map[model.Fingerprint]model.Metric{},
defs: map[model.Fingerprint]metric.Values{},
}
}
@ -255,7 +255,7 @@ func (cmd loadCmd) String() string {
}
// set a sequence of sample values for the given metric.
func (cmd *loadCmd) set(m clientmodel.Metric, vals ...sequenceValue) {
func (cmd *loadCmd) set(m model.Metric, vals ...sequenceValue) {
fp := m.Fingerprint()
samples := make(metric.Values, 0, len(vals))
@ -278,7 +278,7 @@ func (cmd *loadCmd) append(a storage.SampleAppender) {
for fp, samples := range cmd.defs {
met := cmd.metrics[fp]
for _, smpl := range samples {
s := &clientmodel.Sample{
s := &model.Sample{
Metric: met,
Value: smpl.Value,
Timestamp: smpl.Timestamp,
@ -292,14 +292,14 @@ func (cmd *loadCmd) append(a storage.SampleAppender) {
// and expects a specific result.
type evalCmd struct {
expr Expr
start, end clientmodel.Timestamp
start, end model.Time
interval time.Duration
instant bool
fail, ordered bool
metrics map[clientmodel.Fingerprint]clientmodel.Metric
expected map[clientmodel.Fingerprint]entry
metrics map[model.Fingerprint]model.Metric
expected map[model.Fingerprint]entry
}
type entry struct {
@ -311,7 +311,7 @@ func (e entry) String() string {
return fmt.Sprintf("%d: %s", e.pos, e.vals)
}
func newEvalCmd(expr Expr, start, end clientmodel.Timestamp, interval time.Duration) *evalCmd {
func newEvalCmd(expr Expr, start, end model.Time, interval time.Duration) *evalCmd {
return &evalCmd{
expr: expr,
start: start,
@ -319,8 +319,8 @@ func newEvalCmd(expr Expr, start, end clientmodel.Timestamp, interval time.Durat
interval: interval,
instant: start == end && interval == 0,
metrics: map[clientmodel.Fingerprint]clientmodel.Metric{},
expected: map[clientmodel.Fingerprint]entry{},
metrics: map[model.Fingerprint]model.Metric{},
expected: map[model.Fingerprint]entry{},
}
}
@ -330,7 +330,7 @@ func (ev *evalCmd) String() string {
// expect adds a new metric with a sequence of values to the set of expected
// results for the query.
func (ev *evalCmd) expect(pos int, m clientmodel.Metric, vals ...sequenceValue) {
func (ev *evalCmd) expect(pos int, m model.Metric, vals ...sequenceValue) {
if m == nil {
ev.expected[0] = entry{pos: pos, vals: vals}
return
@ -347,7 +347,7 @@ func (ev *evalCmd) compareResult(result Value) error {
if ev.instant {
return fmt.Errorf("received range result on instant evaluation")
}
seen := map[clientmodel.Fingerprint]bool{}
seen := map[model.Fingerprint]bool{}
for pos, v := range val {
fp := v.Metric.Metric.Fingerprint()
if _, ok := ev.metrics[fp]; !ok {
@ -374,7 +374,7 @@ func (ev *evalCmd) compareResult(result Value) error {
if !ev.instant {
fmt.Errorf("received instant result on range evaluation")
}
seen := map[clientmodel.Fingerprint]bool{}
seen := map[model.Fingerprint]bool{}
for pos, v := range val {
fp := v.Metric.Metric.Fingerprint()
if _, ok := ev.metrics[fp]; !ok {

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/log"
consul "github.com/hashicorp/consul/api"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -34,21 +34,21 @@ const (
consulRetryInterval = 15 * time.Second
// ConsuleAddressLabel is the name for the label containing a target's address.
ConsulAddressLabel = clientmodel.MetaLabelPrefix + "consul_address"
ConsulAddressLabel = model.MetaLabelPrefix + "consul_address"
// ConsuleNodeLabel is the name for the label containing a target's node name.
ConsulNodeLabel = clientmodel.MetaLabelPrefix + "consul_node"
ConsulNodeLabel = model.MetaLabelPrefix + "consul_node"
// ConsulTagsLabel is the name of the label containing the tags assigned to the target.
ConsulTagsLabel = clientmodel.MetaLabelPrefix + "consul_tags"
ConsulTagsLabel = model.MetaLabelPrefix + "consul_tags"
// ConsulServiceLabel is the name of the label containing the service name.
ConsulServiceLabel = clientmodel.MetaLabelPrefix + "consul_service"
ConsulServiceLabel = model.MetaLabelPrefix + "consul_service"
// ConsulServiceAddressLabel is the name of the label containing the (optional) service address.
ConsulServiceAddressLabel = clientmodel.MetaLabelPrefix + "consul_service_address"
ConsulServiceAddressLabel = model.MetaLabelPrefix + "consul_service_address"
// ConsulServicePortLabel is the name of the label containing the service port.
ConsulServicePortLabel = clientmodel.MetaLabelPrefix + "consul_service_port"
ConsulServicePortLabel = model.MetaLabelPrefix + "consul_service_port"
// ConsulDCLabel is the name of the label containing the datacenter ID.
ConsulDCLabel = clientmodel.MetaLabelPrefix + "consul_dc"
ConsulDCLabel = model.MetaLabelPrefix + "consul_dc"
// ConsulServiceIDLabel is the name of the label containing the service ID.
ConsulServiceIDLabel = clientmodel.MetaLabelPrefix + "consul_service_id"
ConsulServiceIDLabel = model.MetaLabelPrefix + "consul_service_id"
)
// ConsulDiscovery retrieves target information from a Consul server
@ -226,9 +226,9 @@ func (cd *ConsulDiscovery) watchServices(update chan<- *consulService, done <-ch
srv.tgroup.Source = name
cd.services[name] = srv
}
srv.tgroup.Labels = clientmodel.LabelSet{
ConsulServiceLabel: clientmodel.LabelValue(name),
ConsulDCLabel: clientmodel.LabelValue(cd.clientDatacenter),
srv.tgroup.Labels = model.LabelSet{
ConsulServiceLabel: model.LabelValue(name),
ConsulDCLabel: model.LabelValue(cd.clientDatacenter),
}
update <- srv
}
@ -263,7 +263,7 @@ func (cd *ConsulDiscovery) watchService(srv *consulService, ch chan<- *config.Ta
continue
}
srv.lastIndex = meta.LastIndex
srv.tgroup.Targets = make([]clientmodel.LabelSet, 0, len(nodes))
srv.tgroup.Targets = make([]model.LabelSet, 0, len(nodes))
for _, node := range nodes {
addr := fmt.Sprintf("%s:%d", node.Address, node.ServicePort)
@ -271,14 +271,14 @@ func (cd *ConsulDiscovery) watchService(srv *consulService, ch chan<- *config.Ta
// in relabeling rules don't have to consider tag positions.
tags := cd.tagSeparator + strings.Join(node.ServiceTags, cd.tagSeparator) + cd.tagSeparator
srv.tgroup.Targets = append(srv.tgroup.Targets, clientmodel.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(addr),
ConsulAddressLabel: clientmodel.LabelValue(node.Address),
ConsulNodeLabel: clientmodel.LabelValue(node.Node),
ConsulTagsLabel: clientmodel.LabelValue(tags),
ConsulServiceAddressLabel: clientmodel.LabelValue(node.ServiceAddress),
ConsulServicePortLabel: clientmodel.LabelValue(strconv.Itoa(node.ServicePort)),
ConsulServiceIDLabel: clientmodel.LabelValue(node.ServiceID),
srv.tgroup.Targets = append(srv.tgroup.Targets, model.LabelSet{
model.AddressLabel: model.LabelValue(addr),
ConsulAddressLabel: model.LabelValue(node.Address),
ConsulNodeLabel: model.LabelValue(node.Node),
ConsulTagsLabel: model.LabelValue(tags),
ConsulServiceAddressLabel: model.LabelValue(node.ServiceAddress),
ConsulServicePortLabel: model.LabelValue(strconv.Itoa(node.ServicePort)),
ConsulServiceIDLabel: model.LabelValue(node.ServiceID),
})
}

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -32,7 +32,7 @@ import (
const (
resolvConf = "/etc/resolv.conf"
DNSNameLabel = clientmodel.MetaLabelPrefix + "dns_name"
DNSNameLabel = model.MetaLabelPrefix + "dns_name"
// Constants for instrumentation.
namespace = "prometheus"
@ -144,25 +144,25 @@ func (dd *DNSDiscovery) refresh(name string, ch chan<- *config.TargetGroup) erro
tg := &config.TargetGroup{}
for _, record := range response.Answer {
target := clientmodel.LabelValue("")
target := model.LabelValue("")
switch addr := record.(type) {
case *dns.SRV:
// Remove the final dot from rooted DNS names to make them look more usual.
addr.Target = strings.TrimRight(addr.Target, ".")
target = clientmodel.LabelValue(fmt.Sprintf("%s:%d", addr.Target, addr.Port))
target = model.LabelValue(fmt.Sprintf("%s:%d", addr.Target, addr.Port))
case *dns.A:
target = clientmodel.LabelValue(fmt.Sprintf("%s:%d", addr.A, dd.port))
target = model.LabelValue(fmt.Sprintf("%s:%d", addr.A, dd.port))
case *dns.AAAA:
target = clientmodel.LabelValue(fmt.Sprintf("%s:%d", addr.AAAA, dd.port))
target = model.LabelValue(fmt.Sprintf("%s:%d", addr.AAAA, dd.port))
default:
log.Warnf("%q is not a valid SRV record", record)
continue
}
tg.Targets = append(tg.Targets, clientmodel.LabelSet{
clientmodel.AddressLabel: target,
DNSNameLabel: clientmodel.LabelValue(name),
tg.Targets = append(tg.Targets, model.LabelSet{
model.AddressLabel: target,
DNSNameLabel: model.LabelValue(name),
})
}

View file

@ -25,12 +25,12 @@ import (
"gopkg.in/fsnotify.v1"
"gopkg.in/yaml.v2"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
const FileSDFilepathLabel = clientmodel.MetaLabelPrefix + "filepath"
const FileSDFilepathLabel = model.MetaLabelPrefix + "filepath"
// FileDiscovery provides service discovery functionality based
// on files that contain target groups in JSON or YAML format. Refreshing
@ -244,9 +244,9 @@ func readFile(filename string) ([]*config.TargetGroup, error) {
for i, tg := range targetGroups {
tg.Source = fileSource(filename, i)
if tg.Labels == nil {
tg.Labels = clientmodel.LabelSet{}
tg.Labels = model.LabelSet{}
}
tg.Labels[FileSDFilepathLabel] = clientmodel.LabelValue(filename)
tg.Labels[FileSDFilepathLabel] = model.LabelValue(filename)
}
return targetGroups, nil
}

View file

@ -25,7 +25,7 @@ import (
"sync"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/log"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/util/httputil"
@ -37,7 +37,7 @@ const (
// kubernetesMetaLabelPrefix is the meta prefix used for all meta labels.
// in this discovery.
metaLabelPrefix = clientmodel.MetaLabelPrefix + "kubernetes_"
metaLabelPrefix = model.MetaLabelPrefix + "kubernetes_"
// nodeLabel is the name for the label containing a target's node name.
nodeLabel = metaLabelPrefix + "node"
// serviceNamespaceLabel is the name for the label containing a target's service namespace.
@ -224,13 +224,13 @@ func (kd *KubernetesDiscovery) updateNodesTargetGroup() *config.TargetGroup {
for nodeName, node := range kd.nodes {
address := fmt.Sprintf("%s:%d", node.Status.Addresses[0].Address, kd.Conf.KubeletPort)
t := clientmodel.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(address),
nodeLabel: clientmodel.LabelValue(nodeName),
t := model.LabelSet{
model.AddressLabel: model.LabelValue(address),
nodeLabel: model.LabelValue(nodeName),
}
for k, v := range node.ObjectMeta.Labels {
labelName := strutil.SanitizeLabelName(nodeLabelPrefix + k)
t[clientmodel.LabelName(labelName)] = clientmodel.LabelValue(v)
t[model.LabelName(labelName)] = model.LabelValue(v)
}
tg.Targets = append(tg.Targets, t)
}
@ -397,20 +397,20 @@ func (kd *KubernetesDiscovery) addService(service *Service) *config.TargetGroup
func (kd *KubernetesDiscovery) updateServiceTargetGroup(service *Service, endpoints *Endpoints) *config.TargetGroup {
tg := &config.TargetGroup{
Source: serviceSource(service),
Labels: clientmodel.LabelSet{
serviceNamespaceLabel: clientmodel.LabelValue(service.ObjectMeta.Namespace),
serviceNameLabel: clientmodel.LabelValue(service.ObjectMeta.Name),
Labels: model.LabelSet{
serviceNamespaceLabel: model.LabelValue(service.ObjectMeta.Namespace),
serviceNameLabel: model.LabelValue(service.ObjectMeta.Name),
},
}
for k, v := range service.ObjectMeta.Labels {
labelName := strutil.SanitizeLabelName(serviceLabelPrefix + k)
tg.Labels[clientmodel.LabelName(labelName)] = clientmodel.LabelValue(v)
tg.Labels[model.LabelName(labelName)] = model.LabelValue(v)
}
for k, v := range service.ObjectMeta.Annotations {
labelName := strutil.SanitizeLabelName(serviceAnnotationPrefix + k)
tg.Labels[clientmodel.LabelName(labelName)] = clientmodel.LabelValue(v)
tg.Labels[model.LabelName(labelName)] = model.LabelValue(v)
}
// Now let's loop through the endpoints & add them to the target group with appropriate labels.
@ -424,7 +424,7 @@ func (kd *KubernetesDiscovery) updateServiceTargetGroup(service *Service, endpoi
}
address := fmt.Sprintf("%s:%d", ipAddr, epPort)
t := clientmodel.LabelSet{clientmodel.AddressLabel: clientmodel.LabelValue(address)}
t := model.LabelSet{model.AddressLabel: model.LabelValue(address)}
tg.Targets = append(tg.Targets, t)
}

View file

@ -14,19 +14,19 @@
package marathon
import (
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
const (
// metaLabelPrefix is the meta prefix used for all meta labels in this discovery.
metaLabelPrefix = clientmodel.MetaLabelPrefix + "marathon_"
metaLabelPrefix = model.MetaLabelPrefix + "marathon_"
// appLabelPrefix is the prefix for the application labels.
appLabelPrefix = metaLabelPrefix + "app_label_"
// appLabel is used for the name of the app in Marathon.
appLabel clientmodel.LabelName = metaLabelPrefix + "app"
appLabel model.LabelName = metaLabelPrefix + "app"
// imageLabel is the label that is used for the docker image running the service.
imageLabel clientmodel.LabelName = metaLabelPrefix + "image"
imageLabel model.LabelName = metaLabelPrefix + "image"
// taskLabel contains the mesos task name of the app instance.
taskLabel clientmodel.LabelName = metaLabelPrefix + "task"
taskLabel model.LabelName = metaLabelPrefix + "task"
)

View file

@ -16,7 +16,7 @@ package marathon
import (
"fmt"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -34,12 +34,12 @@ func AppsToTargetGroups(apps *AppList) map[string]*config.TargetGroup {
func createTargetGroup(app *App) *config.TargetGroup {
var (
targets = targetsForApp(app)
appName = clientmodel.LabelValue(app.ID)
image = clientmodel.LabelValue(app.Container.Docker.Image)
appName = model.LabelValue(app.ID)
image = model.LabelValue(app.Container.Docker.Image)
)
tg := &config.TargetGroup{
Targets: targets,
Labels: clientmodel.LabelSet{
Labels: model.LabelSet{
appLabel: appName,
imageLabel: image,
},
@ -48,19 +48,19 @@ func createTargetGroup(app *App) *config.TargetGroup {
for ln, lv := range app.Labels {
ln = appLabelPrefix + ln
tg.Labels[clientmodel.LabelName(ln)] = clientmodel.LabelValue(lv)
tg.Labels[model.LabelName(ln)] = model.LabelValue(lv)
}
return tg
}
func targetsForApp(app *App) []clientmodel.LabelSet {
targets := make([]clientmodel.LabelSet, 0, len(app.Tasks))
func targetsForApp(app *App) []model.LabelSet {
targets := make([]model.LabelSet, 0, len(app.Tasks))
for _, t := range app.Tasks {
target := targetForTask(&t)
targets = append(targets, clientmodel.LabelSet{
clientmodel.AddressLabel: clientmodel.LabelValue(target),
taskLabel: clientmodel.LabelValue(t.ID),
targets = append(targets, model.LabelSet{
model.AddressLabel: model.LabelValue(target),
taskLabel: model.LabelValue(t.ID),
})
}
return targets

View file

@ -18,7 +18,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/retrieval/discovery/marathon"
@ -104,8 +104,8 @@ func TestMarathonSDSendGroup(t *testing.T) {
t.Fatalf("Wrong number of targets: %v", tg.Targets)
}
tgt := tg.Targets[0]
if tgt[clientmodel.AddressLabel] != "mesos-slave1:31000" {
t.Fatalf("Wrong target address: %s", tgt[clientmodel.AddressLabel])
if tgt[model.AddressLabel] != "mesos-slave1:31000" {
t.Fatalf("Wrong target address: %s", tgt[model.AddressLabel])
}
default:
t.Fatal("Did not get a target group.")

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/log"
"github.com/samuel/go-zookeeper/zk"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/util/strutil"
@ -33,7 +33,7 @@ import (
const (
serversetNodePrefix = "member_"
serversetLabelPrefix = clientmodel.MetaLabelPrefix + "serverset_"
serversetLabelPrefix = model.MetaLabelPrefix + "serverset_"
serversetStatusLabel = serversetLabelPrefix + "status"
serversetPathLabel = serversetLabelPrefix + "path"
serversetEndpointLabelPrefix = serversetLabelPrefix + "endpoint"
@ -110,7 +110,7 @@ func (sd *ServersetDiscovery) processUpdates() {
if event.Data != nil {
labelSet, err := parseServersetMember(*event.Data, event.Path)
if err == nil {
tg.Targets = []clientmodel.LabelSet{*labelSet}
tg.Targets = []model.LabelSet{*labelSet}
sd.sources[event.Path] = tg
} else {
delete(sd.sources, event.Path)
@ -144,31 +144,31 @@ func (sd *ServersetDiscovery) Run(ch chan<- *config.TargetGroup, done <-chan str
sd.treeCache.Stop()
}
func parseServersetMember(data []byte, path string) (*clientmodel.LabelSet, error) {
func parseServersetMember(data []byte, path string) (*model.LabelSet, error) {
member := serversetMember{}
err := json.Unmarshal(data, &member)
if err != nil {
return nil, fmt.Errorf("error unmarshaling serverset member %q: %s", path, err)
}
labels := clientmodel.LabelSet{}
labels[serversetPathLabel] = clientmodel.LabelValue(path)
labels[clientmodel.AddressLabel] = clientmodel.LabelValue(
labels := model.LabelSet{}
labels[serversetPathLabel] = model.LabelValue(path)
labels[model.AddressLabel] = model.LabelValue(
fmt.Sprintf("%s:%d", member.ServiceEndpoint.Host, member.ServiceEndpoint.Port))
labels[serversetEndpointLabelPrefix+"_host"] = clientmodel.LabelValue(member.ServiceEndpoint.Host)
labels[serversetEndpointLabelPrefix+"_port"] = clientmodel.LabelValue(fmt.Sprintf("%d", member.ServiceEndpoint.Port))
labels[serversetEndpointLabelPrefix+"_host"] = model.LabelValue(member.ServiceEndpoint.Host)
labels[serversetEndpointLabelPrefix+"_port"] = model.LabelValue(fmt.Sprintf("%d", member.ServiceEndpoint.Port))
for name, endpoint := range member.AdditionalEndpoints {
cleanName := clientmodel.LabelName(strutil.SanitizeLabelName(name))
labels[serversetEndpointLabelPrefix+"_host_"+cleanName] = clientmodel.LabelValue(
cleanName := model.LabelName(strutil.SanitizeLabelName(name))
labels[serversetEndpointLabelPrefix+"_host_"+cleanName] = model.LabelValue(
endpoint.Host)
labels[serversetEndpointLabelPrefix+"_port_"+cleanName] = clientmodel.LabelValue(
labels[serversetEndpointLabelPrefix+"_port_"+cleanName] = model.LabelValue(
fmt.Sprintf("%d", endpoint.Port))
}
labels[serversetStatusLabel] = clientmodel.LabelValue(member.Status)
labels[serversetStatusLabel] = model.LabelValue(member.Status)
return &labels, nil
}

View file

@ -16,28 +16,28 @@ package retrieval
import (
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
type nopAppender struct{}
func (a nopAppender) Append(*clientmodel.Sample) {
func (a nopAppender) Append(*model.Sample) {
}
type slowAppender struct{}
func (a slowAppender) Append(*clientmodel.Sample) {
func (a slowAppender) Append(*model.Sample) {
time.Sleep(time.Millisecond)
return
}
type collectResultAppender struct {
result clientmodel.Samples
result model.Samples
}
func (a *collectResultAppender) Append(s *clientmodel.Sample) {
func (a *collectResultAppender) Append(s *model.Sample) {
for ln, lv := range s.Metric {
if len(lv) == 0 {
delete(s.Metric, ln)

View file

@ -5,7 +5,7 @@ import (
"fmt"
"strings"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -13,8 +13,8 @@ import (
// Relabel returns a relabeled copy of the given label set. The relabel configurations
// are applied in order of input.
// If a label set is dropped, nil is returned.
func Relabel(labels clientmodel.LabelSet, cfgs ...*config.RelabelConfig) (clientmodel.LabelSet, error) {
out := clientmodel.LabelSet{}
func Relabel(labels model.LabelSet, cfgs ...*config.RelabelConfig) (model.LabelSet, error) {
out := model.LabelSet{}
for ln, lv := range labels {
out[ln] = lv
}
@ -30,7 +30,7 @@ func Relabel(labels clientmodel.LabelSet, cfgs ...*config.RelabelConfig) (client
return out, nil
}
func relabel(labels clientmodel.LabelSet, cfg *config.RelabelConfig) (clientmodel.LabelSet, error) {
func relabel(labels model.LabelSet, cfg *config.RelabelConfig) (model.LabelSet, error) {
values := make([]string, 0, len(cfg.SourceLabels))
for _, ln := range cfg.SourceLabels {
values = append(values, string(labels[ln]))
@ -56,13 +56,13 @@ func relabel(labels clientmodel.LabelSet, cfg *config.RelabelConfig) (clientmode
if len(res) == 0 {
delete(labels, cfg.TargetLabel)
} else {
labels[cfg.TargetLabel] = clientmodel.LabelValue(res)
labels[cfg.TargetLabel] = model.LabelValue(res)
}
case config.RelabelHashMod:
mod := sum64(md5.Sum([]byte(val))) % cfg.Modulus
labels[cfg.TargetLabel] = clientmodel.LabelValue(fmt.Sprintf("%d", mod))
labels[cfg.TargetLabel] = model.LabelValue(fmt.Sprintf("%d", mod))
case config.RelabelLabelMap:
out := make(clientmodel.LabelSet, len(labels))
out := make(model.LabelSet, len(labels))
// Take a copy to avoid infinite loops.
for ln, lv := range labels {
out[ln] = lv
@ -70,7 +70,7 @@ func relabel(labels clientmodel.LabelSet, cfg *config.RelabelConfig) (clientmode
for ln, lv := range labels {
if cfg.Regex.MatchString(string(ln)) {
res := cfg.Regex.ReplaceAllString(string(ln), cfg.Replacement)
out[clientmodel.LabelName(res)] = lv
out[model.LabelName(res)] = lv
}
}
labels = out

View file

@ -5,34 +5,34 @@ import (
"regexp"
"testing"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
func TestRelabel(t *testing.T) {
tests := []struct {
input clientmodel.LabelSet
input model.LabelSet
relabel []*config.RelabelConfig
output clientmodel.LabelSet
output model.LabelSet
}{
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
"b": "bar",
"c": "baz",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("f(.*)")},
TargetLabel: clientmodel.LabelName("d"),
TargetLabel: model.LabelName("d"),
Separator: ";",
Replacement: "ch${1}-ch${1}",
Action: config.RelabelReplace,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "foo",
"b": "bar",
"c": "baz",
@ -40,30 +40,30 @@ func TestRelabel(t *testing.T) {
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
"b": "bar",
"c": "baz",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a", "b"},
SourceLabels: model.LabelNames{"a", "b"},
Regex: &config.Regexp{*regexp.MustCompile("^f(.*);(.*)r$")},
TargetLabel: clientmodel.LabelName("a"),
TargetLabel: model.LabelName("a"),
Separator: ";",
Replacement: "b${1}${2}m", // boobam
Action: config.RelabelReplace,
},
{
SourceLabels: clientmodel.LabelNames{"c", "a"},
SourceLabels: model.LabelNames{"c", "a"},
Regex: &config.Regexp{*regexp.MustCompile("(b).*b(.*)ba(.*)")},
TargetLabel: clientmodel.LabelName("d"),
TargetLabel: model.LabelName("d"),
Separator: ";",
Replacement: "$1$2$2$3",
Action: config.RelabelReplace,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "boobam",
"b": "bar",
"c": "baz",
@ -71,18 +71,18 @@ func TestRelabel(t *testing.T) {
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("o$")},
Action: config.RelabelDrop,
}, {
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("f(.*)")},
TargetLabel: clientmodel.LabelName("d"),
TargetLabel: model.LabelName("d"),
Separator: ";",
Replacement: "ch$1-ch$1",
Action: config.RelabelReplace,
@ -91,46 +91,46 @@ func TestRelabel(t *testing.T) {
output: nil,
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "abc",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("(b)")},
TargetLabel: clientmodel.LabelName("d"),
TargetLabel: model.LabelName("d"),
Separator: ";",
Replacement: "$1",
Action: config.RelabelReplace,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "abc",
"d": "b",
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("no-match")},
Action: config.RelabelDrop,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "foo",
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("no-match")},
Action: config.RelabelKeep,
},
@ -138,54 +138,54 @@ func TestRelabel(t *testing.T) {
output: nil,
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("^f")},
Action: config.RelabelKeep,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "foo",
},
},
{
// No replacement must be applied if there is no match.
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "boo",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"a"},
SourceLabels: model.LabelNames{"a"},
Regex: &config.Regexp{*regexp.MustCompile("^f")},
TargetLabel: clientmodel.LabelName("b"),
TargetLabel: model.LabelName("b"),
Replacement: "bar",
Action: config.RelabelReplace,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "boo",
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
"b": "bar",
"c": "baz",
},
relabel: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"c"},
TargetLabel: clientmodel.LabelName("d"),
SourceLabels: model.LabelNames{"c"},
TargetLabel: model.LabelName("d"),
Separator: ";",
Action: config.RelabelHashMod,
Modulus: 1000,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "foo",
"b": "bar",
"c": "baz",
@ -193,7 +193,7 @@ func TestRelabel(t *testing.T) {
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
"b1": "bar",
"b2": "baz",
@ -205,7 +205,7 @@ func TestRelabel(t *testing.T) {
Action: config.RelabelLabelMap,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "foo",
"b1": "bar",
"b2": "baz",
@ -214,7 +214,7 @@ func TestRelabel(t *testing.T) {
},
},
{
input: clientmodel.LabelSet{
input: model.LabelSet{
"a": "foo",
"__meta_my_bar": "aaa",
"__meta_my_baz": "bbb",
@ -227,7 +227,7 @@ func TestRelabel(t *testing.T) {
Action: config.RelabelLabelMap,
},
},
output: clientmodel.LabelSet{
output: model.LabelSet{
"a": "foo",
"__meta_my_bar": "aaa",
"__meta_my_baz": "bbb",

View file

@ -30,7 +30,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/storage"
@ -40,10 +40,10 @@ import (
const (
// ScrapeHealthMetricName is the metric name for the synthetic health
// variable.
scrapeHealthMetricName clientmodel.LabelValue = "up"
scrapeHealthMetricName model.LabelValue = "up"
// ScrapeTimeMetricName is the metric name for the synthetic scrape duration
// variable.
scrapeDurationMetricName clientmodel.LabelValue = "scrape_duration_seconds"
scrapeDurationMetricName model.LabelValue = "scrape_duration_seconds"
// Capacity of the channel to buffer samples during ingestion.
ingestedSamplesCap = 256
@ -150,7 +150,7 @@ type Target struct {
// Closing scraperStopped signals that scraping has been stopped.
scraperStopped chan struct{}
// Channel to buffer ingested samples.
ingestedSamples chan clientmodel.Samples
ingestedSamples chan model.Samples
// Mutex protects the members below.
sync.RWMutex
@ -159,9 +159,9 @@ type Target struct {
// url is the URL to be scraped. Its host is immutable.
url *url.URL
// Labels before any processing.
metaLabels clientmodel.LabelSet
metaLabels model.LabelSet
// Any base labels that are added to this target and its metrics.
baseLabels clientmodel.LabelSet
baseLabels model.LabelSet
// What is the deadline for the HTTP or HTTPS against this endpoint.
deadline time.Duration
// The time between two scrapes.
@ -174,11 +174,11 @@ type Target struct {
}
// NewTarget creates a reasonably configured target for querying.
func NewTarget(cfg *config.ScrapeConfig, baseLabels, metaLabels clientmodel.LabelSet) *Target {
func NewTarget(cfg *config.ScrapeConfig, baseLabels, metaLabels model.LabelSet) *Target {
t := &Target{
url: &url.URL{
Scheme: string(baseLabels[clientmodel.SchemeLabel]),
Host: string(baseLabels[clientmodel.AddressLabel]),
Scheme: string(baseLabels[model.SchemeLabel]),
Host: string(baseLabels[model.AddressLabel]),
},
status: &TargetStatus{},
scraperStopping: make(chan struct{}),
@ -195,7 +195,7 @@ func (t *Target) Status() *TargetStatus {
// Update overwrites settings in the target that are derived from the job config
// it belongs to.
func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientmodel.LabelSet) {
func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels model.LabelSet) {
t.Lock()
defer t.Unlock()
@ -206,19 +206,19 @@ func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientm
}
t.httpClient = httpClient
t.url.Scheme = string(baseLabels[clientmodel.SchemeLabel])
t.url.Path = string(baseLabels[clientmodel.MetricsPathLabel])
t.url.Scheme = string(baseLabels[model.SchemeLabel])
t.url.Path = string(baseLabels[model.MetricsPathLabel])
params := url.Values{}
for k, v := range cfg.Params {
params[k] = make([]string, len(v))
copy(params[k], v)
}
for k, v := range baseLabels {
if strings.HasPrefix(string(k), clientmodel.ParamLabelPrefix) {
if len(params[string(k[len(clientmodel.ParamLabelPrefix):])]) > 0 {
params[string(k[len(clientmodel.ParamLabelPrefix):])][0] = string(v)
if strings.HasPrefix(string(k), model.ParamLabelPrefix) {
if len(params[string(k[len(model.ParamLabelPrefix):])]) > 0 {
params[string(k[len(model.ParamLabelPrefix):])][0] = string(v)
} else {
params[string(k[len(clientmodel.ParamLabelPrefix):])] = []string{string(v)}
params[string(k[len(model.ParamLabelPrefix):])] = []string{string(v)}
}
}
}
@ -229,15 +229,15 @@ func (t *Target) Update(cfg *config.ScrapeConfig, baseLabels, metaLabels clientm
t.honorLabels = cfg.HonorLabels
t.metaLabels = metaLabels
t.baseLabels = clientmodel.LabelSet{}
t.baseLabels = model.LabelSet{}
// All remaining internal labels will not be part of the label set.
for name, val := range baseLabels {
if !strings.HasPrefix(string(name), clientmodel.ReservedLabelPrefix) {
if !strings.HasPrefix(string(name), model.ReservedLabelPrefix) {
t.baseLabels[name] = val
}
}
if _, ok := t.baseLabels[clientmodel.InstanceLabel]; !ok {
t.baseLabels[clientmodel.InstanceLabel] = clientmodel.LabelValue(t.InstanceIdentifier())
if _, ok := t.baseLabels[model.InstanceLabel]; !ok {
t.baseLabels[model.InstanceLabel] = model.LabelValue(t.InstanceIdentifier())
}
t.metricRelabelConfigs = cfg.MetricRelabelConfigs
}
@ -302,7 +302,7 @@ func (t *Target) String() string {
}
// Ingest implements an extraction.Ingester.
func (t *Target) Ingest(s clientmodel.Samples) error {
func (t *Target) Ingest(s model.Samples) error {
t.RLock()
deadline := t.deadline
t.RUnlock()
@ -416,7 +416,7 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
defer func() {
t.status.setLastError(err)
recordScrapeHealth(sampleAppender, clientmodel.TimestampFromTime(start), baseLabels, t.status.Health(), time.Since(start))
recordScrapeHealth(sampleAppender, model.TimeFromTime(start), baseLabels, t.status.Health(), time.Since(start))
}()
req, err := http.NewRequest("GET", t.URL().String(), nil)
@ -439,10 +439,10 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
return err
}
t.ingestedSamples = make(chan clientmodel.Samples, ingestedSamplesCap)
t.ingestedSamples = make(chan model.Samples, ingestedSamplesCap)
processOptions := &extraction.ProcessOptions{
Timestamp: clientmodel.TimestampFromTime(start),
Timestamp: model.TimeFromTime(start),
}
go func() {
err = processor.ProcessSingle(resp.Body, t, processOptions)
@ -464,14 +464,14 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
// value of the label is stored in a label prefixed with the exported prefix.
for ln, lv := range baseLabels {
if v, ok := s.Metric[ln]; ok && v != "" {
s.Metric[clientmodel.ExportedLabelPrefix+ln] = v
s.Metric[model.ExportedLabelPrefix+ln] = v
}
s.Metric[ln] = lv
}
}
// Avoid the copy in Relabel if there are no configs.
if len(metricRelabelConfigs) > 0 {
labels, err := Relabel(clientmodel.LabelSet(s.Metric), metricRelabelConfigs...)
labels, err := Relabel(model.LabelSet(s.Metric), metricRelabelConfigs...)
if err != nil {
log.Errorf("Error while relabeling metric %s of instance %s: %s", s.Metric, req.URL, err)
continue
@ -480,7 +480,7 @@ func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
if labels == nil {
continue
}
s.Metric = clientmodel.Metric(labels)
s.Metric = model.Metric(labels)
}
sampleAppender.Append(s)
}
@ -503,24 +503,24 @@ func (t *Target) InstanceIdentifier() string {
}
// fullLabels returns the base labels plus internal labels defining the target.
func (t *Target) fullLabels() clientmodel.LabelSet {
func (t *Target) fullLabels() model.LabelSet {
t.RLock()
defer t.RUnlock()
lset := make(clientmodel.LabelSet, len(t.baseLabels)+2)
lset := make(model.LabelSet, len(t.baseLabels)+2)
for ln, lv := range t.baseLabels {
lset[ln] = lv
}
lset[clientmodel.MetricsPathLabel] = clientmodel.LabelValue(t.url.Path)
lset[clientmodel.AddressLabel] = clientmodel.LabelValue(t.url.Host)
lset[clientmodel.SchemeLabel] = clientmodel.LabelValue(t.url.Scheme)
lset[model.MetricsPathLabel] = model.LabelValue(t.url.Path)
lset[model.AddressLabel] = model.LabelValue(t.url.Host)
lset[model.SchemeLabel] = model.LabelValue(t.url.Scheme)
return lset
}
// BaseLabels returns a copy of the target's base labels.
func (t *Target) BaseLabels() clientmodel.LabelSet {
func (t *Target) BaseLabels() model.LabelSet {
t.RLock()
defer t.RUnlock()
lset := make(clientmodel.LabelSet, len(t.baseLabels))
lset := make(model.LabelSet, len(t.baseLabels))
for ln, lv := range t.baseLabels {
lset[ln] = lv
}
@ -528,10 +528,10 @@ func (t *Target) BaseLabels() clientmodel.LabelSet {
}
// MetaLabels returns a copy of the target's labels before any processing.
func (t *Target) MetaLabels() clientmodel.LabelSet {
func (t *Target) MetaLabels() model.LabelSet {
t.RLock()
defer t.RUnlock()
lset := make(clientmodel.LabelSet, len(t.metaLabels))
lset := make(model.LabelSet, len(t.metaLabels))
for ln, lv := range t.metaLabels {
lset[ln] = lv
}
@ -540,36 +540,36 @@ func (t *Target) MetaLabels() clientmodel.LabelSet {
func recordScrapeHealth(
sampleAppender storage.SampleAppender,
timestamp clientmodel.Timestamp,
baseLabels clientmodel.LabelSet,
timestamp model.Time,
baseLabels model.LabelSet,
health TargetHealth,
scrapeDuration time.Duration,
) {
healthMetric := make(clientmodel.Metric, len(baseLabels)+1)
durationMetric := make(clientmodel.Metric, len(baseLabels)+1)
healthMetric := make(model.Metric, len(baseLabels)+1)
durationMetric := make(model.Metric, len(baseLabels)+1)
healthMetric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(scrapeHealthMetricName)
durationMetric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(scrapeDurationMetricName)
healthMetric[model.MetricNameLabel] = model.LabelValue(scrapeHealthMetricName)
durationMetric[model.MetricNameLabel] = model.LabelValue(scrapeDurationMetricName)
for label, value := range baseLabels {
healthMetric[label] = value
durationMetric[label] = value
}
healthValue := clientmodel.SampleValue(0)
healthValue := model.SampleValue(0)
if health == HealthGood {
healthValue = clientmodel.SampleValue(1)
healthValue = model.SampleValue(1)
}
healthSample := &clientmodel.Sample{
healthSample := &model.Sample{
Metric: healthMetric,
Timestamp: timestamp,
Value: healthValue,
}
durationSample := &clientmodel.Sample{
durationSample := &model.Sample{
Metric: durationMetric,
Timestamp: timestamp,
Value: clientmodel.SampleValue(float64(scrapeDuration) / float64(time.Second)),
Value: model.SampleValue(float64(scrapeDuration) / float64(time.Second)),
}
sampleAppender.Append(healthSample)

View file

@ -17,7 +17,7 @@ import (
"crypto/tls"
"crypto/x509"
"errors"
"fmt"
// "fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
@ -28,17 +28,17 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
func TestBaseLabels(t *testing.T) {
target := newTestTarget("example.com:80", 0, clientmodel.LabelSet{"job": "some_job", "foo": "bar"})
want := clientmodel.LabelSet{
clientmodel.JobLabel: "some_job",
clientmodel.InstanceLabel: "example.com:80",
"foo": "bar",
target := newTestTarget("example.com:80", 0, model.LabelSet{"job": "some_job", "foo": "bar"})
want := model.LabelSet{
model.JobLabel: "some_job",
model.InstanceLabel: "example.com:80",
"foo": "bar",
}
got := target.BaseLabels()
if !reflect.DeepEqual(want, got) {
@ -49,8 +49,8 @@ func TestBaseLabels(t *testing.T) {
func TestOverwriteLabels(t *testing.T) {
type test struct {
metric string
resultNormal clientmodel.Metric
resultHonor clientmodel.Metric
resultNormal model.Metric
resultHonor model.Metric
}
var tests []test
@ -66,40 +66,40 @@ func TestOverwriteLabels(t *testing.T) {
),
)
defer server.Close()
addr := clientmodel.LabelValue(strings.Split(server.URL, "://")[1])
addr := model.LabelValue(strings.Split(server.URL, "://")[1])
tests = []test{
{
metric: `foo{}`,
resultNormal: clientmodel.Metric{
clientmodel.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr,
resultNormal: model.Metric{
model.MetricNameLabel: "foo",
model.InstanceLabel: addr,
},
resultHonor: clientmodel.Metric{
clientmodel.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr,
resultHonor: model.Metric{
model.MetricNameLabel: "foo",
model.InstanceLabel: addr,
},
},
{
metric: `foo{instance=""}`,
resultNormal: clientmodel.Metric{
clientmodel.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr,
resultNormal: model.Metric{
model.MetricNameLabel: "foo",
model.InstanceLabel: addr,
},
resultHonor: clientmodel.Metric{
clientmodel.MetricNameLabel: "foo",
resultHonor: model.Metric{
model.MetricNameLabel: "foo",
},
},
{
metric: `foo{instance="other_instance"}`,
resultNormal: clientmodel.Metric{
clientmodel.MetricNameLabel: "foo",
clientmodel.InstanceLabel: addr,
clientmodel.ExportedLabelPrefix + clientmodel.InstanceLabel: "other_instance",
resultNormal: model.Metric{
model.MetricNameLabel: "foo",
model.InstanceLabel: addr,
model.ExportedLabelPrefix + model.InstanceLabel: "other_instance",
},
resultHonor: clientmodel.Metric{
clientmodel.MetricNameLabel: "foo",
clientmodel.InstanceLabel: "other_instance",
resultHonor: model.Metric{
model.MetricNameLabel: "foo",
model.InstanceLabel: "other_instance",
},
},
}
@ -140,31 +140,31 @@ func TestTargetScrapeUpdatesState(t *testing.T) {
}
}
func TestTargetScrapeWithFullChannel(t *testing.T) {
server := httptest.NewServer(
http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
for i := 0; i < 2*ingestedSamplesCap; i++ {
w.Write([]byte(
fmt.Sprintf("test_metric_%d{foo=\"bar\"} 123.456\n", i),
))
}
},
),
)
defer server.Close()
// func TestTargetScrapeWithFullChannel(t *testing.T) {
// server := httptest.NewServer(
// http.HandlerFunc(
// func(w http.ResponseWriter, r *http.Request) {
// w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
// for i := 0; i < 2*ingestedSamplesCap; i++ {
// w.Write([]byte(
// fmt.Sprintf("test_metric_%d{foo=\"bar\"} 123.456\n", i),
// ))
// }
// },
// ),
// )
// defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{"dings": "bums"})
// testTarget := newTestTarget(server.URL, 10*time.Millisecond, model.LabelSet{"dings": "bums"})
testTarget.scrape(slowAppender{})
if testTarget.status.Health() != HealthBad {
t.Errorf("Expected target state %v, actual: %v", HealthBad, testTarget.status.Health())
}
if testTarget.status.LastError() != errIngestChannelFull {
t.Errorf("Expected target error %q, actual: %q", errIngestChannelFull, testTarget.status.LastError())
}
}
// testTarget.scrape(slowAppender{})
// if testTarget.status.Health() != HealthBad {
// t.Errorf("Expected target state %v, actual: %v", HealthBad, testTarget.status.Health())
// }
// if testTarget.status.LastError() != errIngestChannelFull {
// t.Errorf("Expected target error %q, actual: %q", errIngestChannelFull, testTarget.status.LastError())
// }
// }
func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
server := httptest.NewServer(
@ -177,15 +177,15 @@ func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
),
)
defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{})
testTarget := newTestTarget(server.URL, 10*time.Millisecond, model.LabelSet{})
testTarget.metricRelabelConfigs = []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{"__name__"},
SourceLabels: model.LabelNames{"__name__"},
Regex: &config.Regexp{*regexp.MustCompile(".*drop.*")},
Action: config.RelabelDrop,
},
{
SourceLabels: clientmodel.LabelNames{"__name__"},
SourceLabels: model.LabelNames{"__name__"},
Regex: &config.Regexp{*regexp.MustCompile(".*(relabel|up).*")},
TargetLabel: "foo",
Replacement: "bar",
@ -202,29 +202,29 @@ func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
sample.Value = 0
}
expected := []*clientmodel.Sample{
expected := []*model.Sample{
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "test_metric_relabel",
"foo": "bar",
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host),
Metric: model.Metric{
model.MetricNameLabel: "test_metric_relabel",
"foo": "bar",
model.InstanceLabel: model.LabelValue(testTarget.url.Host),
},
Timestamp: 0,
Value: 0,
},
// The metrics about the scrape are not affected.
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: scrapeHealthMetricName,
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host),
Metric: model.Metric{
model.MetricNameLabel: scrapeHealthMetricName,
model.InstanceLabel: model.LabelValue(testTarget.url.Host),
},
Timestamp: 0,
Value: 0,
},
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: scrapeDurationMetricName,
clientmodel.InstanceLabel: clientmodel.LabelValue(testTarget.url.Host),
Metric: model.Metric{
model.MetricNameLabel: scrapeDurationMetricName,
model.InstanceLabel: model.LabelValue(testTarget.url.Host),
},
Timestamp: 0,
Value: 0,
@ -238,12 +238,12 @@ func TestTargetScrapeMetricRelabelConfigs(t *testing.T) {
}
func TestTargetRecordScrapeHealth(t *testing.T) {
testTarget := newTestTarget("example.url:80", 0, clientmodel.LabelSet{clientmodel.JobLabel: "testjob"})
testTarget := newTestTarget("example.url:80", 0, model.LabelSet{model.JobLabel: "testjob"})
now := clientmodel.Now()
now := model.Now()
appender := &collectResultAppender{}
testTarget.status.setLastError(nil)
recordScrapeHealth(appender, now, testTarget.BaseLabels(), testTarget.status.Health(), 2*time.Second)
recordScrapeHealth(appender, now.Time(), testTarget.BaseLabels(), testTarget.status.Health(), 2*time.Second)
result := appender.result
@ -252,11 +252,11 @@ func TestTargetRecordScrapeHealth(t *testing.T) {
}
actual := result[0]
expected := &clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: scrapeHealthMetricName,
clientmodel.InstanceLabel: "example.url:80",
clientmodel.JobLabel: "testjob",
expected := &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: scrapeHealthMetricName,
model.InstanceLabel: "example.url:80",
model.JobLabel: "testjob",
},
Timestamp: now,
Value: 1,
@ -267,11 +267,11 @@ func TestTargetRecordScrapeHealth(t *testing.T) {
}
actual = result[1]
expected = &clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: scrapeDurationMetricName,
clientmodel.InstanceLabel: "example.url:80",
clientmodel.JobLabel: "testjob",
expected = &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: scrapeDurationMetricName,
model.InstanceLabel: "example.url:80",
model.JobLabel: "testjob",
},
Timestamp: now,
Value: 2.0,
@ -295,7 +295,7 @@ func TestTargetScrapeTimeout(t *testing.T) {
)
defer server.Close()
testTarget := newTestTarget(server.URL, 50*time.Millisecond, clientmodel.LabelSet{})
testTarget := newTestTarget(server.URL, 50*time.Millisecond, model.LabelSet{})
appender := nopAppender{}
@ -338,7 +338,7 @@ func TestTargetScrape404(t *testing.T) {
)
defer server.Close()
testTarget := newTestTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{})
testTarget := newTestTarget(server.URL, 10*time.Millisecond, model.LabelSet{})
appender := nopAppender{}
want := errors.New("server returned HTTP status 404 Not Found")
@ -381,7 +381,7 @@ func BenchmarkScrape(b *testing.B) {
)
defer server.Close()
testTarget := newTestTarget(server.URL, 100*time.Millisecond, clientmodel.LabelSet{"dings": "bums"})
testTarget := newTestTarget(server.URL, 100*time.Millisecond, model.LabelSet{"dings": "bums"})
appender := nopAppender{}
b.ResetTimer()
@ -424,10 +424,10 @@ func TestURLParams(t *testing.T) {
"foo": []string{"bar", "baz"},
},
},
clientmodel.LabelSet{
clientmodel.SchemeLabel: clientmodel.LabelValue(serverURL.Scheme),
clientmodel.AddressLabel: clientmodel.LabelValue(serverURL.Host),
"__param_foo": "bar",
model.LabelSet{
model.SchemeLabel: model.LabelValue(serverURL.Scheme),
model.AddressLabel: model.LabelValue(serverURL.Host),
"__param_foo": "bar",
},
nil)
app := &collectResultAppender{}
@ -436,7 +436,7 @@ func TestURLParams(t *testing.T) {
}
}
func newTestTarget(targetURL string, deadline time.Duration, baseLabels clientmodel.LabelSet) *Target {
func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target {
cfg := &config.ScrapeConfig{
ScrapeTimeout: config.Duration(deadline),
}
@ -454,8 +454,8 @@ func newTestTarget(targetURL string, deadline time.Duration, baseLabels clientmo
scraperStopping: make(chan struct{}),
scraperStopped: make(chan struct{}),
}
t.baseLabels = clientmodel.LabelSet{
clientmodel.InstanceLabel: clientmodel.LabelValue(t.InstanceIdentifier()),
t.baseLabels = model.LabelSet{
model.InstanceLabel: model.LabelValue(t.InstanceIdentifier()),
}
for baseLabel, baseValue := range baseLabels {
t.baseLabels[baseLabel] = baseValue

View file

@ -20,7 +20,7 @@ import (
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/retrieval/discovery"
@ -52,7 +52,7 @@ type TargetProvider interface {
// target providers.
type TargetManager struct {
mtx sync.RWMutex
globalLabels clientmodel.LabelSet
globalLabels model.LabelSet
sampleAppender storage.SampleAppender
running bool
done chan struct{}
@ -325,7 +325,7 @@ func (tm *TargetManager) Pools() map[string][]*Target {
for _, ts := range tm.targets {
for _, t := range ts {
job := string(t.BaseLabels()[clientmodel.JobLabel])
job := string(t.BaseLabels()[model.JobLabel])
pools[job] = append(pools[job], t)
}
}
@ -452,7 +452,7 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
targets := make([]*Target, 0, len(tg.Targets))
for i, labels := range tg.Targets {
addr := string(labels[clientmodel.AddressLabel])
addr := string(labels[model.AddressLabel])
// If no port was provided, infer it based on the used scheme.
if !strings.Contains(addr, ":") {
switch cfg.Scheme {
@ -463,21 +463,21 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
default:
panic(fmt.Errorf("targetsFromGroup: invalid scheme %q", cfg.Scheme))
}
labels[clientmodel.AddressLabel] = clientmodel.LabelValue(addr)
labels[model.AddressLabel] = model.LabelValue(addr)
}
for k, v := range cfg.Params {
if len(v) > 0 {
labels[clientmodel.LabelName(clientmodel.ParamLabelPrefix+k)] = clientmodel.LabelValue(v[0])
labels[model.LabelName(model.ParamLabelPrefix+k)] = model.LabelValue(v[0])
}
}
// Copy labels into the labelset for the target if they are not
// set already. Apply the labelsets in order of decreasing precedence.
labelsets := []clientmodel.LabelSet{
labelsets := []model.LabelSet{
tg.Labels,
{
clientmodel.SchemeLabel: clientmodel.LabelValue(cfg.Scheme),
clientmodel.MetricsPathLabel: clientmodel.LabelValue(cfg.MetricsPath),
clientmodel.JobLabel: clientmodel.LabelValue(cfg.JobName),
model.SchemeLabel: model.LabelValue(cfg.Scheme),
model.MetricsPathLabel: model.LabelValue(cfg.MetricsPath),
model.JobLabel: model.LabelValue(cfg.JobName),
},
tm.globalLabels,
}
@ -489,7 +489,7 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
}
}
if _, ok := labels[clientmodel.AddressLabel]; !ok {
if _, ok := labels[model.AddressLabel]; !ok {
return nil, fmt.Errorf("instance %d in target group %s has no address", i, tg)
}
@ -507,7 +507,7 @@ func (tm *TargetManager) targetsFromGroup(tg *config.TargetGroup, cfg *config.Sc
for ln := range labels {
// Meta labels are deleted after relabelling. Other internal labels propagate to
// the target which decides whether they will be part of their label set.
if strings.HasPrefix(string(ln), clientmodel.MetaLabelPrefix) {
if strings.HasPrefix(string(ln), model.MetaLabelPrefix) {
delete(labels, ln)
}
}

View file

@ -20,7 +20,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -28,12 +28,12 @@ import (
func TestPrefixedTargetProvider(t *testing.T) {
targetGroups := []*config.TargetGroup{
{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test-1:1234"},
Targets: []model.LabelSet{
{model.AddressLabel: "test-1:1234"},
},
}, {
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test-1:1235"},
Targets: []model.LabelSet{
{model.AddressLabel: "test-1:1235"},
},
},
}
@ -78,9 +78,9 @@ func TestTargetManagerChan(t *testing.T) {
JobName: "test_job1",
ScrapeInterval: config.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "example.org:80"},
{clientmodel.AddressLabel: "example.com:80"},
Targets: []model.LabelSet{
{model.AddressLabel: "example.org:80"},
{model.AddressLabel: "example.com:80"},
},
}},
}
@ -101,72 +101,72 @@ func TestTargetManagerChan(t *testing.T) {
sequence := []struct {
tgroup *config.TargetGroup
expected map[string][]clientmodel.LabelSet
expected map[string][]model.LabelSet
}{
{
tgroup: &config.TargetGroup{
Source: "src1",
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test-1:1234"},
{clientmodel.AddressLabel: "test-2:1234", "label": "set"},
{clientmodel.AddressLabel: "test-3:1234"},
Targets: []model.LabelSet{
{model.AddressLabel: "test-1:1234"},
{model.AddressLabel: "test-2:1234", "label": "set"},
{model.AddressLabel: "test-3:1234"},
},
},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1234", "label": "set"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
},
},
}, {
tgroup: &config.TargetGroup{
Source: "src2",
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test-1:1235"},
{clientmodel.AddressLabel: "test-2:1235"},
{clientmodel.AddressLabel: "test-3:1235"},
Targets: []model.LabelSet{
{model.AddressLabel: "test-1:1235"},
{model.AddressLabel: "test-2:1235"},
{model.AddressLabel: "test-3:1235"},
},
Labels: clientmodel.LabelSet{"group": "label"},
Labels: model.LabelSet{"group": "label"},
},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1234", "label": "set"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
},
"src2": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1235", "group": "label"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1235", "group": "label"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1235", "group": "label"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1235", "group": "label"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1235", "group": "label"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1235", "group": "label"},
},
},
}, {
tgroup: &config.TargetGroup{
Source: "src2",
Targets: []clientmodel.LabelSet{},
Targets: []model.LabelSet{},
},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-2:1234", "label": "set"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
},
},
}, {
tgroup: &config.TargetGroup{
Source: "src1",
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test-1:1234", "added": "label"},
{clientmodel.AddressLabel: "test-3:1234"},
{clientmodel.AddressLabel: "test-4:1234", "fancy": "label"},
Targets: []model.LabelSet{
{model.AddressLabel: "test-1:1234", "added": "label"},
{model.AddressLabel: "test-3:1234"},
{model.AddressLabel: "test-4:1234", "fancy": "label"},
},
},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"src1": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-1:1234", "added": "label"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-3:1234"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "test-4:1234", "fancy": "label"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234", "added": "label"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"},
{model.JobLabel: "test_job1", model.InstanceLabel: "test-4:1234", "fancy": "label"},
},
},
},
@ -210,15 +210,15 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
"testParam": []string{"paramValue", "secondValue"},
},
TargetGroups: []*config.TargetGroup{{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "example.org:80"},
{clientmodel.AddressLabel: "example.com:80"},
Targets: []model.LabelSet{
{model.AddressLabel: "example.org:80"},
{model.AddressLabel: "example.com:80"},
},
}},
RelabelConfigs: []*config.RelabelConfig{
{
// Copy out the URL parameter.
SourceLabels: clientmodel.LabelNames{"__param_testParam"},
SourceLabels: model.LabelNames{"__param_testParam"},
Regex: &config.Regexp{*regexp.MustCompile("^(.*)$")},
TargetLabel: "testParam",
Replacement: "$1",
@ -231,38 +231,38 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
ScrapeInterval: config.Duration(1 * time.Minute),
TargetGroups: []*config.TargetGroup{
{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "example.org:8080"},
{clientmodel.AddressLabel: "example.com:8081"},
Targets: []model.LabelSet{
{model.AddressLabel: "example.org:8080"},
{model.AddressLabel: "example.com:8081"},
},
Labels: clientmodel.LabelSet{
Labels: model.LabelSet{
"foo": "bar",
"boom": "box",
},
},
{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test.com:1234"},
Targets: []model.LabelSet{
{model.AddressLabel: "test.com:1234"},
},
},
{
Targets: []clientmodel.LabelSet{
{clientmodel.AddressLabel: "test.com:1235"},
Targets: []model.LabelSet{
{model.AddressLabel: "test.com:1235"},
},
Labels: clientmodel.LabelSet{"instance": "fixed"},
Labels: model.LabelSet{"instance": "fixed"},
},
},
RelabelConfigs: []*config.RelabelConfig{
{
SourceLabels: clientmodel.LabelNames{clientmodel.AddressLabel},
SourceLabels: model.LabelNames{model.AddressLabel},
Regex: &config.Regexp{*regexp.MustCompile(`^test\.(.*?):(.*)`)},
Replacement: "foo.${1}:${2}",
TargetLabel: clientmodel.AddressLabel,
TargetLabel: model.AddressLabel,
Action: config.RelabelReplace,
},
{
// Add a new label for example.* targets.
SourceLabels: clientmodel.LabelNames{clientmodel.AddressLabel, "boom", "foo"},
SourceLabels: model.LabelNames{model.AddressLabel, "boom", "foo"},
Regex: &config.Regexp{*regexp.MustCompile("^example.*?-b([a-z-]+)r$")},
TargetLabel: "new",
Replacement: "$1",
@ -271,7 +271,7 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
},
{
// Drop an existing label.
SourceLabels: clientmodel.LabelNames{"boom"},
SourceLabels: model.LabelNames{"boom"},
Regex: &config.Regexp{*regexp.MustCompile(".*")},
TargetLabel: "boom",
Replacement: "",
@ -282,57 +282,57 @@ func TestTargetManagerConfigUpdate(t *testing.T) {
sequence := []struct {
scrapeConfigs []*config.ScrapeConfig
expected map[string][]clientmodel.LabelSet
expected map[string][]model.LabelSet
}{
{
scrapeConfigs: []*config.ScrapeConfig{testJob1},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"test_job1:static:0:0": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.com:80", "testParam": "paramValue"},
{model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue"},
},
},
}, {
scrapeConfigs: []*config.ScrapeConfig{testJob1},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"test_job1:static:0:0": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.com:80", "testParam": "paramValue"},
{model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue"},
},
},
}, {
scrapeConfigs: []*config.ScrapeConfig{testJob1, testJob2},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"test_job1:static:0:0": {
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{clientmodel.JobLabel: "test_job1", clientmodel.InstanceLabel: "example.com:80", "testParam": "paramValue"},
{model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue"},
{model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue"},
},
"test_job2:static:0:0": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"},
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"},
{model.JobLabel: "test_job2", model.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"},
{model.JobLabel: "test_job2", model.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"},
},
"test_job2:static:0:1": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "foo.com:1234"},
{model.JobLabel: "test_job2", model.InstanceLabel: "foo.com:1234"},
},
"test_job2:static:0:2": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "fixed"},
{model.JobLabel: "test_job2", model.InstanceLabel: "fixed"},
},
},
}, {
scrapeConfigs: []*config.ScrapeConfig{},
expected: map[string][]clientmodel.LabelSet{},
expected: map[string][]model.LabelSet{},
}, {
scrapeConfigs: []*config.ScrapeConfig{testJob2},
expected: map[string][]clientmodel.LabelSet{
expected: map[string][]model.LabelSet{
"test_job2:static:0:0": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"},
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"},
{model.JobLabel: "test_job2", model.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba"},
{model.JobLabel: "test_job2", model.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba"},
},
"test_job2:static:0:1": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "foo.com:1234"},
{model.JobLabel: "test_job2", model.InstanceLabel: "foo.com:1234"},
},
"test_job2:static:0:2": {
{clientmodel.JobLabel: "test_job2", clientmodel.InstanceLabel: "fixed"},
{model.JobLabel: "test_job2", model.InstanceLabel: "fixed"},
},
},
},

View file

@ -19,7 +19,7 @@ import (
"sync"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil"
@ -27,12 +27,12 @@ import (
const (
// AlertMetricName is the metric name for synthetic alert timeseries.
alertMetricName clientmodel.LabelValue = "ALERTS"
alertMetricName model.LabelValue = "ALERTS"
// AlertNameLabel is the label name indicating the name of an alert.
alertNameLabel clientmodel.LabelName = "alertname"
alertNameLabel model.LabelName = "alertname"
// AlertStateLabel is the label name indicating the state of an alert.
alertStateLabel clientmodel.LabelName = "alertstate"
alertStateLabel model.LabelName = "alertstate"
)
// AlertState denotes the state of an active alert.
@ -67,28 +67,28 @@ type Alert struct {
// The name of the alert.
Name string
// The vector element labelset triggering this alert.
Labels clientmodel.LabelSet
Labels model.LabelSet
// The state of the alert (Pending or Firing).
State AlertState
// The time when the alert first transitioned into Pending state.
ActiveSince clientmodel.Timestamp
ActiveSince model.Time
// The value of the alert expression for this vector element.
Value clientmodel.SampleValue
Value model.SampleValue
}
// sample returns a Sample suitable for recording the alert.
func (a Alert) sample(timestamp clientmodel.Timestamp, value clientmodel.SampleValue) *promql.Sample {
recordedMetric := clientmodel.Metric{}
func (a Alert) sample(timestamp model.Time, value model.SampleValue) *promql.Sample {
recordedMetric := model.Metric{}
for label, value := range a.Labels {
recordedMetric[label] = value
}
recordedMetric[clientmodel.MetricNameLabel] = alertMetricName
recordedMetric[alertNameLabel] = clientmodel.LabelValue(a.Name)
recordedMetric[alertStateLabel] = clientmodel.LabelValue(a.State.String())
recordedMetric[model.MetricNameLabel] = alertMetricName
recordedMetric[alertNameLabel] = model.LabelValue(a.Name)
recordedMetric[alertStateLabel] = model.LabelValue(a.State.String())
return &promql.Sample{
Metric: clientmodel.COWMetric{
Metric: model.COWMetric{
Metric: recordedMetric,
Copied: true,
},
@ -107,7 +107,7 @@ type AlertingRule struct {
// output vector before an alert transitions from Pending to Firing state.
holdDuration time.Duration
// Extra labels to attach to the resulting alert sample vectors.
labels clientmodel.LabelSet
labels model.LabelSet
// Short alert summary, suitable for email subjects.
summary string
// More detailed alert description.
@ -119,7 +119,7 @@ type AlertingRule struct {
mutex sync.Mutex
// A map of alerts which are currently active (Pending or Firing), keyed by
// the fingerprint of the labelset they correspond to.
activeAlerts map[clientmodel.Fingerprint]*Alert
activeAlerts map[model.Fingerprint]*Alert
}
// NewAlertingRule constructs a new AlertingRule.
@ -127,7 +127,7 @@ func NewAlertingRule(
name string,
vector promql.Expr,
holdDuration time.Duration,
labels clientmodel.LabelSet,
labels model.LabelSet,
summary string,
description string,
runbook string,
@ -141,7 +141,7 @@ func NewAlertingRule(
description: description,
runbook: runbook,
activeAlerts: map[clientmodel.Fingerprint]*Alert{},
activeAlerts: map[model.Fingerprint]*Alert{},
}
}
@ -152,7 +152,7 @@ func (rule *AlertingRule) Name() string {
// eval evaluates the rule expression and then creates pending alerts and fires
// or removes previously pending alerts accordingly.
func (rule *AlertingRule) eval(timestamp clientmodel.Timestamp, engine *promql.Engine) (promql.Vector, error) {
func (rule *AlertingRule) eval(timestamp model.Time, engine *promql.Engine) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), timestamp)
if err != nil {
return nil, err
@ -167,17 +167,16 @@ func (rule *AlertingRule) eval(timestamp clientmodel.Timestamp, engine *promql.E
// Create pending alerts for any new vector elements in the alert expression
// or update the expression value for existing elements.
resultFPs := map[clientmodel.Fingerprint]struct{}{}
resultFPs := map[model.Fingerprint]struct{}{}
for _, sample := range exprResult {
fp := sample.Metric.Metric.Fingerprint()
resultFPs[fp] = struct{}{}
if alert, ok := rule.activeAlerts[fp]; !ok {
labels := clientmodel.LabelSet{}
labels.MergeFromMetric(sample.Metric.Metric)
labels := model.LabelSet(sample.Metric.Metric.Clone())
labels = labels.Merge(rule.labels)
if _, ok := labels[clientmodel.MetricNameLabel]; ok {
delete(labels, clientmodel.MetricNameLabel)
if _, ok := labels[model.MetricNameLabel]; ok {
delete(labels, model.MetricNameLabel)
}
rule.activeAlerts[fp] = &Alert{
Name: rule.name,
@ -231,9 +230,9 @@ func (rule *AlertingRule) String() string {
// resulting snippet is expected to be presented in a <pre> element, so that
// line breaks and other returned whitespace is respected.
func (rule *AlertingRule) HTMLSnippet(pathPrefix string) template.HTML {
alertMetric := clientmodel.Metric{
clientmodel.MetricNameLabel: alertMetricName,
alertNameLabel: clientmodel.LabelValue(rule.name),
alertMetric := model.Metric{
model.MetricNameLabel: alertMetricName,
alertNameLabel: model.LabelValue(rule.name),
}
s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), rule.name)
s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(rule.vector.String()), rule.vector)

View file

@ -26,7 +26,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/notification"
@ -81,7 +81,7 @@ type Rule interface {
// Name returns the name of the rule.
Name() string
// Eval evaluates the rule, including any associated recording or alerting actions.
eval(clientmodel.Timestamp, *promql.Engine) (promql.Vector, error)
eval(model.Time, *promql.Engine) (promql.Vector, error)
// String returns a human-readable string representation of the rule.
String() string
// HTMLSnippet returns a human-readable string representation of the rule,
@ -179,7 +179,7 @@ func (m *Manager) Stop() {
m.done <- true
}
func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmodel.Timestamp) {
func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp model.Time) {
activeAlerts := rule.ActiveAlerts()
if len(activeAlerts) == 0 {
return
@ -199,7 +199,7 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
}
tmplData := struct {
Labels map[string]string
Value clientmodel.SampleValue
Value model.SampleValue
}{
Labels: l,
Value: aa.Value,
@ -222,8 +222,8 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
Summary: expand(rule.summary),
Description: expand(rule.description),
Runbook: rule.runbook,
Labels: aa.Labels.Merge(clientmodel.LabelSet{
alertNameLabel: clientmodel.LabelValue(rule.Name()),
Labels: aa.Labels.Merge(model.LabelSet{
alertNameLabel: model.LabelValue(rule.Name()),
}),
Value: aa.Value,
ActiveSince: aa.ActiveSince.Time(),
@ -235,7 +235,7 @@ func (m *Manager) queueAlertNotifications(rule *AlertingRule, timestamp clientmo
}
func (m *Manager) runIteration() {
now := clientmodel.Now()
now := model.Now()
wg := sync.WaitGroup{}
m.Lock()
@ -274,7 +274,7 @@ func (m *Manager) runIteration() {
}
for _, s := range vector {
m.sampleAppender.Append(&clientmodel.Sample{
m.sampleAppender.Append(&model.Sample{
Metric: s.Metric.Metric,
Value: s.Value,
Timestamp: s.Timestamp,

View file

@ -20,7 +20,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
)
@ -55,7 +55,7 @@ func TestAlertingRule(t *testing.T) {
"HTTPRequestRateLow",
expr,
time.Minute,
clientmodel.LabelSet{"severity": "critical"},
model.LabelSet{"severity": "critical"},
"summary", "description", "runbook",
)
@ -95,7 +95,7 @@ func TestAlertingRule(t *testing.T) {
}
for i, test := range tests {
evalTime := clientmodel.Timestamp(0).Add(test.time)
evalTime := model.Time(0).Add(test.time)
res, err := rule.eval(evalTime, suite.QueryEngine())
if err != nil {
@ -131,7 +131,7 @@ func TestAlertingRule(t *testing.T) {
}
}
func annotateWithTime(lines []string, timestamp clientmodel.Timestamp) []string {
func annotateWithTime(lines []string, timestamp model.Time) []string {
annotatedLines := []string{}
for _, line := range lines {
annotatedLines = append(annotatedLines, fmt.Sprintf(line, timestamp))
@ -149,7 +149,7 @@ func TestTransferAlertState(t *testing.T) {
arule := AlertingRule{
name: "test",
activeAlerts: map[clientmodel.Fingerprint]*Alert{},
activeAlerts: map[model.Fingerprint]*Alert{},
}
aruleCopy := arule
@ -166,7 +166,7 @@ func TestTransferAlertState(t *testing.T) {
m.rules = []Rule{
&AlertingRule{
name: "test_other",
activeAlerts: map[clientmodel.Fingerprint]*Alert{},
activeAlerts: map[model.Fingerprint]*Alert{},
},
&aruleCopy,
}

View file

@ -17,7 +17,7 @@ import (
"fmt"
"html/template"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil"
@ -27,11 +27,11 @@ import (
type RecordingRule struct {
name string
vector promql.Expr
labels clientmodel.LabelSet
labels model.LabelSet
}
// NewRecordingRule returns a new recording rule.
func NewRecordingRule(name string, vector promql.Expr, labels clientmodel.LabelSet) *RecordingRule {
func NewRecordingRule(name string, vector promql.Expr, labels model.LabelSet) *RecordingRule {
return &RecordingRule{
name: name,
vector: vector,
@ -43,7 +43,7 @@ func NewRecordingRule(name string, vector promql.Expr, labels clientmodel.LabelS
func (rule RecordingRule) Name() string { return rule.name }
// eval evaluates the rule and then overrides the metric names and labels accordingly.
func (rule RecordingRule) eval(timestamp clientmodel.Timestamp, engine *promql.Engine) (promql.Vector, error) {
func (rule RecordingRule) eval(timestamp model.Time, engine *promql.Engine) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), timestamp)
if err != nil {
return nil, err
@ -69,10 +69,10 @@ func (rule RecordingRule) eval(timestamp clientmodel.Timestamp, engine *promql.E
// Override the metric name and labels.
for _, sample := range vector {
sample.Metric.Set(clientmodel.MetricNameLabel, clientmodel.LabelValue(rule.name))
sample.Metric.Set(model.MetricNameLabel, model.LabelValue(rule.name))
for label, value := range rule.labels {
if value == "" {
sample.Metric.Delete(label)
sample.Metric.Del(label)
} else {
sample.Metric.Set(label, value)
}

View file

@ -20,7 +20,7 @@ import (
"sync"
"sync/atomic"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -59,8 +59,8 @@ type chunkDesc struct {
sync.Mutex
c chunk // nil if chunk is evicted.
rCnt int
chunkFirstTime clientmodel.Timestamp // Used if chunk is evicted.
chunkLastTime clientmodel.Timestamp // Used if chunk is evicted.
chunkFirstTime model.Time // Used if chunk is evicted.
chunkLastTime model.Time // Used if chunk is evicted.
// evictListElement is nil if the chunk is not in the evict list.
// evictListElement is _not_ protected by the chunkDesc mutex.
@ -123,7 +123,7 @@ func (cd *chunkDesc) refCount() int {
return cd.rCnt
}
func (cd *chunkDesc) firstTime() clientmodel.Timestamp {
func (cd *chunkDesc) firstTime() model.Time {
cd.Lock()
defer cd.Unlock()
@ -133,7 +133,7 @@ func (cd *chunkDesc) firstTime() clientmodel.Timestamp {
return cd.c.firstTime()
}
func (cd *chunkDesc) lastTime() clientmodel.Timestamp {
func (cd *chunkDesc) lastTime() model.Time {
cd.Lock()
defer cd.Unlock()
@ -164,7 +164,7 @@ func (cd *chunkDesc) isEvicted() bool {
return cd.c == nil
}
func (cd *chunkDesc) contains(t clientmodel.Timestamp) bool {
func (cd *chunkDesc) contains(t model.Time) bool {
return !t.Before(cd.firstTime()) && !t.After(cd.lastTime())
}
@ -217,7 +217,7 @@ type chunk interface {
// the relevant one and discard the orginal chunk.
add(sample *metric.SamplePair) []chunk
clone() chunk
firstTime() clientmodel.Timestamp
firstTime() model.Time
newIterator() chunkIterator
marshal(io.Writer) error
unmarshal(io.Reader) error
@ -232,24 +232,24 @@ type chunkIterator interface {
// length returns the number of samples in the chunk.
length() int
// Gets the timestamp of the n-th sample in the chunk.
timestampAtIndex(int) clientmodel.Timestamp
timestampAtIndex(int) model.Time
// Gets the last timestamp in the chunk.
lastTimestamp() clientmodel.Timestamp
lastTimestamp() model.Time
// Gets the sample value of the n-th sample in the chunk.
sampleValueAtIndex(int) clientmodel.SampleValue
sampleValueAtIndex(int) model.SampleValue
// Gets the last sample value in the chunk.
lastSampleValue() clientmodel.SampleValue
lastSampleValue() model.SampleValue
// Gets the two values that are immediately adjacent to a given time. In
// case a value exist at precisely the given time, only that single
// value is returned. Only the first or last value is returned (as a
// single value), if the given time is before or after the first or last
// value, respectively.
valueAtTime(clientmodel.Timestamp) metric.Values
valueAtTime(model.Time) metric.Values
// Gets all values contained within a given interval.
rangeValues(metric.Interval) metric.Values
// Whether a given timestamp is contained between first and last value
// in the chunk.
contains(clientmodel.Timestamp) bool
contains(model.Time) bool
// values returns a channel, from which all sample values in the chunk
// can be received in order. The channel is closed after the last
// one. It is generally not safe to mutate the chunk while the channel

View file

@ -37,7 +37,7 @@ import (
"io"
"sync"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -150,9 +150,9 @@ func decodeString(b byteReader) (string, error) {
return string(buf), nil
}
// A Metric is a clientmodel.Metric that implements
// A Metric is a model.Metric that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler.
type Metric clientmodel.Metric
type Metric model.Metric
// MarshalBinary implements encoding.BinaryMarshaler.
func (m Metric) MarshalBinary() ([]byte, error) {
@ -196,16 +196,16 @@ func (m *Metric) UnmarshalFromReader(r byteReader) error {
if err != nil {
return err
}
(*m)[clientmodel.LabelName(ln)] = clientmodel.LabelValue(lv)
(*m)[model.LabelName(ln)] = model.LabelValue(lv)
}
return nil
}
// A Fingerprint is a clientmodel.Fingerprint that implements
// A Fingerprint is a model.Fingerprint that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. The implementation
// depends on clientmodel.Fingerprint to be convertible to uint64. It encodes
// depends on model.Fingerprint to be convertible to uint64. It encodes
// the fingerprint as a big-endian uint64.
type Fingerprint clientmodel.Fingerprint
type Fingerprint model.Fingerprint
// MarshalBinary implements encoding.BinaryMarshaler.
func (fp Fingerprint) MarshalBinary() ([]byte, error) {
@ -220,10 +220,10 @@ func (fp *Fingerprint) UnmarshalBinary(buf []byte) error {
return nil
}
// FingerprintSet is a map[clientmodel.Fingerprint]struct{} that
// FingerprintSet is a map[model.Fingerprint]struct{} that
// implements encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its
// binary form is identical to that of Fingerprints.
type FingerprintSet map[clientmodel.Fingerprint]struct{}
type FingerprintSet map[model.Fingerprint]struct{}
// MarshalBinary implements encoding.BinaryMarshaler.
func (fps FingerprintSet) MarshalBinary() ([]byte, error) {
@ -247,15 +247,15 @@ func (fps *FingerprintSet) UnmarshalBinary(buf []byte) error {
*fps = make(FingerprintSet, numFPs)
for i := 0; i < int(numFPs); i++ {
(*fps)[clientmodel.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))] = struct{}{}
(*fps)[model.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))] = struct{}{}
}
return nil
}
// Fingerprints is a clientmodel.Fingerprints that implements
// Fingerprints is a model.Fingerprints that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is
// identical to that of FingerprintSet.
type Fingerprints clientmodel.Fingerprints
type Fingerprints model.Fingerprints
// MarshalBinary implements encoding.BinaryMarshaler.
func (fps Fingerprints) MarshalBinary() ([]byte, error) {
@ -277,7 +277,7 @@ func (fps *Fingerprints) UnmarshalBinary(buf []byte) error {
*fps = make(Fingerprints, numFPs)
for i := range *fps {
(*fps)[i] = clientmodel.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))
(*fps)[i] = model.Fingerprint(binary.BigEndian.Uint64(buf[offset+i*8:]))
}
return nil
}
@ -309,14 +309,14 @@ func (lp *LabelPair) UnmarshalBinary(buf []byte) error {
if err != nil {
return err
}
lp.Name = clientmodel.LabelName(n)
lp.Value = clientmodel.LabelValue(v)
lp.Name = model.LabelName(n)
lp.Value = model.LabelValue(v)
return nil
}
// LabelName is a clientmodel.LabelName that implements
// LabelName is a model.LabelName that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler.
type LabelName clientmodel.LabelName
type LabelName model.LabelName
// MarshalBinary implements encoding.BinaryMarshaler.
func (l LabelName) MarshalBinary() ([]byte, error) {
@ -338,10 +338,10 @@ func (l *LabelName) UnmarshalBinary(buf []byte) error {
return nil
}
// LabelValueSet is a map[clientmodel.LabelValue]struct{} that implements
// LabelValueSet is a map[model.LabelValue]struct{} that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is
// identical to that of LabelValues.
type LabelValueSet map[clientmodel.LabelValue]struct{}
type LabelValueSet map[model.LabelValue]struct{}
// MarshalBinary implements encoding.BinaryMarshaler.
func (vs LabelValueSet) MarshalBinary() ([]byte, error) {
@ -371,15 +371,15 @@ func (vs *LabelValueSet) UnmarshalBinary(buf []byte) error {
if err != nil {
return err
}
(*vs)[clientmodel.LabelValue(v)] = struct{}{}
(*vs)[model.LabelValue(v)] = struct{}{}
}
return nil
}
// LabelValues is a clientmodel.LabelValues that implements
// LabelValues is a model.LabelValues that implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. Its binary form is
// identical to that of LabelValueSet.
type LabelValues clientmodel.LabelValues
type LabelValues model.LabelValues
// MarshalBinary implements encoding.BinaryMarshaler.
func (vs LabelValues) MarshalBinary() ([]byte, error) {
@ -409,7 +409,7 @@ func (vs *LabelValues) UnmarshalBinary(buf []byte) error {
if err != nil {
return err
}
(*vs)[i] = clientmodel.LabelValue(v)
(*vs)[i] = model.LabelValue(v)
}
return nil
}
@ -417,7 +417,7 @@ func (vs *LabelValues) UnmarshalBinary(buf []byte) error {
// TimeRange is used to define a time range and implements
// encoding.BinaryMarshaler and encoding.BinaryUnmarshaler.
type TimeRange struct {
First, Last clientmodel.Timestamp
First, Last model.Time
}
// MarshalBinary implements encoding.BinaryMarshaler.
@ -443,7 +443,7 @@ func (tr *TimeRange) UnmarshalBinary(buf []byte) error {
if err != nil {
return err
}
tr.First = clientmodel.Timestamp(first)
tr.Last = clientmodel.Timestamp(last)
tr.First = model.Time(first)
tr.Last = model.Time(last)
return nil
}

View file

@ -23,7 +23,7 @@ import (
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/local/index"
@ -34,12 +34,12 @@ import (
// an error or because the persistence was dirty from the start). Not goroutine
// safe. Only call before anything else is running (except index processing
// queue as started by newPersistence).
func (p *persistence) recoverFromCrash(fingerprintToSeries map[clientmodel.Fingerprint]*memorySeries) error {
func (p *persistence) recoverFromCrash(fingerprintToSeries map[model.Fingerprint]*memorySeries) error {
// TODO(beorn): We need proper tests for the crash recovery.
log.Warn("Starting crash recovery. Prometheus is inoperational until complete.")
log.Warn("To avoid crash recovery in the future, shut down Prometheus with SIGTERM or a HTTP POST to /-/quit.")
fpsSeen := map[clientmodel.Fingerprint]struct{}{}
fpsSeen := map[model.Fingerprint]struct{}{}
count := 0
seriesDirNameFmt := fmt.Sprintf("%%0%dx", seriesDirNameLen)
@ -171,9 +171,9 @@ func (p *persistence) recoverFromCrash(fingerprintToSeries map[clientmodel.Finge
// be found there, it is moved into the orphaned directory.
func (p *persistence) sanitizeSeries(
dirname string, fi os.FileInfo,
fingerprintToSeries map[clientmodel.Fingerprint]*memorySeries,
fingerprintToSeries map[model.Fingerprint]*memorySeries,
fpm fpMappings,
) (clientmodel.Fingerprint, bool) {
) (model.Fingerprint, bool) {
filename := path.Join(dirname, fi.Name())
purge := func() {
var err error
@ -194,14 +194,16 @@ func (p *persistence) sanitizeSeries(
}
}
var fp clientmodel.Fingerprint
var fp model.Fingerprint
var err error
if len(fi.Name()) != fpLen-seriesDirNameLen+len(seriesFileSuffix) ||
!strings.HasSuffix(fi.Name(), seriesFileSuffix) {
log.Warnf("Unexpected series file name %s.", filename)
purge()
return fp, false
}
if err := fp.LoadFromString(path.Base(dirname) + fi.Name()[:fpLen-seriesDirNameLen]); err != nil {
if fp, err = model.FingerprintFromString(path.Base(dirname) + fi.Name()[:fpLen-seriesDirNameLen]); err != nil {
log.Warnf("Error parsing file name %s: %s", filename, err)
purge()
return fp, false
@ -353,8 +355,8 @@ func (p *persistence) sanitizeSeries(
}
func (p *persistence) cleanUpArchiveIndexes(
fpToSeries map[clientmodel.Fingerprint]*memorySeries,
fpsSeen map[clientmodel.Fingerprint]struct{},
fpToSeries map[model.Fingerprint]*memorySeries,
fpsSeen map[model.Fingerprint]struct{},
fpm fpMappings,
) error {
log.Info("Cleaning up archive indexes.")
@ -369,17 +371,17 @@ func (p *persistence) cleanUpArchiveIndexes(
if err := kv.Key(&fp); err != nil {
return err
}
_, fpSeen := fpsSeen[clientmodel.Fingerprint(fp)]
_, fpSeen := fpsSeen[model.Fingerprint(fp)]
inMemory := false
if fpSeen {
_, inMemory = fpToSeries[clientmodel.Fingerprint(fp)]
_, inMemory = fpToSeries[model.Fingerprint(fp)]
}
if !fpSeen || inMemory {
if inMemory {
log.Warnf("Archive clean-up: Fingerprint %v is not archived. Purging from archive indexes.", clientmodel.Fingerprint(fp))
log.Warnf("Archive clean-up: Fingerprint %v is not archived. Purging from archive indexes.", model.Fingerprint(fp))
}
if !fpSeen {
log.Warnf("Archive clean-up: Fingerprint %v is unknown. Purging from archive indexes.", clientmodel.Fingerprint(fp))
log.Warnf("Archive clean-up: Fingerprint %v is unknown. Purging from archive indexes.", model.Fingerprint(fp))
}
// It's fine if the fp is not in the archive indexes.
if _, err := p.archivedFingerprintToMetrics.Delete(fp); err != nil {
@ -393,7 +395,7 @@ func (p *persistence) cleanUpArchiveIndexes(
if err := kv.Value(&m); err != nil {
return err
}
maybeAddMapping(clientmodel.Fingerprint(fp), clientmodel.Metric(m), fpm)
maybeAddMapping(model.Fingerprint(fp), model.Metric(m), fpm)
// Make sure it is in timerange index, too.
has, err := p.archivedFingerprintToTimeRange.Has(fp)
if err != nil {
@ -407,12 +409,12 @@ func (p *persistence) cleanUpArchiveIndexes(
if _, err := p.archivedFingerprintToMetrics.Delete(fp); err != nil {
return err
}
cds, err := p.loadChunkDescs(clientmodel.Fingerprint(fp), 0)
cds, err := p.loadChunkDescs(model.Fingerprint(fp), 0)
if err != nil {
return err
}
series := newMemorySeries(clientmodel.Metric(m), cds, p.seriesFileModTime(clientmodel.Fingerprint(fp)))
fpToSeries[clientmodel.Fingerprint(fp)] = series
series := newMemorySeries(model.Metric(m), cds, p.seriesFileModTime(model.Fingerprint(fp)))
fpToSeries[model.Fingerprint(fp)] = series
return nil
}); err != nil {
return err
@ -450,7 +452,7 @@ func (p *persistence) cleanUpArchiveIndexes(
}
func (p *persistence) rebuildLabelIndexes(
fpToSeries map[clientmodel.Fingerprint]*memorySeries,
fpToSeries map[model.Fingerprint]*memorySeries,
) error {
count := 0
log.Info("Rebuilding label indexes.")
@ -472,7 +474,7 @@ func (p *persistence) rebuildLabelIndexes(
if err := kv.Value(&m); err != nil {
return err
}
p.indexMetric(clientmodel.Fingerprint(fp), clientmodel.Metric(m))
p.indexMetric(model.Fingerprint(fp), model.Metric(m))
count++
if count%10000 == 0 {
log.Infof("%d metrics queued for indexing.", count)
@ -486,7 +488,7 @@ func (p *persistence) rebuildLabelIndexes(
}
// maybeAddMapping adds a fingerprint mapping to fpm if the FastFingerprint of m is different from fp.
func maybeAddMapping(fp clientmodel.Fingerprint, m clientmodel.Metric, fpm fpMappings) {
func maybeAddMapping(fp model.Fingerprint, m model.Metric, fpm fpMappings) {
if rawFP := m.FastFingerprint(); rawFP != fp {
log.Warnf(
"Metric %v with fingerprint %v is mapped from raw fingerprint %v.",
@ -495,7 +497,7 @@ func maybeAddMapping(fp clientmodel.Fingerprint, m clientmodel.Metric, fpm fpMap
if mappedFPs, ok := fpm[rawFP]; ok {
mappedFPs[metricToUniqueString(m)] = fp
} else {
fpm[rawFP] = map[string]clientmodel.Fingerprint{
fpm[rawFP] = map[string]model.Fingerprint{
metricToUniqueString(m): fp,
}
}

View file

@ -20,7 +20,7 @@ import (
"math"
"sort"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -112,7 +112,7 @@ func (c deltaEncodedChunk) add(s *metric.SamplePair) []chunk {
// int->float.
nvb = d4
nInt = false
} else if !isInt && vb == d4 && baseValue+clientmodel.SampleValue(float32(dv)) != s.Value {
} else if !isInt && vb == d4 && baseValue+model.SampleValue(float32(dv)) != s.Value {
// float32->float64.
nvb = d8
} else {
@ -189,7 +189,7 @@ func (c deltaEncodedChunk) clone() chunk {
}
// firstTime implements chunk.
func (c deltaEncodedChunk) firstTime() clientmodel.Timestamp {
func (c deltaEncodedChunk) firstTime() model.Time {
return c.baseTime()
}
@ -255,12 +255,12 @@ func (c deltaEncodedChunk) isInt() bool {
return c[deltaHeaderIsIntOffset] == 1
}
func (c deltaEncodedChunk) baseTime() clientmodel.Timestamp {
return clientmodel.Timestamp(binary.LittleEndian.Uint64(c[deltaHeaderBaseTimeOffset:]))
func (c deltaEncodedChunk) baseTime() model.Time {
return model.Time(binary.LittleEndian.Uint64(c[deltaHeaderBaseTimeOffset:]))
}
func (c deltaEncodedChunk) baseValue() clientmodel.SampleValue {
return clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(c[deltaHeaderBaseValueOffset:])))
func (c deltaEncodedChunk) baseValue() model.SampleValue {
return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(c[deltaHeaderBaseValueOffset:])))
}
func (c deltaEncodedChunk) sampleSize() int {
@ -278,8 +278,8 @@ func (c deltaEncodedChunk) len() int {
type deltaEncodedChunkIterator struct {
c deltaEncodedChunk
len int
baseT clientmodel.Timestamp
baseV clientmodel.SampleValue
baseT model.Time
baseV model.SampleValue
tBytes, vBytes deltaBytes
isInt bool
}
@ -288,7 +288,7 @@ type deltaEncodedChunkIterator struct {
func (it *deltaEncodedChunkIterator) length() int { return it.len }
// valueAtTime implements chunkIterator.
func (it *deltaEncodedChunkIterator) valueAtTime(t clientmodel.Timestamp) metric.Values {
func (it *deltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values {
i := sort.Search(it.len, func(i int) bool {
return !it.timestampAtIndex(i).Before(t)
})
@ -350,7 +350,7 @@ func (it *deltaEncodedChunkIterator) rangeValues(in metric.Interval) metric.Valu
}
// contains implements chunkIterator.
func (it *deltaEncodedChunkIterator) contains(t clientmodel.Timestamp) bool {
func (it *deltaEncodedChunkIterator) contains(t model.Time) bool {
return !t.Before(it.baseT) && !t.After(it.timestampAtIndex(it.len-1))
}
@ -370,31 +370,31 @@ func (it *deltaEncodedChunkIterator) values() <-chan *metric.SamplePair {
}
// timestampAtIndex implements chunkIterator.
func (it *deltaEncodedChunkIterator) timestampAtIndex(idx int) clientmodel.Timestamp {
func (it *deltaEncodedChunkIterator) timestampAtIndex(idx int) model.Time {
offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes)
switch it.tBytes {
case d1:
return it.baseT + clientmodel.Timestamp(uint8(it.c[offset]))
return it.baseT + model.Time(uint8(it.c[offset]))
case d2:
return it.baseT + clientmodel.Timestamp(binary.LittleEndian.Uint16(it.c[offset:]))
return it.baseT + model.Time(binary.LittleEndian.Uint16(it.c[offset:]))
case d4:
return it.baseT + clientmodel.Timestamp(binary.LittleEndian.Uint32(it.c[offset:]))
return it.baseT + model.Time(binary.LittleEndian.Uint32(it.c[offset:]))
case d8:
// Take absolute value for d8.
return clientmodel.Timestamp(binary.LittleEndian.Uint64(it.c[offset:]))
return model.Time(binary.LittleEndian.Uint64(it.c[offset:]))
default:
panic("invalid number of bytes for time delta")
}
}
// lastTimestamp implements chunkIterator.
func (it *deltaEncodedChunkIterator) lastTimestamp() clientmodel.Timestamp {
func (it *deltaEncodedChunkIterator) lastTimestamp() model.Time {
return it.timestampAtIndex(it.len - 1)
}
// sampleValueAtIndex implements chunkIterator.
func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.SampleValue {
func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) model.SampleValue {
offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes) + int(it.tBytes)
if it.isInt {
@ -402,11 +402,11 @@ func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.Sam
case d0:
return it.baseV
case d1:
return it.baseV + clientmodel.SampleValue(int8(it.c[offset]))
return it.baseV + model.SampleValue(int8(it.c[offset]))
case d2:
return it.baseV + clientmodel.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
return it.baseV + model.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
case d4:
return it.baseV + clientmodel.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
return it.baseV + model.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
// No d8 for ints.
default:
panic("invalid number of bytes for integer delta")
@ -414,10 +414,10 @@ func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.Sam
} else {
switch it.vBytes {
case d4:
return it.baseV + clientmodel.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
return it.baseV + model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
case d8:
// Take absolute value for d8.
return clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
default:
panic("invalid number of bytes for floating point delta")
}
@ -425,6 +425,6 @@ func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.Sam
}
// lastSampleValue implements chunkIterator.
func (it *deltaEncodedChunkIterator) lastSampleValue() clientmodel.SampleValue {
func (it *deltaEncodedChunkIterator) lastSampleValue() model.SampleValue {
return it.sampleValueAtIndex(it.len - 1)
}

View file

@ -16,7 +16,7 @@ package local
import (
"math"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
type deltaBytes byte
@ -29,7 +29,7 @@ const (
d8 deltaBytes = 8
)
func bytesNeededForUnsignedTimestampDelta(deltaT clientmodel.Timestamp) deltaBytes {
func bytesNeededForUnsignedTimestampDelta(deltaT model.Time) deltaBytes {
switch {
case deltaT > math.MaxUint32:
return d8
@ -42,7 +42,7 @@ func bytesNeededForUnsignedTimestampDelta(deltaT clientmodel.Timestamp) deltaByt
}
}
func bytesNeededForSignedTimestampDelta(deltaT clientmodel.Timestamp) deltaBytes {
func bytesNeededForSignedTimestampDelta(deltaT model.Time) deltaBytes {
switch {
case deltaT > math.MaxInt32 || deltaT < math.MinInt32:
return d8
@ -55,7 +55,7 @@ func bytesNeededForSignedTimestampDelta(deltaT clientmodel.Timestamp) deltaBytes
}
}
func bytesNeededForIntegerSampleValueDelta(deltaV clientmodel.SampleValue) deltaBytes {
func bytesNeededForIntegerSampleValueDelta(deltaV model.SampleValue) deltaBytes {
switch {
case deltaV < math.MinInt32 || deltaV > math.MaxInt32:
return d8
@ -78,7 +78,7 @@ func max(a, b deltaBytes) deltaBytes {
}
// isInt64 returns true if v can be represented as an int64.
func isInt64(v clientmodel.SampleValue) bool {
func isInt64(v model.SampleValue) bool {
// Note: Using math.Modf is slower than the conversion approach below.
return clientmodel.SampleValue(int64(v)) == v
return model.SampleValue(int64(v)) == v
}

View file

@ -20,7 +20,7 @@ import (
"math"
"sort"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -105,10 +105,10 @@ func (c doubleDeltaEncodedChunk) add(s *metric.SamplePair) []chunk {
return []chunk{&c, overflowChunks[0]}
}
projectedTime := c.baseTime() + clientmodel.Timestamp(c.len())*c.baseTimeDelta()
projectedTime := c.baseTime() + model.Time(c.len())*c.baseTimeDelta()
ddt := s.Timestamp - projectedTime
projectedValue := c.baseValue() + clientmodel.SampleValue(c.len())*c.baseValueDelta()
projectedValue := c.baseValue() + model.SampleValue(c.len())*c.baseValueDelta()
ddv := s.Value - projectedValue
ntb, nvb, nInt := tb, vb, c.isInt()
@ -118,7 +118,7 @@ func (c doubleDeltaEncodedChunk) add(s *metric.SamplePair) []chunk {
// int->float.
nvb = d4
nInt = false
} else if !c.isInt() && vb == d4 && projectedValue+clientmodel.SampleValue(float32(ddv)) != s.Value {
} else if !c.isInt() && vb == d4 && projectedValue+model.SampleValue(float32(ddv)) != s.Value {
// float32->float64.
nvb = d8
} else {
@ -195,7 +195,7 @@ func (c doubleDeltaEncodedChunk) clone() chunk {
}
// firstTime implements chunk.
func (c doubleDeltaEncodedChunk) firstTime() clientmodel.Timestamp {
func (c doubleDeltaEncodedChunk) firstTime() model.Time {
return c.baseTime()
}
@ -251,16 +251,16 @@ func (c *doubleDeltaEncodedChunk) unmarshalFromBuf(buf []byte) {
// encoding implements chunk.
func (c doubleDeltaEncodedChunk) encoding() chunkEncoding { return doubleDelta }
func (c doubleDeltaEncodedChunk) baseTime() clientmodel.Timestamp {
return clientmodel.Timestamp(
func (c doubleDeltaEncodedChunk) baseTime() model.Time {
return model.Time(
binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseTimeOffset:],
),
)
}
func (c doubleDeltaEncodedChunk) baseValue() clientmodel.SampleValue {
return clientmodel.SampleValue(
func (c doubleDeltaEncodedChunk) baseValue() model.SampleValue {
return model.SampleValue(
math.Float64frombits(
binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseValueOffset:],
@ -269,22 +269,22 @@ func (c doubleDeltaEncodedChunk) baseValue() clientmodel.SampleValue {
)
}
func (c doubleDeltaEncodedChunk) baseTimeDelta() clientmodel.Timestamp {
func (c doubleDeltaEncodedChunk) baseTimeDelta() model.Time {
if len(c) < doubleDeltaHeaderBaseTimeDeltaOffset+8 {
return 0
}
return clientmodel.Timestamp(
return model.Time(
binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseTimeDeltaOffset:],
),
)
}
func (c doubleDeltaEncodedChunk) baseValueDelta() clientmodel.SampleValue {
func (c doubleDeltaEncodedChunk) baseValueDelta() model.SampleValue {
if len(c) < doubleDeltaHeaderBaseValueDeltaOffset+8 {
return 0
}
return clientmodel.SampleValue(
return model.SampleValue(
math.Float64frombits(
binary.LittleEndian.Uint64(
c[doubleDeltaHeaderBaseValueDeltaOffset:],
@ -384,8 +384,8 @@ func (c doubleDeltaEncodedChunk) addSecondSample(s *metric.SamplePair, tb, vb de
type doubleDeltaEncodedChunkIterator struct {
c doubleDeltaEncodedChunk
len int
baseT, baseΔT clientmodel.Timestamp
baseV, baseΔV clientmodel.SampleValue
baseT, baseΔT model.Time
baseV, baseΔV model.SampleValue
tBytes, vBytes deltaBytes
isInt bool
}
@ -394,7 +394,7 @@ type doubleDeltaEncodedChunkIterator struct {
func (it *doubleDeltaEncodedChunkIterator) length() int { return it.len }
// valueAtTime implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t clientmodel.Timestamp) metric.Values {
func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values {
i := sort.Search(it.len, func(i int) bool {
return !it.timestampAtIndex(i).Before(t)
})
@ -456,7 +456,7 @@ func (it *doubleDeltaEncodedChunkIterator) rangeValues(in metric.Interval) metri
}
// contains implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) contains(t clientmodel.Timestamp) bool {
func (it *doubleDeltaEncodedChunkIterator) contains(t model.Time) bool {
return !t.Before(it.baseT) && !t.After(it.timestampAtIndex(it.len-1))
}
@ -476,7 +476,7 @@ func (it *doubleDeltaEncodedChunkIterator) values() <-chan *metric.SamplePair {
}
// timestampAtIndex implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) timestampAtIndex(idx int) clientmodel.Timestamp {
func (it *doubleDeltaEncodedChunkIterator) timestampAtIndex(idx int) model.Time {
if idx == 0 {
return it.baseT
}
@ -494,31 +494,31 @@ func (it *doubleDeltaEncodedChunkIterator) timestampAtIndex(idx int) clientmodel
switch it.tBytes {
case d1:
return it.baseT +
clientmodel.Timestamp(idx)*it.baseΔT +
clientmodel.Timestamp(int8(it.c[offset]))
model.Time(idx)*it.baseΔT +
model.Time(int8(it.c[offset]))
case d2:
return it.baseT +
clientmodel.Timestamp(idx)*it.baseΔT +
clientmodel.Timestamp(int16(binary.LittleEndian.Uint16(it.c[offset:])))
model.Time(idx)*it.baseΔT +
model.Time(int16(binary.LittleEndian.Uint16(it.c[offset:])))
case d4:
return it.baseT +
clientmodel.Timestamp(idx)*it.baseΔT +
clientmodel.Timestamp(int32(binary.LittleEndian.Uint32(it.c[offset:])))
model.Time(idx)*it.baseΔT +
model.Time(int32(binary.LittleEndian.Uint32(it.c[offset:])))
case d8:
// Take absolute value for d8.
return clientmodel.Timestamp(binary.LittleEndian.Uint64(it.c[offset:]))
return model.Time(binary.LittleEndian.Uint64(it.c[offset:]))
default:
panic("invalid number of bytes for time delta")
}
}
// lastTimestamp implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) lastTimestamp() clientmodel.Timestamp {
func (it *doubleDeltaEncodedChunkIterator) lastTimestamp() model.Time {
return it.timestampAtIndex(it.len - 1)
}
// sampleValueAtIndex implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmodel.SampleValue {
func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) model.SampleValue {
if idx == 0 {
return it.baseV
}
@ -537,19 +537,19 @@ func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmod
switch it.vBytes {
case d0:
return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV
model.SampleValue(idx)*it.baseΔV
case d1:
return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(int8(it.c[offset]))
model.SampleValue(idx)*it.baseΔV +
model.SampleValue(int8(it.c[offset]))
case d2:
return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
model.SampleValue(idx)*it.baseΔV +
model.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
case d4:
return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
model.SampleValue(idx)*it.baseΔV +
model.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
// No d8 for ints.
default:
panic("invalid number of bytes for integer delta")
@ -558,11 +558,11 @@ func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmod
switch it.vBytes {
case d4:
return it.baseV +
clientmodel.SampleValue(idx)*it.baseΔV +
clientmodel.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
model.SampleValue(idx)*it.baseΔV +
model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
case d8:
// Take absolute value for d8.
return clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
default:
panic("invalid number of bytes for floating point delta")
}
@ -570,6 +570,6 @@ func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) clientmod
}
// lastSampleValue implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) lastSampleValue() clientmodel.SampleValue {
func (it *doubleDeltaEncodedChunkIterator) lastSampleValue() model.SampleValue {
return it.sampleValueAtIndex(it.len - 1)
}

View file

@ -20,7 +20,7 @@ import (
"os"
"path"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/metric"
@ -42,7 +42,7 @@ var (
)
// FingerprintMetricMapping is an in-memory map of fingerprints to metrics.
type FingerprintMetricMapping map[clientmodel.Fingerprint]clientmodel.Metric
type FingerprintMetricMapping map[model.Fingerprint]model.Metric
// FingerprintMetricIndex models a database mapping fingerprints to metrics.
type FingerprintMetricIndex struct {
@ -83,7 +83,7 @@ func (i *FingerprintMetricIndex) UnindexBatch(mapping FingerprintMetricMapping)
// fingerprint is not an error. In that case, (nil, false, nil) is returned.
//
// This method is goroutine-safe.
func (i *FingerprintMetricIndex) Lookup(fp clientmodel.Fingerprint) (metric clientmodel.Metric, ok bool, err error) {
func (i *FingerprintMetricIndex) Lookup(fp model.Fingerprint) (metric model.Metric, ok bool, err error) {
ok, err = i.Get(codable.Fingerprint(fp), (*codable.Metric)(&metric))
return
}
@ -105,7 +105,7 @@ func NewFingerprintMetricIndex(basePath string) (*FingerprintMetricIndex, error)
// LabelNameLabelValuesMapping is an in-memory map of label names to
// label values.
type LabelNameLabelValuesMapping map[clientmodel.LabelName]codable.LabelValueSet
type LabelNameLabelValuesMapping map[model.LabelName]codable.LabelValueSet
// LabelNameLabelValuesIndex is a KeyValueStore that maps existing label names
// to all label values stored for that label name.
@ -138,11 +138,11 @@ func (i *LabelNameLabelValuesIndex) IndexBatch(b LabelNameLabelValuesMapping) er
}
// Lookup looks up all label values for a given label name and returns them as
// clientmodel.LabelValues (which is a slice). Looking up a non-existing label
// model.LabelValues (which is a slice). Looking up a non-existing label
// name is not an error. In that case, (nil, false, nil) is returned.
//
// This method is goroutine-safe.
func (i *LabelNameLabelValuesIndex) Lookup(l clientmodel.LabelName) (values clientmodel.LabelValues, ok bool, err error) {
func (i *LabelNameLabelValuesIndex) Lookup(l model.LabelName) (values model.LabelValues, ok bool, err error) {
ok, err = i.Get(codable.LabelName(l), (*codable.LabelValues)(&values))
return
}
@ -152,10 +152,10 @@ func (i *LabelNameLabelValuesIndex) Lookup(l clientmodel.LabelName) (values clie
// (nil, false, nil) is returned.
//
// This method is goroutine-safe.
func (i *LabelNameLabelValuesIndex) LookupSet(l clientmodel.LabelName) (values map[clientmodel.LabelValue]struct{}, ok bool, err error) {
func (i *LabelNameLabelValuesIndex) LookupSet(l model.LabelName) (values map[model.LabelValue]struct{}, ok bool, err error) {
ok, err = i.Get(codable.LabelName(l), (*codable.LabelValueSet)(&values))
if values == nil {
values = map[clientmodel.LabelValue]struct{}{}
values = map[model.LabelValue]struct{}{}
}
return
}
@ -216,7 +216,7 @@ func (i *LabelPairFingerprintIndex) IndexBatch(m LabelPairFingerprintsMapping) e
// returned.
//
// This method is goroutine-safe.
func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps clientmodel.Fingerprints, ok bool, err error) {
func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps model.Fingerprints, ok bool, err error) {
ok, err = i.Get((codable.LabelPair)(p), (*codable.Fingerprints)(&fps))
return
}
@ -226,10 +226,10 @@ func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps clientmodel.
// returned.
//
// This method is goroutine-safe.
func (i *LabelPairFingerprintIndex) LookupSet(p metric.LabelPair) (fps map[clientmodel.Fingerprint]struct{}, ok bool, err error) {
func (i *LabelPairFingerprintIndex) LookupSet(p metric.LabelPair) (fps map[model.Fingerprint]struct{}, ok bool, err error) {
ok, err = i.Get((codable.LabelPair)(p), (*codable.FingerprintSet)(&fps))
if fps == nil {
fps = map[clientmodel.Fingerprint]struct{}{}
fps = map[model.Fingerprint]struct{}{}
}
return
}
@ -266,7 +266,7 @@ type FingerprintTimeRangeIndex struct {
// returned.
//
// This method is goroutine-safe.
func (i *FingerprintTimeRangeIndex) Lookup(fp clientmodel.Fingerprint) (firstTime, lastTime clientmodel.Timestamp, ok bool, err error) {
func (i *FingerprintTimeRangeIndex) Lookup(fp model.Fingerprint) (firstTime, lastTime model.Time, ok bool, err error) {
var tr codable.TimeRange
ok, err = i.Get(codable.Fingerprint(fp), &tr)
return tr.First, tr.Last, ok, err

View file

@ -18,7 +18,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -34,29 +34,29 @@ type Storage interface {
// processing.) The implementation might remove labels with empty value
// from the provided Sample as those labels are considered equivalent to
// a label not present at all.
Append(*clientmodel.Sample)
Append(*model.Sample)
// NewPreloader returns a new Preloader which allows preloading and pinning
// series data into memory for use within a query.
NewPreloader() Preloader
// MetricsForLabelMatchers returns the metrics from storage that satisfy the given
// label matchers. At least one label matcher must be specified that does not
// match the empty string.
MetricsForLabelMatchers(...*metric.LabelMatcher) map[clientmodel.Fingerprint]clientmodel.COWMetric
MetricsForLabelMatchers(...*metric.LabelMatcher) map[model.Fingerprint]model.COWMetric
// LastSamplePairForFingerprint returns the last sample pair for the
// provided fingerprint. If the respective time series does not exist or
// has an evicted head chunk, nil is returned.
LastSamplePairForFingerprint(clientmodel.Fingerprint) *metric.SamplePair
LastSamplePairForFingerprint(model.Fingerprint) *metric.SamplePair
// Get all of the label values that are associated with a given label name.
LabelValuesForLabelName(clientmodel.LabelName) clientmodel.LabelValues
LabelValuesForLabelName(model.LabelName) model.LabelValues
// Get the metric associated with the provided fingerprint.
MetricForFingerprint(clientmodel.Fingerprint) clientmodel.COWMetric
MetricForFingerprint(model.Fingerprint) model.COWMetric
// Construct an iterator for a given fingerprint.
// The iterator will never return samples older than retention time,
// relative to the time NewIterator was called.
NewIterator(clientmodel.Fingerprint) SeriesIterator
NewIterator(model.Fingerprint) SeriesIterator
// Drop all time series associated with the given fingerprints. This operation
// will not show up in the series operations metrics.
DropMetricsForFingerprints(...clientmodel.Fingerprint)
DropMetricsForFingerprints(...model.Fingerprint)
// Run the various maintenance loops in goroutines. Returns when the
// storage is ready to use. Keeps everything running in the background
// until Stop is called.
@ -81,7 +81,7 @@ type SeriesIterator interface {
// value is returned. Only the first or last value is returned (as a
// single value), if the given time is before or after the first or last
// value, respectively.
ValueAtTime(clientmodel.Timestamp) metric.Values
ValueAtTime(model.Time) metric.Values
// Gets the boundary values of an interval: the first and last value
// within a given interval.
BoundaryValues(metric.Interval) metric.Values
@ -94,8 +94,8 @@ type SeriesIterator interface {
// goroutine-safe.
type Preloader interface {
PreloadRange(
fp clientmodel.Fingerprint,
from clientmodel.Timestamp, through clientmodel.Timestamp,
fp model.Fingerprint,
from model.Time, through model.Time,
stalenessDelta time.Duration,
) error
// Close unpins any previously requested series data from memory.

View file

@ -3,7 +3,7 @@ package local
import (
"sync"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// fingerprintLocker allows locking individual fingerprints. To limit the number
@ -33,11 +33,11 @@ func newFingerprintLocker(preallocatedMutexes int) *fingerprintLocker {
}
// Lock locks the given fingerprint.
func (l *fingerprintLocker) Lock(fp clientmodel.Fingerprint) {
func (l *fingerprintLocker) Lock(fp model.Fingerprint) {
l.fpMtxs[uint(fp)%l.numFpMtxs].Lock()
}
// Unlock unlocks the given fingerprint.
func (l *fingerprintLocker) Unlock(fp clientmodel.Fingerprint) {
func (l *fingerprintLocker) Unlock(fp model.Fingerprint) {
l.fpMtxs[uint(fp)%l.numFpMtxs].Unlock()
}

View file

@ -4,7 +4,7 @@ import (
"sync"
"testing"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
func BenchmarkFingerprintLockerParallel(b *testing.B) {
@ -19,8 +19,8 @@ func BenchmarkFingerprintLockerParallel(b *testing.B) {
wg.Add(1)
go func(i int) {
for j := 0; j < numLockOps; j++ {
fp1 := clientmodel.Fingerprint(j % numFingerprints)
fp2 := clientmodel.Fingerprint(j%numFingerprints + numFingerprints)
fp1 := model.Fingerprint(j % numFingerprints)
fp2 := model.Fingerprint(j%numFingerprints + numFingerprints)
locker.Lock(fp1)
locker.Lock(fp2)
locker.Unlock(fp2)
@ -38,7 +38,7 @@ func BenchmarkFingerprintLockerSerial(b *testing.B) {
b.ResetTimer()
for i := 0; i < b.N; i++ {
fp := clientmodel.Fingerprint(i % numFingerprints)
fp := model.Fingerprint(i % numFingerprints)
locker.Lock(fp)
locker.Unlock(fp)
}

View file

@ -10,22 +10,22 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
const maxMappedFP = 1 << 20 // About 1M fingerprints reserved for mapping.
var separatorString = string([]byte{clientmodel.SeparatorByte})
var separatorString = string([]byte{model.SeparatorByte})
// fpMappings maps original fingerprints to a map of string representations of
// metrics to the truly unique fingerprint.
type fpMappings map[clientmodel.Fingerprint]map[string]clientmodel.Fingerprint
type fpMappings map[model.Fingerprint]map[string]model.Fingerprint
// fpMapper is used to map fingerprints in order to work around fingerprint
// collisions.
type fpMapper struct {
// highestMappedFP has to be aligned for atomic operations.
highestMappedFP clientmodel.Fingerprint
highestMappedFP model.Fingerprint
mtx sync.RWMutex // Protects mappings.
mappings fpMappings
@ -65,7 +65,7 @@ func newFPMapper(fpToSeries *seriesMap, p *persistence) (*fpMapper, error) {
//
// If an error is encountered, it is returned together with the unchanged raw
// fingerprint.
func (m *fpMapper) mapFP(fp clientmodel.Fingerprint, metric clientmodel.Metric) (clientmodel.Fingerprint, error) {
func (m *fpMapper) mapFP(fp model.Fingerprint, metric model.Metric) (model.Fingerprint, error) {
// First check if we are in the reserved FP space, in which case this is
// automatically a collision that has to be mapped.
if fp <= maxMappedFP {
@ -125,9 +125,9 @@ func (m *fpMapper) mapFP(fp clientmodel.Fingerprint, metric clientmodel.Metric)
// adds it to the collisions map if not yet there. In any case, it returns the
// truly unique fingerprint for the colliding metric.
func (m *fpMapper) maybeAddMapping(
fp clientmodel.Fingerprint,
collidingMetric clientmodel.Metric,
) (clientmodel.Fingerprint, error) {
fp model.Fingerprint,
collidingMetric model.Metric,
) (model.Fingerprint, error) {
ms := metricToUniqueString(collidingMetric)
m.mtx.RLock()
mappedFPs, ok := m.mappings[fp]
@ -153,7 +153,7 @@ func (m *fpMapper) maybeAddMapping(
}
// This is the first collision for fp.
mappedFP := m.nextMappedFP()
mappedFPs = map[string]clientmodel.Fingerprint{ms: mappedFP}
mappedFPs = map[string]model.Fingerprint{ms: mappedFP}
m.mtx.Lock()
m.mappings[fp] = mappedFPs
m.mappingsCounter.Inc()
@ -167,8 +167,8 @@ func (m *fpMapper) maybeAddMapping(
return mappedFP, err
}
func (m *fpMapper) nextMappedFP() clientmodel.Fingerprint {
mappedFP := clientmodel.Fingerprint(atomic.AddUint64((*uint64)(&m.highestMappedFP), 1))
func (m *fpMapper) nextMappedFP() model.Fingerprint {
mappedFP := model.Fingerprint(atomic.AddUint64((*uint64)(&m.highestMappedFP), 1))
if mappedFP > maxMappedFP {
panic(fmt.Errorf("more than %v fingerprints mapped in collision detection", maxMappedFP))
}
@ -192,7 +192,7 @@ func (m *fpMapper) Collect(ch chan<- prometheus.Metric) {
// FastFingerprint function, and its result is not suitable as a key for maps
// and indexes as it might become really large, causing a lot of hashing effort
// in maps and a lot of storage overhead in indexes.
func metricToUniqueString(m clientmodel.Metric) string {
func metricToUniqueString(m model.Metric) string {
parts := make([]string, 0, len(m))
for ln, lv := range m {
parts = append(parts, string(ln)+separatorString+string(lv))

View file

@ -3,7 +3,7 @@ package local
import (
"testing"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
var (
@ -13,31 +13,31 @@ var (
// Note that fingerprints are set and not actually calculated.
// The collision detection is independent from the actually used
// fingerprinting algorithm.
fp1 = clientmodel.Fingerprint(maxMappedFP + 1)
fp2 = clientmodel.Fingerprint(maxMappedFP + 2)
fp3 = clientmodel.Fingerprint(1)
cm11 = clientmodel.Metric{
fp1 = model.Fingerprint(maxMappedFP + 1)
fp2 = model.Fingerprint(maxMappedFP + 2)
fp3 = model.Fingerprint(1)
cm11 = model.Metric{
"foo": "bar",
"dings": "bumms",
}
cm12 = clientmodel.Metric{
cm12 = model.Metric{
"bar": "foo",
}
cm13 = clientmodel.Metric{
cm13 = model.Metric{
"foo": "bar",
}
cm21 = clientmodel.Metric{
cm21 = model.Metric{
"foo": "bumms",
"dings": "bar",
}
cm22 = clientmodel.Metric{
cm22 = model.Metric{
"dings": "foo",
"bar": "bumms",
}
cm31 = clientmodel.Metric{
cm31 = model.Metric{
"bumms": "dings",
}
cm32 = clientmodel.Metric{
cm32 = model.Metric{
"bumms": "dings",
"bar": "foo",
}
@ -84,12 +84,12 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP {
if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
// The mapped cm12 is added to sm, too. That should not change the outcome.
sm.put(clientmodel.Fingerprint(1), &memorySeries{metric: cm12})
sm.put(model.Fingerprint(1), &memorySeries{metric: cm12})
gotFP, err = mapper.mapFP(fp1, cm11)
if err != nil {
t.Fatal(err)
@ -101,7 +101,7 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP {
if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -110,19 +110,19 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP {
if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP {
if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
// Add cm13 to sm. Should not change anything.
sm.put(clientmodel.Fingerprint(2), &memorySeries{metric: cm13})
sm.put(model.Fingerprint(2), &memorySeries{metric: cm13})
gotFP, err = mapper.mapFP(fp1, cm11)
if err != nil {
t.Fatal(err)
@ -134,14 +134,14 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP {
if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP {
if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -165,10 +165,10 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP {
if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
sm.put(clientmodel.Fingerprint(3), &memorySeries{metric: cm22})
sm.put(model.Fingerprint(3), &memorySeries{metric: cm22})
gotFP, err = mapper.mapFP(fp2, cm21)
if err != nil {
t.Fatal(err)
@ -180,7 +180,7 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP {
if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -189,20 +189,20 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP {
if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
sm.put(clientmodel.Fingerprint(4), &memorySeries{metric: cm31})
sm.put(model.Fingerprint(4), &memorySeries{metric: cm31})
// Map cm32, which is now mapped for two reasons...
gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP {
if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
sm.put(clientmodel.Fingerprint(5), &memorySeries{metric: cm32})
sm.put(model.Fingerprint(5), &memorySeries{metric: cm32})
// Now check ALL the mappings, just to be sure.
gotFP, err = mapper.mapFP(fp1, cm11)
@ -216,14 +216,14 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP {
if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP {
if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp2, cm21)
@ -237,21 +237,21 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP {
if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp3, cm31)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP {
if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP {
if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -271,14 +271,14 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP {
if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP {
if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp2, cm21)
@ -292,21 +292,21 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP {
if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp3, cm31)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP {
if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP {
if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -327,14 +327,14 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(1); gotFP != wantFP {
if wantFP := model.Fingerprint(1); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp1, cm13)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(2); gotFP != wantFP {
if wantFP := model.Fingerprint(2); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp2, cm21)
@ -348,21 +348,21 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP {
if wantFP := model.Fingerprint(3); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp3, cm31)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(4); gotFP != wantFP {
if wantFP := model.Fingerprint(4); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
gotFP, err = mapper.mapFP(fp3, cm32)
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(5); gotFP != wantFP {
if wantFP := model.Fingerprint(5); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -384,7 +384,7 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(3); gotFP != wantFP { // Old mapping still applied.
if wantFP := model.Fingerprint(3); gotFP != wantFP { // Old mapping still applied.
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}
@ -396,7 +396,7 @@ func TestFPMapper(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if wantFP := clientmodel.Fingerprint(6); gotFP != wantFP {
if wantFP := model.Fingerprint(6); gotFP != wantFP {
t.Errorf("got fingerprint %v, want fingerprint %v", gotFP, wantFP)
}

View file

@ -31,7 +31,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/local/index"
@ -76,7 +76,7 @@ const (
indexingQueueCapacity = 1024 * 16
)
var fpLen = len(clientmodel.Fingerprint(0).String()) // Length of a fingerprint as string.
var fpLen = len(model.Fingerprint(0).String()) // Length of a fingerprint as string.
const (
flagHeadChunkPersisted byte = 1 << iota
@ -93,8 +93,8 @@ const (
)
type indexingOp struct {
fingerprint clientmodel.Fingerprint
metric clientmodel.Metric
fingerprint model.Fingerprint
metric model.Metric
opType indexingOpType
}
@ -335,7 +335,7 @@ func (p *persistence) setDirty(dirty bool) {
// pair. This method is goroutine-safe but take into account that metrics queued
// for indexing with IndexMetric might not have made it into the index
// yet. (Same applies correspondingly to UnindexMetric.)
func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (clientmodel.Fingerprints, error) {
func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (model.Fingerprints, error) {
fps, _, err := p.labelPairToFingerprints.Lookup(lp)
if err != nil {
return nil, err
@ -347,7 +347,7 @@ func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (clientmodel
// name. This method is goroutine-safe but take into account that metrics queued
// for indexing with IndexMetric might not have made it into the index
// yet. (Same applies correspondingly to UnindexMetric.)
func (p *persistence) labelValuesForLabelName(ln clientmodel.LabelName) (clientmodel.LabelValues, error) {
func (p *persistence) labelValuesForLabelName(ln model.LabelName) (model.LabelValues, error) {
lvs, _, err := p.labelNameToLabelValues.Lookup(ln)
if err != nil {
return nil, err
@ -361,7 +361,7 @@ func (p *persistence) labelValuesForLabelName(ln clientmodel.LabelName) (clientm
// the (zero-based) index of the first persisted chunk within the series
// file. In case of an error, the returned index is -1 (to avoid the
// misconception that the chunk was written at position 0).
func (p *persistence) persistChunks(fp clientmodel.Fingerprint, chunks []chunk) (index int, err error) {
func (p *persistence) persistChunks(fp model.Fingerprint, chunks []chunk) (index int, err error) {
defer func() {
if err != nil {
log.Error("Error persisting chunks: ", err)
@ -397,7 +397,7 @@ func (p *persistence) persistChunks(fp clientmodel.Fingerprint, chunks []chunk)
// incrementally larger indexes. The indexOffset denotes the offset to be added to
// each index in indexes. It is the caller's responsibility to not persist or
// drop anything for the same fingerprint concurrently.
func (p *persistence) loadChunks(fp clientmodel.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) {
func (p *persistence) loadChunks(fp model.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) {
f, err := p.openChunkFileForReading(fp)
if err != nil {
return nil, err
@ -448,7 +448,7 @@ func (p *persistence) loadChunks(fp clientmodel.Fingerprint, indexes []int, inde
// the number of chunkDescs to skip from the end of the series file. It is the
// caller's responsibility to not persist or drop anything for the same
// fingerprint concurrently.
func (p *persistence) loadChunkDescs(fp clientmodel.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) {
func (p *persistence) loadChunkDescs(fp model.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) {
f, err := p.openChunkFileForReading(fp)
if os.IsNotExist(err) {
return nil, nil
@ -484,8 +484,8 @@ func (p *persistence) loadChunkDescs(fp clientmodel.Fingerprint, offsetFromEnd i
return nil, err
}
cds[i] = &chunkDesc{
chunkFirstTime: clientmodel.Timestamp(binary.LittleEndian.Uint64(chunkTimesBuf)),
chunkLastTime: clientmodel.Timestamp(binary.LittleEndian.Uint64(chunkTimesBuf[8:])),
chunkFirstTime: model.Time(binary.LittleEndian.Uint64(chunkTimesBuf)),
chunkLastTime: model.Time(binary.LittleEndian.Uint64(chunkTimesBuf[8:])),
}
}
chunkDescOps.WithLabelValues(load).Add(float64(len(cds)))
@ -681,7 +681,7 @@ func (p *persistence) checkpointSeriesMapAndHeads(fingerprintToSeries *seriesMap
// utterly goroutine-unsafe.
func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist int64, err error) {
var chunkDescsTotal int64
fingerprintToSeries := make(map[clientmodel.Fingerprint]*memorySeries)
fingerprintToSeries := make(map[model.Fingerprint]*memorySeries)
sm = &seriesMap{m: fingerprintToSeries}
defer func() {
@ -819,8 +819,8 @@ func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist in
return sm, chunksToPersist, nil
}
chunkDescs[i] = &chunkDesc{
chunkFirstTime: clientmodel.Timestamp(firstTime),
chunkLastTime: clientmodel.Timestamp(lastTime),
chunkFirstTime: model.Time(firstTime),
chunkLastTime: model.Time(lastTime),
}
chunkDescsTotal++
} else {
@ -842,13 +842,13 @@ func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist in
}
}
fingerprintToSeries[clientmodel.Fingerprint(fp)] = &memorySeries{
metric: clientmodel.Metric(metric),
fingerprintToSeries[model.Fingerprint(fp)] = &memorySeries{
metric: model.Metric(metric),
chunkDescs: chunkDescs,
persistWatermark: int(persistWatermark),
modTime: modTime,
chunkDescsOffset: int(chunkDescsOffset),
savedFirstTime: clientmodel.Timestamp(savedFirstTime),
savedFirstTime: model.Time(savedFirstTime),
lastTime: chunkDescs[len(chunkDescs)-1].lastTime(),
headChunkClosed: persistWatermark >= numChunkDescs,
}
@ -866,9 +866,9 @@ func (p *persistence) loadSeriesMapAndHeads() (sm *seriesMap, chunksToPersist in
// It is the caller's responsibility to make sure nothing is persisted or loaded
// for the same fingerprint concurrently.
func (p *persistence) dropAndPersistChunks(
fp clientmodel.Fingerprint, beforeTime clientmodel.Timestamp, chunks []chunk,
fp model.Fingerprint, beforeTime model.Time, chunks []chunk,
) (
firstTimeNotDropped clientmodel.Timestamp,
firstTimeNotDropped model.Time,
offset int,
numDropped int,
allDropped bool,
@ -952,11 +952,11 @@ func (p *persistence) dropAndPersistChunks(
if err != nil {
return
}
lastTime := clientmodel.Timestamp(
lastTime := model.Time(
binary.LittleEndian.Uint64(headerBuf[chunkHeaderLastTimeOffset:]),
)
if !lastTime.Before(beforeTime) {
firstTimeNotDropped = clientmodel.Timestamp(
firstTimeNotDropped = model.Time(
binary.LittleEndian.Uint64(headerBuf[chunkHeaderFirstTimeOffset:]),
)
chunkOps.WithLabelValues(drop).Add(float64(numDropped))
@ -1008,7 +1008,7 @@ func (p *persistence) dropAndPersistChunks(
// deleteSeriesFile deletes a series file belonging to the provided
// fingerprint. It returns the number of chunks that were contained in the
// deleted file.
func (p *persistence) deleteSeriesFile(fp clientmodel.Fingerprint) (int, error) {
func (p *persistence) deleteSeriesFile(fp model.Fingerprint) (int, error) {
fname := p.fileNameForFingerprint(fp)
fi, err := os.Stat(fname)
if os.IsNotExist(err) {
@ -1029,7 +1029,7 @@ func (p *persistence) deleteSeriesFile(fp clientmodel.Fingerprint) (int, error)
// seriesFileModTime returns the modification time of the series file belonging
// to the provided fingerprint. In case of an error, the zero value of time.Time
// is returned.
func (p *persistence) seriesFileModTime(fp clientmodel.Fingerprint) time.Time {
func (p *persistence) seriesFileModTime(fp model.Fingerprint) time.Time {
var modTime time.Time
if fi, err := os.Stat(p.fileNameForFingerprint(fp)); err == nil {
return fi.ModTime()
@ -1041,7 +1041,7 @@ func (p *persistence) seriesFileModTime(fp clientmodel.Fingerprint) time.Time {
// fingerprintsForLabelPair, labelValuesForLabelName, and
// fingerprintsModifiedBefore. If the queue is full, this method blocks until
// the metric can be queued. This method is goroutine-safe.
func (p *persistence) indexMetric(fp clientmodel.Fingerprint, m clientmodel.Metric) {
func (p *persistence) indexMetric(fp model.Fingerprint, m model.Metric) {
p.indexingQueue <- indexingOp{fp, m, add}
}
@ -1052,7 +1052,7 @@ func (p *persistence) indexMetric(fp clientmodel.Fingerprint, m clientmodel.Metr
// archived metric. To purge an archived metric, call purgeArchivedFingerprint.)
// If the queue is full, this method blocks until the metric can be queued. This
// method is goroutine-safe.
func (p *persistence) unindexMetric(fp clientmodel.Fingerprint, m clientmodel.Metric) {
func (p *persistence) unindexMetric(fp model.Fingerprint, m model.Metric) {
p.indexingQueue <- indexingOp{fp, m, remove}
}
@ -1074,7 +1074,7 @@ func (p *persistence) waitForIndexing() {
// metric, together with the first and last timestamp of the series belonging to
// the metric. The caller must have locked the fingerprint.
func (p *persistence) archiveMetric(
fp clientmodel.Fingerprint, m clientmodel.Metric, first, last clientmodel.Timestamp,
fp model.Fingerprint, m model.Metric, first, last model.Time,
) error {
if err := p.archivedFingerprintToMetrics.Put(codable.Fingerprint(fp), codable.Metric(m)); err != nil {
p.setDirty(true)
@ -1090,8 +1090,8 @@ func (p *persistence) archiveMetric(
// hasArchivedMetric returns whether the archived metric for the given
// fingerprint exists and if yes, what the first and last timestamp in the
// corresponding series is. This method is goroutine-safe.
func (p *persistence) hasArchivedMetric(fp clientmodel.Fingerprint) (
hasMetric bool, firstTime, lastTime clientmodel.Timestamp, err error,
func (p *persistence) hasArchivedMetric(fp model.Fingerprint) (
hasMetric bool, firstTime, lastTime model.Time, err error,
) {
firstTime, lastTime, hasMetric, err = p.archivedFingerprintToTimeRange.Lookup(fp)
return
@ -1101,7 +1101,7 @@ func (p *persistence) hasArchivedMetric(fp clientmodel.Fingerprint) (
// sure that the fingerprint is currently archived (the time range will
// otherwise be added without the corresponding metric in the archive).
func (p *persistence) updateArchivedTimeRange(
fp clientmodel.Fingerprint, first, last clientmodel.Timestamp,
fp model.Fingerprint, first, last model.Time,
) error {
return p.archivedFingerprintToTimeRange.Put(codable.Fingerprint(fp), codable.TimeRange{First: first, Last: last})
}
@ -1109,10 +1109,10 @@ func (p *persistence) updateArchivedTimeRange(
// fingerprintsModifiedBefore returns the fingerprints of archived timeseries
// that have live samples before the provided timestamp. This method is
// goroutine-safe.
func (p *persistence) fingerprintsModifiedBefore(beforeTime clientmodel.Timestamp) ([]clientmodel.Fingerprint, error) {
func (p *persistence) fingerprintsModifiedBefore(beforeTime model.Time) ([]model.Fingerprint, error) {
var fp codable.Fingerprint
var tr codable.TimeRange
fps := []clientmodel.Fingerprint{}
fps := []model.Fingerprint{}
p.archivedFingerprintToTimeRange.ForEach(func(kv index.KeyValueAccessor) error {
if err := kv.Value(&tr); err != nil {
return err
@ -1121,7 +1121,7 @@ func (p *persistence) fingerprintsModifiedBefore(beforeTime clientmodel.Timestam
if err := kv.Key(&fp); err != nil {
return err
}
fps = append(fps, clientmodel.Fingerprint(fp))
fps = append(fps, model.Fingerprint(fp))
}
return nil
})
@ -1130,7 +1130,7 @@ func (p *persistence) fingerprintsModifiedBefore(beforeTime clientmodel.Timestam
// archivedMetric retrieves the archived metric with the given fingerprint. This
// method is goroutine-safe.
func (p *persistence) archivedMetric(fp clientmodel.Fingerprint) (clientmodel.Metric, error) {
func (p *persistence) archivedMetric(fp model.Fingerprint) (model.Metric, error) {
metric, _, err := p.archivedFingerprintToMetrics.Lookup(fp)
return metric, err
}
@ -1139,7 +1139,7 @@ func (p *persistence) archivedMetric(fp clientmodel.Fingerprint) (clientmodel.Me
// metric entirely. It also queues the metric for un-indexing (no need to call
// unindexMetric for the deleted metric.) It does not touch the series file,
// though. The caller must have locked the fingerprint.
func (p *persistence) purgeArchivedMetric(fp clientmodel.Fingerprint) (err error) {
func (p *persistence) purgeArchivedMetric(fp model.Fingerprint) (err error) {
defer func() {
if err != nil {
p.setDirty(true)
@ -1172,7 +1172,7 @@ func (p *persistence) purgeArchivedMetric(fp clientmodel.Fingerprint) (err error
// contrast to purgeArchivedMetric) does not un-index the metric. If a metric
// was actually deleted, the method returns true and the first time and last
// time of the deleted metric. The caller must have locked the fingerprint.
func (p *persistence) unarchiveMetric(fp clientmodel.Fingerprint) (deletedAnything bool, err error) {
func (p *persistence) unarchiveMetric(fp model.Fingerprint) (deletedAnything bool, err error) {
defer func() {
if err != nil {
p.setDirty(true)
@ -1232,22 +1232,22 @@ func (p *persistence) close() error {
return lastError
}
func (p *persistence) dirNameForFingerprint(fp clientmodel.Fingerprint) string {
func (p *persistence) dirNameForFingerprint(fp model.Fingerprint) string {
fpStr := fp.String()
return path.Join(p.basePath, fpStr[0:seriesDirNameLen])
}
func (p *persistence) fileNameForFingerprint(fp clientmodel.Fingerprint) string {
func (p *persistence) fileNameForFingerprint(fp model.Fingerprint) string {
fpStr := fp.String()
return path.Join(p.basePath, fpStr[0:seriesDirNameLen], fpStr[seriesDirNameLen:]+seriesFileSuffix)
}
func (p *persistence) tempFileNameForFingerprint(fp clientmodel.Fingerprint) string {
func (p *persistence) tempFileNameForFingerprint(fp model.Fingerprint) string {
fpStr := fp.String()
return path.Join(p.basePath, fpStr[0:seriesDirNameLen], fpStr[seriesDirNameLen:]+seriesTempFileSuffix)
}
func (p *persistence) openChunkFileForWriting(fp clientmodel.Fingerprint) (*os.File, error) {
func (p *persistence) openChunkFileForWriting(fp model.Fingerprint) (*os.File, error) {
if err := os.MkdirAll(p.dirNameForFingerprint(fp), 0700); err != nil {
return nil, err
}
@ -1272,7 +1272,7 @@ func (p *persistence) closeChunkFile(f *os.File) {
}
}
func (p *persistence) openChunkFileForReading(fp clientmodel.Fingerprint) (*os.File, error) {
func (p *persistence) openChunkFileForReading(fp model.Fingerprint) (*os.File, error) {
return os.Open(p.fileNameForFingerprint(fp))
}
@ -1481,9 +1481,9 @@ func (p *persistence) checkpointFPMappings(fpm fpMappings) (err error) {
// mapped fingerprint and any error encountered. If p.mappingsFileName is not
// found, the method returns (fpMappings{}, 0, nil). Do not call concurrently
// with checkpointFPMappings.
func (p *persistence) loadFPMappings() (fpMappings, clientmodel.Fingerprint, error) {
func (p *persistence) loadFPMappings() (fpMappings, model.Fingerprint, error) {
fpm := fpMappings{}
var highestMappedFP clientmodel.Fingerprint
var highestMappedFP model.Fingerprint
f, err := os.Open(p.mappingsFileName())
if os.IsNotExist(err) {
@ -1523,7 +1523,7 @@ func (p *persistence) loadFPMappings() (fpMappings, clientmodel.Fingerprint, err
if err != nil {
return nil, 0, err
}
mappings := make(map[string]clientmodel.Fingerprint, numMappings)
mappings := make(map[string]model.Fingerprint, numMappings)
for ; numMappings > 0; numMappings-- {
lenMS, err := binary.ReadUvarint(r)
if err != nil {
@ -1537,13 +1537,13 @@ func (p *persistence) loadFPMappings() (fpMappings, clientmodel.Fingerprint, err
if err != nil {
return nil, 0, err
}
mappedFP := clientmodel.Fingerprint(fp)
mappedFP := model.Fingerprint(fp)
if mappedFP > highestMappedFP {
highestMappedFP = mappedFP
}
mappings[string(buf)] = mappedFP
}
fpm[clientmodel.Fingerprint(rawFP)] = mappings
fpm[model.Fingerprint(rawFP)] = mappings
}
return fpm, highestMappedFP, nil
}

View file

@ -19,7 +19,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/local/codable"
"github.com/prometheus/prometheus/storage/local/index"
@ -28,11 +28,11 @@ import (
)
var (
m1 = clientmodel.Metric{"label": "value1"}
m2 = clientmodel.Metric{"label": "value2"}
m3 = clientmodel.Metric{"label": "value3"}
m4 = clientmodel.Metric{"label": "value4"}
m5 = clientmodel.Metric{"label": "value5"}
m1 = model.Metric{"label": "value1"}
m2 = model.Metric{"label": "value2"}
m3 = model.Metric{"label": "value3"}
m4 = model.Metric{"label": "value4"}
m5 = model.Metric{"label": "value5"}
)
func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, testutil.Closer) {
@ -50,20 +50,20 @@ func newTestPersistence(t *testing.T, encoding chunkEncoding) (*persistence, tes
})
}
func buildTestChunks(encoding chunkEncoding) map[clientmodel.Fingerprint][]chunk {
fps := clientmodel.Fingerprints{
func buildTestChunks(encoding chunkEncoding) map[model.Fingerprint][]chunk {
fps := model.Fingerprints{
m1.FastFingerprint(),
m2.FastFingerprint(),
m3.FastFingerprint(),
}
fpToChunks := map[clientmodel.Fingerprint][]chunk{}
fpToChunks := map[model.Fingerprint][]chunk{}
for _, fp := range fps {
fpToChunks[fp] = make([]chunk, 0, 10)
for i := 0; i < 10; i++ {
fpToChunks[fp] = append(fpToChunks[fp], newChunkForEncoding(encoding).add(&metric.SamplePair{
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(fp),
Timestamp: model.Time(i),
Value: model.SampleValue(fp),
})[0])
}
}
@ -89,11 +89,11 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
for fp, chunks := range fpToChunks {
firstTimeNotDropped, offset, numDropped, allDropped, err :=
p.dropAndPersistChunks(fp, clientmodel.Earliest, chunks)
p.dropAndPersistChunks(fp, model.Earliest, chunks)
if err != nil {
t.Fatal(err)
}
if got, want := firstTimeNotDropped, clientmodel.Timestamp(0); got != want {
if got, want := firstTimeNotDropped, model.Time(0); got != want {
t.Errorf("Want firstTimeNotDropped %v, got %v.", got, want)
}
if got, want := offset, 0; got != want {
@ -127,7 +127,7 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
t.Errorf("Got %d chunkDescs, want %d.", len(actualChunkDescs), 10)
}
for i, cd := range actualChunkDescs {
if cd.firstTime() != clientmodel.Timestamp(i) || cd.lastTime() != clientmodel.Timestamp(i) {
if cd.firstTime() != model.Time(i) || cd.lastTime() != model.Time(i) {
t.Errorf(
"Want ts=%v, got firstTime=%v, lastTime=%v.",
i, cd.firstTime(), cd.lastTime(),
@ -141,7 +141,7 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
t.Errorf("Got %d chunkDescs, want %d.", len(actualChunkDescs), 5)
}
for i, cd := range actualChunkDescs {
if cd.firstTime() != clientmodel.Timestamp(i) || cd.lastTime() != clientmodel.Timestamp(i) {
if cd.firstTime() != model.Time(i) || cd.lastTime() != model.Time(i) {
t.Errorf(
"Want ts=%v, got firstTime=%v, lastTime=%v.",
i, cd.firstTime(), cd.lastTime(),
@ -204,11 +204,11 @@ func testPersistLoadDropChunks(t *testing.T, encoding chunkEncoding) {
// Re-add first two of the chunks.
for fp, chunks := range fpToChunks {
firstTimeNotDropped, offset, numDropped, allDropped, err :=
p.dropAndPersistChunks(fp, clientmodel.Earliest, chunks[:2])
p.dropAndPersistChunks(fp, model.Earliest, chunks[:2])
if err != nil {
t.Fatal(err)
}
if got, want := firstTimeNotDropped, clientmodel.Timestamp(0); got != want {
if got, want := firstTimeNotDropped, model.Time(0); got != want {
t.Errorf("Want firstTimeNotDropped %v, got %v.", got, want)
}
if got, want := offset, 0; got != want {
@ -366,12 +366,12 @@ func testCheckpointAndLoadSeriesMapAndHeads(t *testing.T, encoding chunkEncoding
s3.persistWatermark = 1
for i := 0; i < 10000; i++ {
s4.add(&metric.SamplePair{
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(i) / 2,
Timestamp: model.Time(i),
Value: model.SampleValue(i) / 2,
})
s5.add(&metric.SamplePair{
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(i * i),
Timestamp: model.Time(i),
Value: model.SampleValue(i * i),
})
}
s5.persistWatermark = 3
@ -491,11 +491,11 @@ func TestCheckpointAndLoadFPMappings(t *testing.T) {
defer closer.Close()
in := fpMappings{
1: map[string]clientmodel.Fingerprint{
1: map[string]model.Fingerprint{
"foo": 1,
"bar": 2,
},
3: map[string]clientmodel.Fingerprint{
3: map[string]model.Fingerprint{
"baz": 4,
},
}
@ -508,7 +508,7 @@ func TestCheckpointAndLoadFPMappings(t *testing.T) {
if err != nil {
t.Fatal(err)
}
if got, want := fp, clientmodel.Fingerprint(4); got != want {
if got, want := fp, model.Fingerprint(4); got != want {
t.Errorf("got highest FP %v, want %v", got, want)
}
if !reflect.DeepEqual(in, out) {
@ -520,14 +520,14 @@ func testFingerprintsModifiedBefore(t *testing.T, encoding chunkEncoding) {
p, closer := newTestPersistence(t, encoding)
defer closer.Close()
m1 := clientmodel.Metric{"n1": "v1"}
m2 := clientmodel.Metric{"n2": "v2"}
m3 := clientmodel.Metric{"n1": "v2"}
m1 := model.Metric{"n1": "v1"}
m2 := model.Metric{"n2": "v2"}
m3 := model.Metric{"n1": "v2"}
p.archiveMetric(1, m1, 2, 4)
p.archiveMetric(2, m2, 1, 6)
p.archiveMetric(3, m3, 5, 5)
expectedFPs := map[clientmodel.Timestamp][]clientmodel.Fingerprint{
expectedFPs := map[model.Time][]model.Fingerprint{
0: {},
1: {},
2: {2},
@ -562,7 +562,7 @@ func testFingerprintsModifiedBefore(t *testing.T, encoding chunkEncoding) {
t.Error("expected no unarchival")
}
expectedFPs = map[clientmodel.Timestamp][]clientmodel.Fingerprint{
expectedFPs = map[model.Time][]model.Fingerprint{
0: {},
1: {},
2: {2},
@ -595,8 +595,8 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
p, closer := newTestPersistence(t, encoding)
defer closer.Close()
m1 := clientmodel.Metric{"n1": "v1"}
m2 := clientmodel.Metric{"n2": "v2"}
m1 := model.Metric{"n1": "v1"}
m2 := model.Metric{"n2": "v2"}
p.archiveMetric(1, m1, 2, 4)
p.archiveMetric(2, m2, 1, 6)
p.indexMetric(1, m1)
@ -607,7 +607,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
if err != nil {
t.Fatal(err)
}
want := clientmodel.Fingerprints{1}
want := model.Fingerprints{1}
if !reflect.DeepEqual(outFPs, want) {
t.Errorf("want %#v, got %#v", want, outFPs)
}
@ -615,7 +615,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
if err != nil {
t.Fatal(err)
}
want = clientmodel.Fingerprints{2}
want = model.Fingerprints{2}
if !reflect.DeepEqual(outFPs, want) {
t.Errorf("want %#v, got %#v", want, outFPs)
}
@ -647,7 +647,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) {
if err != nil {
t.Fatal(err)
}
want = clientmodel.Fingerprints{2}
want = model.Fingerprints{2}
if !reflect.DeepEqual(outFPs, want) {
t.Errorf("want %#v, got %#v", want, outFPs)
}
@ -678,21 +678,21 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
{
fpToMetric: index.FingerprintMetricMapping{
0: {
clientmodel.MetricNameLabel: "metric_0",
"label_1": "value_1",
model.MetricNameLabel: "metric_0",
"label_1": "value_1",
},
1: {
clientmodel.MetricNameLabel: "metric_0",
"label_2": "value_2",
"label_3": "value_3",
model.MetricNameLabel: "metric_0",
"label_2": "value_2",
"label_3": "value_3",
},
2: {
clientmodel.MetricNameLabel: "metric_1",
"label_1": "value_2",
model.MetricNameLabel: "metric_1",
"label_1": "value_2",
},
},
expectedLnToLvs: index.LabelNameLabelValuesMapping{
clientmodel.MetricNameLabel: codable.LabelValueSet{
model.MetricNameLabel: codable.LabelValueSet{
"metric_0": struct{}{},
"metric_1": struct{}{},
},
@ -709,11 +709,11 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
},
expectedLpToFps: index.LabelPairFingerprintsMapping{
metric.LabelPair{
Name: clientmodel.MetricNameLabel,
Name: model.MetricNameLabel,
Value: "metric_0",
}: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}},
metric.LabelPair{
Name: clientmodel.MetricNameLabel,
Name: model.MetricNameLabel,
Value: "metric_1",
}: codable.FingerprintSet{2: struct{}{}},
metric.LabelPair{
@ -736,21 +736,21 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
}, {
fpToMetric: index.FingerprintMetricMapping{
3: {
clientmodel.MetricNameLabel: "metric_0",
"label_1": "value_3",
model.MetricNameLabel: "metric_0",
"label_1": "value_3",
},
4: {
clientmodel.MetricNameLabel: "metric_2",
"label_2": "value_2",
"label_3": "value_1",
model.MetricNameLabel: "metric_2",
"label_2": "value_2",
"label_3": "value_1",
},
5: {
clientmodel.MetricNameLabel: "metric_1",
"label_1": "value_3",
model.MetricNameLabel: "metric_1",
"label_1": "value_3",
},
},
expectedLnToLvs: index.LabelNameLabelValuesMapping{
clientmodel.MetricNameLabel: codable.LabelValueSet{
model.MetricNameLabel: codable.LabelValueSet{
"metric_0": struct{}{},
"metric_1": struct{}{},
"metric_2": struct{}{},
@ -770,15 +770,15 @@ func testIndexing(t *testing.T, encoding chunkEncoding) {
},
expectedLpToFps: index.LabelPairFingerprintsMapping{
metric.LabelPair{
Name: clientmodel.MetricNameLabel,
Name: model.MetricNameLabel,
Value: "metric_0",
}: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}, 3: struct{}{}},
metric.LabelPair{
Name: clientmodel.MetricNameLabel,
Name: model.MetricNameLabel,
Value: "metric_1",
}: codable.FingerprintSet{2: struct{}{}, 5: struct{}{}},
metric.LabelPair{
Name: clientmodel.MetricNameLabel,
Name: model.MetricNameLabel,
Value: "metric_2",
}: codable.FingerprintSet{4: struct{}{}},
metric.LabelPair{
@ -928,10 +928,10 @@ func BenchmarkLoadChunksSequentially(b *testing.B) {
sequentialIndexes[i] = i
}
var fp clientmodel.Fingerprint
var fp model.Fingerprint
for i := 0; i < b.N; i++ {
for _, s := range fpStrings {
fp.LoadFromString(s)
fp, _ = model.FingerprintFromString(s)
cds, err := p.loadChunks(fp, sequentialIndexes, 0)
if err != nil {
b.Error(err)
@ -950,10 +950,10 @@ func BenchmarkLoadChunksRandomly(b *testing.B) {
}
randomIndexes := []int{1, 5, 6, 8, 11, 14, 18, 23, 29, 33, 42, 46}
var fp clientmodel.Fingerprint
var fp model.Fingerprint
for i := 0; i < b.N; i++ {
for _, s := range fpStrings {
fp.LoadFromString(s)
fp, _ = model.FingerprintFromString(s)
cds, err := p.loadChunks(fp, randomIndexes, 0)
if err != nil {
b.Error(err)
@ -970,10 +970,10 @@ func BenchmarkLoadChunkDescs(b *testing.B) {
basePath: "fixtures",
}
var fp clientmodel.Fingerprint
var fp model.Fingerprint
for i := 0; i < b.N; i++ {
for _, s := range fpStrings {
fp.LoadFromString(s)
fp, _ = model.FingerprintFromString(s)
cds, err := p.loadChunkDescs(fp, 0)
if err != nil {
b.Error(err)

View file

@ -16,7 +16,7 @@ package local
import (
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// memorySeriesPreloader is a Preloader for the memorySeriesStorage.
@ -27,8 +27,8 @@ type memorySeriesPreloader struct {
// PreloadRange implements Preloader.
func (p *memorySeriesPreloader) PreloadRange(
fp clientmodel.Fingerprint,
from clientmodel.Timestamp, through clientmodel.Timestamp,
fp model.Fingerprint,
from model.Time, through model.Time,
stalenessDelta time.Duration,
) error {
cds, err := p.storage.preloadChunksForRange(fp, from, through, stalenessDelta)
@ -41,7 +41,7 @@ func (p *memorySeriesPreloader) PreloadRange(
/*
// MetricAtTime implements Preloader.
func (p *memorySeriesPreloader) MetricAtTime(fp clientmodel.Fingerprint, t clientmodel.Timestamp) error {
func (p *memorySeriesPreloader) MetricAtTime(fp model.Fingerprint, t model.Time) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: t,
through: t,
@ -54,7 +54,7 @@ func (p *memorySeriesPreloader) MetricAtTime(fp clientmodel.Fingerprint, t clien
}
// MetricAtInterval implements Preloader.
func (p *memorySeriesPreloader) MetricAtInterval(fp clientmodel.Fingerprint, from, through clientmodel.Timestamp, interval time.Duration) error {
func (p *memorySeriesPreloader) MetricAtInterval(fp model.Fingerprint, from, through model.Time, interval time.Duration) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: from,
through: through,
@ -68,7 +68,7 @@ func (p *memorySeriesPreloader) MetricAtInterval(fp clientmodel.Fingerprint, fro
}
// MetricRange implements Preloader.
func (p *memorySeriesPreloader) MetricRange(fp clientmodel.Fingerprint, t clientmodel.Timestamp, rangeDuration time.Duration) error {
func (p *memorySeriesPreloader) MetricRange(fp model.Fingerprint, t model.Time, rangeDuration time.Duration) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: t,
through: t,
@ -82,7 +82,7 @@ func (p *memorySeriesPreloader) MetricRange(fp clientmodel.Fingerprint, t client
}
// MetricRangeAtInterval implements Preloader.
func (p *memorySeriesPreloader) MetricRangeAtInterval(fp clientmodel.Fingerprint, from, through clientmodel.Timestamp, interval, rangeDuration time.Duration) error {
func (p *memorySeriesPreloader) MetricRangeAtInterval(fp model.Fingerprint, from, through model.Time, interval, rangeDuration time.Duration) error {
cds, err := p.storage.preloadChunks(fp, &timeSelector{
from: from,
through: through,

View file

@ -18,7 +18,7 @@ import (
"sync"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -36,22 +36,22 @@ const (
// fingerprintSeriesPair pairs a fingerprint with a memorySeries pointer.
type fingerprintSeriesPair struct {
fp clientmodel.Fingerprint
fp model.Fingerprint
series *memorySeries
}
// seriesMap maps fingerprints to memory series. All its methods are
// goroutine-safe. A SeriesMap is effectively is a goroutine-safe version of
// map[clientmodel.Fingerprint]*memorySeries.
// map[model.Fingerprint]*memorySeries.
type seriesMap struct {
mtx sync.RWMutex
m map[clientmodel.Fingerprint]*memorySeries
m map[model.Fingerprint]*memorySeries
}
// newSeriesMap returns a newly allocated empty seriesMap. To create a seriesMap
// based on a prefilled map, use an explicit initializer.
func newSeriesMap() *seriesMap {
return &seriesMap{m: make(map[clientmodel.Fingerprint]*memorySeries)}
return &seriesMap{m: make(map[model.Fingerprint]*memorySeries)}
}
// length returns the number of mappings in the seriesMap.
@ -64,7 +64,7 @@ func (sm *seriesMap) length() int {
// get returns a memorySeries for a fingerprint. Return values have the same
// semantics as the native Go map.
func (sm *seriesMap) get(fp clientmodel.Fingerprint) (s *memorySeries, ok bool) {
func (sm *seriesMap) get(fp model.Fingerprint) (s *memorySeries, ok bool) {
sm.mtx.RLock()
defer sm.mtx.RUnlock()
@ -73,7 +73,7 @@ func (sm *seriesMap) get(fp clientmodel.Fingerprint) (s *memorySeries, ok bool)
}
// put adds a mapping to the seriesMap. It panics if s == nil.
func (sm *seriesMap) put(fp clientmodel.Fingerprint, s *memorySeries) {
func (sm *seriesMap) put(fp model.Fingerprint, s *memorySeries) {
sm.mtx.Lock()
defer sm.mtx.Unlock()
@ -84,7 +84,7 @@ func (sm *seriesMap) put(fp clientmodel.Fingerprint, s *memorySeries) {
}
// del removes a mapping from the series Map.
func (sm *seriesMap) del(fp clientmodel.Fingerprint) {
func (sm *seriesMap) del(fp model.Fingerprint) {
sm.mtx.Lock()
defer sm.mtx.Unlock()
@ -120,8 +120,8 @@ func (sm *seriesMap) iter() <-chan fingerprintSeriesPair {
// for iterating over a map with a 'range' clause. However, if the next element
// in iteration order is removed after the current element has been received
// from the channel, it will still be produced by the channel.
func (sm *seriesMap) fpIter() <-chan clientmodel.Fingerprint {
ch := make(chan clientmodel.Fingerprint)
func (sm *seriesMap) fpIter() <-chan model.Fingerprint {
ch := make(chan model.Fingerprint)
go func() {
sm.mtx.RLock()
for fp := range sm.m {
@ -136,7 +136,7 @@ func (sm *seriesMap) fpIter() <-chan clientmodel.Fingerprint {
}
type memorySeries struct {
metric clientmodel.Metric
metric model.Metric
// Sorted by start time, overlapping chunk ranges are forbidden.
chunkDescs []*chunkDesc
// The index (within chunkDescs above) of the first chunkDesc that
@ -161,10 +161,10 @@ type memorySeries struct {
// chunkDescsOffset is not 0. It can be used to save the firstTime of the
// first chunk before its chunk desc is evicted. In doubt, this field is
// just set to the oldest possible timestamp.
savedFirstTime clientmodel.Timestamp
savedFirstTime model.Time
// The timestamp of the last sample in this series. Needed for fast access to
// ensure timestamp monotonicity during ingestion.
lastTime clientmodel.Timestamp
lastTime model.Time
// Whether the current head chunk has already been finished. If true,
// the current head chunk must not be modified anymore.
headChunkClosed bool
@ -182,12 +182,12 @@ type memorySeries struct {
// the provided parameters. chunkDescs can be nil or empty if this is a
// genuinely new time series (i.e. not one that is being unarchived). In that
// case, headChunkClosed is set to false, and firstTime and lastTime are both
// set to clientmodel.Earliest. The zero value for modTime can be used if the
// set to model.Earliest. The zero value for modTime can be used if the
// modification time of the series file is unknown (e.g. if this is a genuinely
// new series).
func newMemorySeries(m clientmodel.Metric, chunkDescs []*chunkDesc, modTime time.Time) *memorySeries {
firstTime := clientmodel.Earliest
lastTime := clientmodel.Earliest
func newMemorySeries(m model.Metric, chunkDescs []*chunkDesc, modTime time.Time) *memorySeries {
firstTime := model.Earliest
lastTime := model.Earliest
if len(chunkDescs) > 0 {
firstTime = chunkDescs[0].firstTime()
lastTime = chunkDescs[len(chunkDescs)-1].lastTime()
@ -281,7 +281,7 @@ func (s *memorySeries) evictChunkDescs(iOldestNotEvicted int) {
// dropChunks removes chunkDescs older than t. The caller must have locked the
// fingerprint of the series.
func (s *memorySeries) dropChunks(t clientmodel.Timestamp) {
func (s *memorySeries) dropChunks(t model.Time) {
keepIdx := len(s.chunkDescs)
for i, cd := range s.chunkDescs {
if !cd.lastTime().Before(t) {
@ -308,7 +308,7 @@ func (s *memorySeries) dropChunks(t clientmodel.Timestamp) {
// preloadChunks is an internal helper method.
func (s *memorySeries) preloadChunks(
indexes []int, fp clientmodel.Fingerprint, mss *memorySeriesStorage,
indexes []int, fp model.Fingerprint, mss *memorySeriesStorage,
) ([]*chunkDesc, error) {
loadIndexes := []int{}
pinnedChunkDescs := make([]*chunkDesc, 0, len(indexes))
@ -343,7 +343,7 @@ func (s *memorySeries) preloadChunks(
}
/*
func (s *memorySeries) preloadChunksAtTime(t clientmodel.Timestamp, p *persistence) (chunkDescs, error) {
func (s *memorySeries) preloadChunksAtTime(t model.Time, p *persistence) (chunkDescs, error) {
s.mtx.Lock()
defer s.mtx.Unlock()
@ -376,10 +376,10 @@ func (s *memorySeries) preloadChunksAtTime(t clientmodel.Timestamp, p *persisten
// preloadChunksForRange loads chunks for the given range from the persistence.
// The caller must have locked the fingerprint of the series.
func (s *memorySeries) preloadChunksForRange(
from clientmodel.Timestamp, through clientmodel.Timestamp,
fp clientmodel.Fingerprint, mss *memorySeriesStorage,
from model.Time, through model.Time,
fp model.Fingerprint, mss *memorySeriesStorage,
) ([]*chunkDesc, error) {
firstChunkDescTime := clientmodel.Latest
firstChunkDescTime := model.Latest
if len(s.chunkDescs) > 0 {
firstChunkDescTime = s.chunkDescs[0].firstTime()
}
@ -447,7 +447,7 @@ func (s *memorySeries) head() *chunkDesc {
// firstTime returns the timestamp of the first sample in the series. The caller
// must have locked the fingerprint of the memorySeries.
func (s *memorySeries) firstTime() clientmodel.Timestamp {
func (s *memorySeries) firstTime() model.Time {
if s.chunkDescsOffset == 0 && len(s.chunkDescs) > 0 {
return s.chunkDescs[0].firstTime()
}
@ -482,7 +482,7 @@ type memorySeriesIterator struct {
}
// ValueAtTime implements SeriesIterator.
func (it *memorySeriesIterator) ValueAtTime(t clientmodel.Timestamp) metric.Values {
func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values {
// The most common case. We are iterating through a chunk.
if it.chunkIt != nil && it.chunkIt.contains(t) {
return it.chunkIt.valueAtTime(t)
@ -638,7 +638,7 @@ func (it *memorySeriesIterator) chunkIterator(i int) chunkIterator {
type nopSeriesIterator struct{}
// ValueAtTime implements SeriesIterator.
func (_ nopSeriesIterator) ValueAtTime(t clientmodel.Timestamp) metric.Values {
func (_ nopSeriesIterator) ValueAtTime(t model.Time) metric.Values {
return metric.Values{}
}

View file

@ -23,7 +23,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
)
@ -309,7 +309,7 @@ func (s *memorySeriesStorage) WaitForIndexing() {
}
// NewIterator implements Storage.
func (s *memorySeriesStorage) NewIterator(fp clientmodel.Fingerprint) SeriesIterator {
func (s *memorySeriesStorage) NewIterator(fp model.Fingerprint) SeriesIterator {
s.fpLocker.Lock(fp)
defer s.fpLocker.Unlock(fp)
@ -324,12 +324,12 @@ func (s *memorySeriesStorage) NewIterator(fp clientmodel.Fingerprint) SeriesIter
}
return &boundedIterator{
it: series.newIterator(),
start: clientmodel.Now().Add(-s.dropAfter),
start: model.Now().Add(-s.dropAfter),
}
}
// LastSampleForFingerprint implements Storage.
func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp clientmodel.Fingerprint) *metric.SamplePair {
func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp model.Fingerprint) *metric.SamplePair {
s.fpLocker.Lock(fp)
defer s.fpLocker.Unlock(fp)
@ -344,11 +344,11 @@ func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp clientmodel.Finger
// data from earlier than the configured start time.
type boundedIterator struct {
it SeriesIterator
start clientmodel.Timestamp
start model.Time
}
// ValueAtTime implements the SeriesIterator interface.
func (bit *boundedIterator) ValueAtTime(ts clientmodel.Timestamp) metric.Values {
func (bit *boundedIterator) ValueAtTime(ts model.Time) metric.Values {
if ts < bit.start {
return metric.Values{}
}
@ -386,10 +386,10 @@ func (s *memorySeriesStorage) NewPreloader() Preloader {
// fingerprintsForLabelPairs returns the set of fingerprints that have the given labels.
// This does not work with empty label values.
func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPair) map[clientmodel.Fingerprint]struct{} {
var result map[clientmodel.Fingerprint]struct{}
func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPair) map[model.Fingerprint]struct{} {
var result map[model.Fingerprint]struct{}
for _, pair := range pairs {
intersection := map[clientmodel.Fingerprint]struct{}{}
intersection := map[model.Fingerprint]struct{}{}
fps, err := s.persistence.fingerprintsForLabelPair(pair)
if err != nil {
log.Error("Error getting fingerprints for label pair: ", err)
@ -411,7 +411,7 @@ func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPai
}
// MetricsForLabelMatchers implements Storage.
func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelMatcher) map[clientmodel.Fingerprint]clientmodel.COWMetric {
func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelMatcher) map[model.Fingerprint]model.COWMetric {
var (
equals []metric.LabelPair
filters []*metric.LabelMatcher
@ -427,7 +427,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
}
}
var resFPs map[clientmodel.Fingerprint]struct{}
var resFPs map[model.Fingerprint]struct{}
if len(equals) > 0 {
resFPs = s.fingerprintsForLabelPairs(equals...)
} else {
@ -440,7 +440,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
remaining = append(remaining, matcher)
continue
}
intersection := map[clientmodel.Fingerprint]struct{}{}
intersection := map[model.Fingerprint]struct{}{}
matches := matcher.Filter(s.LabelValuesForLabelName(matcher.Name))
if len(matches) == 0 {
@ -463,7 +463,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
filters = remaining
}
result := make(map[clientmodel.Fingerprint]clientmodel.COWMetric, len(resFPs))
result := make(map[model.Fingerprint]model.COWMetric, len(resFPs))
for fp := range resFPs {
result[fp] = s.MetricForFingerprint(fp)
}
@ -478,7 +478,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM
}
// LabelValuesForLabelName implements Storage.
func (s *memorySeriesStorage) LabelValuesForLabelName(labelName clientmodel.LabelName) clientmodel.LabelValues {
func (s *memorySeriesStorage) LabelValuesForLabelName(labelName model.LabelName) model.LabelValues {
lvs, err := s.persistence.labelValuesForLabelName(labelName)
if err != nil {
log.Errorf("Error getting label values for label name %q: %v", labelName, err)
@ -487,7 +487,7 @@ func (s *memorySeriesStorage) LabelValuesForLabelName(labelName clientmodel.Labe
}
// MetricForFingerprint implements Storage.
func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) clientmodel.COWMetric {
func (s *memorySeriesStorage) MetricForFingerprint(fp model.Fingerprint) model.COWMetric {
s.fpLocker.Lock(fp)
defer s.fpLocker.Unlock(fp)
@ -495,7 +495,7 @@ func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) c
if ok {
// Wrap the returned metric in a copy-on-write (COW) metric here because
// the caller might mutate it.
return clientmodel.COWMetric{
return model.COWMetric{
Metric: series.metric,
}
}
@ -503,13 +503,13 @@ func (s *memorySeriesStorage) MetricForFingerprint(fp clientmodel.Fingerprint) c
if err != nil {
log.Errorf("Error retrieving archived metric for fingerprint %v: %v", fp, err)
}
return clientmodel.COWMetric{
return model.COWMetric{
Metric: metric,
}
}
// DropMetric implements Storage.
func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...clientmodel.Fingerprint) {
func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...model.Fingerprint) {
for _, fp := range fps {
s.fpLocker.Lock(fp)
@ -529,7 +529,7 @@ func (s *memorySeriesStorage) DropMetricsForFingerprints(fps ...clientmodel.Fing
}
// Append implements Storage.
func (s *memorySeriesStorage) Append(sample *clientmodel.Sample) {
func (s *memorySeriesStorage) Append(sample *model.Sample) {
for ln, lv := range sample.Metric {
if len(lv) == 0 {
delete(sample.Metric, ln)
@ -580,7 +580,7 @@ func (s *memorySeriesStorage) Append(sample *clientmodel.Sample) {
s.incNumChunksToPersist(completedChunksCount)
}
func (s *memorySeriesStorage) getOrCreateSeries(fp clientmodel.Fingerprint, m clientmodel.Metric) *memorySeries {
func (s *memorySeriesStorage) getOrCreateSeries(fp model.Fingerprint, m model.Metric) *memorySeries {
series, ok := s.fpToSeries.get(fp)
if !ok {
var cds []*chunkDesc
@ -614,8 +614,8 @@ func (s *memorySeriesStorage) getOrCreateSeries(fp clientmodel.Fingerprint, m cl
}
func (s *memorySeriesStorage) preloadChunksForRange(
fp clientmodel.Fingerprint,
from clientmodel.Timestamp, through clientmodel.Timestamp,
fp model.Fingerprint,
from model.Time, through model.Time,
stalenessDelta time.Duration,
) ([]*chunkDesc, error) {
s.fpLocker.Lock(fp)
@ -768,10 +768,10 @@ func (s *memorySeriesStorage) waitForNextFP(numberOfFPs int, maxWaitDurationFact
// cycleThroughMemoryFingerprints returns a channel that emits fingerprints for
// series in memory in a throttled fashion. It continues to cycle through all
// fingerprints in memory until s.loopStopping is closed.
func (s *memorySeriesStorage) cycleThroughMemoryFingerprints() chan clientmodel.Fingerprint {
memoryFingerprints := make(chan clientmodel.Fingerprint)
func (s *memorySeriesStorage) cycleThroughMemoryFingerprints() chan model.Fingerprint {
memoryFingerprints := make(chan model.Fingerprint)
go func() {
var fpIter <-chan clientmodel.Fingerprint
var fpIter <-chan model.Fingerprint
defer func() {
if fpIter != nil {
@ -815,14 +815,14 @@ func (s *memorySeriesStorage) cycleThroughMemoryFingerprints() chan clientmodel.
// cycleThroughArchivedFingerprints returns a channel that emits fingerprints
// for archived series in a throttled fashion. It continues to cycle through all
// archived fingerprints until s.loopStopping is closed.
func (s *memorySeriesStorage) cycleThroughArchivedFingerprints() chan clientmodel.Fingerprint {
archivedFingerprints := make(chan clientmodel.Fingerprint)
func (s *memorySeriesStorage) cycleThroughArchivedFingerprints() chan model.Fingerprint {
archivedFingerprints := make(chan model.Fingerprint)
go func() {
defer close(archivedFingerprints)
for {
archivedFPs, err := s.persistence.fingerprintsModifiedBefore(
clientmodel.Now().Add(-s.dropAfter),
model.Now().Add(-s.dropAfter),
)
if err != nil {
log.Error("Failed to lookup archived fingerprint ranges: ", err)
@ -878,7 +878,7 @@ loop:
dirtySeriesCount = 0
checkpointTimer.Reset(s.checkpointInterval)
case fp := <-memoryFingerprints:
if s.maintainMemorySeries(fp, clientmodel.Now().Add(-s.dropAfter)) {
if s.maintainMemorySeries(fp, model.Now().Add(-s.dropAfter)) {
dirtySeriesCount++
// Check if we have enough "dirty" series so that we need an early checkpoint.
// However, if we are already behind persisting chunks, creating a checkpoint
@ -892,7 +892,7 @@ loop:
}
}
case fp := <-archivedFingerprints:
s.maintainArchivedSeries(fp, clientmodel.Now().Add(-s.dropAfter))
s.maintainArchivedSeries(fp, model.Now().Add(-s.dropAfter))
}
}
// Wait until both channels are closed.
@ -934,7 +934,7 @@ loop:
//
// Finally, it evicts chunkDescs if there are too many.
func (s *memorySeriesStorage) maintainMemorySeries(
fp clientmodel.Fingerprint, beforeTime clientmodel.Timestamp,
fp model.Fingerprint, beforeTime model.Time,
) (becameDirty bool) {
defer func(begin time.Time) {
s.maintainSeriesDuration.WithLabelValues(maintainInMemory).Observe(
@ -1002,7 +1002,7 @@ func (s *memorySeriesStorage) maintainMemorySeries(
//
// The caller must have locked the fp.
func (s *memorySeriesStorage) writeMemorySeries(
fp clientmodel.Fingerprint, series *memorySeries, beforeTime clientmodel.Timestamp,
fp model.Fingerprint, series *memorySeries, beforeTime model.Time,
) bool {
cds := series.chunksToPersist()
defer func() {
@ -1071,7 +1071,7 @@ func (s *memorySeriesStorage) writeMemorySeries(
// maintainArchivedSeries drops chunks older than beforeTime from an archived
// series. If the series contains no chunks after that, it is purged entirely.
func (s *memorySeriesStorage) maintainArchivedSeries(fp clientmodel.Fingerprint, beforeTime clientmodel.Timestamp) {
func (s *memorySeriesStorage) maintainArchivedSeries(fp model.Fingerprint, beforeTime model.Time) {
defer func(begin time.Time) {
s.maintainSeriesDuration.WithLabelValues(maintainArchived).Observe(
float64(time.Since(begin)) / float64(time.Millisecond),
@ -1109,12 +1109,12 @@ func (s *memorySeriesStorage) maintainArchivedSeries(fp clientmodel.Fingerprint,
}
// See persistence.loadChunks for detailed explanation.
func (s *memorySeriesStorage) loadChunks(fp clientmodel.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) {
func (s *memorySeriesStorage) loadChunks(fp model.Fingerprint, indexes []int, indexOffset int) ([]chunk, error) {
return s.persistence.loadChunks(fp, indexes, indexOffset)
}
// See persistence.loadChunkDescs for detailed explanation.
func (s *memorySeriesStorage) loadChunkDescs(fp clientmodel.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) {
func (s *memorySeriesStorage) loadChunkDescs(fp model.Fingerprint, offsetFromEnd int) ([]*chunkDesc, error) {
return s.persistence.loadChunkDescs(fp, offsetFromEnd)
}

View file

@ -24,7 +24,7 @@ import (
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/util/testutil"
@ -34,20 +34,20 @@ func TestMatches(t *testing.T) {
storage, closer := NewTestStorage(t, 1)
defer closer.Close()
samples := make([]*clientmodel.Sample, 100)
fingerprints := make(clientmodel.Fingerprints, 100)
samples := make([]*model.Sample, 100)
fingerprints := make(model.Fingerprints, 100)
for i := range samples {
metric := clientmodel.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i)),
"label1": clientmodel.LabelValue(fmt.Sprintf("test_%d", i/10)),
"label2": clientmodel.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)),
"all": "const",
metric := model.Metric{
model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i)),
"label1": model.LabelValue(fmt.Sprintf("test_%d", i/10)),
"label2": model.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)),
"all": "const",
}
samples[i] = &clientmodel.Sample{
samples[i] = &model.Sample{
Metric: metric,
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(i),
Timestamp: model.Time(i),
Value: model.SampleValue(i),
}
fingerprints[i] = metric.FastFingerprint()
}
@ -56,7 +56,7 @@ func TestMatches(t *testing.T) {
}
storage.WaitForIndexing()
newMatcher := func(matchType metric.MatchType, name clientmodel.LabelName, value clientmodel.LabelValue) *metric.LabelMatcher {
newMatcher := func(matchType metric.MatchType, name model.LabelName, value model.LabelValue) *metric.LabelMatcher {
lm, err := metric.NewLabelMatcher(matchType, name, value)
if err != nil {
t.Fatalf("error creating label matcher: %s", err)
@ -66,11 +66,11 @@ func TestMatches(t *testing.T) {
var matcherTests = []struct {
matchers metric.LabelMatchers
expected clientmodel.Fingerprints
expected model.Fingerprints
}{
{
matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "x")},
expected: clientmodel.Fingerprints{},
expected: model.Fingerprints{},
},
{
matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "test_0")},
@ -145,7 +145,7 @@ func TestMatches(t *testing.T) {
newMatcher(metric.Equal, "all", "const"),
newMatcher(metric.RegexNoMatch, "label1", `test_[3-5]`),
},
expected: append(append(clientmodel.Fingerprints{}, fingerprints[:30]...), fingerprints[60:]...),
expected: append(append(model.Fingerprints{}, fingerprints[:30]...), fingerprints[60:]...),
},
{
matchers: metric.LabelMatchers{
@ -159,21 +159,21 @@ func TestMatches(t *testing.T) {
newMatcher(metric.RegexMatch, "label1", `test_[3-5]`),
newMatcher(metric.NotEqual, "label2", `test_4`),
},
expected: append(append(clientmodel.Fingerprints{}, fingerprints[30:35]...), fingerprints[45:60]...),
expected: append(append(model.Fingerprints{}, fingerprints[30:35]...), fingerprints[45:60]...),
},
{
matchers: metric.LabelMatchers{
newMatcher(metric.Equal, "label1", `nonexistent`),
newMatcher(metric.RegexMatch, "label2", `test`),
},
expected: clientmodel.Fingerprints{},
expected: model.Fingerprints{},
},
{
matchers: metric.LabelMatchers{
newMatcher(metric.Equal, "label1", `test_0`),
newMatcher(metric.RegexMatch, "label2", `nonexistent`),
},
expected: clientmodel.Fingerprints{},
expected: model.Fingerprints{},
},
}
@ -201,19 +201,19 @@ func TestFingerprintsForLabels(t *testing.T) {
storage, closer := NewTestStorage(t, 1)
defer closer.Close()
samples := make([]*clientmodel.Sample, 100)
fingerprints := make(clientmodel.Fingerprints, 100)
samples := make([]*model.Sample, 100)
fingerprints := make(model.Fingerprints, 100)
for i := range samples {
metric := clientmodel.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i)),
"label1": clientmodel.LabelValue(fmt.Sprintf("test_%d", i/10)),
"label2": clientmodel.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)),
metric := model.Metric{
model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i)),
"label1": model.LabelValue(fmt.Sprintf("test_%d", i/10)),
"label2": model.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)),
}
samples[i] = &clientmodel.Sample{
samples[i] = &model.Sample{
Metric: metric,
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(i),
Timestamp: model.Time(i),
Value: model.SampleValue(i),
}
fingerprints[i] = metric.FastFingerprint()
}
@ -224,7 +224,7 @@ func TestFingerprintsForLabels(t *testing.T) {
var matcherTests = []struct {
pairs []metric.LabelPair
expected clientmodel.Fingerprints
expected model.Fingerprints
}{
{
pairs: []metric.LabelPair{{"label1", "x"}},
@ -277,21 +277,21 @@ func TestFingerprintsForLabels(t *testing.T) {
}
}
var benchLabelMatchingRes map[clientmodel.Fingerprint]clientmodel.COWMetric
var benchLabelMatchingRes map[model.Fingerprint]model.COWMetric
func BenchmarkLabelMatching(b *testing.B) {
s, closer := NewTestStorage(b, 1)
defer closer.Close()
h := fnv.New64a()
lbl := func(x int) clientmodel.LabelValue {
lbl := func(x int) model.LabelValue {
h.Reset()
h.Write([]byte(fmt.Sprintf("%d", x)))
return clientmodel.LabelValue(fmt.Sprintf("%d", h.Sum64()))
return model.LabelValue(fmt.Sprintf("%d", h.Sum64()))
}
M := 32
met := clientmodel.Metric{}
met := model.Metric{}
for i := 0; i < M; i++ {
met["label_a"] = lbl(i)
for j := 0; j < M; j++ {
@ -300,7 +300,7 @@ func BenchmarkLabelMatching(b *testing.B) {
met["label_c"] = lbl(k)
for l := 0; l < M; l++ {
met["label_d"] = lbl(l)
s.Append(&clientmodel.Sample{
s.Append(&model.Sample{
Metric: met.Clone(),
Timestamp: 0,
Value: 1,
@ -311,7 +311,7 @@ func BenchmarkLabelMatching(b *testing.B) {
}
s.WaitForIndexing()
newMatcher := func(matchType metric.MatchType, name clientmodel.LabelName, value clientmodel.LabelValue) *metric.LabelMatcher {
newMatcher := func(matchType metric.MatchType, name model.LabelName, value model.LabelValue) *metric.LabelMatcher {
lm, err := metric.NewLabelMatcher(matchType, name, value)
if err != nil {
b.Fatalf("error creating label matcher: %s", err)
@ -360,7 +360,7 @@ func BenchmarkLabelMatching(b *testing.B) {
b.ResetTimer()
for i := 0; i < b.N; i++ {
benchLabelMatchingRes = map[clientmodel.Fingerprint]clientmodel.COWMetric{}
benchLabelMatchingRes = map[model.Fingerprint]model.COWMetric{}
for _, mt := range matcherTests {
benchLabelMatchingRes = s.MetricsForLabelMatchers(mt...)
}
@ -370,7 +370,7 @@ func BenchmarkLabelMatching(b *testing.B) {
}
func TestRetentionCutoff(t *testing.T) {
now := clientmodel.Now()
now := model.Now()
insertStart := now.Add(-2 * time.Hour)
s, closer := NewTestStorage(t, 1)
@ -382,8 +382,8 @@ func TestRetentionCutoff(t *testing.T) {
s.dropAfter = 1 * time.Hour
for i := 0; i < 120; i++ {
smpl := &clientmodel.Sample{
Metric: clientmodel.Metric{"job": "test"},
smpl := &model.Sample{
Metric: model.Metric{"job": "test"},
Timestamp: insertStart.Add(time.Duration(i) * time.Minute), // 1 minute intervals.
Value: 1,
}
@ -391,7 +391,7 @@ func TestRetentionCutoff(t *testing.T) {
}
s.WaitForIndexing()
var fp clientmodel.Fingerprint
var fp model.Fingerprint
for f := range s.fingerprintsForLabelPairs(metric.LabelPair{Name: "job", Value: "test"}) {
fp = f
break
@ -414,7 +414,7 @@ func TestRetentionCutoff(t *testing.T) {
}
vals = it.RangeValues(metric.Interval{OldestInclusive: insertStart, NewestInclusive: now})
// We get 59 values here because the clientmodel.Now() is slightly later
// We get 59 values here because the model.Now() is slightly later
// than our now.
if len(vals) != 59 {
t.Errorf("expected 59 values but got %d", len(vals))
@ -433,35 +433,35 @@ func TestRetentionCutoff(t *testing.T) {
}
func TestDropMetrics(t *testing.T) {
now := clientmodel.Now()
now := model.Now()
insertStart := now.Add(-2 * time.Hour)
s, closer := NewTestStorage(t, 1)
defer closer.Close()
m1 := clientmodel.Metric{clientmodel.MetricNameLabel: "test", "n1": "v1"}
m2 := clientmodel.Metric{clientmodel.MetricNameLabel: "test", "n1": "v2"}
m1 := model.Metric{model.MetricNameLabel: "test", "n1": "v1"}
m2 := model.Metric{model.MetricNameLabel: "test", "n1": "v2"}
N := 120000
for j, m := range []clientmodel.Metric{m1, m2} {
for j, m := range []model.Metric{m1, m2} {
for i := 0; i < N; i++ {
smpl := &clientmodel.Sample{
smpl := &model.Sample{
Metric: m,
Timestamp: insertStart.Add(time.Duration(i) * time.Millisecond), // 1 minute intervals.
Value: clientmodel.SampleValue(j),
Value: model.SampleValue(j),
}
s.Append(smpl)
}
}
s.WaitForIndexing()
fps := s.fingerprintsForLabelPairs(metric.LabelPair{Name: clientmodel.MetricNameLabel, Value: "test"})
fps := s.fingerprintsForLabelPairs(metric.LabelPair{Name: model.MetricNameLabel, Value: "test"})
if len(fps) != 2 {
t.Fatalf("unexpected number of fingerprints: %d", len(fps))
}
var fpList clientmodel.Fingerprints
var fpList model.Fingerprints
for fp := range fps {
it := s.NewIterator(fp)
if vals := it.RangeValues(metric.Interval{OldestInclusive: insertStart, NewestInclusive: now}); len(vals) != N {
@ -474,7 +474,7 @@ func TestDropMetrics(t *testing.T) {
s.WaitForIndexing()
fps2 := s.fingerprintsForLabelPairs(metric.LabelPair{
Name: clientmodel.MetricNameLabel, Value: "test",
Name: model.MetricNameLabel, Value: "test",
})
if len(fps2) != 1 {
t.Fatalf("unexpected number of fingerprints: %d", len(fps2))
@ -493,7 +493,7 @@ func TestDropMetrics(t *testing.T) {
s.WaitForIndexing()
fps3 := s.fingerprintsForLabelPairs(metric.LabelPair{
Name: clientmodel.MetricNameLabel, Value: "test",
Name: model.MetricNameLabel, Value: "test",
})
if len(fps3) != 0 {
t.Fatalf("unexpected number of fingerprints: %d", len(fps3))
@ -515,11 +515,11 @@ func TestLoop(t *testing.T) {
if testing.Short() {
t.Skip("Skipping test in short mode.")
}
samples := make(clientmodel.Samples, 1000)
samples := make(model.Samples, 1000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i) * 0.2),
}
}
directory := testutil.NewTemporaryDirectory("test_storage", t)
@ -540,7 +540,7 @@ func TestLoop(t *testing.T) {
storage.Append(s)
}
storage.WaitForIndexing()
series, _ := storage.(*memorySeriesStorage).fpToSeries.get(clientmodel.Metric{}.FastFingerprint())
series, _ := storage.(*memorySeriesStorage).fpToSeries.get(model.Metric{}.FastFingerprint())
cdsBefore := len(series.chunkDescs)
time.Sleep(fpMaxWaitDuration + time.Second) // TODO(beorn7): Ugh, need to wait for maintenance to kick in.
cdsAfter := len(series.chunkDescs)
@ -554,11 +554,11 @@ func TestLoop(t *testing.T) {
}
func testChunk(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 500000)
samples := make(model.Samples, 500000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(float64(i) * 0.2),
samples[i] = &model.Sample{
Timestamp: model.Time(i),
Value: model.SampleValue(float64(i) * 0.2),
}
}
s, closer := NewTestStorage(t, encoding)
@ -604,11 +604,11 @@ func TestChunkType1(t *testing.T) {
}
func testValueAtTime(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000)
samples := make(model.Samples, 10000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i) * 0.2),
}
}
s, closer := NewTestStorage(t, encoding)
@ -619,7 +619,7 @@ func testValueAtTime(t *testing.T, encoding chunkEncoding) {
}
s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint()
fp := model.Metric{}.FastFingerprint()
it := s.NewIterator(fp)
@ -697,11 +697,11 @@ func TestValueAtTimeChunkType1(t *testing.T) {
}
func benchmarkValueAtTime(b *testing.B, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000)
samples := make(model.Samples, 10000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i) * 0.2),
}
}
s, closer := NewTestStorage(b, encoding)
@ -712,7 +712,7 @@ func benchmarkValueAtTime(b *testing.B, encoding chunkEncoding) {
}
s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint()
fp := model.Metric{}.FastFingerprint()
b.ResetTimer()
@ -770,11 +770,11 @@ func BenchmarkValueAtTimeChunkType1(b *testing.B) {
}
func testRangeValues(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000)
samples := make(model.Samples, 10000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i) * 0.2),
}
}
s, closer := NewTestStorage(t, encoding)
@ -785,7 +785,7 @@ func testRangeValues(t *testing.T, encoding chunkEncoding) {
}
s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint()
fp := model.Metric{}.FastFingerprint()
it := s.NewIterator(fp)
@ -922,11 +922,11 @@ func TestRangeValuesChunkType1(t *testing.T) {
}
func benchmarkRangeValues(b *testing.B, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000)
samples := make(model.Samples, 10000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i) * 0.2),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i) * 0.2),
}
}
s, closer := NewTestStorage(b, encoding)
@ -937,7 +937,7 @@ func benchmarkRangeValues(b *testing.B, encoding chunkEncoding) {
}
s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint()
fp := model.Metric{}.FastFingerprint()
b.ResetTimer()
@ -967,11 +967,11 @@ func BenchmarkRangeValuesChunkType1(b *testing.B) {
}
func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000)
samples := make(model.Samples, 10000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i * i)),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i * i)),
}
}
s, closer := NewTestStorage(t, encoding)
@ -982,7 +982,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
}
s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint()
fp := model.Metric{}.FastFingerprint()
// Drop ~half of the chunks.
s.maintainMemorySeries(fp, 10000)
@ -997,7 +997,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
if actual[0].Timestamp < 6000 || actual[0].Timestamp > 10000 {
t.Errorf("1st timestamp out of expected range: %v", actual[0].Timestamp)
}
want := clientmodel.Timestamp(19998)
want := model.Time(19998)
if actual[1].Timestamp != want {
t.Errorf("2nd timestamp: want %v, got %v", want, actual[1].Timestamp)
}
@ -1026,7 +1026,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
// Persist head chunk so we can safely archive.
series.headChunkClosed = true
s.maintainMemorySeries(fp, clientmodel.Earliest)
s.maintainMemorySeries(fp, model.Earliest)
// Archive metrics.
s.fpToSeries.del(fp)
@ -1077,7 +1077,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
// Persist head chunk so we can safely archive.
series.headChunkClosed = true
s.maintainMemorySeries(fp, clientmodel.Earliest)
s.maintainMemorySeries(fp, model.Earliest)
// Archive metrics.
s.fpToSeries.del(fp)
@ -1096,7 +1096,7 @@ func testEvictAndPurgeSeries(t *testing.T, encoding chunkEncoding) {
}
// Unarchive metrics.
s.getOrCreateSeries(fp, clientmodel.Metric{})
s.getOrCreateSeries(fp, model.Metric{})
series, ok = s.fpToSeries.get(fp)
if !ok {
@ -1131,19 +1131,19 @@ func TestEvictAndPurgeSeriesChunkType1(t *testing.T) {
}
func testEvictAndLoadChunkDescs(t *testing.T, encoding chunkEncoding) {
samples := make(clientmodel.Samples, 10000)
samples := make(model.Samples, 10000)
for i := range samples {
samples[i] = &clientmodel.Sample{
Timestamp: clientmodel.Timestamp(2 * i),
Value: clientmodel.SampleValue(float64(i * i)),
samples[i] = &model.Sample{
Timestamp: model.Time(2 * i),
Value: model.SampleValue(float64(i * i)),
}
}
// Give last sample a timestamp of now so that the head chunk will not
// be closed (which would then archive the time series later as
// everything will get evicted).
samples[len(samples)-1] = &clientmodel.Sample{
Timestamp: clientmodel.Now(),
Value: clientmodel.SampleValue(3.14),
samples[len(samples)-1] = &model.Sample{
Timestamp: model.Now(),
Value: model.SampleValue(3.14),
}
s, closer := NewTestStorage(t, encoding)
@ -1157,7 +1157,7 @@ func testEvictAndLoadChunkDescs(t *testing.T, encoding chunkEncoding) {
}
s.WaitForIndexing()
fp := clientmodel.Metric{}.FastFingerprint()
fp := model.Metric{}.FastFingerprint()
series, ok := s.fpToSeries.get(fp)
if !ok {
@ -1203,16 +1203,16 @@ func TestEvictAndLoadChunkDescsType1(t *testing.T) {
}
func benchmarkAppend(b *testing.B, encoding chunkEncoding) {
samples := make(clientmodel.Samples, b.N)
samples := make(model.Samples, b.N)
for i := range samples {
samples[i] = &clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
"label1": clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
"label2": clientmodel.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
samples[i] = &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
"label1": model.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
"label2": model.LabelValue(fmt.Sprintf("test_metric_%d", i%10)),
},
Timestamp: clientmodel.Timestamp(i),
Value: clientmodel.SampleValue(i),
Timestamp: model.Time(i),
Value: model.SampleValue(i),
}
}
b.ResetTimer()
@ -1323,56 +1323,56 @@ func BenchmarkFuzzChunkType1(b *testing.B) {
benchmarkFuzz(b, 1)
}
func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
type valueCreator func() clientmodel.SampleValue
type deltaApplier func(clientmodel.SampleValue) clientmodel.SampleValue
func createRandomSamples(metricName string, minLen int) model.Samples {
type valueCreator func() model.SampleValue
type deltaApplier func(model.SampleValue) model.SampleValue
var (
maxMetrics = 5
maxStreakLength = 500
maxTimeDelta = 10000
maxTimeDeltaFactor = 10
timestamp = clientmodel.Now() - clientmodel.Timestamp(maxTimeDelta*maxTimeDeltaFactor*minLen/4) // So that some timestamps are in the future.
timestamp = model.Now() - model.Time(maxTimeDelta*maxTimeDeltaFactor*minLen/4) // So that some timestamps are in the future.
generators = []struct {
createValue valueCreator
applyDelta []deltaApplier
}{
{ // "Boolean".
createValue: func() clientmodel.SampleValue {
return clientmodel.SampleValue(rand.Intn(2))
createValue: func() model.SampleValue {
return model.SampleValue(rand.Intn(2))
},
applyDelta: []deltaApplier{
func(_ clientmodel.SampleValue) clientmodel.SampleValue {
return clientmodel.SampleValue(rand.Intn(2))
func(_ model.SampleValue) model.SampleValue {
return model.SampleValue(rand.Intn(2))
},
},
},
{ // Integer with int deltas of various byte length.
createValue: func() clientmodel.SampleValue {
return clientmodel.SampleValue(rand.Int63() - 1<<62)
createValue: func() model.SampleValue {
return model.SampleValue(rand.Int63() - 1<<62)
},
applyDelta: []deltaApplier{
func(v clientmodel.SampleValue) clientmodel.SampleValue {
return clientmodel.SampleValue(rand.Intn(1<<8) - 1<<7 + int(v))
func(v model.SampleValue) model.SampleValue {
return model.SampleValue(rand.Intn(1<<8) - 1<<7 + int(v))
},
func(v clientmodel.SampleValue) clientmodel.SampleValue {
return clientmodel.SampleValue(rand.Intn(1<<16) - 1<<15 + int(v))
func(v model.SampleValue) model.SampleValue {
return model.SampleValue(rand.Intn(1<<16) - 1<<15 + int(v))
},
func(v clientmodel.SampleValue) clientmodel.SampleValue {
return clientmodel.SampleValue(rand.Int63n(1<<32) - 1<<31 + int64(v))
func(v model.SampleValue) model.SampleValue {
return model.SampleValue(rand.Int63n(1<<32) - 1<<31 + int64(v))
},
},
},
{ // Float with float32 and float64 deltas.
createValue: func() clientmodel.SampleValue {
return clientmodel.SampleValue(rand.NormFloat64())
createValue: func() model.SampleValue {
return model.SampleValue(rand.NormFloat64())
},
applyDelta: []deltaApplier{
func(v clientmodel.SampleValue) clientmodel.SampleValue {
return v + clientmodel.SampleValue(float32(rand.NormFloat64()))
func(v model.SampleValue) model.SampleValue {
return v + model.SampleValue(float32(rand.NormFloat64()))
},
func(v clientmodel.SampleValue) clientmodel.SampleValue {
return v + clientmodel.SampleValue(rand.NormFloat64())
func(v model.SampleValue) model.SampleValue {
return v + model.SampleValue(rand.NormFloat64())
},
},
},
@ -1380,17 +1380,17 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
)
// Prefill result with two samples with colliding metrics (to test fingerprint mapping).
result := clientmodel.Samples{
&clientmodel.Sample{
Metric: clientmodel.Metric{
result := model.Samples{
&model.Sample{
Metric: model.Metric{
"instance": "ip-10-33-84-73.l05.ams5.s-cloud.net:24483",
"status": "503",
},
Value: 42,
Timestamp: timestamp,
},
&clientmodel.Sample{
Metric: clientmodel.Metric{
&model.Sample{
Metric: model.Metric{
"instance": "ip-10-33-84-73.l05.ams5.s-cloud.net:24480",
"status": "500",
},
@ -1399,11 +1399,11 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
},
}
metrics := []clientmodel.Metric{}
metrics := []model.Metric{}
for n := rand.Intn(maxMetrics); n >= 0; n-- {
metrics = append(metrics, clientmodel.Metric{
clientmodel.MetricNameLabel: clientmodel.LabelValue(metricName),
clientmodel.LabelName(fmt.Sprintf("labelname_%d", n+1)): clientmodel.LabelValue(fmt.Sprintf("labelvalue_%d", rand.Int())),
metrics = append(metrics, model.Metric{
model.MetricNameLabel: model.LabelValue(metricName),
model.LabelName(fmt.Sprintf("labelname_%d", n+1)): model.LabelValue(fmt.Sprintf("labelvalue_%d", rand.Int())),
})
}
@ -1414,10 +1414,10 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
generator := generators[rand.Intn(len(generators))]
createValue := generator.createValue
applyDelta := generator.applyDelta[rand.Intn(len(generator.applyDelta))]
incTimestamp := func() { timestamp += clientmodel.Timestamp(timeDelta * (rand.Intn(maxTimeDeltaFactor) + 1)) }
incTimestamp := func() { timestamp += model.Time(timeDelta * (rand.Intn(maxTimeDeltaFactor) + 1)) }
switch rand.Intn(4) {
case 0: // A single sample.
result = append(result, &clientmodel.Sample{
result = append(result, &model.Sample{
Metric: metric,
Value: createValue(),
Timestamp: timestamp,
@ -1425,7 +1425,7 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
incTimestamp()
case 1: // A streak of random sample values.
for n := rand.Intn(maxStreakLength); n >= 0; n-- {
result = append(result, &clientmodel.Sample{
result = append(result, &model.Sample{
Metric: metric,
Value: createValue(),
Timestamp: timestamp,
@ -1435,7 +1435,7 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
case 2: // A streak of sample values with incremental changes.
value := createValue()
for n := rand.Intn(maxStreakLength); n >= 0; n-- {
result = append(result, &clientmodel.Sample{
result = append(result, &model.Sample{
Metric: metric,
Value: value,
Timestamp: timestamp,
@ -1446,7 +1446,7 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
case 3: // A streak of constant sample values.
value := createValue()
for n := rand.Intn(maxStreakLength); n >= 0; n-- {
result = append(result, &clientmodel.Sample{
result = append(result, &model.Sample{
Metric: metric,
Value: value,
Timestamp: timestamp,
@ -1459,12 +1459,12 @@ func createRandomSamples(metricName string, minLen int) clientmodel.Samples {
return result
}
func verifyStorage(t testing.TB, s *memorySeriesStorage, samples clientmodel.Samples, maxAge time.Duration) bool {
func verifyStorage(t testing.TB, s *memorySeriesStorage, samples model.Samples, maxAge time.Duration) bool {
s.WaitForIndexing()
result := true
for _, i := range rand.Perm(len(samples)) {
sample := samples[i]
if sample.Timestamp.Before(clientmodel.TimestampFromTime(time.Now().Add(-maxAge))) {
if sample.Timestamp.Before(model.TimeFromUnixNano(time.Now().Add(-maxAge).UnixNano())) {
continue
// TODO: Once we have a guaranteed cutoff at the
// retention period, we can verify here that no results
@ -1501,15 +1501,15 @@ func TestAppendOutOfOrder(t *testing.T) {
s, closer := NewTestStorage(t, 1)
defer closer.Close()
m := clientmodel.Metric{
clientmodel.MetricNameLabel: "out_of_order",
m := model.Metric{
model.MetricNameLabel: "out_of_order",
}
for i, t := range []int{0, 2, 2, 1} {
s.Append(&clientmodel.Sample{
s.Append(&model.Sample{
Metric: m,
Timestamp: clientmodel.Timestamp(t),
Value: clientmodel.SampleValue(i),
Timestamp: model.Time(t),
Value: model.SampleValue(i),
})
}

View file

@ -14,13 +14,13 @@
package metric
import (
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// LabelPair pairs a name with a value.
type LabelPair struct {
Name clientmodel.LabelName
Value clientmodel.LabelValue
Name model.LabelName
Value model.LabelValue
}
// Equal returns true iff both the Name and the Value of this LabelPair and o

View file

@ -17,7 +17,7 @@ import (
"fmt"
"regexp"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// MatchType is an enum for label matching types.
@ -50,13 +50,13 @@ type LabelMatchers []*LabelMatcher
// LabelMatcher models the matching of a label.
type LabelMatcher struct {
Type MatchType
Name clientmodel.LabelName
Value clientmodel.LabelValue
Name model.LabelName
Value model.LabelValue
re *regexp.Regexp
}
// NewLabelMatcher returns a LabelMatcher object ready to use.
func NewLabelMatcher(matchType MatchType, name clientmodel.LabelName, value clientmodel.LabelValue) (*LabelMatcher, error) {
func NewLabelMatcher(matchType MatchType, name model.LabelName, value model.LabelValue) (*LabelMatcher, error) {
m := &LabelMatcher{
Type: matchType,
Name: name,
@ -77,7 +77,7 @@ func (m *LabelMatcher) String() string {
}
// Match returns true if the label matcher matches the supplied label value.
func (m *LabelMatcher) Match(v clientmodel.LabelValue) bool {
func (m *LabelMatcher) Match(v model.LabelValue) bool {
switch m.Type {
case Equal:
return m.Value == v
@ -94,8 +94,8 @@ func (m *LabelMatcher) Match(v clientmodel.LabelValue) bool {
// Filter takes a list of label values and returns all label values which match
// the label matcher.
func (m *LabelMatcher) Filter(in clientmodel.LabelValues) clientmodel.LabelValues {
out := clientmodel.LabelValues{}
func (m *LabelMatcher) Filter(in model.LabelValues) model.LabelValues {
out := model.LabelValues{}
for _, v := range in {
if m.Match(v) {
out = append(out, v)

View file

@ -17,7 +17,7 @@ import (
"fmt"
"strconv"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// MarshalJSON implements json.Marshaler.
@ -27,8 +27,8 @@ func (s SamplePair) MarshalJSON() ([]byte, error) {
// SamplePair pairs a SampleValue with a Timestamp.
type SamplePair struct {
Timestamp clientmodel.Timestamp
Value clientmodel.SampleValue
Timestamp model.Time
Value model.SampleValue
}
// Equal returns true if this SamplePair and o have equal Values and equal
@ -50,6 +50,6 @@ type Values []SamplePair
// Interval describes the inclusive interval between two Timestamps.
type Interval struct {
OldestInclusive clientmodel.Timestamp
NewestInclusive clientmodel.Timestamp
OldestInclusive model.Time
NewestInclusive model.Time
}

View file

@ -23,10 +23,9 @@ import (
"net/url"
"time"
"github.com/prometheus/common/model"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/util/httputil"
)
@ -63,23 +62,23 @@ type StoreSamplesRequest struct {
// point represents a single InfluxDB measurement.
type point struct {
Timestamp int64 `json:"timestamp"`
Precision string `json:"precision"`
Name clientmodel.LabelValue `json:"name"`
Tags clientmodel.LabelSet `json:"tags"`
Fields fields `json:"fields"`
Timestamp int64 `json:"timestamp"`
Precision string `json:"precision"`
Name model.LabelValue `json:"name"`
Tags model.LabelSet `json:"tags"`
Fields fields `json:"fields"`
}
// fields represents the fields/columns sent to InfluxDB for a given measurement.
type fields struct {
Value clientmodel.SampleValue `json:"value"`
Value model.SampleValue `json:"value"`
}
// tagsFromMetric extracts InfluxDB tags from a Prometheus metric.
func tagsFromMetric(m clientmodel.Metric) clientmodel.LabelSet {
tags := make(clientmodel.LabelSet, len(m)-1)
func tagsFromMetric(m model.Metric) model.LabelSet {
tags := make(model.LabelSet, len(m)-1)
for l, v := range m {
if l == clientmodel.MetricNameLabel {
if l == model.MetricNameLabel {
continue
}
tags[l] = v
@ -88,7 +87,7 @@ func tagsFromMetric(m clientmodel.Metric) clientmodel.LabelSet {
}
// Store sends a batch of samples to InfluxDB via its HTTP API.
func (c *Client) Store(samples clientmodel.Samples) error {
func (c *Client) Store(samples model.Samples) error {
points := make([]point, 0, len(samples))
for _, s := range samples {
v := float64(s.Value)
@ -98,7 +97,7 @@ func (c *Client) Store(samples clientmodel.Samples) error {
log.Warnf("cannot send value %f to InfluxDB, skipping sample %#v", v, s)
continue
}
metric := s.Metric[clientmodel.MetricNameLabel]
metric := s.Metric[model.MetricNameLabel]
points = append(points, point{
Timestamp: s.Timestamp.UnixNano(),
Precision: "n",

View file

@ -21,33 +21,33 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
func TestClient(t *testing.T) {
samples := clientmodel.Samples{
samples := model.Samples{
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "testmetric",
"test_label": "test_label_value1",
Metric: model.Metric{
model.MetricNameLabel: "testmetric",
"test_label": "test_label_value1",
},
Timestamp: clientmodel.Timestamp(123456789123),
Timestamp: model.Time(123456789123),
Value: 1.23,
},
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "testmetric",
"test_label": "test_label_value2",
Metric: model.Metric{
model.MetricNameLabel: "testmetric",
"test_label": "test_label_value2",
},
Timestamp: clientmodel.Timestamp(123456789123),
Timestamp: model.Time(123456789123),
Value: 5.1234,
},
{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "special_float_value",
Metric: model.Metric{
model.MetricNameLabel: "special_float_value",
},
Timestamp: clientmodel.Timestamp(123456789123),
Value: clientmodel.SampleValue(math.NaN()),
Timestamp: model.Time(123456789123),
Value: model.SampleValue(math.NaN()),
},
}

View file

@ -26,7 +26,7 @@ import (
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/util/httputil"
)
@ -64,10 +64,10 @@ type StoreSamplesRequest struct {
}
// tagsFromMetric translates Prometheus metric into OpenTSDB tags.
func tagsFromMetric(m clientmodel.Metric) map[string]TagValue {
func tagsFromMetric(m model.Metric) map[string]TagValue {
tags := make(map[string]TagValue, len(m)-1)
for l, v := range m {
if l == clientmodel.MetricNameLabel {
if l == model.MetricNameLabel {
continue
}
tags[string(l)] = TagValue(v)
@ -76,7 +76,7 @@ func tagsFromMetric(m clientmodel.Metric) map[string]TagValue {
}
// Store sends a batch of samples to OpenTSDB via its HTTP API.
func (c *Client) Store(samples clientmodel.Samples) error {
func (c *Client) Store(samples model.Samples) error {
reqs := make([]StoreSamplesRequest, 0, len(samples))
for _, s := range samples {
v := float64(s.Value)
@ -84,7 +84,7 @@ func (c *Client) Store(samples clientmodel.Samples) error {
log.Warnf("cannot send value %f to OpenTSDB, skipping sample %#v", v, s)
continue
}
metric := TagValue(s.Metric[clientmodel.MetricNameLabel])
metric := TagValue(s.Metric[model.MetricNameLabel])
reqs = append(reqs, StoreSamplesRequest{
Metric: metric,
Timestamp: s.Timestamp.Unix(),

View file

@ -19,14 +19,14 @@ import (
"reflect"
"testing"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
var (
metric = clientmodel.Metric{
clientmodel.MetricNameLabel: "test:metric",
"testlabel": "test:value",
"many_chars": "abc!ABC:012-3!45ö67~89./",
metric = model.Metric{
model.MetricNameLabel: "test:metric",
"testlabel": "test:value",
"many_chars": "abc!ABC:012-3!45ö67~89./",
}
)

View file

@ -4,14 +4,14 @@ import (
"bytes"
"fmt"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// TagValue is a clientmodel.LabelValue that implements json.Marshaler and
// TagValue is a model.LabelValue that implements json.Marshaler and
// json.Unmarshaler. These implementations avoid characters illegal in
// OpenTSDB. See the MarshalJSON for details. TagValue is used for the values of
// OpenTSDB tags as well as for OpenTSDB metric names.
type TagValue clientmodel.LabelValue
type TagValue model.LabelValue
// MarshalJSON marshals this TagValue into JSON that only contains runes allowed
// in OpenTSDB. It implements json.Marshaler. The runes allowed in OpenTSDB are

View file

@ -19,7 +19,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
const (
@ -47,7 +47,7 @@ const (
// external timeseries database.
type StorageClient interface {
// Store stores the given samples in the remote storage.
Store(clientmodel.Samples) error
Store(model.Samples) error
// Name identifies the remote storage implementation.
Name() string
}
@ -56,8 +56,8 @@ type StorageClient interface {
// indicated by the provided StorageClient.
type StorageQueueManager struct {
tsdb StorageClient
queue chan *clientmodel.Sample
pendingSamples clientmodel.Samples
queue chan *model.Sample
pendingSamples model.Samples
sendSemaphore chan bool
drained chan bool
@ -76,7 +76,7 @@ func NewStorageQueueManager(tsdb StorageClient, queueCapacity int) *StorageQueue
return &StorageQueueManager{
tsdb: tsdb,
queue: make(chan *clientmodel.Sample, queueCapacity),
queue: make(chan *model.Sample, queueCapacity),
sendSemaphore: make(chan bool, maxConcurrentSends),
drained: make(chan bool),
@ -127,7 +127,7 @@ func NewStorageQueueManager(tsdb StorageClient, queueCapacity int) *StorageQueue
// Append queues a sample to be sent to the remote storage. It drops the
// sample on the floor if the queue is full. It implements
// storage.SampleAppender.
func (t *StorageQueueManager) Append(s *clientmodel.Sample) {
func (t *StorageQueueManager) Append(s *model.Sample) {
select {
case t.queue <- s:
default:
@ -165,7 +165,7 @@ func (t *StorageQueueManager) Collect(ch chan<- prometheus.Metric) {
ch <- t.queueCapacity
}
func (t *StorageQueueManager) sendSamples(s clientmodel.Samples) {
func (t *StorageQueueManager) sendSamples(s model.Samples) {
t.sendSemaphore <- true
defer func() {
<-t.sendSemaphore

View file

@ -17,16 +17,16 @@ import (
"sync"
"testing"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
type TestStorageClient struct {
receivedSamples clientmodel.Samples
expectedSamples clientmodel.Samples
receivedSamples model.Samples
expectedSamples model.Samples
wg sync.WaitGroup
}
func (c *TestStorageClient) expectSamples(s clientmodel.Samples) {
func (c *TestStorageClient) expectSamples(s model.Samples) {
c.expectedSamples = append(c.expectedSamples, s...)
c.wg.Add(len(s))
}
@ -40,7 +40,7 @@ func (c *TestStorageClient) waitForExpectedSamples(t *testing.T) {
}
}
func (c *TestStorageClient) Store(s clientmodel.Samples) error {
func (c *TestStorageClient) Store(s model.Samples) error {
c.receivedSamples = append(c.receivedSamples, s...)
c.wg.Add(-len(s))
return nil
@ -55,13 +55,13 @@ func TestSampleDelivery(t *testing.T) {
// batch timeout case.
n := maxSamplesPerSend * 2
samples := make(clientmodel.Samples, 0, n)
samples := make(model.Samples, 0, n)
for i := 0; i < n; i++ {
samples = append(samples, &clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "test_metric",
samples = append(samples, &model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "test_metric",
},
Value: clientmodel.SampleValue(i),
Value: model.SampleValue(i),
})
}

View file

@ -21,7 +21,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// Storage collects multiple remote storage queues.
@ -70,7 +70,7 @@ func (s *Storage) Stop() {
}
// Append implements storage.SampleAppender.
func (s *Storage) Append(smpl *clientmodel.Sample) {
func (s *Storage) Append(smpl *model.Sample) {
for _, q := range s.queues {
q.Append(smpl)
}

View file

@ -14,13 +14,13 @@
package storage
import (
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
// SampleAppender is the interface to append samples to both, local and remote
// storage.
type SampleAppender interface {
Append(*clientmodel.Sample)
Append(*model.Sample)
}
// Fanout is a SampleAppender that appends every sample to a list of other
@ -30,7 +30,7 @@ type Fanout []SampleAppender
// Append implements SampleAppender. It appends the provided sample to all
// SampleAppenders in the Fanout slice and waits for each append to complete
// before proceeding with the next.
func (f Fanout) Append(s *clientmodel.Sample) {
func (f Fanout) Append(s *model.Sample) {
for _, a := range f {
a.Append(s)
}

View file

@ -25,7 +25,7 @@ import (
html_template "html/template"
text_template "text/template"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/strutil"
@ -55,7 +55,7 @@ func (q queryResultByLabelSorter) Swap(i, j int) {
q.results[i], q.results[j] = q.results[j], q.results[i]
}
func query(q string, timestamp clientmodel.Timestamp, queryEngine *promql.Engine) (queryResult, error) {
func query(q string, timestamp model.Time, queryEngine *promql.Engine) (queryResult, error) {
query, err := queryEngine.NewInstantQuery(q, timestamp)
if err != nil {
return nil, err
@ -78,8 +78,8 @@ func query(q string, timestamp clientmodel.Timestamp, queryEngine *promql.Engine
}}
case *promql.String:
vector = promql.Vector{&promql.Sample{
Metric: clientmodel.COWMetric{
Metric: clientmodel.Metric{"__value__": clientmodel.LabelValue(v.Value)},
Metric: model.COWMetric{
Metric: model.Metric{"__value__": model.LabelValue(v.Value)},
Copied: true,
},
Timestamp: v.Timestamp,
@ -112,7 +112,7 @@ type templateExpander struct {
}
// NewTemplateExpander returns a template expander ready to use.
func NewTemplateExpander(text string, name string, data interface{}, timestamp clientmodel.Timestamp, queryEngine *promql.Engine, pathPrefix string) *templateExpander {
func NewTemplateExpander(text string, name string, data interface{}, timestamp model.Time, queryEngine *promql.Engine, pathPrefix string) *templateExpander {
return &templateExpander{
text: text,
name: name,
@ -242,7 +242,7 @@ func NewTemplateExpander(text string, name string, data interface{}, timestamp c
if math.IsNaN(v) || math.IsInf(v, 0) {
return fmt.Sprintf("%.4g", v)
}
t := clientmodel.TimestampFromUnixNano(int64(v * 1e9)).Time().UTC()
t := model.TimeFromUnixNano(int64(v * 1e9)).Time().UTC()
return fmt.Sprint(t)
},
"pathPrefix": func() string {

View file

@ -17,7 +17,7 @@ import (
"math"
"testing"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local"
@ -140,7 +140,7 @@ func TestTemplateExpansion(t *testing.T) {
output: "+Inf:+Inf:+Inf:+Inf:-Inf:-Inf:-Inf:-Inf:NaN:NaN:NaN:NaN:",
},
{
// HumanizeTimestamp - clientmodel.SampleValue input.
// HumanizeTimestamp - model.SampleValue input.
text: "{{ 1435065584.128 | humanizeTimestamp }}",
output: "2015-06-23 13:19:44.128 +0000 UTC",
},
@ -172,20 +172,20 @@ func TestTemplateExpansion(t *testing.T) {
},
}
time := clientmodel.Timestamp(0)
time := model.Time(0)
storage, closer := local.NewTestStorage(t, 1)
defer closer.Close()
storage.Append(&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "metric",
"instance": "a"},
storage.Append(&model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "metric",
"instance": "a"},
Value: 11,
})
storage.Append(&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "metric",
"instance": "b"},
storage.Append(&model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "metric",
"instance": "b"},
Value: 21,
})
storage.WaitForIndexing()

View file

@ -18,7 +18,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local"
@ -28,7 +28,7 @@ import (
// API manages the /api HTTP endpoint.
type API struct {
Now func() clientmodel.Timestamp
Now func() model.Time
Storage local.Storage
QueryEngine *promql.Engine
}

View file

@ -21,7 +21,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local"
@ -35,9 +35,9 @@ import (
// query layer precisely without any change. Thus we round to seconds and then
// add known-good digits after the decimal point which behave well in
// parsing/re-formatting.
var testTimestamp = clientmodel.TimestampFromTime(time.Now().Round(time.Second)).Add(124 * time.Millisecond)
var testTimestamp = model.TimeFromUnix(time.Now().Round(time.Second).Unix()).Add(124 * time.Millisecond)
func testNow() clientmodel.Timestamp {
func testNow() model.Time {
return testTimestamp
}
@ -89,9 +89,9 @@ func TestQuery(t *testing.T) {
storage, closer := local.NewTestStorage(t, 1)
defer closer.Close()
storage.Append(&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "testmetric",
storage.Append(&model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "testmetric",
},
Timestamp: testTimestamp,
Value: 0,

View file

@ -26,7 +26,7 @@ import (
"github.com/prometheus/log"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
)
@ -44,7 +44,7 @@ func httpJSONError(w http.ResponseWriter, err error, code int) {
errorJSON(w, err)
}
func parseTimestampOrNow(t string, now clientmodel.Timestamp) (clientmodel.Timestamp, error) {
func parseTimestampOrNow(t string, now model.Time) (model.Time, error) {
if t == "" {
return now, nil
}
@ -53,7 +53,7 @@ func parseTimestampOrNow(t string, now clientmodel.Timestamp) (clientmodel.Times
if err != nil {
return 0, err
}
return clientmodel.TimestampFromUnixNano(int64(tFloat * float64(time.Second/time.Nanosecond))), nil
return model.TimeFromUnixNano(int64(tFloat * float64(time.Second/time.Nanosecond))), nil
}
func parseDuration(d string) (time.Duration, error) {
@ -223,7 +223,7 @@ func (api *API) Metrics(w http.ResponseWriter, r *http.Request) {
setAccessControlHeaders(w)
w.Header().Set("Content-Type", "application/json")
metricNames := api.Storage.LabelValuesForLabelName(clientmodel.MetricNameLabel)
metricNames := api.Storage.LabelValuesForLabelName(model.MetricNameLabel)
sort.Sort(metricNames)
resultBytes, err := json.Marshal(metricNames)
if err != nil {

View file

@ -17,7 +17,7 @@ import (
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
)
func TestParseTimestampOrNow(t *testing.T) {
@ -33,7 +33,7 @@ func TestParseTimestampOrNow(t *testing.T) {
if err != nil {
t.Fatalf("err = %s; want nil", err)
}
expTS := clientmodel.TimestampFromUnixNano(1426956073123000000)
expTS := model.TimeFromUnixNano(1426956073123000000)
if !ts.Equal(expTS) {
t.Fatalf("ts = %v; want %v", ts, expTS)
}

View file

@ -12,7 +12,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local"
@ -174,10 +174,10 @@ func (api *API) queryRange(r *http.Request) (interface{}, *apiError) {
func (api *API) labelValues(r *http.Request) (interface{}, *apiError) {
name := route.Param(api.context(r), "name")
if !clientmodel.LabelNameRE.MatchString(name) {
if !model.LabelNameRE.MatchString(name) {
return nil, &apiError{errorBadData, fmt.Errorf("invalid label name: %q", name)}
}
vals := api.Storage.LabelValuesForLabelName(clientmodel.LabelName(name))
vals := api.Storage.LabelValuesForLabelName(model.LabelName(name))
sort.Sort(vals)
return vals, nil
@ -188,7 +188,7 @@ func (api *API) series(r *http.Request) (interface{}, *apiError) {
if len(r.Form["match[]"]) == 0 {
return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}
}
res := map[clientmodel.Fingerprint]clientmodel.COWMetric{}
res := map[model.Fingerprint]model.COWMetric{}
for _, lm := range r.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(lm)
@ -200,7 +200,7 @@ func (api *API) series(r *http.Request) (interface{}, *apiError) {
}
}
metrics := make([]clientmodel.Metric, 0, len(res))
metrics := make([]model.Metric, 0, len(res))
for _, met := range res {
metrics = append(metrics, met.Metric)
}
@ -212,7 +212,7 @@ func (api *API) dropSeries(r *http.Request) (interface{}, *apiError) {
if len(r.Form["match[]"]) == 0 {
return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}
}
fps := map[clientmodel.Fingerprint]struct{}{}
fps := map[model.Fingerprint]struct{}{}
for _, lm := range r.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(lm)
@ -265,13 +265,13 @@ func respondError(w http.ResponseWriter, apiErr *apiError, data interface{}) {
w.Write(b)
}
func parseTime(s string) (clientmodel.Timestamp, error) {
func parseTime(s string) (model.Time, error) {
if t, err := strconv.ParseFloat(s, 64); err == nil {
ts := int64(t * float64(time.Second))
return clientmodel.TimestampFromUnixNano(ts), nil
return model.TimeFromUnixNano(ts), nil
}
if t, err := time.Parse(time.RFC3339Nano, s); err == nil {
return clientmodel.TimestampFromTime(t), nil
return model.TimeFromUnixNano(t.UnixNano()), nil
}
return 0, fmt.Errorf("cannot parse %q to a valid timestamp", s)
}

View file

@ -14,7 +14,7 @@ import (
"golang.org/x/net/context"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/metric"
@ -42,7 +42,7 @@ func TestEndpoints(t *testing.T) {
QueryEngine: suite.QueryEngine(),
}
start := clientmodel.Timestamp(0)
start := model.Time(0)
var tests = []struct {
endpoint apiFunc
params map[string]string
@ -173,7 +173,7 @@ func TestEndpoints(t *testing.T) {
params: map[string]string{
"name": "__name__",
},
response: clientmodel.LabelValues{
response: model.LabelValues{
"test_metric1",
"test_metric2",
},
@ -183,7 +183,7 @@ func TestEndpoints(t *testing.T) {
params: map[string]string{
"name": "foo",
},
response: clientmodel.LabelValues{
response: model.LabelValues{
"bar",
"boo",
},
@ -201,7 +201,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{
"match[]": []string{`test_metric2`},
},
response: []clientmodel.Metric{
response: []model.Metric{
{
"__name__": "test_metric2",
"foo": "boo",
@ -213,7 +213,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{
"match[]": []string{`test_metric1{foo=~"o$"}`},
},
response: []clientmodel.Metric{
response: []model.Metric{
{
"__name__": "test_metric1",
"foo": "boo",
@ -225,7 +225,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{
"match[]": []string{`test_metric1{foo=~"o$"}`, `test_metric1{foo=~"o$"}`},
},
response: []clientmodel.Metric{
response: []model.Metric{
{
"__name__": "test_metric1",
"foo": "boo",
@ -237,7 +237,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{
"match[]": []string{`test_metric1{foo=~"o$"}`, `none`},
},
response: []clientmodel.Metric{
response: []model.Metric{
{
"__name__": "test_metric1",
"foo": "boo",
@ -269,7 +269,7 @@ func TestEndpoints(t *testing.T) {
query: url.Values{
"match[]": []string{`test_metric1`},
},
response: []clientmodel.Metric{
response: []model.Metric{
{
"__name__": "test_metric1",
"foo": "bar",
@ -445,7 +445,7 @@ func TestParseTime(t *testing.T) {
t.Errorf("Expected error for %q but got none", test.input)
continue
}
res := clientmodel.TimestampFromTime(test.result)
res := model.TimeFromUnixNano(test.result.UnixNano())
if !test.fail && ts != res {
t.Errorf("Expected time %v for input %q but got %v", res, test.input, ts)
}

View file

@ -12,8 +12,8 @@ import (
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage/local"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/client_model/go"
"github.com/prometheus/common/model"
)
type Federation struct {
@ -23,7 +23,7 @@ type Federation struct {
func (fed *Federation) ServeHTTP(w http.ResponseWriter, req *http.Request) {
req.ParseForm()
metrics := map[clientmodel.Fingerprint]clientmodel.COWMetric{}
metrics := map[model.Fingerprint]model.COWMetric{}
for _, s := range req.Form["match[]"] {
matchers, err := promql.ParseMetricSelector(s)
@ -58,7 +58,7 @@ func (fed *Federation) ServeHTTP(w http.ResponseWriter, req *http.Request) {
protMetric.Label = protMetric.Label[:0]
for ln, lv := range met.Metric {
if ln == clientmodel.MetricNameLabel {
if ln == model.MetricNameLabel {
protMetricFam.Name = proto.String(string(lv))
continue
}

View file

@ -31,8 +31,8 @@ import (
pprof_runtime "runtime/pprof"
template_text "text/template"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model"
"github.com/prometheus/log"
"github.com/prometheus/prometheus/config"
@ -128,7 +128,7 @@ func New(st local.Storage, qe *promql.Engine, rm *rules.Manager, status *Prometh
apiLegacy: &legacy.API{
QueryEngine: qe,
Storage: st,
Now: clientmodel.Now,
Now: model.Now,
},
federation: &Federation{
Storage: st,
@ -257,7 +257,7 @@ func (h *Handler) consoles(w http.ResponseWriter, r *http.Request) {
Path: strings.TrimLeft(name, "/"),
}
tmpl := template.NewTemplateExpander(string(text), "__console_"+name, data, clientmodel.Now(), h.queryEngine, h.options.ExternalURL.Path)
tmpl := template.NewTemplateExpander(string(text), "__console_"+name, data, model.Now(), h.queryEngine, h.options.ExternalURL.Path)
filenames, err := filepath.Glob(h.options.ConsoleLibrariesPath + "/*.lib")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
@ -351,7 +351,7 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
"since": time.Since,
"consolesPath": func() string { return consolesPath },
"pathPrefix": func() string { return opts.ExternalURL.Path },
"stripLabels": func(lset clientmodel.LabelSet, labels ...clientmodel.LabelName) clientmodel.LabelSet {
"stripLabels": func(lset model.LabelSet, labels ...model.LabelName) model.LabelSet {
for _, ln := range labels {
delete(lset, ln)
}
@ -426,7 +426,7 @@ func (h *Handler) executeTemplate(w http.ResponseWriter, name string, data inter
http.Error(w, err.Error(), http.StatusInternalServerError)
}
tmpl := template.NewTemplateExpander(text, name, data, clientmodel.Now(), h.queryEngine, h.options.ExternalURL.Path)
tmpl := template.NewTemplateExpander(text, name, data, model.Now(), h.queryEngine, h.options.ExternalURL.Path)
tmpl.Funcs(tmplFuncs(h.consolesPath(), h.options))
result, err := tmpl.ExpandHTML(nil)