WIP - Snapshot of Moving to Client Model.

This commit is contained in:
Matt T. Proud 2013-06-25 14:02:27 +02:00
parent 42198c1f1c
commit 30b1cf80b5
94 changed files with 1973 additions and 3762 deletions

View file

@ -17,8 +17,8 @@ include Makefile.INCLUDE
all: binary test
$(GOCC): build/cache/$(GOPKG)
tar -C build/root -xzf $<
$(GOCC): $(BUILD_PATH)/cache/$(GOPKG) source_path
tar -C $(BUILD_PATH)/root -xzf $<
touch $@
advice:
@ -28,14 +28,14 @@ binary: build
build: config dependencies model preparation tools web
$(GO) build -o prometheus $(BUILDFLAGS) .
cp prometheus build/package/prometheus
rsync -av build/root/lib/ build/package/lib/
cp prometheus $(BUILD_PATH)/package/prometheus
rsync -av --delete $(BUILD_PATH)/root/lib/ $(BUILD_PATH)/package/lib/
build/cache/$(GOPKG):
$(BUILD_PATH)/cache/$(GOPKG):
curl -o $@ http://go.googlecode.com/files/$(GOPKG)
clean:
$(MAKE) -C build clean
$(MAKE) -C $(BUILD_PATH) clean
$(MAKE) -C tools clean
$(MAKE) -C web clean
rm -rf $(TEST_ARTIFACTS)
@ -53,16 +53,16 @@ documentation: search_index
godoc -http=:6060 -index -index_files='search_index'
format:
find . -iname '*.go' | egrep -v "^./build/|generated|\.(l|y)\.go" | xargs -n1 $(GOFMT) -w -s=true
find . -iname '*.go' | egrep -v "^\./\.build|./generated|\.(l|y)\.go" | xargs -n1 $(GOFMT) -w -s=true
model: dependencies preparation
$(MAKE) -C model
preparation: $(GOCC) source_path
$(MAKE) -C build
$(MAKE) -C $(BUILD_PATH)
race_condition_binary: build
CGO_CFLAGS="-I$(PWD)/build/root/include" CGO_LDFLAGS="-L$(PWD)/build/root/lib" $(GO) build -race -o prometheus.race $(BUILDFLAGS) .
CGO_CFLAGS="-I$(BUILD_PATH)/root/include" CGO_LDFLAGS="-L$(BUILD_PATH)/root/lib" $(GO) build -race -o prometheus.race $(BUILDFLAGS) .
race_condition_run: race_condition_binary
./prometheus.race $(ARGUMENTS)
@ -83,13 +83,16 @@ source_path:
[ -d "$(FULL_GOPATH)" ]
test: build
$(GOENV) find . -maxdepth 1 -mindepth 1 -type d -and -not -path ./build -exec $(GOCC) test {}/... $(GO_TEST_FLAGS) \;
$(GOENV) find . -maxdepth 1 -mindepth 1 -type d -and -not -path $(BUILD_PATH) -exec $(GOCC) test {}/... $(GO_TEST_FLAGS) \;
$(GO) test $(GO_TEST_FLAGS)
tools: dependencies preparation
$(MAKE) -C tools
update:
$(GO) get -d
web: config dependencies model preparation
$(MAKE) -C web
.PHONY: advice binary build clean config dependencies documentation format model package preparation race_condition_binary race_condition_run run search_index source_path test tools
.PHONY: advice binary build clean config dependencies documentation format model package preparation race_condition_binary race_condition_run run search_index source_path test tools update

View file

@ -28,17 +28,19 @@ endif
OS=$(shell uname)
ARCH=$(shell uname -m)
BUILD_PATH = $(PWD)/.build
GO_VERSION := 1.1
GOOS = $(subst Darwin,darwin,$(subst Linux,linux,$(OS)))
GOARCH = $(subst x86_64,amd64,$(ARCH))
GOPKG = go$(GO_VERSION).$(GOOS)-$(GOARCH).tar.gz
GOROOT = $(PWD)/build/root/go
GOPATH = $(PWD)/build/root/gopath
GOCC = $(GOROOT)/bin/go
TMPDIR = /tmp
GOENV = TMPDIR=$(TMPDIR) GOROOT=$(GOROOT) GOPATH=$(GOPATH)
GO = $(GOENV) $(GOCC)
GOFMT = $(GOROOT)/bin/gofmt
GOOS = $(subst Darwin,darwin,$(subst Linux,linux,$(OS)))
GOARCH = $(subst x86_64,amd64,$(ARCH))
GOPKG = go$(GO_VERSION).$(GOOS)-$(GOARCH).tar.gz
GOROOT = $(BUILD_PATH)/root/go
GOPATH = $(BUILD_PATH)/root/gopath
GOCC = $(GOROOT)/bin/go
TMPDIR = /tmp
GOENV = TMPDIR=$(TMPDIR) GOROOT=$(GOROOT) GOPATH=$(GOPATH)
GO = $(GOENV) $(GOCC)
GOFMT = $(GOROOT)/bin/gofmt
LEVELDB_VERSION := 1.12.0
PROTOCOL_BUFFERS_VERSION := 2.5.0
@ -48,7 +50,7 @@ UNAME := $(shell uname)
FULL_GOPATH := $(GOPATH)/src/github.com/prometheus/prometheus
FULL_GOPATH_BASE := $(GOPATH)/src/github.com/prometheus
export PREFIX=$(PWD)/build/root
export PREFIX=$(BUILD_PATH)/root
export LOCAL_BINARIES=$(PREFIX)/bin

54
main.go
View file

@ -15,32 +15,31 @@ package main
import (
"flag"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/retrieval/format"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/storage/raw/leveldb"
"github.com/prometheus/prometheus/web"
"github.com/prometheus/prometheus/web/api"
"log"
"os"
"os/signal"
"sync"
"time"
"github.com/prometheus/client_golang/extraction"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/storage/raw/leveldb"
"github.com/prometheus/prometheus/web"
"github.com/prometheus/prometheus/web/api"
)
const (
deletionBatchSize = 100
)
const deletionBatchSize = 100
// Commandline flags.
var (
printVersion = flag.Bool("version", false, "print version information")
configFile = flag.String("configFile", "prometheus.conf", "Prometheus configuration file name.")
metricsStoragePath = flag.String("metricsStoragePath", "/tmp/metrics", "Base path for metrics storage.")
scrapeResultsQueueCapacity = flag.Int("scrapeResultsQueueCapacity", 4096, "The size of the scrape results queue.")
ruleResultsQueueCapacity = flag.Int("ruleResultsQueueCapacity", 4096, "The size of the rule results queue.")
samplesQueueCapacity = flag.Int("samplesQueueCapacity", 4096, "The size of the unwritten samples queue.")
concurrentRetrievalAllowance = flag.Int("concurrentRetrievalAllowance", 15, "The number of concurrent metrics retrieval requests allowed.")
diskAppendQueueCapacity = flag.Int("queue.diskAppendCapacity", 1000000, "The size of the queue for items that are pending writing to disk.")
memoryAppendQueueCapacity = flag.Int("queue.memoryAppendCapacity", 10000, "The size of the queue for items that are pending writing to memory.")
@ -76,8 +75,7 @@ type prometheus struct {
databaseStates chan []leveldb.DatabaseState
stopBackgroundOperations chan bool
ruleResults chan *rules.Result
scrapeResults chan format.Result
unwrittenSamples chan *extraction.Result
storage *metric.TieredStorage
}
@ -198,8 +196,7 @@ func main() {
log.Fatalln("Nil tiered storage.")
}
scrapeResults := make(chan format.Result, *scrapeResultsQueueCapacity)
ruleResults := make(chan *rules.Result, *ruleResultsQueueCapacity)
unwrittenSamples := make(chan *extraction.Result, *samplesQueueCapacity)
curationState := make(chan metric.CurationState, 1)
databaseStates := make(chan []leveldb.DatabaseState, 1)
// Coprime numbers, fool!
@ -209,11 +206,11 @@ func main() {
deletionTimer := time.NewTicker(*deleteInterval)
// Queue depth will need to be exposed
targetManager := retrieval.NewTargetManager(scrapeResults, *concurrentRetrievalAllowance)
targetManager := retrieval.NewTargetManager(unwrittenSamples, *concurrentRetrievalAllowance)
targetManager.AddTargetsFromConfig(conf)
// Queue depth will need to be exposed
ruleManager := rules.NewRuleManager(ruleResults, conf.EvaluationInterval(), ts)
ruleManager := rules.NewRuleManager(unwrittenSamples, conf.EvaluationInterval(), ts)
err = ruleManager.AddRulesFromConfig(conf)
if err != nil {
log.Fatalf("Error loading rule files: %v", err)
@ -259,7 +256,7 @@ func main() {
AlertsHandler: alertsHandler,
}
prometheus := prometheus{
prometheus := &prometheus{
bodyCompactionTimer: bodyCompactionTimer,
headCompactionTimer: headCompactionTimer,
tailCompactionTimer: tailCompactionTimer,
@ -271,8 +268,7 @@ func main() {
curationState: curationState,
databaseStates: databaseStates,
ruleResults: ruleResults,
scrapeResults: scrapeResults,
unwrittenSamples: unwrittenSamples,
stopBackgroundOperations: make(chan bool, 1),
@ -343,17 +339,9 @@ func main() {
}()
// TODO(all): Migrate this into prometheus.serve().
for {
select {
case scrapeResult := <-scrapeResults:
if scrapeResult.Err == nil {
ts.AppendSamples(scrapeResult.Samples)
}
case ruleResult := <-ruleResults:
if ruleResult.Err == nil {
ts.AppendSamples(ruleResult.Samples)
}
for block := range unwrittenSamples {
if block.Err == nil {
ts.AppendSamples(block.Samples)
}
}
}

View file

@ -1,103 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"bytes"
"code.google.com/p/goprotobuf/proto"
"fmt"
dto "github.com/prometheus/prometheus/model/generated"
"time"
)
// CurationRemark provides a representation of dto.CurationValue with associated
// business logic methods attached to it to enhance code readability.
type CurationRemark struct {
LastCompletionTimestamp time.Time
}
// OlderThan answers whether this CurationRemark is older than the provided
// cutOff time.
func (c CurationRemark) OlderThan(t time.Time) bool {
return c.LastCompletionTimestamp.Before(t)
}
// Equal answers whether the two CurationRemarks are equivalent.
func (c CurationRemark) Equal(o CurationRemark) bool {
return c.LastCompletionTimestamp.Equal(o.LastCompletionTimestamp)
}
func (c CurationRemark) String() string {
return fmt.Sprintf("Last curated at %s", c.LastCompletionTimestamp)
}
// ToDTO generates the dto.CurationValue representation of this.
func (c CurationRemark) ToDTO() *dto.CurationValue {
return &dto.CurationValue{
LastCompletionTimestamp: proto.Int64(c.LastCompletionTimestamp.Unix()),
}
}
// NewCurationRemarkFromDTO builds CurationRemark from the provided
// dto.CurationValue object.
func NewCurationRemarkFromDTO(d *dto.CurationValue) CurationRemark {
return CurationRemark{
LastCompletionTimestamp: time.Unix(*d.LastCompletionTimestamp, 0).UTC(),
}
}
// CurationKey provides a representation of dto.CurationKey with associated
// business logic methods attached to it to enhance code readability.
type CurationKey struct {
Fingerprint *Fingerprint
ProcessorMessageRaw []byte
ProcessorMessageTypeName string
IgnoreYoungerThan time.Duration
}
// Equal answers whether the two CurationKeys are equivalent.
func (c CurationKey) Equal(o CurationKey) bool {
switch {
case !c.Fingerprint.Equal(o.Fingerprint):
return false
case bytes.Compare(c.ProcessorMessageRaw, o.ProcessorMessageRaw) != 0:
return false
case c.ProcessorMessageTypeName != o.ProcessorMessageTypeName:
return false
case c.IgnoreYoungerThan != o.IgnoreYoungerThan:
return false
}
return true
}
// ToDTO generates a dto.CurationKey representation of this.
func (c CurationKey) ToDTO() *dto.CurationKey {
return &dto.CurationKey{
Fingerprint: c.Fingerprint.ToDTO(),
ProcessorMessageRaw: c.ProcessorMessageRaw,
ProcessorMessageTypeName: proto.String(c.ProcessorMessageTypeName),
IgnoreYoungerThan: proto.Int64(int64(c.IgnoreYoungerThan)),
}
}
// NewCurationKeyFromDTO builds CurationKey from the provided dto.CurationKey.
func NewCurationKeyFromDTO(d *dto.CurationKey) CurationKey {
return CurationKey{
Fingerprint: NewFingerprintFromDTO(d.Fingerprint),
ProcessorMessageRaw: d.ProcessorMessageRaw,
ProcessorMessageTypeName: *d.ProcessorMessageTypeName,
IgnoreYoungerThan: time.Duration(*d.IgnoreYoungerThan),
}
}

View file

@ -1,131 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"code.google.com/p/goprotobuf/proto"
dto "github.com/prometheus/prometheus/model/generated"
"sort"
"time"
)
func SampleToMetricDTO(s *Sample) *dto.Metric {
labelLength := len(s.Metric)
labelNames := make([]string, 0, labelLength)
for labelName := range s.Metric {
labelNames = append(labelNames, string(labelName))
}
sort.Strings(labelNames)
labelSets := make([]*dto.LabelPair, 0, labelLength)
for _, labelName := range labelNames {
labelValue := s.Metric[LabelName(labelName)]
labelPair := &dto.LabelPair{
Name: proto.String(string(labelName)),
Value: proto.String(string(labelValue)),
}
labelSets = append(labelSets, labelPair)
}
return &dto.Metric{
LabelPair: labelSets,
}
}
func MetricToDTO(m Metric) *dto.Metric {
metricLength := len(m)
labelNames := make([]string, 0, metricLength)
for labelName := range m {
labelNames = append(labelNames, string(labelName))
}
sort.Strings(labelNames)
labelSets := make([]*dto.LabelPair, 0, metricLength)
for _, labelName := range labelNames {
l := LabelName(labelName)
labelValue := m[l]
labelPair := &dto.LabelPair{
Name: proto.String(string(labelName)),
Value: proto.String(string(labelValue)),
}
labelSets = append(labelSets, labelPair)
}
return &dto.Metric{
LabelPair: labelSets,
}
}
func LabelSetToDTOs(s *LabelSet) []*dto.LabelPair {
metricLength := len(*s)
labelNames := make([]string, 0, metricLength)
for labelName := range *s {
labelNames = append(labelNames, string(labelName))
}
sort.Strings(labelNames)
labelSets := make([]*dto.LabelPair, 0, metricLength)
for _, labelName := range labelNames {
l := LabelName(labelName)
labelValue := (*s)[l]
labelPair := &dto.LabelPair{
Name: proto.String(string(labelName)),
Value: proto.String(string(labelValue)),
}
labelSets = append(labelSets, labelPair)
}
return labelSets
}
func LabelSetToDTO(s *LabelSet) *dto.LabelSet {
return &dto.LabelSet{
Member: LabelSetToDTOs(s),
}
}
func LabelNameToDTO(l *LabelName) *dto.LabelName {
return &dto.LabelName{
Name: proto.String(string(*l)),
}
}
func FingerprintToDTO(f *Fingerprint) *dto.Fingerprint {
return &dto.Fingerprint{
Signature: proto.String(f.ToRowKey()),
}
}
func SampleFromDTO(m *Metric, t *time.Time, v *dto.SampleValueSeries) *Sample {
s := &Sample{
Value: SampleValue(*v.Value[0].Value),
Timestamp: *t,
}
s.Metric = *m
return s
}

View file

@ -1,207 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"code.google.com/p/goprotobuf/proto"
"encoding/binary"
"fmt"
dto "github.com/prometheus/prometheus/model/generated"
"hash/fnv"
"sort"
"strconv"
"strings"
)
const (
// rowKeyDelimiter is used to separate formatted versions of a metric's row
// key.
rowKeyDelimiter = "-"
)
// Builds a Fingerprint from a row key.
func NewFingerprintFromRowKey(rowKey string) *Fingerprint {
components := strings.Split(rowKey, rowKeyDelimiter)
hash, err := strconv.ParseUint(components[0], 10, 64)
if err != nil {
panic(err)
}
labelMatterLength, err := strconv.ParseUint(components[2], 10, 0)
if err != nil {
panic(err)
}
return &Fingerprint{
hash: hash,
firstCharacterOfFirstLabelName: components[1],
labelMatterLength: uint(labelMatterLength),
lastCharacterOfLastLabelValue: components[3],
}
}
// Builds a Fingerprint from a datastore entry.
func NewFingerprintFromDTO(f *dto.Fingerprint) *Fingerprint {
return NewFingerprintFromRowKey(*f.Signature)
}
// Decomposes a Metric into a Fingerprint.
func NewFingerprintFromMetric(metric Metric) *Fingerprint {
labelLength := len(metric)
labelNames := make([]string, 0, labelLength)
for labelName := range metric {
labelNames = append(labelNames, string(labelName))
}
sort.Strings(labelNames)
summer := fnv.New64a()
firstCharacterOfFirstLabelName := ""
lastCharacterOfLastLabelValue := ""
labelMatterLength := 0
for i, labelName := range labelNames {
labelValue := metric[LabelName(labelName)]
labelNameLength := len(labelName)
labelValueLength := len(labelValue)
labelMatterLength += labelNameLength + labelValueLength
if i == 0 {
firstCharacterOfFirstLabelName = labelName[0:1]
}
if i == labelLength-1 {
lastCharacterOfLastLabelValue = string(labelValue[labelValueLength-1 : labelValueLength])
}
summer.Write([]byte(labelName))
summer.Write([]byte(reservedDelimiter))
summer.Write([]byte(labelValue))
}
return &Fingerprint{
firstCharacterOfFirstLabelName: firstCharacterOfFirstLabelName,
hash: binary.LittleEndian.Uint64(summer.Sum(nil)),
labelMatterLength: uint(labelMatterLength % 10),
lastCharacterOfLastLabelValue: lastCharacterOfLastLabelValue,
}
}
// A simplified representation of an entity.
type Fingerprint struct {
// A hashed representation of the underyling entity. For our purposes, FNV-1A
// 64-bit is used.
hash uint64
firstCharacterOfFirstLabelName string
labelMatterLength uint
lastCharacterOfLastLabelValue string
}
func (f *Fingerprint) String() string {
return f.ToRowKey()
}
// Transforms the Fingerprint into a database row key.
func (f *Fingerprint) ToRowKey() string {
return strings.Join([]string{fmt.Sprintf("%020d", f.hash), f.firstCharacterOfFirstLabelName, fmt.Sprint(f.labelMatterLength), f.lastCharacterOfLastLabelValue}, rowKeyDelimiter)
}
func (f *Fingerprint) ToDTO() *dto.Fingerprint {
return &dto.Fingerprint{
Signature: proto.String(f.ToRowKey()),
}
}
func (f *Fingerprint) Hash() uint64 {
return f.hash
}
func (f *Fingerprint) FirstCharacterOfFirstLabelName() string {
return f.firstCharacterOfFirstLabelName
}
func (f *Fingerprint) LabelMatterLength() uint {
return f.labelMatterLength
}
func (f *Fingerprint) LastCharacterOfLastLabelValue() string {
return f.lastCharacterOfLastLabelValue
}
func (f *Fingerprint) Less(o *Fingerprint) bool {
if f.hash < o.hash {
return true
}
if f.hash > o.hash {
return false
}
if f.firstCharacterOfFirstLabelName < o.firstCharacterOfFirstLabelName {
return true
}
if f.firstCharacterOfFirstLabelName > o.firstCharacterOfFirstLabelName {
return false
}
if f.labelMatterLength < o.labelMatterLength {
return true
}
if f.labelMatterLength > o.labelMatterLength {
return false
}
if f.lastCharacterOfLastLabelValue < o.lastCharacterOfLastLabelValue {
return true
}
if f.lastCharacterOfLastLabelValue > o.lastCharacterOfLastLabelValue {
return false
}
return false
}
func (f *Fingerprint) Equal(o *Fingerprint) (equal bool) {
equal = f.Hash() == o.Hash()
if !equal {
return
}
equal = f.FirstCharacterOfFirstLabelName() == o.FirstCharacterOfFirstLabelName()
if !equal {
return
}
equal = f.LabelMatterLength() == o.LabelMatterLength()
if !equal {
return
}
equal = f.LastCharacterOfLastLabelValue() == o.LastCharacterOfLastLabelValue()
return
}
// Represents a collection of Fingerprint subject to a given natural sorting
// scheme.
type Fingerprints []*Fingerprint
func (f Fingerprints) Len() int {
return len(f)
}
func (f Fingerprints) Less(i, j int) bool {
return f[i].Less(f[j])
}
func (f Fingerprints) Swap(i, j int) {
f[i], f[j] = f[j], f[i]
}

View file

@ -1,104 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"runtime"
"testing"
)
func TestFingerprintComparison(t *testing.T) {
fingerprints := []*Fingerprint{
{
hash: 0,
firstCharacterOfFirstLabelName: "b",
labelMatterLength: 1,
lastCharacterOfLastLabelValue: "b",
},
{
hash: 1,
firstCharacterOfFirstLabelName: "a",
labelMatterLength: 0,
lastCharacterOfLastLabelValue: "a",
},
{
hash: 1,
firstCharacterOfFirstLabelName: "a",
labelMatterLength: 1000,
lastCharacterOfLastLabelValue: "b",
},
{
hash: 1,
firstCharacterOfFirstLabelName: "b",
labelMatterLength: 0,
lastCharacterOfLastLabelValue: "a",
},
{
hash: 1,
firstCharacterOfFirstLabelName: "b",
labelMatterLength: 1,
lastCharacterOfLastLabelValue: "a",
},
{
hash: 1,
firstCharacterOfFirstLabelName: "b",
labelMatterLength: 1,
lastCharacterOfLastLabelValue: "b",
},
}
for i := range fingerprints {
if i == 0 {
continue
}
if !fingerprints[i-1].Less(fingerprints[i]) {
t.Errorf("%d expected %s < %s", i, fingerprints[i-1], fingerprints[i])
}
}
}
func BenchmarkFingerprinting(b *testing.B) {
b.StopTimer()
fps := []*Fingerprint{
{
hash: 0,
firstCharacterOfFirstLabelName: "a",
labelMatterLength: 2,
lastCharacterOfLastLabelValue: "z",
},
{
hash: 0,
firstCharacterOfFirstLabelName: "a",
labelMatterLength: 2,
lastCharacterOfLastLabelValue: "z",
},
}
for i := 0; i < 10; i++ {
fps[0].Less(fps[1])
}
b.Logf("N: %v", b.N)
b.StartTimer()
var pre runtime.MemStats
runtime.ReadMemStats(&pre)
for i := 0; i < b.N; i++ {
fps[0].Less(fps[1])
}
var post runtime.MemStats
runtime.ReadMemStats(&post)
b.Logf("allocs: %d items: ", post.TotalAlloc-pre.TotalAlloc)
}

View file

@ -1,64 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"strings"
)
const (
// The label name indicating the metric name of a timeseries.
MetricNameLabel = LabelName("name")
// The label name indicating the job from which a timeseries was scraped.
JobLabel = LabelName("job")
// The label name indicating the instance from which a timeseries was scraped.
InstanceLabel = LabelName("instance")
// The label name prefix to prepend if a synthetic label is already present
// in the exported metrics.
ExporterLabelPrefix = LabelName("exporter_")
// The metric name for the synthetic health variable.
ScrapeHealthMetricName = LabelValue("up")
// The metric name for synthetic alert timeseries.
AlertMetricName = LabelValue("ALERTS")
// The label name indicating the name of an alert.
AlertNameLabel = LabelName("alertname")
// The label name indicating the state of an alert.
AlertStateLabel = LabelName("alertstate")
)
// A LabelName is a key for a LabelSet or Metric. It has a value associated
// therewith.
type LabelName string
type LabelNames []LabelName
func (l LabelNames) Len() int {
return len(l)
}
func (l LabelNames) Less(i, j int) bool {
return l[i] < l[j]
}
func (l LabelNames) Swap(i, j int) {
l[i], l[j] = l[j], l[i]
}
func (l LabelNames) String() string {
labelStrings := make([]string, 0, len(l))
for _, label := range l {
labelStrings = append(labelStrings, string(label))
}
return strings.Join(labelStrings, ", ")
}

View file

@ -1,56 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"github.com/prometheus/prometheus/utility/test"
"sort"
"testing"
)
func testLabelNames(t test.Tester) {
var scenarios = []struct {
in LabelNames
out LabelNames
}{
{
in: LabelNames{"ZZZ", "zzz"},
out: LabelNames{"ZZZ", "zzz"},
},
{
in: LabelNames{"aaa", "AAA"},
out: LabelNames{"AAA", "aaa"},
},
}
for i, scenario := range scenarios {
sort.Sort(scenario.in)
for j, expected := range scenario.out {
if expected != scenario.in[j] {
t.Errorf("%d.%d expected %s, got %s", i, j, expected, scenario.in[j])
}
}
}
}
func TestLabelNames(t *testing.T) {
testLabelNames(t)
}
func BenchmarkLabelNames(b *testing.B) {
for i := 0; i < b.N; i++ {
testLabelNames(b)
}
}

View file

@ -1,35 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"sort"
)
// A LabelValue is an associated value for a LabelName.
type LabelValue string
type LabelValues []LabelValue
func (l LabelValues) Len() int {
return len(l)
}
func (l LabelValues) Less(i, j int) bool {
return sort.StringsAreSorted([]string{string(l[i]), string(l[j])})
}
func (l LabelValues) Swap(i, j int) {
l[i], l[j] = l[j], l[i]
}

View file

@ -1,56 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"github.com/prometheus/prometheus/utility/test"
"sort"
"testing"
)
func testLabelValues(t test.Tester) {
var scenarios = []struct {
in LabelValues
out LabelValues
}{
{
in: LabelValues{"ZZZ", "zzz"},
out: LabelValues{"ZZZ", "zzz"},
},
{
in: LabelValues{"aaa", "AAA"},
out: LabelValues{"AAA", "aaa"},
},
}
for i, scenario := range scenarios {
sort.Sort(scenario.in)
for j, expected := range scenario.out {
if expected != scenario.in[j] {
t.Errorf("%d.%d expected %s, got %s", i, j, expected, scenario.in[j])
}
}
}
}
func TestLabelValues(t *testing.T) {
testLabelValues(t)
}
func BenchmarkLabelValues(b *testing.B) {
for i := 0; i < b.N; i++ {
testLabelValues(b)
}
}

View file

@ -1,262 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"bytes"
"code.google.com/p/goprotobuf/proto"
"fmt"
dto "github.com/prometheus/prometheus/model/generated"
"sort"
"strings"
"time"
)
const (
// XXX: Re-evaluate down the road.
reservedDelimiter = `"`
)
// A LabelSet is a collection of LabelName and LabelValue pairs. The LabelSet
// may be fully-qualified down to the point where it may resolve to a single
// Metric in the data store or not. All operations that occur within the realm
// of a LabelSet can emit a vector of Metric entities to which the LabelSet may
// match.
type LabelSet map[LabelName]LabelValue
// Helper function to non-destructively merge two label sets.
func (l LabelSet) Merge(other LabelSet) LabelSet {
result := make(LabelSet, len(l))
for k, v := range l {
result[k] = v
}
for k, v := range other {
result[k] = v
}
return result
}
func (l LabelSet) String() string {
labelStrings := make([]string, 0, len(l))
for label, value := range l {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
}
switch len(labelStrings) {
case 0:
return ""
default:
sort.Strings(labelStrings)
return fmt.Sprintf("{%s}", strings.Join(labelStrings, ", "))
}
}
func (m Metric) String() string {
metricName, ok := m[MetricNameLabel]
if !ok {
panic("Tried to print metric without name")
}
labelStrings := make([]string, 0, len(m)-1)
for label, value := range m {
if label != MetricNameLabel {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
}
}
switch len(labelStrings) {
case 0:
return string(metricName)
default:
sort.Strings(labelStrings)
return fmt.Sprintf("%s{%s}", metricName, strings.Join(labelStrings, ", "))
}
}
func (l LabelSet) ToMetric() Metric {
metric := Metric{}
for label, value := range l {
metric[label] = value
}
return metric
}
func (m Metric) ToLabelSet() LabelSet {
labels := LabelSet{}
for label, value := range m {
labels[label] = value
}
return labels
}
// A Metric is similar to a LabelSet, but the key difference is that a Metric is
// a singleton and refers to one and only one stream of samples.
type Metric map[LabelName]LabelValue
// A SampleValue is a representation of a value for a given sample at a given
// time.
type SampleValue float64
func (s SampleValue) Equal(o SampleValue) bool {
return s == o
}
func (s SampleValue) ToDTO() *float64 {
return proto.Float64(float64(s))
}
func (v SampleValue) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf(`"%f"`, v)), nil
}
func (v SampleValue) String() string {
return fmt.Sprint(float64(v))
}
func (s SamplePair) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf("{\"Value\": \"%f\", \"Timestamp\": %d}", s.Value, s.Timestamp.Unix())), nil
}
type SamplePair struct {
Value SampleValue
Timestamp time.Time
}
func (s SamplePair) Equal(o SamplePair) bool {
return s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp)
}
func (s SamplePair) ToDTO() (out *dto.SampleValueSeries_Value) {
out = &dto.SampleValueSeries_Value{
Timestamp: proto.Int64(s.Timestamp.Unix()),
Value: s.Value.ToDTO(),
}
return
}
func (s SamplePair) String() string {
return fmt.Sprintf("SamplePair at %s of %s", s.Timestamp, s.Value)
}
type Values []SamplePair
func (v Values) Len() int {
return len(v)
}
func (v Values) Less(i, j int) bool {
return v[i].Timestamp.Before(v[j].Timestamp)
}
func (v Values) Swap(i, j int) {
v[i], v[j] = v[j], v[i]
}
// FirstTimeAfter indicates whether the first sample of a set is after a given
// timestamp.
func (v Values) FirstTimeAfter(t time.Time) bool {
return v[0].Timestamp.After(t)
}
// LastTimeBefore indicates whether the last sample of a set is before a given
// timestamp.
func (v Values) LastTimeBefore(t time.Time) bool {
return v[len(v)-1].Timestamp.Before(t)
}
// InsideInterval indicates whether a given range of sorted values could contain
// a value for a given time.
func (v Values) InsideInterval(t time.Time) bool {
switch {
case v.Len() == 0:
return false
case t.Before(v[0].Timestamp):
return false
case !v[v.Len()-1].Timestamp.Before(t):
return false
default:
return true
}
}
// TruncateBefore returns a subslice of the original such that extraneous
// samples in the collection that occur before the provided time are
// dropped. The original slice is not mutated
func (v Values) TruncateBefore(t time.Time) Values {
index := sort.Search(len(v), func(i int) bool {
timestamp := v[i].Timestamp
return !timestamp.Before(t)
})
return v[index:]
}
func (v Values) ToDTO() (out *dto.SampleValueSeries) {
out = &dto.SampleValueSeries{}
for _, value := range v {
out.Value = append(out.Value, value.ToDTO())
}
return
}
func (v Values) ToSampleKey(f *Fingerprint) SampleKey {
return SampleKey{
Fingerprint: f,
FirstTimestamp: v[0].Timestamp,
LastTimestamp: v[len(v)-1].Timestamp,
SampleCount: uint32(len(v)),
}
}
func (v Values) String() string {
buffer := bytes.Buffer{}
fmt.Fprintf(&buffer, "[")
for i, value := range v {
fmt.Fprintf(&buffer, "%d. %s", i, value)
if i != len(v)-1 {
fmt.Fprintf(&buffer, "\n")
}
}
fmt.Fprintf(&buffer, "]")
return buffer.String()
}
func NewValuesFromDTO(dto *dto.SampleValueSeries) (v Values) {
for _, value := range dto.Value {
v = append(v, SamplePair{
Timestamp: time.Unix(*value.Timestamp, 0).UTC(),
Value: SampleValue(*value.Value),
})
}
return v
}
type SampleSet struct {
Metric Metric
Values Values
}
type Interval struct {
OldestInclusive time.Time
NewestInclusive time.Time
}

View file

@ -1,229 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"github.com/prometheus/prometheus/utility/test"
"testing"
"time"
)
func testMetric(t test.Tester) {
var scenarios = []struct {
input map[string]string
hash uint64
rowkey string
}{
{
input: map[string]string{},
rowkey: "02676020557754725067--0-",
hash: 2676020557754725067,
},
{
input: map[string]string{
"first_name": "electro",
"occupation": "robot",
"manufacturer": "westinghouse",
},
rowkey: "04776841610193542734-f-6-t",
hash: 4776841610193542734,
},
{
input: map[string]string{
"x": "y",
},
rowkey: "01306929544689993150-x-2-y",
hash: 1306929544689993150,
},
}
for i, scenario := range scenarios {
metric := Metric{}
for key, value := range scenario.input {
metric[LabelName(key)] = LabelValue(value)
}
expectedRowKey := scenario.rowkey
expectedHash := scenario.hash
fingerprint := NewFingerprintFromMetric(metric)
actualRowKey := fingerprint.ToRowKey()
actualHash := fingerprint.Hash()
if expectedRowKey != actualRowKey {
t.Errorf("%d. expected %s, got %s", i, expectedRowKey, actualRowKey)
}
if actualHash != expectedHash {
t.Errorf("%d. expected %d, got %d", i, expectedHash, actualHash)
}
}
}
func TestMetric(t *testing.T) {
testMetric(t)
}
func BenchmarkMetric(b *testing.B) {
for i := 0; i < b.N; i++ {
testMetric(b)
}
}
func testTruncateBefore(t test.Tester) {
type in struct {
values Values
time time.Time
}
instant := time.Now()
var scenarios = []struct {
in in
out Values
}{
{
in: in{
time: instant,
values: Values{
{
Value: 0,
Timestamp: instant,
},
{
Value: 1,
Timestamp: instant.Add(time.Second),
},
{
Value: 2,
Timestamp: instant.Add(2 * time.Second),
},
{
Value: 3,
Timestamp: instant.Add(3 * time.Second),
},
{
Value: 4,
Timestamp: instant.Add(4 * time.Second),
},
},
},
out: Values{
{
Value: 0,
Timestamp: instant,
},
{
Value: 1,
Timestamp: instant.Add(time.Second),
},
{
Value: 2,
Timestamp: instant.Add(2 * time.Second),
},
{
Value: 3,
Timestamp: instant.Add(3 * time.Second),
},
{
Value: 4,
Timestamp: instant.Add(4 * time.Second),
},
},
},
{
in: in{
time: instant.Add(2 * time.Second),
values: Values{
{
Value: 0,
Timestamp: instant,
},
{
Value: 1,
Timestamp: instant.Add(time.Second),
},
{
Value: 2,
Timestamp: instant.Add(2 * time.Second),
},
{
Value: 3,
Timestamp: instant.Add(3 * time.Second),
},
{
Value: 4,
Timestamp: instant.Add(4 * time.Second),
},
},
},
out: Values{
{
Value: 2,
Timestamp: instant.Add(2 * time.Second),
},
{
Value: 3,
Timestamp: instant.Add(3 * time.Second),
},
{
Value: 4,
Timestamp: instant.Add(4 * time.Second),
},
},
},
{
in: in{
time: instant.Add(5 * time.Second),
values: Values{
{
Value: 0,
Timestamp: instant,
},
{
Value: 1,
Timestamp: instant.Add(time.Second),
},
{
Value: 2,
Timestamp: instant.Add(2 * time.Second),
},
{
Value: 3,
Timestamp: instant.Add(3 * time.Second),
},
{
Value: 4,
Timestamp: instant.Add(4 * time.Second),
},
},
},
out: Values{},
},
}
for i, scenario := range scenarios {
actual := scenario.in.values.TruncateBefore(scenario.in.time)
if len(actual) != len(scenario.out) {
t.Fatalf("%d. expected length of %d, got %d", i, len(scenario.out), len(actual))
}
for j, actualValue := range actual {
if !actualValue.Equal(scenario.out[j]) {
t.Fatalf("%d.%d. expected %s, got %s", i, j, scenario.out[j], actualValue)
}
}
}
}
func TestTruncateBefore(t *testing.T) {
testTruncateBefore(t)
}

View file

@ -1,59 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"time"
)
type Sample struct {
Metric Metric
Value SampleValue
Timestamp time.Time
}
func (s Sample) Equal(sample Sample) bool {
if !NewFingerprintFromMetric(s.Metric).Equal(NewFingerprintFromMetric(sample.Metric)) {
return false
}
if !s.Timestamp.Equal(sample.Timestamp) {
return false
}
if !s.Value.Equal(sample.Value) {
return false
}
return true
}
type Samples []Sample
func (s Samples) Len() int {
return len(s)
}
func (s Samples) Less(i, j int) bool {
switch {
case NewFingerprintFromMetric(s[i].Metric).Less(NewFingerprintFromMetric(s[j].Metric)):
return true
case s[i].Timestamp.Before(s[j].Timestamp):
return true
default:
return false
}
}
func (s Samples) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}

View file

@ -1,49 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"code.google.com/p/goprotobuf/proto"
dto "github.com/prometheus/prometheus/model/generated"
"time"
)
// Watermark provides a representation of dto.MetricHighWatermark with
// associated business logic methods attached to it to enhance code readability.
type Watermark struct {
time.Time
}
// ToMetricHighWatermarkDTO builds a MetricHighWatermark DTO out of a given
// Watermark.
func (w Watermark) ToMetricHighWatermarkDTO() *dto.MetricHighWatermark {
return &dto.MetricHighWatermark{
Timestamp: proto.Int64(w.Time.Unix()),
}
}
// NewWatermarkFromHighWatermarkDTO builds Watermark from the provided
// dto.MetricHighWatermark object.
func NewWatermarkFromHighWatermarkDTO(d *dto.MetricHighWatermark) Watermark {
return Watermark{
time.Unix(*d.Timestamp, 0).UTC(),
}
}
// NewWatermarkFromTime builds a new Watermark for the provided time.
func NewWatermarkFromTime(t time.Time) Watermark {
return Watermark{
t,
}
}

View file

@ -1,68 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"fmt"
"mime"
"net/http"
)
var (
DefaultRegistry Registry = &registry{}
)
// Registry is responsible for applying a determination strategy to the given
// inputs to determine what Processor can handle this type of input.
type Registry interface {
// ProcessorForRequestHeader interprets a HTTP request header to determine
// what Processor should be used for the given input.
ProcessorForRequestHeader(header http.Header) (Processor, error)
}
type registry struct {
}
func (r *registry) ProcessorForRequestHeader(header http.Header) (Processor, error) {
if header == nil {
return nil, fmt.Errorf("Received illegal and nil header.")
}
mediatype, params, err := mime.ParseMediaType(header.Get("Content-Type"))
if err != nil {
return nil, fmt.Errorf("Invalid Content-Type header %q: %s", header.Get("Content-Type"), err)
}
if mediatype != "application/json" {
return nil, fmt.Errorf("Unsupported media type %q, expected %q", mediatype, "application/json")
}
var prometheusApiVersion string
if params["schema"] == "prometheus/telemetry" && params["version"] != "" {
prometheusApiVersion = params["version"]
} else {
prometheusApiVersion = header.Get("X-Prometheus-API-Version")
}
switch prometheusApiVersion {
case "0.0.2":
return Processor002, nil
case "0.0.1":
return Processor001, nil
default:
return nil, fmt.Errorf("Unrecognized API version %s", prometheusApiVersion)
}
}

View file

@ -1,92 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"fmt"
"github.com/prometheus/prometheus/utility/test"
"net/http"
"testing"
)
func testDiscriminatorHttpHeader(t test.Tester) {
var scenarios = []struct {
input map[string]string
output Processor
err error
}{
{
output: nil,
err: fmt.Errorf("Received illegal and nil header."),
},
{
input: map[string]string{"Content-Type": "application/json", "X-Prometheus-API-Version": "0.0.0"},
output: nil,
err: fmt.Errorf("Unrecognized API version 0.0.0"),
},
{
input: map[string]string{"Content-Type": "application/json", "X-Prometheus-API-Version": "0.0.1"},
output: Processor001,
err: nil,
},
{
input: map[string]string{"Content-Type": `application/json; schema="prometheus/telemetry"; version=0.0.0`},
output: nil,
err: fmt.Errorf("Unrecognized API version 0.0.0"),
},
{
input: map[string]string{"Content-Type": `application/json; schema="prometheus/telemetry"; version=0.0.1`},
output: Processor001,
err: nil,
},
}
for i, scenario := range scenarios {
var header http.Header
if len(scenario.input) > 0 {
header = http.Header{}
}
for key, value := range scenario.input {
header.Add(key, value)
}
actual, err := DefaultRegistry.ProcessorForRequestHeader(header)
if scenario.err != err {
if scenario.err != nil && err != nil {
if scenario.err.Error() != err.Error() {
t.Errorf("%d. expected %s, got %s", i, scenario.err, err)
}
} else if scenario.err != nil || err != nil {
t.Errorf("%d. expected %s, got %s", i, scenario.err, err)
}
}
if scenario.output != actual {
t.Errorf("%d. expected %s, got %s", i, scenario.output, actual)
}
}
}
func TestDiscriminatorHttpHeader(t *testing.T) {
testDiscriminatorHttpHeader(t)
}
func BenchmarkDiscriminatorHttpHeader(b *testing.B) {
for i := 0; i < b.N; i++ {
testDiscriminatorHttpHeader(b)
}
}

View file

@ -1,79 +0,0 @@
[
{
"baseLabels": {
"name": "rpc_calls_total",
"job": "batch_job"
},
"docstring": "RPC calls.",
"metric": {
"type": "counter",
"value": [
{
"labels": {
"service": "zed"
},
"value": 25
},
{
"labels": {
"service": "bar"
},
"value": 25
},
{
"labels": {
"service": "foo"
},
"value": 25
}
]
}
},
{
"baseLabels": {
"name": "rpc_latency_microseconds"
},
"docstring": "RPC latency.",
"metric": {
"type": "histogram",
"value": [
{
"labels": {
"service": "foo"
},
"value": {
"0.010000": 15.890724674774395,
"0.050000": 15.890724674774395,
"0.500000": 84.63044031436561,
"0.900000": 160.21100853053224,
"0.990000": 172.49828748957728
}
},
{
"labels": {
"service": "zed"
},
"value": {
"0.010000": 0.0459814091918713,
"0.050000": 0.0459814091918713,
"0.500000": 0.6120456642749681,
"0.900000": 1.355915069887731,
"0.990000": 1.772733213161236
}
},
{
"labels": {
"service": "bar"
},
"value": {
"0.010000": 78.48563317257356,
"0.050000": 78.48563317257356,
"0.500000": 97.31798360385088,
"0.900000": 109.89202084295582,
"0.990000": 109.99626121011262
}
}
]
}
}
]

View file

@ -1,22 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"testing"
)
func TestInterface(t *testing.T) {
var _ Registry = &registry{}
}

View file

@ -1,70 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"github.com/prometheus/prometheus/model"
"io"
"time"
)
// Processor is responsible for decoding the actual message responses from
// stream into a format that can be consumed with the end result written
// to the results channel.
type Processor interface {
// Process performs the work on the input and closes the incoming stream.
Process(stream io.ReadCloser, timestamp time.Time, baseLabels model.LabelSet, results chan Result) (err error)
}
// The ProcessorFunc type allows the use of ordinary functions for processors.
type ProcessorFunc func(io.ReadCloser, time.Time, model.LabelSet, chan Result) error
func (f ProcessorFunc) Process(stream io.ReadCloser, timestamp time.Time, baseLabels model.LabelSet, results chan Result) error {
return f(stream, timestamp, baseLabels, results)
}
// Helper function to convert map[string]string into model.LabelSet.
//
// NOTE: This should be deleted when support for go 1.0.3 is removed; 1.1 is
// smart enough to unmarshal JSON objects into model.LabelSet directly.
func LabelSet(labels map[string]string) model.LabelSet {
labelset := make(model.LabelSet, len(labels))
for k, v := range labels {
labelset[model.LabelName(k)] = model.LabelValue(v)
}
return labelset
}
// Helper function to merge a target's base labels ontop of the labels of an
// exported sample. If a label is already defined in the exported sample, we
// assume that we are scraping an intermediate exporter and attach
// "exporter_"-prefixes to Prometheus' own base labels.
func mergeTargetLabels(entityLabels, targetLabels model.LabelSet) model.LabelSet {
result := model.LabelSet{}
for label, value := range entityLabels {
result[label] = value
}
for label, labelValue := range targetLabels {
if _, exists := result[label]; exists {
result[model.ExporterLabelPrefix+label] = labelValue
} else {
result[label] = labelValue
}
}
return result
}

View file

@ -1,140 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"encoding/json"
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility"
"io"
"io/ioutil"
"time"
)
const (
baseLabels001 = "baseLabels"
counter001 = "counter"
docstring001 = "docstring"
gauge001 = "gauge"
histogram001 = "histogram"
labels001 = "labels"
metric001 = "metric"
type001 = "type"
value001 = "value"
percentile001 = "percentile"
)
var (
Processor001 Processor = &processor001{}
)
// processor001 is responsible for handling API version 0.0.1.
type processor001 struct {
time utility.Time
}
// entity001 represents a the JSON structure that 0.0.1 uses.
type entity001 []struct {
BaseLabels map[string]string `json:"baseLabels"`
Docstring string `json:"docstring"`
Metric struct {
MetricType string `json:"type"`
Value []struct {
Labels map[string]string `json:"labels"`
Value interface{} `json:"value"`
} `json:"value"`
} `json:"metric"`
}
func (p *processor001) Process(stream io.ReadCloser, timestamp time.Time, baseLabels model.LabelSet, results chan Result) error {
// TODO(matt): Replace with plain-jane JSON unmarshalling.
defer stream.Close()
buffer, err := ioutil.ReadAll(stream)
if err != nil {
return err
}
entities := entity001{}
if err = json.Unmarshal(buffer, &entities); err != nil {
return err
}
// TODO(matt): This outer loop is a great basis for parallelization.
pendingSamples := model.Samples{}
for _, entity := range entities {
for _, value := range entity.Metric.Value {
entityLabels := LabelSet(entity.BaseLabels).Merge(LabelSet(value.Labels))
labels := mergeTargetLabels(entityLabels, baseLabels)
switch entity.Metric.MetricType {
case gauge001, counter001:
sampleValue, ok := value.Value.(float64)
if !ok {
err = fmt.Errorf("Could not convert value from %s %s to float64.", entity, value)
results <- Result{Err: err}
continue
}
pendingSamples = append(pendingSamples, model.Sample{
Metric: model.Metric(labels),
Timestamp: timestamp,
Value: model.SampleValue(sampleValue),
})
break
case histogram001:
sampleValue, ok := value.Value.(map[string]interface{})
if !ok {
err = fmt.Errorf("Could not convert value from %q to a map[string]interface{}.", value.Value)
results <- Result{Err: err}
continue
}
for percentile, percentileValue := range sampleValue {
individualValue, ok := percentileValue.(float64)
if !ok {
err = fmt.Errorf("Could not convert value from %q to a float64.", percentileValue)
results <- Result{Err: err}
continue
}
childMetric := make(map[model.LabelName]model.LabelValue, len(labels)+1)
for k, v := range labels {
childMetric[k] = v
}
childMetric[model.LabelName(percentile001)] = model.LabelValue(percentile)
pendingSamples = append(pendingSamples, model.Sample{
Metric: model.Metric(childMetric),
Timestamp: timestamp,
Value: model.SampleValue(individualValue),
})
}
break
}
}
}
if len(pendingSamples) > 0 {
results <- Result{Samples: pendingSamples}
}
return nil
}

View file

@ -1,216 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"container/list"
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"os"
"path"
"testing"
"time"
)
func testProcessor001Process(t test.Tester) {
var scenarios = []struct {
in string
baseLabels model.LabelSet
out model.Samples
err error
}{
{
in: "empty.json",
err: fmt.Errorf("unexpected end of JSON input"),
},
{
in: "test0_0_1-0_0_2.json",
baseLabels: model.LabelSet{
model.JobLabel: "batch_exporter",
},
out: model.Samples{
model.Sample{
Metric: model.Metric{"service": "zed", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
Value: 25,
},
model.Sample{
Metric: model.Metric{"service": "bar", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
Value: 25,
},
model.Sample{
Metric: model.Metric{"service": "foo", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
Value: 25,
},
model.Sample{
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 0.0459814091918713,
},
model.Sample{
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 78.48563317257356,
},
model.Sample{
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 15.890724674774395,
},
model.Sample{
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 0.0459814091918713,
},
model.Sample{
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 78.48563317257356,
},
model.Sample{
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 15.890724674774395,
},
model.Sample{
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 0.6120456642749681,
},
model.Sample{
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 97.31798360385088,
},
model.Sample{
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 84.63044031436561,
},
model.Sample{
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 1.355915069887731,
},
model.Sample{
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 109.89202084295582,
},
model.Sample{
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 160.21100853053224,
},
model.Sample{
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 1.772733213161236,
},
model.Sample{
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 109.99626121011262,
},
model.Sample{
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 172.49828748957728,
},
},
},
}
for i, scenario := range scenarios {
inputChannel := make(chan Result, 1024)
defer func(c chan Result) {
close(c)
}(inputChannel)
reader, err := os.Open(path.Join("fixtures", scenario.in))
if err != nil {
t.Fatalf("%d. couldn't open scenario input file %s: %s", i, scenario.in, err)
}
err = Processor001.Process(reader, time.Now(), scenario.baseLabels, inputChannel)
if !test.ErrorEqual(scenario.err, err) {
t.Errorf("%d. expected err of %s, got %s", i, scenario.err, err)
continue
}
delivered := model.Samples{}
for len(inputChannel) != 0 {
result := <-inputChannel
if result.Err != nil {
t.Fatalf("%d. expected no error, got: %s", i, result.Err)
}
delivered = append(delivered, result.Samples...)
}
if len(delivered) != len(scenario.out) {
t.Errorf("%d. expected output length of %d, got %d", i, len(scenario.out), len(delivered))
continue
}
expectedElements := list.New()
for _, j := range scenario.out {
expectedElements.PushBack(j)
}
for j := 0; j < len(delivered); j++ {
actual := delivered[j]
found := false
for element := expectedElements.Front(); element != nil && found == false; element = element.Next() {
candidate := element.Value.(model.Sample)
if candidate.Value != actual.Value {
continue
}
if len(candidate.Metric) != len(actual.Metric) {
continue
}
labelsMatch := false
for key, value := range candidate.Metric {
actualValue, ok := actual.Metric[key]
if !ok {
break
}
if actualValue == value {
labelsMatch = true
break
}
}
if !labelsMatch {
continue
}
// XXX: Test time.
found = true
expectedElements.Remove(element)
}
if !found {
t.Errorf("%d.%d. expected to find %s among candidate, absent", i, j, actual)
}
}
}
}
func TestProcessor001Process(t *testing.T) {
testProcessor001Process(t)
}
func BenchmarkProcessor001Process(b *testing.B) {
for i := 0; i < b.N; i++ {
testProcessor001Process(b)
}
}

View file

@ -1,114 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"encoding/json"
"fmt"
"github.com/prometheus/prometheus/model"
"io"
"time"
)
// Processor for telemetry schema version 0.0.2.
var Processor002 ProcessorFunc = func(stream io.ReadCloser, timestamp time.Time, baseLabels model.LabelSet, results chan Result) error {
// container for telemetry data
var entities []struct {
BaseLabels map[string]string `json:"baseLabels"`
Docstring string `json:"docstring"`
Metric struct {
Type string `json:"type"`
Values json.RawMessage `json:"value"`
} `json:"metric"`
}
// concrete type for histogram values
type histogram struct {
Labels map[string]string `json:"labels"`
Values map[string]model.SampleValue `json:"value"`
}
// concrete type for counter and gauge values
type counter struct {
Labels map[string]string `json:"labels"`
Value model.SampleValue `json:"value"`
}
defer stream.Close()
if err := json.NewDecoder(stream).Decode(&entities); err != nil {
return err
}
pendingSamples := model.Samples{}
for _, entity := range entities {
switch entity.Metric.Type {
case "counter", "gauge":
var values []counter
if err := json.Unmarshal(entity.Metric.Values, &values); err != nil {
results <- Result{
Err: fmt.Errorf("Could not extract %s value: %s", entity.Metric.Type, err),
}
continue
}
for _, counter := range values {
entityLabels := LabelSet(entity.BaseLabels).Merge(LabelSet(counter.Labels))
labels := mergeTargetLabels(entityLabels, baseLabels)
pendingSamples = append(pendingSamples, model.Sample{
Metric: model.Metric(labels),
Timestamp: timestamp,
Value: counter.Value,
})
}
case "histogram":
var values []histogram
if err := json.Unmarshal(entity.Metric.Values, &values); err != nil {
results <- Result{
Err: fmt.Errorf("Could not extract %s value: %s", entity.Metric.Type, err),
}
continue
}
for _, histogram := range values {
for percentile, value := range histogram.Values {
entityLabels := LabelSet(entity.BaseLabels).Merge(LabelSet(histogram.Labels))
entityLabels[model.LabelName("percentile")] = model.LabelValue(percentile)
labels := mergeTargetLabels(entityLabels, baseLabels)
pendingSamples = append(pendingSamples, model.Sample{
Metric: model.Metric(labels),
Timestamp: timestamp,
Value: value,
})
}
}
default:
results <- Result{
Err: fmt.Errorf("Unknown metric type %q", entity.Metric.Type),
}
}
}
if len(pendingSamples) > 0 {
results <- Result{Samples: pendingSamples}
}
return nil
}

View file

@ -1,216 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"container/list"
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"os"
"path"
"testing"
"time"
)
func testProcessor002Process(t test.Tester) {
var scenarios = []struct {
in string
baseLabels model.LabelSet
out model.Samples
err error
}{
{
in: "empty.json",
err: fmt.Errorf("EOF"),
},
{
in: "test0_0_1-0_0_2.json",
baseLabels: model.LabelSet{
model.JobLabel: "batch_exporter",
},
out: model.Samples{
model.Sample{
Metric: model.Metric{"service": "zed", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
Value: 25,
},
model.Sample{
Metric: model.Metric{"service": "bar", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
Value: 25,
},
model.Sample{
Metric: model.Metric{"service": "foo", model.MetricNameLabel: "rpc_calls_total", "job": "batch_job", "exporter_job": "batch_exporter"},
Value: 25,
},
model.Sample{
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 0.0459814091918713,
},
model.Sample{
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 78.48563317257356,
},
model.Sample{
Metric: model.Metric{"percentile": "0.010000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 15.890724674774395,
},
model.Sample{
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 0.0459814091918713,
},
model.Sample{
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 78.48563317257356,
},
model.Sample{
Metric: model.Metric{"percentile": "0.050000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 15.890724674774395,
},
model.Sample{
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 0.6120456642749681,
},
model.Sample{
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 97.31798360385088,
},
model.Sample{
Metric: model.Metric{"percentile": "0.500000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 84.63044031436561,
},
model.Sample{
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 1.355915069887731,
},
model.Sample{
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 109.89202084295582,
},
model.Sample{
Metric: model.Metric{"percentile": "0.900000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 160.21100853053224,
},
model.Sample{
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "zed", "job": "batch_exporter"},
Value: 1.772733213161236,
},
model.Sample{
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "bar", "job": "batch_exporter"},
Value: 109.99626121011262,
},
model.Sample{
Metric: model.Metric{"percentile": "0.990000", model.MetricNameLabel: "rpc_latency_microseconds", "service": "foo", "job": "batch_exporter"},
Value: 172.49828748957728,
},
},
},
}
for i, scenario := range scenarios {
inputChannel := make(chan Result, 1024)
defer func(c chan Result) {
close(c)
}(inputChannel)
reader, err := os.Open(path.Join("fixtures", scenario.in))
if err != nil {
t.Fatalf("%d. couldn't open scenario input file %s: %s", i, scenario.in, err)
}
err = Processor002.Process(reader, time.Now(), scenario.baseLabels, inputChannel)
if !test.ErrorEqual(scenario.err, err) {
t.Errorf("%d. expected err of %s, got %s", i, scenario.err, err)
continue
}
delivered := model.Samples{}
for len(inputChannel) != 0 {
result := <-inputChannel
if result.Err != nil {
t.Fatalf("%d. expected no error, got: %s", i, result.Err)
}
delivered = append(delivered, result.Samples...)
}
if len(delivered) != len(scenario.out) {
t.Errorf("%d. expected output length of %d, got %d", i, len(scenario.out), len(delivered))
continue
}
expectedElements := list.New()
for _, j := range scenario.out {
expectedElements.PushBack(j)
}
for j := 0; j < len(delivered); j++ {
actual := delivered[j]
found := false
for element := expectedElements.Front(); element != nil && found == false; element = element.Next() {
candidate := element.Value.(model.Sample)
if candidate.Value != actual.Value {
continue
}
if len(candidate.Metric) != len(actual.Metric) {
continue
}
labelsMatch := false
for key, value := range candidate.Metric {
actualValue, ok := actual.Metric[key]
if !ok {
break
}
if actualValue == value {
labelsMatch = true
break
}
}
if !labelsMatch {
continue
}
// XXX: Test time.
found = true
expectedElements.Remove(element)
}
if !found {
t.Errorf("%d.%d. expected to find %s among candidate, absent", i, j, actual)
}
}
}
}
func TestProcessor002Process(t *testing.T) {
testProcessor002Process(t)
}
func BenchmarkProcessor002Process(b *testing.B) {
for i := 0; i < b.N; i++ {
testProcessor002Process(b)
}
}

View file

@ -1,24 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"github.com/prometheus/prometheus/model"
)
// Result encapsulates the outcome from processing samples from a source.
type Result struct {
Err error
Samples model.Samples
}

View file

@ -14,19 +14,26 @@ package retrieval
import (
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/retrieval/format"
"log"
"net/http"
"os"
"strings"
"time"
"github.com/prometheus/client_golang/extraction"
clientmodel "github.com/prometheus/client_golang/model"
)
var (
localhostRepresentations = []string{"http://127.0.0.1", "http://localhost"}
const (
InstanceLabel clientmodel.LabelName = "instance"
// The metric name for the synthetic health variable.
ScrapeHealthMetricName clientmodel.LabelValue = "up"
)
var localhostRepresentations = []string{"http://127.0.0.1", "http://localhost"}
// The state of the given Target.
type TargetState int
@ -81,7 +88,7 @@ type Target interface {
// alluded to in the scheduledFor function, to use this as it wants to. The
// current use case is to create a common batching time for scraping multiple
// Targets in the future through the TargetPool.
Scrape(earliest time.Time, results chan format.Result) error
Scrape(earliest time.Time, results chan<- *extraction.Result) error
// Fulfill the healthReporter interface.
State() TargetState
// Report the soonest time at which this Target may be scheduled for
@ -100,7 +107,7 @@ type Target interface {
// to the address of the prometheus server.
GlobalAddress() string
// Return the target's base labels.
BaseLabels() model.LabelSet
BaseLabels() clientmodel.LabelSet
// Merge a new externally supplied target definition (e.g. with changed base
// labels) into an old target definition for the same endpoint. Preserve
// remaining information - like health state - from the old target.
@ -121,13 +128,13 @@ type target struct {
// What is the deadline for the HTTP or HTTPS against this endpoint.
Deadline time.Duration
// Any base labels that are added to this target and its metrics.
baseLabels model.LabelSet
baseLabels clientmodel.LabelSet
// The HTTP client used to scrape the target's endpoint.
client http.Client
}
// Furnish a reasonably configured target for querying.
func NewTarget(address string, deadline time.Duration, baseLabels model.LabelSet) Target {
func NewTarget(address string, deadline time.Duration, baseLabels clientmodel.LabelSet) Target {
target := &target{
address: address,
Deadline: deadline,
@ -143,32 +150,32 @@ func NewTarget(address string, deadline time.Duration, baseLabels model.LabelSet
return target
}
func (t *target) recordScrapeHealth(results chan format.Result, timestamp time.Time, healthy bool) {
metric := model.Metric{}
func (t *target) recordScrapeHealth(results chan<- *extraction.Result, timestamp time.Time, healthy bool) {
metric := clientmodel.Metric{}
for label, value := range t.baseLabels {
metric[label] = value
}
metric[model.MetricNameLabel] = model.ScrapeHealthMetricName
metric[model.InstanceLabel] = model.LabelValue(t.Address())
metric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(ScrapeHealthMetricName)
metric[InstanceLabel] = clientmodel.LabelValue(t.Address())
healthValue := model.SampleValue(0)
healthValue := clientmodel.SampleValue(0)
if healthy {
healthValue = model.SampleValue(1)
healthValue = clientmodel.SampleValue(1)
}
sample := model.Sample{
sample := &clientmodel.Sample{
Metric: metric,
Timestamp: timestamp,
Value: healthValue,
}
results <- format.Result{
results <- &extraction.Result{
Err: nil,
Samples: model.Samples{sample},
Samples: clientmodel.Samples{sample},
}
}
func (t *target) Scrape(earliest time.Time, results chan format.Result) (err error) {
func (t *target) Scrape(earliest time.Time, results chan<- *extraction.Result) (err error) {
now := time.Now()
futureState := t.state
@ -187,7 +194,7 @@ func (t *target) Scrape(earliest time.Time, results chan format.Result) (err err
return err
}
func (t *target) scrape(timestamp time.Time, results chan format.Result) (err error) {
func (t *target) scrape(timestamp time.Time, results chan<- *extraction.Result) (err error) {
defer func(start time.Time) {
ms := float64(time.Since(start)) / float64(time.Millisecond)
labels := map[string]string{address: t.Address(), outcome: success}
@ -205,19 +212,24 @@ func (t *target) scrape(timestamp time.Time, results chan format.Result) (err er
}
defer resp.Body.Close()
processor, err := format.DefaultRegistry.ProcessorForRequestHeader(resp.Header)
processor, err := extraction.ProcessorForRequestHeader(resp.Header)
if err != nil {
return err
}
// XXX: This is a wart; we need to handle this more gracefully down the
// road, especially once we have service discovery support.
baseLabels := model.LabelSet{model.InstanceLabel: model.LabelValue(t.Address())}
baseLabels := clientmodel.LabelSet{InstanceLabel: clientmodel.LabelValue(t.Address())}
for baseLabel, baseValue := range t.baseLabels {
baseLabels[baseLabel] = baseValue
}
return processor.Process(resp.Body, timestamp, baseLabels, results)
processOptions := &extraction.ProcessOptions{
Timestamp: timestamp,
BaseLabels: baseLabels,
}
return processor.ProcessSingle(resp.Body, results, processOptions)
}
func (t target) State() TargetState {
@ -249,7 +261,7 @@ func (t target) GlobalAddress() string {
return address
}
func (t target) BaseLabels() model.LabelSet {
func (t target) BaseLabels() clientmodel.LabelSet {
return t.baseLabels
}

View file

@ -19,8 +19,9 @@ import (
"net/url"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility"
)
@ -61,8 +62,8 @@ func (p *sdTargetProvider) Targets() ([]Target, error) {
return nil, err
}
baseLabels := model.LabelSet{
model.JobLabel: model.LabelValue(p.job.GetName()),
baseLabels := clientmodel.LabelSet{
clientmodel.JobLabel: clientmodel.LabelValue(p.job.GetName()),
}
targets := make([]Target, 0, len(addrs))

View file

@ -14,12 +14,14 @@
package retrieval
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/retrieval/format"
"net/http"
"net/http/httptest"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/client_golang/extraction"
)
func TestTargetScrapeUpdatesState(t *testing.T) {
@ -28,7 +30,7 @@ func TestTargetScrapeUpdatesState(t *testing.T) {
state: UNKNOWN,
address: "bad schema",
}
testTarget.Scrape(time.Time{}, make(chan format.Result, 2))
testTarget.Scrape(time.Time{}, make(chan *extraction.Result, 2))
if testTarget.state != UNREACHABLE {
t.Errorf("Expected target state %v, actual: %v", UNREACHABLE, testTarget.state)
}
@ -38,11 +40,11 @@ func TestTargetRecordScrapeHealth(t *testing.T) {
testTarget := target{
scheduler: literalScheduler{},
address: "http://example.url",
baseLabels: model.LabelSet{model.JobLabel: "testjob"},
baseLabels: clientmodel.LabelSet{clientmodel.JobLabel: "testjob"},
}
now := time.Now()
results := make(chan format.Result)
results := make(chan *extraction.Result)
go testTarget.recordScrapeHealth(results, now, true)
result := <-results
@ -52,11 +54,11 @@ func TestTargetRecordScrapeHealth(t *testing.T) {
}
actual := result.Samples[0]
expected := model.Sample{
Metric: model.Metric{
model.MetricNameLabel: model.ScrapeHealthMetricName,
model.InstanceLabel: "http://example.url",
model.JobLabel: "testjob",
expected := &clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: ScrapeHealthMetricName,
InstanceLabel: "http://example.url",
clientmodel.JobLabel: "testjob",
},
Timestamp: now,
Value: 1,
@ -81,8 +83,8 @@ func TestTargetScrapeTimeout(t *testing.T) {
defer server.Close()
testTarget := NewTarget(server.URL, 10*time.Millisecond, model.LabelSet{})
results := make(chan format.Result, 1024)
testTarget := NewTarget(server.URL, 10*time.Millisecond, clientmodel.LabelSet{})
results := make(chan *extraction.Result, 1024)
// scrape once without timeout
signal <- true

View file

@ -14,11 +14,13 @@
package retrieval
import (
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/retrieval/format"
"log"
"time"
"github.com/prometheus/client_golang/extraction"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/config"
)
type TargetManager interface {
@ -34,10 +36,10 @@ type TargetManager interface {
type targetManager struct {
requestAllowance chan bool
poolsByJob map[string]*TargetPool
results chan format.Result
results chan<- *extraction.Result
}
func NewTargetManager(results chan format.Result, requestAllowance int) TargetManager {
func NewTargetManager(results chan<- *extraction.Result, requestAllowance int) TargetManager {
return &targetManager{
requestAllowance: make(chan bool, requestAllowance),
results: results,
@ -97,12 +99,12 @@ func (m *targetManager) AddTargetsFromConfig(config config.Config) {
}
for _, targetGroup := range job.TargetGroup {
baseLabels := model.LabelSet{
model.JobLabel: model.LabelValue(job.GetName()),
baseLabels := clientmodel.LabelSet{
clientmodel.JobLabel: clientmodel.LabelValue(job.GetName()),
}
if targetGroup.Labels != nil {
for _, label := range targetGroup.Labels.Label {
baseLabels[model.LabelName(label.GetName())] = model.LabelValue(label.GetValue())
baseLabels[clientmodel.LabelName(label.GetName())] = clientmodel.LabelValue(label.GetValue())
}
}

View file

@ -14,14 +14,19 @@
package retrieval
import (
"code.google.com/p/goprotobuf/proto"
"github.com/prometheus/prometheus/config"
pb "github.com/prometheus/prometheus/config/generated"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/retrieval/format"
"github.com/prometheus/prometheus/utility/test"
"testing"
"time"
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/client_golang/extraction"
pb "github.com/prometheus/prometheus/config/generated"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/utility/test"
)
type fakeTarget struct {
@ -43,15 +48,15 @@ func (t fakeTarget) GlobalAddress() string {
return t.Address()
}
func (t fakeTarget) BaseLabels() model.LabelSet {
return model.LabelSet{}
func (t fakeTarget) BaseLabels() clientmodel.LabelSet {
return clientmodel.LabelSet{}
}
func (t fakeTarget) Interval() time.Duration {
return t.interval
}
func (t *fakeTarget) Scrape(e time.Time, r chan format.Result) error {
func (t *fakeTarget) Scrape(e time.Time, r chan<- *extraction.Result) error {
t.scrapeCount++
return nil
@ -71,7 +76,7 @@ func (t *fakeTarget) scheduledFor() (time time.Time) {
func (t *fakeTarget) Merge(newTarget Target) {}
func testTargetManager(t test.Tester) {
results := make(chan format.Result, 5)
results := make(chan *extraction.Result, 5)
targetManager := NewTargetManager(results, 3)
testJob1 := config.JobConfig{
JobConfig: pb.JobConfig{

View file

@ -19,7 +19,7 @@ import (
"sync"
"time"
"github.com/prometheus/prometheus/retrieval/format"
"github.com/prometheus/client_golang/extraction"
)
const (
@ -50,7 +50,7 @@ func NewTargetPool(m TargetManager, p TargetProvider) *TargetPool {
}
}
func (p *TargetPool) Run(results chan format.Result, interval time.Duration) {
func (p *TargetPool) Run(results chan<- *extraction.Result, interval time.Duration) {
ticker := time.NewTicker(interval)
defer ticker.Stop()
@ -116,14 +116,14 @@ func (p *TargetPool) replaceTargets(newTargets []Target) {
p.targets = newTargets
}
func (p *TargetPool) runSingle(earliest time.Time, results chan format.Result, t Target) {
func (p *TargetPool) runSingle(earliest time.Time, results chan<- *extraction.Result, t Target) {
p.manager.acquire()
defer p.manager.release()
t.Scrape(earliest, results)
}
func (p *TargetPool) runIteration(results chan format.Result, interval time.Duration) {
func (p *TargetPool) runIteration(results chan<- *extraction.Result, interval time.Duration) {
if p.targetProvider != nil {
targets, err := p.targetProvider.Targets()
if err != nil {

View file

@ -18,7 +18,8 @@ import (
"testing"
"time"
"github.com/prometheus/prometheus/retrieval/format"
"github.com/prometheus/client_golang/extraction"
"github.com/prometheus/prometheus/utility/test"
)
@ -149,7 +150,7 @@ func TestTargetPoolIterationWithUnhealthyTargetsFinishes(t *testing.T) {
done := make(chan bool)
go func() {
pool.runIteration(make(chan format.Result), time.Duration(0))
pool.runIteration(make(chan *extraction.Result), time.Duration(0))
done <- true
}()

View file

@ -19,13 +19,24 @@ import (
"sync"
"time"
"github.com/prometheus/prometheus/model"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/utility"
)
const (
// The metric name for synthetic alert timeseries.
AlertMetricName clientmodel.LabelValue = "ALERTS"
// The label name indicating the name of an alert.
AlertNameLabel clientmodel.LabelName = "alertname"
// The label name indicating the state of an alert.
AlertStateLabel clientmodel.LabelName = "alertstate"
)
// States that active alerts can be in.
type AlertState int
@ -53,27 +64,27 @@ type Alert struct {
// The name of the alert.
Name string
// The vector element labelset triggering this alert.
Labels model.LabelSet
Labels clientmodel.LabelSet
// The state of the alert (PENDING or FIRING).
State AlertState
// The time when the alert first transitioned into PENDING state.
ActiveSince time.Time
// The value of the alert expression for this vector element.
Value model.SampleValue
Value clientmodel.SampleValue
}
// sample returns a Sample suitable for recording the alert.
func (a Alert) sample(timestamp time.Time, value model.SampleValue) model.Sample {
recordedMetric := model.Metric{}
func (a Alert) sample(timestamp time.Time, value clientmodel.SampleValue) *clientmodel.Sample {
recordedMetric := clientmodel.Metric{}
for label, value := range a.Labels {
recordedMetric[label] = value
}
recordedMetric[model.MetricNameLabel] = model.AlertMetricName
recordedMetric[model.AlertNameLabel] = model.LabelValue(a.Name)
recordedMetric[model.AlertStateLabel] = model.LabelValue(a.State.String())
recordedMetric[clientmodel.MetricNameLabel] = AlertMetricName
recordedMetric[AlertNameLabel] = clientmodel.LabelValue(a.Name)
recordedMetric[AlertStateLabel] = clientmodel.LabelValue(a.State.String())
return model.Sample{
return &clientmodel.Sample{
Metric: recordedMetric,
Value: value,
Timestamp: timestamp,
@ -90,13 +101,13 @@ type AlertingRule struct {
// output vector before an alert transitions from PENDING to FIRING state.
holdDuration time.Duration
// Extra labels to attach to the resulting alert sample vectors.
labels model.LabelSet
labels clientmodel.LabelSet
// Protects the below.
mutex sync.Mutex
// A map of alerts which are currently active (PENDING or FIRING), keyed by
// the fingerprint of the labelset they correspond to.
activeAlerts map[model.Fingerprint]*Alert
activeAlerts map[clientmodel.Fingerprint]*Alert
}
func (rule *AlertingRule) Name() string { return rule.name }
@ -119,16 +130,17 @@ func (rule *AlertingRule) Eval(timestamp time.Time, storage *metric.TieredStorag
// or update the expression value for existing elements.
resultFingerprints := utility.Set{}
for _, sample := range exprResult {
fp := *model.NewFingerprintFromMetric(sample.Metric)
resultFingerprints.Add(fp)
fp := new(clientmodel.Fingerprint)
fp.LoadFromMetric(sample.Metric)
resultFingerprints.Add(*fp)
alert, ok := rule.activeAlerts[fp]
if !ok {
labels := sample.Metric.ToLabelSet()
if _, ok := labels[model.MetricNameLabel]; ok {
delete(labels, model.MetricNameLabel)
if alert, ok := rule.activeAlerts[*fp]; !ok {
labels := clientmodel.LabelSet{}
labels.MergeFromMetric(sample.Metric)
if _, ok := labels[clientmodel.MetricNameLabel]; ok {
delete(labels, clientmodel.MetricNameLabel)
}
rule.activeAlerts[fp] = &Alert{
rule.activeAlerts[*fp] = &Alert{
Name: rule.name,
Labels: labels,
State: PENDING,
@ -175,9 +187,9 @@ func (rule *AlertingRule) String() string {
}
func (rule *AlertingRule) HTMLSnippet() template.HTML {
alertMetric := model.Metric{
model.MetricNameLabel: model.AlertMetricName,
model.AlertNameLabel: model.LabelValue(rule.name),
alertMetric := clientmodel.Metric{
clientmodel.MetricNameLabel: AlertMetricName,
AlertNameLabel: clientmodel.LabelValue(rule.name),
}
return template.HTML(fmt.Sprintf(
`ALERT <a href="%s">%s</a> IF <a href="%s">%s</a> FOR %s WITH %s`,
@ -214,12 +226,12 @@ func (rule *AlertingRule) ActiveAlerts() []Alert {
}
// Construct a new AlertingRule.
func NewAlertingRule(name string, vector ast.VectorNode, holdDuration time.Duration, labels model.LabelSet) *AlertingRule {
func NewAlertingRule(name string, vector ast.VectorNode, holdDuration time.Duration, labels clientmodel.LabelSet) *AlertingRule {
return &AlertingRule{
name: name,
vector: vector,
holdDuration: holdDuration,
labels: labels,
activeAlerts: map[model.Fingerprint]*Alert{},
activeAlerts: map[clientmodel.Fingerprint]*Alert{},
}
}

View file

@ -16,25 +16,29 @@ package ast
import (
"errors"
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"hash/fnv"
"log"
"math"
"sort"
"strings"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
)
// ----------------------------------------------------------------------------
// Raw data value types.
type Vector model.Samples
type Matrix []model.SampleSet
type Vector clientmodel.Samples
// BUG(julius): Pointerize this.
type Matrix []metric.SampleSet
type groupedAggregation struct {
labels model.Metric
value model.SampleValue
labels clientmodel.Metric
value clientmodel.SampleValue
groupCount int
}
@ -98,7 +102,7 @@ type Node interface {
// interface represents the type returned to the parent node.
type ScalarNode interface {
Node
Eval(timestamp time.Time, view *viewAdapter) model.SampleValue
Eval(timestamp time.Time, view *viewAdapter) clientmodel.SampleValue
}
type VectorNode interface {
@ -123,7 +127,7 @@ type StringNode interface {
type (
// A numeric literal.
ScalarLiteral struct {
value model.SampleValue
value clientmodel.SampleValue
}
// A function of numeric return type.
@ -146,9 +150,9 @@ type (
type (
// Vector literal, i.e. metric name plus labelset.
VectorLiteral struct {
labels model.LabelSet
labels clientmodel.LabelSet
// Fingerprints are populated from labels at query analysis time.
fingerprints model.Fingerprints
fingerprints clientmodel.Fingerprints
}
// A function of vector return type.
@ -160,7 +164,7 @@ type (
// A vector aggregation with vector return type.
VectorAggregation struct {
aggrType AggrType
groupBy model.LabelNames
groupBy clientmodel.LabelNames
vector VectorNode
}
@ -178,9 +182,9 @@ type (
type (
// Matrix literal, i.e. metric name plus labelset and timerange.
MatrixLiteral struct {
labels model.LabelSet
labels clientmodel.LabelSet
// Fingerprints are populated from labels at query analysis time.
fingerprints model.Fingerprints
fingerprints clientmodel.Fingerprints
interval time.Duration
}
)
@ -228,35 +232,48 @@ func (node MatrixLiteral) Children() Nodes { return Nodes{} }
func (node StringLiteral) Children() Nodes { return Nodes{} }
func (node StringFunctionCall) Children() Nodes { return node.args }
func (node *ScalarLiteral) Eval(timestamp time.Time, view *viewAdapter) model.SampleValue {
func (node *ScalarLiteral) Eval(timestamp time.Time, view *viewAdapter) clientmodel.SampleValue {
return node.value
}
func (node *ScalarArithExpr) Eval(timestamp time.Time, view *viewAdapter) model.SampleValue {
func (node *ScalarArithExpr) Eval(timestamp time.Time, view *viewAdapter) clientmodel.SampleValue {
lhs := node.lhs.Eval(timestamp, view)
rhs := node.rhs.Eval(timestamp, view)
return evalScalarBinop(node.opType, lhs, rhs)
}
func (node *ScalarFunctionCall) Eval(timestamp time.Time, view *viewAdapter) model.SampleValue {
return node.function.callFn(timestamp, view, node.args).(model.SampleValue)
func (node *ScalarFunctionCall) Eval(timestamp time.Time, view *viewAdapter) clientmodel.SampleValue {
return node.function.callFn(timestamp, view, node.args).(clientmodel.SampleValue)
}
func (node *VectorAggregation) labelsToGroupingKey(labels model.Metric) string {
keyParts := []string{}
for _, keyLabel := range node.groupBy {
keyParts = append(keyParts, string(labels[keyLabel]))
func (node *VectorAggregation) labelsToGroupingKey(labels clientmodel.Metric) uint64 {
summer := fnv.New64a()
for _, label := range node.groupBy {
fmt.Fprint(summer, labels[label])
}
return strings.Join(keyParts, ",") // TODO not safe when label value contains comma.
return summer.Sum64()
}
func labelsToKey(labels model.Metric) string {
keyParts := []string{}
func labelsToKey(labels clientmodel.Metric) uint64 {
pairs := metric.LabelPairs{}
for label, value := range labels {
keyParts = append(keyParts, fmt.Sprintf("%v='%v'", label, value))
pairs = append(pairs, &metric.LabelPair{
Name: label,
Value: value,
})
}
sort.Strings(keyParts)
return strings.Join(keyParts, ",") // TODO not safe when label value contains comma.
sort.Sort(pairs)
summer := fnv.New64a()
for _, pair := range pairs {
fmt.Fprint(summer, pair.Name, pair.Value)
}
return summer.Sum64()
}
func EvalVectorInstant(node VectorNode, timestamp time.Time, storage *metric.TieredStorage, queryStats *stats.TimerGroup) (vector Vector, err error) {
@ -282,19 +299,19 @@ func EvalVectorRange(node VectorNode, start time.Time, end time.Time, interval t
// TODO implement watchdog timer for long-running queries.
evalTimer := queryStats.GetTimer(stats.InnerEvalTime).Start()
sampleSets := map[string]*model.SampleSet{}
sampleSets := map[uint64]*metric.SampleSet{}
for t := start; t.Before(end); t = t.Add(interval) {
vector := node.Eval(t, viewAdapter)
for _, sample := range vector {
samplePair := model.SamplePair{
samplePair := &metric.SamplePair{
Value: sample.Value,
Timestamp: sample.Timestamp,
}
groupingKey := labelsToKey(sample.Metric)
if sampleSets[groupingKey] == nil {
sampleSets[groupingKey] = &model.SampleSet{
sampleSets[groupingKey] = &metric.SampleSet{
Metric: sample.Metric,
Values: model.Values{samplePair},
Values: metric.Values{samplePair},
}
} else {
sampleSets[groupingKey].Values = append(sampleSets[groupingKey].Values, samplePair)
@ -312,8 +329,8 @@ func EvalVectorRange(node VectorNode, start time.Time, end time.Time, interval t
return matrix, nil
}
func labelIntersection(metric1, metric2 model.Metric) model.Metric {
intersection := model.Metric{}
func labelIntersection(metric1, metric2 clientmodel.Metric) clientmodel.Metric {
intersection := clientmodel.Metric{}
for label, value := range metric1 {
if metric2[label] == value {
intersection[label] = value
@ -322,18 +339,18 @@ func labelIntersection(metric1, metric2 model.Metric) model.Metric {
return intersection
}
func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[string]*groupedAggregation, timestamp time.Time) Vector {
func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[uint64]*groupedAggregation, timestamp time.Time) Vector {
vector := Vector{}
for _, aggregation := range aggregations {
switch node.aggrType {
case AVG:
aggregation.value = aggregation.value / model.SampleValue(aggregation.groupCount)
aggregation.value = aggregation.value / clientmodel.SampleValue(aggregation.groupCount)
case COUNT:
aggregation.value = model.SampleValue(aggregation.groupCount)
aggregation.value = clientmodel.SampleValue(aggregation.groupCount)
default:
// For other aggregations, we already have the right value.
}
sample := model.Sample{
sample := &clientmodel.Sample{
Metric: aggregation.labels,
Value: aggregation.value,
Timestamp: timestamp,
@ -345,7 +362,7 @@ func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[stri
func (node *VectorAggregation) Eval(timestamp time.Time, view *viewAdapter) Vector {
vector := node.vector.Eval(timestamp, view)
result := map[string]*groupedAggregation{}
result := map[uint64]*groupedAggregation{}
for _, sample := range vector {
groupingKey := node.labelsToGroupingKey(sample.Metric)
if groupedResult, ok := result[groupingKey]; ok {
@ -377,6 +394,7 @@ func (node *VectorAggregation) Eval(timestamp time.Time, view *viewAdapter) Vect
}
}
}
return node.groupedAggregationsToVector(result, timestamp)
}
@ -394,8 +412,8 @@ func (node *VectorFunctionCall) Eval(timestamp time.Time, view *viewAdapter) Vec
}
func evalScalarBinop(opType BinOpType,
lhs model.SampleValue,
rhs model.SampleValue) model.SampleValue {
lhs clientmodel.SampleValue,
rhs clientmodel.SampleValue) clientmodel.SampleValue {
switch opType {
case ADD:
return lhs + rhs
@ -407,13 +425,13 @@ func evalScalarBinop(opType BinOpType,
if rhs != 0 {
return lhs / rhs
} else {
return model.SampleValue(math.Inf(int(rhs)))
return clientmodel.SampleValue(math.Inf(int(rhs)))
}
case MOD:
if rhs != 0 {
return model.SampleValue(int(lhs) % int(rhs))
return clientmodel.SampleValue(int(lhs) % int(rhs))
} else {
return model.SampleValue(math.Inf(int(rhs)))
return clientmodel.SampleValue(math.Inf(int(rhs)))
}
case EQ:
if lhs == rhs {
@ -456,8 +474,8 @@ func evalScalarBinop(opType BinOpType,
}
func evalVectorBinop(opType BinOpType,
lhs model.SampleValue,
rhs model.SampleValue) (model.SampleValue, bool) {
lhs clientmodel.SampleValue,
rhs clientmodel.SampleValue) (clientmodel.SampleValue, bool) {
switch opType {
case ADD:
return lhs + rhs, true
@ -469,13 +487,13 @@ func evalVectorBinop(opType BinOpType,
if rhs != 0 {
return lhs / rhs, true
} else {
return model.SampleValue(math.Inf(int(rhs))), true
return clientmodel.SampleValue(math.Inf(int(rhs))), true
}
case MOD:
if rhs != 0 {
return model.SampleValue(int(lhs) % int(rhs)), true
return clientmodel.SampleValue(int(lhs) % int(rhs)), true
} else {
return model.SampleValue(math.Inf(int(rhs))), true
return clientmodel.SampleValue(math.Inf(int(rhs))), true
}
case EQ:
if lhs == rhs {
@ -521,12 +539,12 @@ func evalVectorBinop(opType BinOpType,
panic("Not all enum values enumerated in switch")
}
func labelsEqual(labels1, labels2 model.Metric) bool {
func labelsEqual(labels1, labels2 clientmodel.Metric) bool {
if len(labels1) != len(labels2) {
return false
}
for label, value := range labels1 {
if labels2[label] != value && label != model.MetricNameLabel {
if labels2[label] != value && label != clientmodel.MetricNameLabel {
return false
}
}
@ -565,7 +583,7 @@ func (node *VectorArithExpr) Eval(timestamp time.Time, view *viewAdapter) Vector
}
func (node *MatrixLiteral) Eval(timestamp time.Time, view *viewAdapter) Matrix {
interval := &model.Interval{
interval := &metric.Interval{
OldestInclusive: timestamp.Add(-node.interval),
NewestInclusive: timestamp,
}
@ -578,7 +596,7 @@ func (node *MatrixLiteral) Eval(timestamp time.Time, view *viewAdapter) Matrix {
}
func (node *MatrixLiteral) EvalBoundaries(timestamp time.Time, view *viewAdapter) Matrix {
interval := &model.Interval{
interval := &metric.Interval{
OldestInclusive: timestamp.Add(-node.interval),
NewestInclusive: timestamp,
}
@ -595,7 +613,7 @@ func (matrix Matrix) Len() int {
}
func (matrix Matrix) Less(i, j int) bool {
return labelsToKey(matrix[i].Metric) < labelsToKey(matrix[j].Metric)
return matrix[i].Metric.String() < matrix[j].Metric.String()
}
func (matrix Matrix) Swap(i, j int) {
@ -613,19 +631,19 @@ func (node *StringFunctionCall) Eval(timestamp time.Time, view *viewAdapter) str
// ----------------------------------------------------------------------------
// Constructors.
func NewScalarLiteral(value model.SampleValue) *ScalarLiteral {
func NewScalarLiteral(value clientmodel.SampleValue) *ScalarLiteral {
return &ScalarLiteral{
value: value,
}
}
func NewVectorLiteral(labels model.LabelSet) *VectorLiteral {
func NewVectorLiteral(labels clientmodel.LabelSet) *VectorLiteral {
return &VectorLiteral{
labels: labels,
}
}
func NewVectorAggregation(aggrType AggrType, vector VectorNode, groupBy model.LabelNames) *VectorAggregation {
func NewVectorAggregation(aggrType AggrType, vector VectorNode, groupBy clientmodel.LabelNames) *VectorAggregation {
return &VectorAggregation{
aggrType: aggrType,
groupBy: groupBy,

View file

@ -16,10 +16,12 @@ package ast
import (
"errors"
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility"
"sort"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility"
)
type Function struct {
@ -64,9 +66,9 @@ func (function *Function) CheckArgTypes(args []Node) error {
return nil
}
// === time() model.SampleValue ===
// === time() clientmodel.SampleValue ===
func timeImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} {
return model.SampleValue(time.Now().Unix())
return clientmodel.SampleValue(time.Now().Unix())
}
// === delta(matrix MatrixNode, isCounter ScalarNode) Vector ===
@ -91,8 +93,8 @@ func deltaImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{}
continue
}
counterCorrection := model.SampleValue(0)
lastValue := model.SampleValue(0)
counterCorrection := clientmodel.SampleValue(0)
lastValue := clientmodel.SampleValue(0)
for _, sample := range samples.Values {
currentValue := sample.Value
if isCounter && currentValue < lastValue {
@ -116,10 +118,10 @@ func deltaImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{}
// them. Depending on how many samples are found under a target interval,
// the delta results are distorted and temporal aliasing occurs (ugly
// bumps). This effect is corrected for below.
intervalCorrection := model.SampleValue(targetInterval) / model.SampleValue(sampledInterval)
intervalCorrection := clientmodel.SampleValue(targetInterval) / clientmodel.SampleValue(sampledInterval)
resultValue *= intervalCorrection
resultSample := model.Sample{
resultSample := &clientmodel.Sample{
Metric: samples.Metric,
Value: resultValue,
Timestamp: timestamp,
@ -139,7 +141,7 @@ func rateImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} {
// matrix, such as looking at the samples themselves.
interval := args[0].(*MatrixLiteral).interval
for i := range vector {
vector[i].Value /= model.SampleValue(interval / time.Second)
vector[i].Value /= clientmodel.SampleValue(interval / time.Second)
}
return vector
}
@ -183,69 +185,69 @@ func sortDescImpl(timestamp time.Time, view *viewAdapter, args []Node) interface
// === sampleVectorImpl() Vector ===
func sampleVectorImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} {
return Vector{
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "0",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "0",
},
Value: 10,
Timestamp: timestamp,
},
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "1",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "1",
},
Value: 20,
Timestamp: timestamp,
},
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "2",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "2",
},
Value: 30,
Timestamp: timestamp,
},
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "3",
"group": "canary",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "3",
"group": "canary",
},
Value: 40,
Timestamp: timestamp,
},
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "2",
"group": "canary",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "2",
"group": "canary",
},
Value: 40,
Timestamp: timestamp,
},
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "3",
"group": "mytest",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "3",
"group": "mytest",
},
Value: 40,
Timestamp: timestamp,
},
model.Sample{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "3",
"group": "mytest",
&clientmodel.Sample{
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "3",
"group": "mytest",
},
Value: 40,
Timestamp: timestamp,

View file

@ -14,9 +14,12 @@
package ast
import (
"github.com/prometheus/prometheus/model"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/storage/metric"
)
type emptyRangeNode struct{}
@ -28,18 +31,18 @@ func (node emptyRangeNode) Children() Nodes { return Nodes{} }
func (node emptyRangeNode) Eval(timestamp time.Time, view *viewAdapter) Matrix {
return Matrix{
model.SampleSet{
Metric: model.Metric{model.MetricNameLabel: "empty_metric"},
Values: model.Values{},
metric.SampleSet{
Metric: clientmodel.Metric{clientmodel.MetricNameLabel: "empty_metric"},
Values: metric.Values{},
},
}
}
func (node emptyRangeNode) EvalBoundaries(timestamp time.Time, view *viewAdapter) Matrix {
return Matrix{
model.SampleSet{
Metric: model.Metric{model.MetricNameLabel: "empty_metric"},
Values: model.Values{},
metric.SampleSet{
Metric: clientmodel.Metric{clientmodel.MetricNameLabel: "empty_metric"},
Values: metric.Values{},
},
}
}

View file

@ -15,10 +15,12 @@ package ast
import (
"flag"
"github.com/prometheus/prometheus/model"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"time"
)
var defaultStalenessDelta = flag.Int("defaultStalenessDelta", 300, "Default staleness delta allowance in seconds during expression evaluations.")
@ -46,14 +48,14 @@ type viewAdapter struct {
// interpolateSamples interpolates a value at a target time between two
// provided sample pairs.
func interpolateSamples(first, second *model.SamplePair, timestamp time.Time) *model.SamplePair {
func interpolateSamples(first, second *metric.SamplePair, timestamp time.Time) *metric.SamplePair {
dv := second.Value - first.Value
dt := second.Timestamp.Sub(first.Timestamp)
dDt := dv / model.SampleValue(dt)
offset := model.SampleValue(timestamp.Sub(first.Timestamp))
dDt := dv / clientmodel.SampleValue(dt)
offset := clientmodel.SampleValue(timestamp.Sub(first.Timestamp))
return &model.SamplePair{
return &metric.SamplePair{
Value: first.Value + (offset * dDt),
Timestamp: timestamp,
}
@ -63,9 +65,9 @@ func interpolateSamples(first, second *model.SamplePair, timestamp time.Time) *m
// surrounding a given target time. If samples are found both before and after
// the target time, the sample value is interpolated between these. Otherwise,
// the single closest sample is returned verbatim.
func (v *viewAdapter) chooseClosestSample(samples model.Values, timestamp time.Time) *model.SamplePair {
var closestBefore *model.SamplePair
var closestAfter *model.SamplePair
func (v *viewAdapter) chooseClosestSample(samples metric.Values, timestamp time.Time) *metric.SamplePair {
var closestBefore *metric.SamplePair
var closestAfter *metric.SamplePair
for _, candidate := range samples {
delta := candidate.Timestamp.Sub(timestamp)
// Samples before target time.
@ -79,7 +81,7 @@ func (v *viewAdapter) chooseClosestSample(samples model.Values, timestamp time.T
continue
}
sample := candidate
closestBefore = &sample
closestBefore = sample
}
// Samples after target time.
@ -93,7 +95,7 @@ func (v *viewAdapter) chooseClosestSample(samples model.Values, timestamp time.T
continue
}
sample := candidate
closestAfter = &sample
closestAfter = sample
}
}
@ -107,7 +109,7 @@ func (v *viewAdapter) chooseClosestSample(samples model.Values, timestamp time.T
}
}
func (v *viewAdapter) GetValueAtTime(fingerprints model.Fingerprints, timestamp time.Time) (samples Vector, err error) {
func (v *viewAdapter) GetValueAtTime(fingerprints clientmodel.Fingerprints, timestamp time.Time) (samples Vector, err error) {
timer := v.stats.GetTimer(stats.GetValueAtTimeTime).Start()
for _, fingerprint := range fingerprints {
sampleCandidates := v.view.GetValueAtTime(fingerprint, timestamp)
@ -117,7 +119,7 @@ func (v *viewAdapter) GetValueAtTime(fingerprints model.Fingerprints, timestamp
continue
}
if samplePair != nil {
samples = append(samples, model.Sample{
samples = append(samples, &clientmodel.Sample{
Metric: m,
Value: samplePair.Value,
Timestamp: timestamp,
@ -128,7 +130,7 @@ func (v *viewAdapter) GetValueAtTime(fingerprints model.Fingerprints, timestamp
return samples, err
}
func (v *viewAdapter) GetBoundaryValues(fingerprints model.Fingerprints, interval *model.Interval) (sampleSets []model.SampleSet, err error) {
func (v *viewAdapter) GetBoundaryValues(fingerprints clientmodel.Fingerprints, interval *metric.Interval) (sampleSets []metric.SampleSet, err error) {
timer := v.stats.GetTimer(stats.GetBoundaryValuesTime).Start()
for _, fingerprint := range fingerprints {
samplePairs := v.view.GetBoundaryValues(fingerprint, *interval)
@ -142,7 +144,7 @@ func (v *viewAdapter) GetBoundaryValues(fingerprints model.Fingerprints, interva
continue
}
sampleSet := model.SampleSet{
sampleSet := metric.SampleSet{
Metric: m,
Values: samplePairs,
}
@ -152,7 +154,7 @@ func (v *viewAdapter) GetBoundaryValues(fingerprints model.Fingerprints, interva
return sampleSets, nil
}
func (v *viewAdapter) GetRangeValues(fingerprints model.Fingerprints, interval *model.Interval) (sampleSets []model.SampleSet, err error) {
func (v *viewAdapter) GetRangeValues(fingerprints clientmodel.Fingerprints, interval *metric.Interval) (sampleSets []metric.SampleSet, err error) {
timer := v.stats.GetTimer(stats.GetRangeValuesTime).Start()
for _, fingerprint := range fingerprints {
samplePairs := v.view.GetRangeValues(fingerprint, *interval)
@ -166,7 +168,7 @@ func (v *viewAdapter) GetRangeValues(fingerprints model.Fingerprints, interval *
continue
}
sampleSet := model.SampleSet{
sampleSet := metric.SampleSet{
Metric: m,
Values: samplePairs,
}

View file

@ -16,13 +16,15 @@ package ast
import (
"encoding/json"
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/utility"
"sort"
"strings"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/utility"
)
type OutputFormat int
@ -86,13 +88,13 @@ func (vector Vector) String() string {
func (matrix Matrix) String() string {
metricStrings := make([]string, 0, len(matrix))
for _, sampleSet := range matrix {
metricName, ok := sampleSet.Metric[model.MetricNameLabel]
metricName, ok := sampleSet.Metric[clientmodel.MetricNameLabel]
if !ok {
panic("Tried to print matrix without metric name")
}
labelStrings := make([]string, 0, len(sampleSet.Metric)-1)
for label, value := range sampleSet.Metric {
if label != model.MetricNameLabel {
if label != clientmodel.MetricNameLabel {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
}
}
@ -296,13 +298,13 @@ func (node *ScalarArithExpr) String() string {
}
func (node *VectorLiteral) String() string {
metricName, ok := node.labels[model.MetricNameLabel]
metricName, ok := node.labels[clientmodel.MetricNameLabel]
if !ok {
panic("Tried to print vector without metric name")
}
labelStrings := make([]string, 0, len(node.labels)-1)
for label, value := range node.labels {
if label != model.MetricNameLabel {
if label != clientmodel.MetricNameLabel {
labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value))
}
}

View file

@ -14,15 +14,17 @@
package ast
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"log"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
)
type FullRangeMap map[model.Fingerprint]time.Duration
type IntervalRangeMap map[model.Fingerprint]bool
type FullRangeMap map[clientmodel.Fingerprint]time.Duration
type IntervalRangeMap map[clientmodel.Fingerprint]bool
type QueryAnalyzer struct {
// Values collected by query analysis.
@ -105,10 +107,10 @@ func viewAdapterForInstantQuery(node Node, timestamp time.Time, storage *metric.
requestBuildTimer := queryStats.GetTimer(stats.ViewRequestBuildTime).Start()
viewBuilder := metric.NewViewRequestBuilder()
for fingerprint, rangeDuration := range analyzer.FullRanges {
viewBuilder.GetMetricRange(fingerprint, timestamp.Add(-rangeDuration), timestamp)
viewBuilder.GetMetricRange(&fingerprint, timestamp.Add(-rangeDuration), timestamp)
}
for fingerprint := range analyzer.IntervalRanges {
viewBuilder.GetMetricAtTime(fingerprint, timestamp)
viewBuilder.GetMetricAtTime(&fingerprint, timestamp)
}
requestBuildTimer.Stop()
@ -132,11 +134,11 @@ func viewAdapterForRangeQuery(node Node, start time.Time, end time.Time, interva
for fingerprint, rangeDuration := range analyzer.FullRanges {
// TODO: we should support GetMetricRangeAtInterval() or similar ops in the view builder.
for t := start; t.Before(end); t = t.Add(interval) {
viewBuilder.GetMetricRange(fingerprint, t.Add(-rangeDuration), t)
viewBuilder.GetMetricRange(&fingerprint, t.Add(-rangeDuration), t)
}
}
for fingerprint := range analyzer.IntervalRanges {
viewBuilder.GetMetricAtInterval(fingerprint, start, end, interval)
viewBuilder.GetMetricAtInterval(&fingerprint, start, end, interval)
}
requestBuildTimer.Stop()

View file

@ -17,19 +17,20 @@ import (
"fmt"
"html"
"github.com/prometheus/prometheus/model"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/utility"
)
func CreateRecordingRule(name string, labels model.LabelSet, expr ast.Node, permanent bool) (*RecordingRule, error) {
func CreateRecordingRule(name string, labels clientmodel.LabelSet, expr ast.Node, permanent bool) (*RecordingRule, error) {
if _, ok := expr.(ast.VectorNode); !ok {
return nil, fmt.Errorf("Recording rule expression %v does not evaluate to vector type", expr)
}
return NewRecordingRule(name, labels, expr.(ast.VectorNode), permanent), nil
}
func CreateAlertingRule(name string, expr ast.Node, holdDurationStr string, labels model.LabelSet) (*AlertingRule, error) {
func CreateAlertingRule(name string, expr ast.Node, holdDurationStr string, labels clientmodel.LabelSet) (*AlertingRule, error) {
if _, ok := expr.(ast.VectorNode); !ok {
return nil, fmt.Errorf("Alert rule expression %v does not evaluate to vector type", expr)
}
@ -52,7 +53,7 @@ func NewFunctionCall(name string, args []ast.Node) (ast.Node, error) {
return functionCall, nil
}
func NewVectorAggregation(aggrTypeStr string, vector ast.Node, groupBy model.LabelNames) (*ast.VectorAggregation, error) {
func NewVectorAggregation(aggrTypeStr string, vector ast.Node, groupBy clientmodel.LabelNames) (*ast.VectorAggregation, error) {
if _, ok := vector.(ast.VectorNode); !ok {
return nil, fmt.Errorf("Operand of %v aggregation must be of vector type", aggrTypeStr)
}

View file

@ -14,19 +14,21 @@
package rules
import (
"github.com/prometheus/prometheus/model"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/storage/metric"
"time"
)
var testSampleInterval = time.Duration(5) * time.Minute
var testStartTime = time.Time{}
func getTestValueStream(startVal model.SampleValue, endVal model.SampleValue, stepVal model.SampleValue, startTime time.Time) (resultValues model.Values) {
func getTestValueStream(startVal clientmodel.SampleValue, endVal clientmodel.SampleValue, stepVal clientmodel.SampleValue, startTime time.Time) (resultValues metric.Values) {
currentTime := startTime
for currentVal := startVal; currentVal <= endVal; currentVal += stepVal {
sample := model.SamplePair{
sample := &metric.SamplePair{
Value: currentVal,
Timestamp: currentTime,
}
@ -40,7 +42,7 @@ func getTestVectorFromTestMatrix(matrix ast.Matrix) ast.Vector {
vector := ast.Vector{}
for _, sampleSet := range matrix {
lastSample := sampleSet.Values[len(sampleSet.Values)-1]
vector = append(vector, model.Sample{
vector = append(vector, &clientmodel.Sample{
Metric: sampleSet.Metric,
Value: lastSample.Value,
Timestamp: lastSample.Timestamp,
@ -50,10 +52,10 @@ func getTestVectorFromTestMatrix(matrix ast.Matrix) ast.Vector {
}
func storeMatrix(storage metric.TieredStorage, matrix ast.Matrix) (err error) {
pendingSamples := model.Samples{}
pendingSamples := clientmodel.Samples{}
for _, sampleSet := range matrix {
for _, sample := range sampleSet.Values {
pendingSamples = append(pendingSamples, model.Sample{
pendingSamples = append(pendingSamples, &clientmodel.Sample{
Metric: sampleSet.Metric,
Value: sample.Value,
Timestamp: sample.Timestamp,
@ -66,96 +68,96 @@ func storeMatrix(storage metric.TieredStorage, matrix ast.Matrix) (err error) {
var testMatrix = ast.Matrix{
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "0",
"group": "production",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "0",
"group": "production",
},
Values: getTestValueStream(0, 100, 10, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "1",
"group": "production",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "1",
"group": "production",
},
Values: getTestValueStream(0, 200, 20, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "0",
"group": "canary",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "0",
"group": "canary",
},
Values: getTestValueStream(0, 300, 30, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "api-server",
"instance": "1",
"group": "canary",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "api-server",
"instance": "1",
"group": "canary",
},
Values: getTestValueStream(0, 400, 40, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "app-server",
"instance": "0",
"group": "production",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "app-server",
"instance": "0",
"group": "production",
},
Values: getTestValueStream(0, 500, 50, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "app-server",
"instance": "1",
"group": "production",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "app-server",
"instance": "1",
"group": "production",
},
Values: getTestValueStream(0, 600, 60, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "app-server",
"instance": "0",
"group": "canary",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "app-server",
"instance": "0",
"group": "canary",
},
Values: getTestValueStream(0, 700, 70, testStartTime),
},
{
Metric: model.Metric{
model.MetricNameLabel: "http_requests",
model.JobLabel: "app-server",
"instance": "1",
"group": "canary",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "http_requests",
clientmodel.JobLabel: "app-server",
"instance": "1",
"group": "canary",
},
Values: getTestValueStream(0, 800, 80, testStartTime),
},
// Single-letter metric and label names.
{
Metric: model.Metric{
model.MetricNameLabel: "x",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "x",
"y": "testvalue",
},
Values: getTestValueStream(0, 100, 10, testStartTime),
},
// Counter reset in the middle of range.
{
Metric: model.Metric{
model.MetricNameLabel: "testcounter_reset_middle",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "testcounter_reset_middle",
},
Values: append(getTestValueStream(0, 40, 10, testStartTime), getTestValueStream(0, 50, 10, testStartTime.Add(testSampleInterval*5))...),
},
// Counter reset at the end of range.
{
Metric: model.Metric{
model.MetricNameLabel: "testcounter_reset_end",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "testcounter_reset_end",
},
Values: append(getTestValueStream(0, 90, 10, testStartTime), getTestValueStream(0, 0, 10, testStartTime.Add(testSampleInterval*10))...),
},

View file

@ -15,9 +15,10 @@
package rules
import (
"github.com/prometheus/prometheus/model"
"strconv"
"strings"
clientmodel "github.com/prometheus/client_golang/model"
)
%}
@ -58,7 +59,7 @@ avg|sum|max|min|count { yylval.str = strings.ToUpper(yytext); return AGGR_OP
if (err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax) {
panic("Invalid float")
}
yylval.num = model.SampleValue(num)
yylval.num = clientmodel.SampleValue(num)
return NUMBER }
\"(\\.|[^\\"])*\" { yylval.str = yytext[1:len(yytext) - 1]; return STRING }

View file

@ -1,7 +1,6 @@
// Generated by golex
package rules
import (
"bufio"
"io"
@ -10,9 +9,10 @@ import (
"sort"
)
import (
"github.com/prometheus/prometheus/model"
"strconv"
"strings"
"strconv"
"strings"
clientmodel "github.com/prometheus/client_golang/model"
)
var yyin io.Reader = os.Stdin
@ -32,6 +32,7 @@ type yyactionreturn struct {
}
type yyactionreturntype int
const (
yyRT_FALLTHROUGH yyactionreturntype = iota
yyRT_USER_RETURN
@ -44,6 +45,7 @@ var yyorigidx int
var yytext string = ""
var yytextrepl bool = true
func yymore() {
yytextrepl = false
}
@ -61,6 +63,7 @@ func yyREJECT() {
}
var yylessed int
func yyless(n int) {
yylessed = len(yytext) - n
}
@ -81,13 +84,14 @@ func input() int {
}
var EOF int = -1
type yystartcondition int
var INITIAL yystartcondition = 0
var YY_START yystartcondition = INITIAL
type yylexMatch struct {
index int
index int
matchFunc func() yyactionreturn
sortLen int
advLen int
@ -221,8 +225,9 @@ func yylex() int {
return 0
}
var S_COMMENTS yystartcondition = 1024
var yystartconditionexclmap = map[yystartcondition]bool{S_COMMENTS: true, }
var yystartconditionexclmap = map[yystartcondition]bool{S_COMMENTS: true}
var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
@ -265,7 +270,7 @@ var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcon
yyBEGIN(S_COMMENTS)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\*/"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
}}, {regexp.MustCompile("\\*/"), nil, []yystartcondition{S_COMMENTS}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
@ -278,7 +283,7 @@ var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcon
yyBEGIN(0)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{S_COMMENTS}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
@ -506,7 +511,7 @@ var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcon
if err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax {
panic("Invalid float")
}
yylval.num = model.SampleValue(num)
yylval.num = clientmodel.SampleValue(num)
return yyactionreturn{NUMBER, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
@ -575,5 +580,6 @@ var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcon
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, }
}}}
func yyactioninline(BEGIN func(yystartcondition)) {}

View file

@ -14,18 +14,16 @@
package rules
import (
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/storage/metric"
"log"
"sync"
"time"
)
type Result struct {
Err error // TODO propagate errors from rule evaluation.
Samples model.Samples
}
"github.com/prometheus/client_golang/extraction"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/storage/metric"
)
type RuleManager interface {
// Load and add rules from rule files specified in the configuration.
@ -45,13 +43,13 @@ type ruleManager struct {
sync.Mutex
rules []Rule
results chan *Result
results chan<- *extraction.Result
done chan bool
interval time.Duration
storage *metric.TieredStorage
}
func NewRuleManager(results chan *Result, interval time.Duration, storage *metric.TieredStorage) RuleManager {
func NewRuleManager(results chan<- *extraction.Result, interval time.Duration, storage *metric.TieredStorage) RuleManager {
manager := &ruleManager{
results: results,
rules: []Rule{},
@ -86,7 +84,7 @@ func (m *ruleManager) Stop() {
}
}
func (m *ruleManager) runIteration(results chan *Result) {
func (m *ruleManager) runIteration(results chan<- *extraction.Result) {
now := time.Now()
wg := sync.WaitGroup{}
@ -101,9 +99,9 @@ func (m *ruleManager) runIteration(results chan *Result) {
go func(rule Rule) {
defer wg.Done()
vector, err := rule.Eval(now, m.storage)
samples := make(model.Samples, len(vector))
samples := make(clientmodel.Samples, len(vector))
copy(samples, vector)
m.results <- &Result{
m.results <- &extraction.Result{
Samples: samples,
Err: err,
}

View file

@ -14,18 +14,21 @@
%{
package rules
import "github.com/prometheus/prometheus/model"
import (
clientmodel "github.com/prometheus/client_golang/model"
import "github.com/prometheus/prometheus/rules/ast"
)
%}
%union {
num model.SampleValue
num clientmodel.SampleValue
str string
ruleNode ast.Node
ruleNodeSlice []ast.Node
boolean bool
labelNameSlice model.LabelNames
labelSet model.LabelSet
labelNameSlice clientmodel.LabelNames
labelSet clientmodel.LabelSet
}
/* We simulate multiple start symbols for closely-related grammars via dummy tokens. See
@ -93,11 +96,11 @@ qualifier : /* empty */
;
rule_labels : /* empty */
{ $$ = model.LabelSet{} }
{ $$ = clientmodel.LabelSet{} }
| '{' label_assign_list '}'
{ $$ = $2 }
| '{' '}'
{ $$ = model.LabelSet{} }
{ $$ = clientmodel.LabelSet{} }
label_assign_list : label_assign
{ $$ = $1 }
@ -106,14 +109,14 @@ label_assign_list : label_assign
;
label_assign : IDENTIFIER '=' STRING
{ $$ = model.LabelSet{ model.LabelName($1): model.LabelValue($3) } }
{ $$ = clientmodel.LabelSet{ clientmodel.LabelName($1): clientmodel.LabelValue($3) } }
;
rule_expr : '(' rule_expr ')'
{ $$ = $2 }
| IDENTIFIER rule_labels
{ $2[model.MetricNameLabel] = model.LabelValue($1); $$ = ast.NewVectorLiteral($2) }
{ $2[clientmodel.MetricNameLabel] = clientmodel.LabelValue($1); $$ = ast.NewVectorLiteral($2) }
| IDENTIFIER '(' func_arg_list ')'
{
var err error
@ -163,15 +166,15 @@ rule_expr : '(' rule_expr ')'
;
grouping_opts :
{ $$ = model.LabelNames{} }
{ $$ = clientmodel.LabelNames{} }
| GROUP_OP '(' label_list ')'
{ $$ = $3 }
;
label_list : IDENTIFIER
{ $$ = model.LabelNames{model.LabelName($1)} }
{ $$ = clientmodel.LabelNames{clientmodel.LabelName($1)} }
| label_list ',' IDENTIFIER
{ $$ = append($$, model.LabelName($3)) }
{ $$ = append($$, clientmodel.LabelName($3)) }
;
func_arg_list : func_arg

View file

@ -1,22 +1,22 @@
//line parser.y:15
package rules
import __yyfmt__ "fmt"
//line parser.y:15
package rules
import __yyfmt__ "fmt"
//line parser.y:15
import "github.com/prometheus/prometheus/model"
import "github.com/prometheus/prometheus/rules/ast"
import clientmodel "github.com/prometheus/client_golang/model"
import "github.com/prometheus/prometheus/rules/ast"
//line parser.y:21
type yySymType struct {
yys int
num model.SampleValue
str string
ruleNode ast.Node
ruleNodeSlice []ast.Node
boolean bool
labelNameSlice model.LabelNames
labelSet model.LabelSet
yys int
num clientmodel.SampleValue
str string
ruleNode ast.Node
ruleNodeSlice []ast.Node
boolean bool
labelNameSlice clientmodel.LabelNames
labelSet clientmodel.LabelSet
}
const START_RULES = 57346
@ -63,7 +63,6 @@ const yyMaxDepth = 200
//line parser.y:188
//line yacctab:1
var yyExca = []int{
-1, 1,
@ -396,133 +395,207 @@ yydefault:
case 5:
//line parser.y:66
{ yylex.(*RulesLexer).parsedExpr = yyS[yypt-0].ruleNode }
{
yylex.(*RulesLexer).parsedExpr = yyS[yypt-0].ruleNode
}
case 6:
//line parser.y:70
{
rule, err := CreateRecordingRule(yyS[yypt-3].str, yyS[yypt-2].labelSet, yyS[yypt-0].ruleNode, yyS[yypt-4].boolean)
if err != nil { yylex.Error(err.Error()); return 1 }
yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule)
}
rule, err := CreateRecordingRule(yyS[yypt-3].str, yyS[yypt-2].labelSet, yyS[yypt-0].ruleNode, yyS[yypt-4].boolean)
if err != nil {
yylex.Error(err.Error())
return 1
}
yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule)
}
case 7:
//line parser.y:76
{
rule, err := CreateAlertingRule(yyS[yypt-5].str, yyS[yypt-3].ruleNode, yyS[yypt-2].str, yyS[yypt-0].labelSet)
if err != nil { yylex.Error(err.Error()); return 1 }
yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule)
}
rule, err := CreateAlertingRule(yyS[yypt-5].str, yyS[yypt-3].ruleNode, yyS[yypt-2].str, yyS[yypt-0].labelSet)
if err != nil {
yylex.Error(err.Error())
return 1
}
yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule)
}
case 8:
//line parser.y:84
{ yyVAL.str = "0s" }
{
yyVAL.str = "0s"
}
case 9:
//line parser.y:86
{ yyVAL.str = yyS[yypt-0].str }
{
yyVAL.str = yyS[yypt-0].str
}
case 10:
//line parser.y:90
{ yyVAL.boolean = false }
{
yyVAL.boolean = false
}
case 11:
//line parser.y:92
{ yyVAL.boolean = true }
{
yyVAL.boolean = true
}
case 12:
//line parser.y:96
{ yyVAL.labelSet = model.LabelSet{} }
{
yyVAL.labelSet = clientmodel.LabelSet{}
}
case 13:
//line parser.y:98
{ yyVAL.labelSet = yyS[yypt-1].labelSet }
{
yyVAL.labelSet = yyS[yypt-1].labelSet
}
case 14:
//line parser.y:100
{ yyVAL.labelSet = model.LabelSet{} }
{
yyVAL.labelSet = clientmodel.LabelSet{}
}
case 15:
//line parser.y:103
{ yyVAL.labelSet = yyS[yypt-0].labelSet }
{
yyVAL.labelSet = yyS[yypt-0].labelSet
}
case 16:
//line parser.y:105
{ for k, v := range yyS[yypt-0].labelSet { yyVAL.labelSet[k] = v } }
{
for k, v := range yyS[yypt-0].labelSet {
yyVAL.labelSet[k] = v
}
}
case 17:
//line parser.y:109
{ yyVAL.labelSet = model.LabelSet{ model.LabelName(yyS[yypt-2].str): model.LabelValue(yyS[yypt-0].str) } }
{
yyVAL.labelSet = clientmodel.LabelSet{clientmodel.LabelName(yyS[yypt-2].str): clientmodel.LabelValue(yyS[yypt-0].str)}
}
case 18:
//line parser.y:114
{ yyVAL.ruleNode = yyS[yypt-1].ruleNode }
{
yyVAL.ruleNode = yyS[yypt-1].ruleNode
}
case 19:
//line parser.y:116
{ yyS[yypt-0].labelSet[model.MetricNameLabel] = model.LabelValue(yyS[yypt-1].str); yyVAL.ruleNode = ast.NewVectorLiteral(yyS[yypt-0].labelSet) }
{
yyS[yypt-0].labelSet[clientmodel.MetricNameLabel] = clientmodel.LabelValue(yyS[yypt-1].str)
yyVAL.ruleNode = ast.NewVectorLiteral(yyS[yypt-0].labelSet)
}
case 20:
//line parser.y:118
{
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice)
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 21:
//line parser.y:124
{
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{})
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{})
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 22:
//line parser.y:130
{
var err error
yyVAL.ruleNode, err = NewMatrix(yyS[yypt-3].ruleNode, yyS[yypt-1].str)
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewMatrix(yyS[yypt-3].ruleNode, yyS[yypt-1].str)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 23:
//line parser.y:136
{
var err error
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-4].str, yyS[yypt-2].ruleNode, yyS[yypt-0].labelNameSlice)
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-4].str, yyS[yypt-2].ruleNode, yyS[yypt-0].labelNameSlice)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 24:
//line parser.y:144
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 25:
//line parser.y:150
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 26:
//line parser.y:156
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil { yylex.Error(err.Error()); return 1 }
}
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil {
yylex.Error(err.Error())
return 1
}
}
case 27:
//line parser.y:162
{ yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num)}
{
yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num)
}
case 28:
//line parser.y:166
{ yyVAL.labelNameSlice = model.LabelNames{} }
{
yyVAL.labelNameSlice = clientmodel.LabelNames{}
}
case 29:
//line parser.y:168
{ yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice }
{
yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice
}
case 30:
//line parser.y:172
{ yyVAL.labelNameSlice = model.LabelNames{model.LabelName(yyS[yypt-0].str)} }
{
yyVAL.labelNameSlice = clientmodel.LabelNames{clientmodel.LabelName(yyS[yypt-0].str)}
}
case 31:
//line parser.y:174
{ yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, model.LabelName(yyS[yypt-0].str)) }
{
yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, clientmodel.LabelName(yyS[yypt-0].str))
}
case 32:
//line parser.y:178
{ yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode} }
{
yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode}
}
case 33:
//line parser.y:180
{ yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode) }
{
yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode)
}
case 34:
//line parser.y:184
{ yyVAL.ruleNode = yyS[yypt-0].ruleNode }
{
yyVAL.ruleNode = yyS[yypt-0].ruleNode
}
case 35:
//line parser.y:186
{ yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str) }
{
yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str)
}
}
goto yystack /* stack new state and value */
}

View file

@ -18,7 +18,8 @@ import (
"html/template"
"time"
"github.com/prometheus/prometheus/model"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
@ -28,7 +29,7 @@ import (
type RecordingRule struct {
name string
vector ast.VectorNode
labels model.LabelSet
labels clientmodel.LabelSet
permanent bool
}
@ -47,7 +48,7 @@ func (rule RecordingRule) Eval(timestamp time.Time, storage *metric.TieredStorag
// Override the metric name and labels.
for _, sample := range vector {
sample.Metric[model.MetricNameLabel] = model.LabelValue(rule.name)
sample.Metric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(rule.name)
for label, value := range rule.labels {
if value == "" {
delete(sample.Metric, label)
@ -85,7 +86,7 @@ func (rule RecordingRule) HTMLSnippet() template.HTML {
}
// Construct a new RecordingRule.
func NewRecordingRule(name string, labels model.LabelSet, vector ast.VectorNode, permanent bool) *RecordingRule {
func NewRecordingRule(name string, labels clientmodel.LabelSet, vector ast.VectorNode, permanent bool) *RecordingRule {
return &RecordingRule{
name: name,
labels: labels,

View file

@ -15,15 +15,17 @@ package rules
import (
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/utility/test"
"path"
"strings"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/metric"
"github.com/prometheus/prometheus/utility/test"
)
var (
@ -528,7 +530,7 @@ func TestAlertingRule(t *testing.T) {
t.Fatalf("Unable to parse alert expression: %s", err)
}
alertName := "HttpRequestRateLow"
alertLabels := model.LabelSet{
alertLabels := clientmodel.LabelSet{
"summary": "HTTP request rate is low",
}
rule := NewAlertingRule(alertName, alertExpr.(ast.VectorNode), time.Minute, alertLabels)

View file

@ -14,16 +14,21 @@
package metric
import (
"code.google.com/p/goprotobuf/proto"
"bytes"
"fmt"
"github.com/prometheus/prometheus/coding"
"github.com/prometheus/prometheus/model"
"strings"
"time"
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/coding"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/raw"
"github.com/prometheus/prometheus/storage/raw/leveldb"
"strings"
"time"
)
// CurationState contains high-level curation state information for the
@ -32,28 +37,7 @@ type CurationState struct {
Active bool
Name string
Limit time.Duration
Fingerprint *model.Fingerprint
}
// watermarkFilter determines whether to include or exclude candidate
// values from the curation process by virtue of how old the high watermark is.
type watermarkFilter struct {
// curationState is the data store for curation remarks.
curationState raw.Persistence
// ignoreYoungerThan conveys this filter's policy of not working on elements
// younger than a given relative time duration. This is persisted to the
// curation remark database (curationState) to indicate how far a given
// policy of this type has progressed.
ignoreYoungerThan time.Duration
// processor is the post-processor that performs whatever action is desired on
// the data that is deemed valid to be worked on.
processor Processor
// stop functions as the global stop channel for all future operations.
stop chan bool
// stopAt is used to determine the elegibility of series for compaction.
stopAt time.Time
// status is the outbound channel for notifying the status page of its state.
status chan CurationState
Fingerprint *clientmodel.Fingerprint
}
// curator is responsible for effectuating a given curation policy across the
@ -66,17 +50,19 @@ type Curator struct {
Stop chan bool
}
// watermarkDecoder converts (dto.Fingerprint, dto.MetricHighWatermark) doubles
// watermarkScanner converts (dto.Fingerprint, dto.MetricHighWatermark) doubles
// into (model.Fingerprint, model.Watermark) doubles.
type watermarkDecoder struct{}
// watermarkOperator scans over the curator.samples table for metrics whose
//
// watermarkScanner determines whether to include or exclude candidate
// values from the curation process by virtue of how old the high watermark is.
//
// watermarkScanner scans over the curator.samples table for metrics whose
// high watermark has been determined to be allowable for curation. This type
// is individually responsible for compaction.
//
// The scanning starts from CurationRemark.LastCompletionTimestamp and goes
// forward until the stop point or end of the series is reached.
type watermarkOperator struct {
type watermarkScanner struct {
// curationState is the data store for curation remarks.
curationState raw.Persistence
// diskFrontier models the available seekable ranges for the provided
@ -93,6 +79,11 @@ type watermarkOperator struct {
samples raw.Persistence
// stopAt is a cue for when to stop mutating a given series.
stopAt time.Time
// stop functions as the global stop channel for all future operations.
stop chan bool
// status is the outbound channel for notifying the status page of its state.
status chan CurationState
}
// run facilitates the curation lifecycle.
@ -101,7 +92,7 @@ type watermarkOperator struct {
// curated.
// curationState is the on-disk store where the curation remarks are made for
// how much progress has been made.
func (c Curator) Run(ignoreYoungerThan time.Duration, instant time.Time, processor Processor, curationState, samples, watermarks *leveldb.LevelDBPersistence, status chan CurationState) (err error) {
func (c *Curator) Run(ignoreYoungerThan time.Duration, instant time.Time, processor Processor, curationState, samples, watermarks *leveldb.LevelDBPersistence, status chan CurationState) (err error) {
defer func(t time.Time) {
duration := float64(time.Since(t) / time.Millisecond)
@ -137,104 +128,89 @@ func (c Curator) Run(ignoreYoungerThan time.Duration, instant time.Time, process
return
}
decoder := watermarkDecoder{}
filter := watermarkFilter{
scanner := &watermarkScanner{
curationState: curationState,
ignoreYoungerThan: ignoreYoungerThan,
processor: processor,
status: status,
stop: c.Stop,
stopAt: instant.Add(-1 * ignoreYoungerThan),
diskFrontier: diskFrontier,
sampleIterator: iterator,
samples: samples,
}
// Right now, the ability to stop a curation is limited to the beginning of
// each fingerprint cycle. It is impractical to cease the work once it has
// begun for a given series.
operator := watermarkOperator{
curationState: curationState,
diskFrontier: diskFrontier,
processor: processor,
ignoreYoungerThan: ignoreYoungerThan,
sampleIterator: iterator,
samples: samples,
stopAt: instant.Add(-1 * ignoreYoungerThan),
}
_, err = watermarks.ForEach(decoder, filter, operator)
_, err = watermarks.ForEach(scanner, scanner, scanner)
return
}
// drain instructs the curator to stop at the next convenient moment as to not
// introduce data inconsistencies.
func (c Curator) Drain() {
func (c *Curator) Drain() {
if len(c.Stop) == 0 {
c.Stop <- true
}
}
func (w watermarkDecoder) DecodeKey(in interface{}) (out interface{}, err error) {
key := &dto.Fingerprint{}
func (w *watermarkScanner) DecodeKey(in interface{}) (interface{}, error) {
key := new(dto.Fingerprint)
bytes := in.([]byte)
err = proto.Unmarshal(bytes, key)
if err != nil {
return
if err := proto.Unmarshal(bytes, key); err != nil {
return nil, err
}
out = model.NewFingerprintFromDTO(key)
fingerprint := new(clientmodel.Fingerprint)
loadFingerprint(fingerprint, key)
return
return fingerprint, nil
}
func (w watermarkDecoder) DecodeValue(in interface{}) (out interface{}, err error) {
dto := &dto.MetricHighWatermark{}
func (w *watermarkScanner) DecodeValue(in interface{}) (interface{}, error) {
value := new(dto.MetricHighWatermark)
bytes := in.([]byte)
err = proto.Unmarshal(bytes, dto)
if err != nil {
return
if err := proto.Unmarshal(bytes, value); err != nil {
return nil, err
}
out = model.NewWatermarkFromHighWatermarkDTO(dto)
watermark := new(watermarks)
watermark.load(value)
return
return watermark, nil
}
func (w watermarkFilter) shouldStop() bool {
func (w *watermarkScanner) shouldStop() bool {
return len(w.stop) != 0
}
func getCurationRemark(states raw.Persistence, processor Processor, ignoreYoungerThan time.Duration, fingerprint *model.Fingerprint) (*model.CurationRemark, error) {
rawSignature, err := processor.Signature()
if err != nil {
return nil, err
}
func (w *watermarkScanner) getCurationRemark(k *curationKey) (r *curationRemark, found bool, err error) {
curationKey := new(dto.CurationKey)
curationValue := new(dto.CurationValue)
curationKey := model.CurationKey{
Fingerprint: fingerprint,
ProcessorMessageRaw: rawSignature,
ProcessorMessageTypeName: processor.Name(),
IgnoreYoungerThan: ignoreYoungerThan,
}.ToDTO()
curationValue := &dto.CurationValue{}
k.dump(curationKey)
present, err := states.Get(curationKey, curationValue)
present, err := w.curationState.Get(curationKey, curationValue)
if err != nil {
return nil, err
return nil, false, err
}
if !present {
return nil, nil
return nil, false, nil
}
remark := model.NewCurationRemarkFromDTO(curationValue)
remark := new(curationRemark)
remark.load(curationValue)
return &remark, nil
return remark, true, nil
}
func (w watermarkFilter) Filter(key, value interface{}) (r storage.FilterResult) {
fingerprint := key.(*model.Fingerprint)
func (w *watermarkScanner) Filter(key, value interface{}) (r storage.FilterResult) {
fingerprint := key.(*clientmodel.Fingerprint)
defer func() {
labels := map[string]string{
@ -244,9 +220,7 @@ func (w watermarkFilter) Filter(key, value interface{}) (r storage.FilterResult)
}
curationFilterOperations.Increment(labels)
}()
defer func() {
select {
case w.status <- CurationState{
Active: true,
@ -263,19 +237,25 @@ func (w watermarkFilter) Filter(key, value interface{}) (r storage.FilterResult)
return storage.STOP
}
curationRemark, err := getCurationRemark(w.curationState, w.processor, w.ignoreYoungerThan, fingerprint)
k := &curationKey{
Fingerprint: fingerprint,
ProcessorMessageRaw: w.processor.Signature(),
ProcessorMessageTypeName: w.processor.Name(),
IgnoreYoungerThan: w.ignoreYoungerThan,
}
curationRemark, present, err := w.getCurationRemark(k)
if err != nil {
return
}
if curationRemark == nil {
r = storage.ACCEPT
return
if !present {
return storage.ACCEPT
}
if !curationRemark.OlderThan(w.stopAt) {
return storage.SKIP
}
watermark := value.(model.Watermark)
if !curationRemark.OlderThan(watermark.Time) {
watermark := value.(*watermarks)
if !curationRemark.OlderThan(watermark.High) {
return storage.SKIP
}
curationConsistent, err := w.curationConsistent(fingerprint, watermark)
@ -291,20 +271,29 @@ func (w watermarkFilter) Filter(key, value interface{}) (r storage.FilterResult)
// curationConsistent determines whether the given metric is in a dirty state
// and needs curation.
func (w watermarkFilter) curationConsistent(f *model.Fingerprint, watermark model.Watermark) (consistent bool, err error) {
curationRemark, err := getCurationRemark(w.curationState, w.processor, w.ignoreYoungerThan, f)
if err != nil {
return
func (w *watermarkScanner) curationConsistent(f *clientmodel.Fingerprint, watermark *watermarks) (bool, error) {
k := &curationKey{
Fingerprint: f,
ProcessorMessageRaw: w.processor.Signature(),
ProcessorMessageTypeName: w.processor.Name(),
IgnoreYoungerThan: w.ignoreYoungerThan,
}
if !curationRemark.OlderThan(watermark.Time) {
consistent = true
curationRemark, present, err := w.getCurationRemark(k)
if err != nil {
return false, err
}
if !present {
return false, nil
}
if !curationRemark.OlderThan(watermark.High) {
return true, nil
}
return
return false, nil
}
func (w watermarkOperator) Operate(key, _ interface{}) (oErr *storage.OperatorError) {
fingerprint := key.(*model.Fingerprint)
func (w *watermarkScanner) Operate(key, _ interface{}) (oErr *storage.OperatorError) {
fingerprint := key.(*clientmodel.Fingerprint)
seriesFrontier, present, err := newSeriesFrontier(fingerprint, w.diskFrontier, w.sampleIterator)
if err != nil || !present {
@ -314,7 +303,14 @@ func (w watermarkOperator) Operate(key, _ interface{}) (oErr *storage.OperatorEr
return &storage.OperatorError{error: err, Continuable: false}
}
curationState, err := getCurationRemark(w.curationState, w.processor, w.ignoreYoungerThan, fingerprint)
k := &curationKey{
Fingerprint: fingerprint,
ProcessorMessageRaw: w.processor.Signature(),
ProcessorMessageTypeName: w.processor.Name(),
IgnoreYoungerThan: w.ignoreYoungerThan,
}
curationState, _, err := w.getCurationRemark(k)
if err != nil {
// An anomaly with the curation remark is likely not fatal in the sense that
// there was a decoding error with the entity and shouldn't be cause to stop
@ -323,12 +319,14 @@ func (w watermarkOperator) Operate(key, _ interface{}) (oErr *storage.OperatorEr
return &storage.OperatorError{error: err, Continuable: true}
}
startKey := model.SampleKey{
startKey := &SampleKey{
Fingerprint: fingerprint,
FirstTimestamp: seriesFrontier.optimalStartTime(curationState),
}
dto := new(dto.SampleKey)
prospectiveKey := coding.NewPBEncoder(startKey.ToDTO()).MustEncode()
startKey.Dump(dto)
prospectiveKey := coding.NewPBEncoder(dto).MustEncode()
if !w.sampleIterator.Seek(prospectiveKey) {
// LevelDB is picky about the seek ranges. If an iterator was invalidated,
// no work may occur, and the iterator cannot be recovered.
@ -358,22 +356,101 @@ func (w watermarkOperator) Operate(key, _ interface{}) (oErr *storage.OperatorEr
return
}
func (w watermarkOperator) refreshCurationRemark(f *model.Fingerprint, finished time.Time) (err error) {
signature, err := w.processor.Signature()
if err != nil {
return
}
curationKey := model.CurationKey{
func (w *watermarkScanner) refreshCurationRemark(f *clientmodel.Fingerprint, finished time.Time) error {
curationKey := curationKey{
Fingerprint: f,
ProcessorMessageRaw: signature,
ProcessorMessageRaw: w.processor.Signature(),
ProcessorMessageTypeName: w.processor.Name(),
IgnoreYoungerThan: w.ignoreYoungerThan,
}.ToDTO()
curationValue := model.CurationRemark{
}
k := new(dto.CurationKey)
curationKey.dump(k)
curationValue := curationRemark{
LastCompletionTimestamp: finished,
}.ToDTO()
}
v := new(dto.CurationValue)
curationValue.dump(v)
err = w.curationState.Put(curationKey, curationValue)
return
return w.curationState.Put(k, v)
}
// curationRemark provides a representation of dto.CurationValue with associated
// business logic methods attached to it to enhance code readability.
type curationRemark struct {
LastCompletionTimestamp time.Time
}
// OlderThan answers whether this curationRemark is older than the provided
// cutOff time.
func (c *curationRemark) OlderThan(t time.Time) bool {
return c.LastCompletionTimestamp.Before(t)
}
// Equal answers whether the two curationRemarks are equivalent.
func (c *curationRemark) Equal(o curationRemark) bool {
return c.LastCompletionTimestamp.Equal(o.LastCompletionTimestamp)
}
func (c *curationRemark) String() string {
return fmt.Sprintf("Last curated at %s", c.LastCompletionTimestamp)
}
func (c *curationRemark) load(d *dto.CurationValue) {
c.LastCompletionTimestamp = time.Unix(d.GetLastCompletionTimestamp(), 0).UTC()
}
func (c *curationRemark) dump(d *dto.CurationValue) {
d.Reset()
d.LastCompletionTimestamp = proto.Int64(c.LastCompletionTimestamp.Unix())
}
// curationKey provides a representation of dto.CurationKey with associated
// business logic methods attached to it to enhance code readability.
type curationKey struct {
Fingerprint *clientmodel.Fingerprint
ProcessorMessageRaw []byte
ProcessorMessageTypeName string
IgnoreYoungerThan time.Duration
}
// Equal answers whether the two curationKeys are equivalent.
func (c *curationKey) Equal(o *curationKey) bool {
switch {
case !c.Fingerprint.Equal(o.Fingerprint):
return false
case bytes.Compare(c.ProcessorMessageRaw, o.ProcessorMessageRaw) != 0:
return false
case c.ProcessorMessageTypeName != o.ProcessorMessageTypeName:
return false
case c.IgnoreYoungerThan != o.IgnoreYoungerThan:
return false
}
return true
}
func (c *curationKey) dump(d *dto.CurationKey) {
d.Reset()
// BUG(matt): Avenue for simplification.
fingerprintDTO := &dto.Fingerprint{}
dumpFingerprint(fingerprintDTO, c.Fingerprint)
d.Fingerprint = fingerprintDTO
d.ProcessorMessageRaw = c.ProcessorMessageRaw
d.ProcessorMessageTypeName = proto.String(c.ProcessorMessageTypeName)
d.IgnoreYoungerThan = proto.Int64(int64(c.IgnoreYoungerThan))
}
func (c *curationKey) load(d *dto.CurationKey) {
// BUG(matt): Avenue for simplification.
c.Fingerprint = &clientmodel.Fingerprint{}
loadFingerprint(c.Fingerprint, d.Fingerprint)
c.ProcessorMessageRaw = d.ProcessorMessageRaw
c.ProcessorMessageTypeName = d.GetProcessorMessageTypeName()
c.IgnoreYoungerThan = time.Duration(d.GetIgnoreYoungerThan())
}

68
storage/metric/dto.go Normal file
View file

@ -0,0 +1,68 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metric
import (
"sort"
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
)
func dumpFingerprint(d *dto.Fingerprint, f *clientmodel.Fingerprint) {
d.Reset()
d.Signature = proto.String(f.String())
}
func loadFingerprint(f *clientmodel.Fingerprint, d *dto.Fingerprint) {
f.LoadFromString(d.GetSignature())
}
func dumpMetric(d *dto.Metric, m clientmodel.Metric) {
d.Reset()
metricLength := len(m)
labelNames := make([]string, 0, metricLength)
for labelName := range m {
labelNames = append(labelNames, string(labelName))
}
sort.Strings(labelNames)
pairs := make([]*dto.LabelPair, 0, metricLength)
for _, labelName := range labelNames {
l := clientmodel.LabelName(labelName)
labelValue := m[l]
labelPair := &dto.LabelPair{
Name: proto.String(string(labelName)),
Value: proto.String(string(labelValue)),
}
pairs = append(pairs, labelPair)
}
d.LabelPair = pairs
}
func dumpLabelName(d *dto.LabelName, l clientmodel.LabelName) {
d.Reset()
d.Name = proto.String(string(l))
}

View file

@ -14,33 +14,35 @@
package metric
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility/test"
)
func GetFingerprintsForLabelSetTests(p MetricPersistence, t test.Tester) {
testAppendSample(p, model.Sample{
testAppendSample(p, &clientmodel.Sample{
Value: 0,
Timestamp: time.Time{},
Metric: model.Metric{
model.MetricNameLabel: "my_metric",
"request_type": "your_mom",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
"request_type": "your_mom",
},
}, t)
testAppendSample(p, model.Sample{
testAppendSample(p, &clientmodel.Sample{
Value: 0,
Timestamp: time.Time{},
Metric: model.Metric{
model.MetricNameLabel: "my_metric",
"request_type": "your_dad",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
"request_type": "your_dad",
},
}, t)
result, err := p.GetFingerprintsForLabelSet(model.LabelSet{
model.MetricNameLabel: model.LabelValue("my_metric"),
result, err := p.GetFingerprintsForLabelSet(clientmodel.LabelSet{
clientmodel.MetricNameLabel: clientmodel.LabelValue("my_metric"),
})
if err != nil {
@ -51,8 +53,8 @@ func GetFingerprintsForLabelSetTests(p MetricPersistence, t test.Tester) {
t.Errorf("Expected two elements.")
}
result, err = p.GetFingerprintsForLabelSet(model.LabelSet{
model.LabelName("request_type"): model.LabelValue("your_mom"),
result, err = p.GetFingerprintsForLabelSet(clientmodel.LabelSet{
clientmodel.LabelName("request_type"): clientmodel.LabelValue("your_mom"),
})
if err != nil {
@ -63,8 +65,8 @@ func GetFingerprintsForLabelSetTests(p MetricPersistence, t test.Tester) {
t.Errorf("Expected one element.")
}
result, err = p.GetFingerprintsForLabelSet(model.LabelSet{
model.LabelName("request_type"): model.LabelValue("your_dad"),
result, err = p.GetFingerprintsForLabelSet(clientmodel.LabelSet{
clientmodel.LabelName("request_type"): clientmodel.LabelValue("your_dad"),
})
if err != nil {
@ -77,27 +79,27 @@ func GetFingerprintsForLabelSetTests(p MetricPersistence, t test.Tester) {
}
func GetFingerprintsForLabelNameTests(p MetricPersistence, t test.Tester) {
testAppendSample(p, model.Sample{
testAppendSample(p, &clientmodel.Sample{
Value: 0,
Timestamp: time.Time{},
Metric: model.Metric{
model.MetricNameLabel: "my_metric",
"request_type": "your_mom",
"language": "english",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
"request_type": "your_mom",
"language": "english",
},
}, t)
testAppendSample(p, model.Sample{
testAppendSample(p, &clientmodel.Sample{
Value: 0,
Timestamp: time.Time{},
Metric: model.Metric{
model.MetricNameLabel: "my_metric",
"request_type": "your_dad",
"sprache": "deutsch",
Metric: clientmodel.Metric{
clientmodel.MetricNameLabel: "my_metric",
"request_type": "your_dad",
"sprache": "deutsch",
},
}, t)
b := model.MetricNameLabel
b := clientmodel.MetricNameLabel
result, err := p.GetFingerprintsForLabelName(b)
if err != nil {
@ -108,7 +110,7 @@ func GetFingerprintsForLabelNameTests(p MetricPersistence, t test.Tester) {
t.Errorf("Expected two elements.")
}
b = model.LabelName("request_type")
b = clientmodel.LabelName("request_type")
result, err = p.GetFingerprintsForLabelName(b)
if err != nil {
@ -119,7 +121,7 @@ func GetFingerprintsForLabelNameTests(p MetricPersistence, t test.Tester) {
t.Errorf("Expected two elements.")
}
b = model.LabelName("language")
b = clientmodel.LabelName("language")
result, err = p.GetFingerprintsForLabelName(b)
if err != nil {
@ -130,7 +132,7 @@ func GetFingerprintsForLabelNameTests(p MetricPersistence, t test.Tester) {
t.Errorf("Expected one element.")
}
b = model.LabelName("sprache")
b = clientmodel.LabelName("sprache")
result, err = p.GetFingerprintsForLabelName(b)
if err != nil {
@ -143,25 +145,25 @@ func GetFingerprintsForLabelNameTests(p MetricPersistence, t test.Tester) {
}
func GetMetricForFingerprintTests(p MetricPersistence, t test.Tester) {
testAppendSample(p, model.Sample{
testAppendSample(p, &clientmodel.Sample{
Value: 0,
Timestamp: time.Time{},
Metric: model.Metric{
Metric: clientmodel.Metric{
"request_type": "your_mom",
},
}, t)
testAppendSample(p, model.Sample{
testAppendSample(p, &clientmodel.Sample{
Value: 0,
Timestamp: time.Time{},
Metric: model.Metric{
Metric: clientmodel.Metric{
"request_type": "your_dad",
"one-off": "value",
},
}, t)
result, err := p.GetFingerprintsForLabelSet(model.LabelSet{
model.LabelName("request_type"): model.LabelValue("your_mom"),
result, err := p.GetFingerprintsForLabelSet(clientmodel.LabelSet{
clientmodel.LabelName("request_type"): clientmodel.LabelValue("your_mom"),
})
if err != nil {
@ -189,8 +191,8 @@ func GetMetricForFingerprintTests(p MetricPersistence, t test.Tester) {
t.Errorf("Expected metric to match.")
}
result, err = p.GetFingerprintsForLabelSet(model.LabelSet{
model.LabelName("request_type"): model.LabelValue("your_dad"),
result, err = p.GetFingerprintsForLabelSet(clientmodel.LabelSet{
clientmodel.LabelName("request_type"): clientmodel.LabelValue("your_dad"),
})
if err != nil {
@ -250,10 +252,10 @@ func GetMetricForFingerprintTests(p MetricPersistence, t test.Tester) {
}
func AppendRepeatingValuesTests(p MetricPersistence, t test.Tester) {
metric := model.Metric{
model.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
metric := clientmodel.Metric{
clientmodel.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
}
increments := 10
@ -262,8 +264,8 @@ func AppendRepeatingValuesTests(p MetricPersistence, t test.Tester) {
for i := 0; i < increments; i++ {
for j := 0; j < repetitions; j++ {
time := time.Time{}.Add(time.Duration(i) * time.Hour).Add(time.Duration(j) * time.Second)
testAppendSample(p, model.Sample{
Value: model.SampleValue(i),
testAppendSample(p, &clientmodel.Sample{
Value: clientmodel.SampleValue(i),
Timestamp: time,
Metric: metric,
}, t)
@ -275,10 +277,10 @@ func AppendRepeatingValuesTests(p MetricPersistence, t test.Tester) {
return
}
labelSet := model.LabelSet{
model.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
labelSet := clientmodel.LabelSet{
clientmodel.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
}
for i := 0; i < increments; i++ {
@ -297,7 +299,7 @@ func AppendRepeatingValuesTests(p MetricPersistence, t test.Tester) {
t.Fatal("expected at least one sample.")
}
expected := model.SampleValue(i)
expected := clientmodel.SampleValue(i)
for _, sample := range samples {
if sample.Value != expected {
@ -309,21 +311,21 @@ func AppendRepeatingValuesTests(p MetricPersistence, t test.Tester) {
}
func AppendsRepeatingValuesTests(p MetricPersistence, t test.Tester) {
metric := model.Metric{
model.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
metric := clientmodel.Metric{
clientmodel.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
}
increments := 10
repetitions := 500
s := model.Samples{}
s := clientmodel.Samples{}
for i := 0; i < increments; i++ {
for j := 0; j < repetitions; j++ {
time := time.Time{}.Add(time.Duration(i) * time.Hour).Add(time.Duration(j) * time.Second)
s = append(s, model.Sample{
Value: model.SampleValue(i),
s = append(s, &clientmodel.Sample{
Value: clientmodel.SampleValue(i),
Timestamp: time,
Metric: metric,
})
@ -337,10 +339,10 @@ func AppendsRepeatingValuesTests(p MetricPersistence, t test.Tester) {
return
}
labelSet := model.LabelSet{
model.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
labelSet := clientmodel.LabelSet{
clientmodel.MetricNameLabel: "errors_total",
"controller": "foo",
"operation": "bar",
}
for i := 0; i < increments; i++ {
@ -359,7 +361,7 @@ func AppendsRepeatingValuesTests(p MetricPersistence, t test.Tester) {
t.Fatal("expected at least one sample.")
}
expected := model.SampleValue(i)
expected := clientmodel.SampleValue(i)
for _, sample := range samples {
if sample.Value != expected {

View file

@ -15,12 +15,15 @@ package metric
import (
"fmt"
"time"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/coding"
"github.com/prometheus/prometheus/coding/indexable"
"github.com/prometheus/prometheus/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/storage/raw/leveldb"
"time"
)
// diskFrontier describes an on-disk store of series to provide a
@ -29,17 +32,17 @@ import (
// This is used to reduce the burden associated with LevelDB iterator
// management.
type diskFrontier struct {
firstFingerprint *model.Fingerprint
firstFingerprint *clientmodel.Fingerprint
firstSupertime time.Time
lastFingerprint *model.Fingerprint
lastFingerprint *clientmodel.Fingerprint
lastSupertime time.Time
}
func (f diskFrontier) String() string {
return fmt.Sprintf("diskFrontier from %s at %s to %s at %s", f.firstFingerprint.ToRowKey(), f.firstSupertime, f.lastFingerprint.ToRowKey(), f.lastSupertime)
return fmt.Sprintf("diskFrontier from %s at %s to %s at %s", f.firstFingerprint, f.firstSupertime, f.lastFingerprint, f.lastSupertime)
}
func (f diskFrontier) ContainsFingerprint(fingerprint *model.Fingerprint) bool {
func (f *diskFrontier) ContainsFingerprint(fingerprint *clientmodel.Fingerprint) bool {
return !(fingerprint.Less(f.firstFingerprint) || f.lastFingerprint.Less(fingerprint))
}
@ -60,14 +63,12 @@ func newDiskFrontier(i leveldb.Iterator) (d *diskFrontier, present bool, err err
return nil, false, err
}
d = &diskFrontier{}
d.firstFingerprint = firstKey.Fingerprint
d.firstSupertime = firstKey.FirstTimestamp
d.lastFingerprint = lastKey.Fingerprint
d.lastSupertime = lastKey.FirstTimestamp
return d, true, nil
return &diskFrontier{
firstFingerprint: firstKey.Fingerprint,
firstSupertime: firstKey.FirstTimestamp,
lastFingerprint: lastKey.Fingerprint,
lastSupertime: lastKey.FirstTimestamp,
}, true, nil
}
// seriesFrontier represents the valid seek frontier for a given series.
@ -77,13 +78,13 @@ type seriesFrontier struct {
lastTime time.Time
}
func (f seriesFrontier) String() string {
func (f *seriesFrontier) String() string {
return fmt.Sprintf("seriesFrontier from %s to %s at %s", f.firstSupertime, f.lastSupertime, f.lastTime)
}
// newSeriesFrontier furnishes a populated diskFrontier for a given
// fingerprint. If the series is absent, present will be false.
func newSeriesFrontier(f *model.Fingerprint, d *diskFrontier, i leveldb.Iterator) (s *seriesFrontier, present bool, err error) {
func newSeriesFrontier(f *clientmodel.Fingerprint, d *diskFrontier, i leveldb.Iterator) (s *seriesFrontier, present bool, err error) {
lowerSeek := firstSupertime
upperSeek := lastSupertime
@ -104,8 +105,10 @@ func newSeriesFrontier(f *model.Fingerprint, d *diskFrontier, i leveldb.Iterator
}
// TODO: Convert this to SampleKey.ToPartialDTO.
fp := new(dto.Fingerprint)
dumpFingerprint(fp, f)
key := &dto.SampleKey{
Fingerprint: f.ToDTO(),
Fingerprint: fp,
Timestamp: upperSeek,
}
@ -169,14 +172,14 @@ func newSeriesFrontier(f *model.Fingerprint, d *diskFrontier, i leveldb.Iterator
// Contains indicates whether a given time value is within the recorded
// interval.
func (s seriesFrontier) Contains(t time.Time) bool {
func (s *seriesFrontier) Contains(t time.Time) bool {
return !(t.Before(s.firstSupertime) || t.After(s.lastTime))
}
// InSafeSeekRange indicates whether the time is within the recorded time range
// and is safely seekable such that a seek does not result in an iterator point
// after the last value of the series or outside of the entire store.
func (s seriesFrontier) InSafeSeekRange(t time.Time) (safe bool) {
func (s *seriesFrontier) InSafeSeekRange(t time.Time) (safe bool) {
if !s.Contains(t) {
return
}
@ -188,13 +191,13 @@ func (s seriesFrontier) InSafeSeekRange(t time.Time) (safe bool) {
return true
}
func (s seriesFrontier) After(t time.Time) bool {
func (s *seriesFrontier) After(t time.Time) bool {
return s.firstSupertime.After(t)
}
// optimalStartTime indicates what the best start time for a curation operation
// should be given the curation remark.
func (s seriesFrontier) optimalStartTime(remark *model.CurationRemark) (t time.Time) {
func (s *seriesFrontier) optimalStartTime(remark *curationRemark) (t time.Time) {
switch {
case remark == nil:
t = s.firstSupertime

View file

@ -15,9 +15,11 @@ package metric
import (
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility/test"
)
var (
@ -26,7 +28,7 @@ var (
testInstant = time.Date(1972, 7, 18, 19, 5, 45, 0, usEastern).In(time.UTC)
)
func testAppendSample(p MetricPersistence, s model.Sample, t test.Tester) {
func testAppendSample(p MetricPersistence, s *clientmodel.Sample, t test.Tester) {
err := p.AppendSample(s)
if err != nil {
t.Fatal(err)
@ -48,10 +50,12 @@ func buildLevelDBTestPersistencesMaker(name string, t test.Tester) func() (Metri
func buildLevelDBTestPersistence(name string, f func(p MetricPersistence, t test.Tester)) func(t test.Tester) {
return func(t test.Tester) {
temporaryDirectory := test.NewTemporaryDirectory(fmt.Sprintf("test_leveldb_%s", name), t)
defer temporaryDirectory.Close()
p, err := NewLevelDBMetricPersistence(temporaryDirectory.Path())
if err != nil {
t.Errorf("Could not create LevelDB Metric Persistence: %q\n", err)
}
@ -78,12 +82,12 @@ type testTieredStorageCloser struct {
directory test.Closer
}
func (t testTieredStorageCloser) Close() {
func (t *testTieredStorageCloser) Close() {
t.storage.Close()
t.directory.Close()
}
func NewTestTieredStorage(t test.Tester) (storage *TieredStorage, closer test.Closer) {
func NewTestTieredStorage(t test.Tester) (*TieredStorage, test.Closer) {
var directory test.TemporaryDirectory
directory = test.NewTemporaryDirectory("test_tiered_storage", t)
storage, err := NewTieredStorage(2500, 1000, 5*time.Second, 0, directory.Path())
@ -105,9 +109,10 @@ func NewTestTieredStorage(t test.Tester) (storage *TieredStorage, closer test.Cl
go storage.Serve(started)
<-started
closer = &testTieredStorageCloser{
closer := &testTieredStorageCloser{
storage: storage,
directory: directory,
}
return
return storage, closer
}

View file

@ -14,9 +14,11 @@
package metric
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/storage"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/storage"
)
// MetricPersistence is a system for storing metric samples in a persistence
@ -31,31 +33,31 @@ type MetricPersistence interface {
// Flush() error
// Record a new sample in the storage layer.
AppendSample(model.Sample) error
AppendSample(*clientmodel.Sample) error
// Record a group of new samples in the storage layer.
AppendSamples(model.Samples) error
AppendSamples(clientmodel.Samples) error
// Get all of the metric fingerprints that are associated with the provided
// label set.
GetFingerprintsForLabelSet(model.LabelSet) (model.Fingerprints, error)
GetFingerprintsForLabelSet(clientmodel.LabelSet) (clientmodel.Fingerprints, error)
// Get all of the metric fingerprints that are associated for a given label
// name.
GetFingerprintsForLabelName(model.LabelName) (model.Fingerprints, error)
GetFingerprintsForLabelName(clientmodel.LabelName) (clientmodel.Fingerprints, error)
// Get the metric associated with the provided fingerprint.
GetMetricForFingerprint(*model.Fingerprint) (model.Metric, error)
GetMetricForFingerprint(*clientmodel.Fingerprint) (clientmodel.Metric, error)
// Get the two metric values that are immediately adjacent to a given time.
GetValueAtTime(*model.Fingerprint, time.Time) model.Values
GetValueAtTime(*clientmodel.Fingerprint, time.Time) Values
// Get the boundary values of an interval: the first value older than the
// interval start, and the first value younger than the interval end.
GetBoundaryValues(*model.Fingerprint, model.Interval) model.Values
GetBoundaryValues(*clientmodel.Fingerprint, Interval) Values
// Get all values contained within a provided interval.
GetRangeValues(*model.Fingerprint, model.Interval) model.Values
GetRangeValues(*clientmodel.Fingerprint, Interval) Values
// Get all label values that are associated with a given label name.
GetAllValuesForLabel(model.LabelName) (model.LabelValues, error)
GetAllValuesForLabel(clientmodel.LabelName) (clientmodel.LabelValues, error)
// Requests the storage stack to build a materialized View of the values
// contained therein.
@ -65,17 +67,17 @@ type MetricPersistence interface {
// View provides a view of the values in the datastore subject to the request
// of a preloading operation.
type View interface {
GetValueAtTime(*model.Fingerprint, time.Time) model.Values
GetBoundaryValues(*model.Fingerprint, model.Interval) model.Values
GetRangeValues(*model.Fingerprint, model.Interval) model.Values
GetValueAtTime(*clientmodel.Fingerprint, time.Time) Values
GetBoundaryValues(*clientmodel.Fingerprint, Interval) Values
GetRangeValues(*clientmodel.Fingerprint, Interval) Values
// Destroy this view.
Close()
}
type Series interface {
Fingerprint() *model.Fingerprint
Metric() model.Metric
Fingerprint() *clientmodel.Fingerprint
Metric() clientmodel.Metric
}
type IteratorsForFingerprintBuilder interface {

View file

@ -11,29 +11,47 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package model
package metric
import (
clientmodel "github.com/prometheus/client_golang/model"
)
type LabelPair struct {
Name LabelName
Value LabelValue
Name clientmodel.LabelName
Value clientmodel.LabelValue
}
type LabelPairs []LabelPair
func (l *LabelPair) Equal(o *LabelPair) bool {
switch {
case l.Name != o.Name:
return false
case l.Value != o.Value:
return false
default:
return true
}
}
type LabelPairs []*LabelPair
func (l LabelPairs) Len() int {
return len(l)
}
func (l LabelPairs) Less(i, j int) bool {
if l[i].Name < l[j].Name {
switch {
case l[i].Name > l[j].Name:
return false
case l[i].Name < l[j].Name:
return true
}
if l[i].Value < l[j].Value {
case l[i].Value > l[j].Value:
return false
case l[i].Value < l[j].Value:
return true
default:
return false
}
return false
}
func (l LabelPairs) Swap(i, j int) {

View file

@ -11,12 +11,13 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package model
package metric
import (
"github.com/prometheus/prometheus/utility/test"
"sort"
"testing"
"github.com/prometheus/prometheus/utility/test"
)
func testLabelPairs(t test.Tester) {
@ -66,7 +67,7 @@ func testLabelPairs(t test.Tester) {
sort.Sort(scenario.in)
for j, expected := range scenario.out {
if expected != scenario.in[j] {
if !expected.Equal(scenario.in[j]) {
t.Errorf("%d.%d expected %s, got %s", i, j, expected, scenario.in[j])
}
}

View file

@ -23,10 +23,11 @@ import (
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
index "github.com/prometheus/prometheus/storage/raw/index/leveldb"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/raw/leveldb"
"github.com/prometheus/prometheus/utility"
@ -92,7 +93,7 @@ func (l *LevelDBMetricPersistence) Close() {
func NewLevelDBMetricPersistence(baseDirectory string) (*LevelDBMetricPersistence, error) {
workers := utility.NewUncertaintyGroup(7)
emission := &LevelDBMetricPersistence{}
emission := new(LevelDBMetricPersistence)
var subsystemOpeners = []struct {
name string
@ -172,14 +173,14 @@ func NewLevelDBMetricPersistence(baseDirectory string) (*LevelDBMetricPersistenc
return emission, nil
}
func (l *LevelDBMetricPersistence) AppendSample(sample model.Sample) (err error) {
func (l *LevelDBMetricPersistence) AppendSample(sample *clientmodel.Sample) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
recordOutcome(duration, err, map[string]string{operation: appendSample, result: success}, map[string]string{operation: appendSample, result: failure})
}(time.Now())
err = l.AppendSamples(model.Samples{sample})
err = l.AppendSamples(clientmodel.Samples{sample})
return
}
@ -187,30 +188,28 @@ func (l *LevelDBMetricPersistence) AppendSample(sample model.Sample) (err error)
// groupByFingerprint collects all of the provided samples, groups them
// together by their respective metric fingerprint, and finally sorts
// them chronologically.
func groupByFingerprint(samples model.Samples) map[model.Fingerprint]model.Samples {
fingerprintToSamples := map[model.Fingerprint]model.Samples{}
func groupByFingerprint(samples clientmodel.Samples) map[clientmodel.Fingerprint]clientmodel.Samples {
fingerprintToSamples := map[clientmodel.Fingerprint]clientmodel.Samples{}
for _, sample := range samples {
fingerprint := *model.NewFingerprintFromMetric(sample.Metric)
samples := fingerprintToSamples[fingerprint]
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromMetric(sample.Metric)
samples := fingerprintToSamples[*fingerprint]
samples = append(samples, sample)
fingerprintToSamples[fingerprint] = samples
fingerprintToSamples[*fingerprint] = samples
}
sortingSemaphore := make(chan bool, sortConcurrency)
doneSorting := sync.WaitGroup{}
for i := 0; i < sortConcurrency; i++ {
sortingSemaphore <- true
}
for _, samples := range fingerprintToSamples {
doneSorting.Add(1)
<-sortingSemaphore
go func(samples model.Samples) {
sortingSemaphore <- true
go func(samples clientmodel.Samples) {
sort.Sort(samples)
sortingSemaphore <- true
<-sortingSemaphore
doneSorting.Done()
}(samples)
}
@ -223,18 +222,18 @@ func groupByFingerprint(samples model.Samples) map[model.Fingerprint]model.Sampl
// findUnindexedMetrics scours the metric membership index for each given Metric
// in the keyspace and returns a map of Fingerprint-Metric pairs that are
// absent.
func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[model.Fingerprint]model.Metric) (unindexed map[model.Fingerprint]model.Metric, err error) {
func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[clientmodel.Fingerprint]clientmodel.Metric) (unindexed map[clientmodel.Fingerprint]clientmodel.Metric, err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
recordOutcome(duration, err, map[string]string{operation: findUnindexedMetrics, result: success}, map[string]string{operation: findUnindexedMetrics, result: failure})
}(time.Now())
unindexed = make(map[model.Fingerprint]model.Metric)
unindexed = make(map[clientmodel.Fingerprint]clientmodel.Metric)
// Determine which metrics are unknown in the database.
dto := &dto.Metric{}
for fingerprint, metric := range candidates {
dto := model.MetricToDTO(metric)
dumpMetric(dto, metric)
indexHas, err := l.hasIndexMetric(dto)
if err != nil {
return unindexed, err
@ -244,7 +243,7 @@ func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[model.Fin
}
}
return
return unindexed, nil
}
// indexLabelNames accumulates all label name to fingerprint index entries for
@ -252,14 +251,14 @@ func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[model.Fin
// the index to reflect the new state.
//
// This operation is idempotent.
func (l *LevelDBMetricPersistence) indexLabelNames(metrics map[model.Fingerprint]model.Metric) (err error) {
func (l *LevelDBMetricPersistence) indexLabelNames(metrics map[clientmodel.Fingerprint]clientmodel.Metric) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
recordOutcome(duration, err, map[string]string{operation: indexLabelNames, result: success}, map[string]string{operation: indexLabelNames, result: failure})
}(time.Now())
labelNameFingerprints := map[model.LabelName]utility.Set{}
labelNameFingerprints := map[clientmodel.LabelName]utility.Set{}
for fingerprint, metric := range metrics {
for labelName := range metric {
@ -286,9 +285,9 @@ func (l *LevelDBMetricPersistence) indexLabelNames(metrics map[model.Fingerprint
defer batch.Close()
for labelName, fingerprintSet := range labelNameFingerprints {
fingerprints := model.Fingerprints{}
fingerprints := clientmodel.Fingerprints{}
for e := range fingerprintSet {
fingerprint := e.(model.Fingerprint)
fingerprint := e.(clientmodel.Fingerprint)
fingerprints = append(fingerprints, &fingerprint)
}
@ -297,9 +296,11 @@ func (l *LevelDBMetricPersistence) indexLabelNames(metrics map[model.Fingerprint
key := &dto.LabelName{
Name: proto.String(string(labelName)),
}
value := &dto.FingerprintCollection{}
value := new(dto.FingerprintCollection)
for _, fingerprint := range fingerprints {
value.Member = append(value.Member, fingerprint.ToDTO())
f := new(dto.Fingerprint)
dumpFingerprint(f, fingerprint)
value.Member = append(value.Member, f)
}
batch.Put(key, value)
@ -318,18 +319,18 @@ func (l *LevelDBMetricPersistence) indexLabelNames(metrics map[model.Fingerprint
// the index to reflect the new state.
//
// This operation is idempotent.
func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[model.Fingerprint]model.Metric) (err error) {
func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[clientmodel.Fingerprint]clientmodel.Metric) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
recordOutcome(duration, err, map[string]string{operation: indexLabelPairs, result: success}, map[string]string{operation: indexLabelPairs, result: failure})
}(time.Now())
labelPairFingerprints := map[model.LabelPair]utility.Set{}
labelPairFingerprints := map[LabelPair]utility.Set{}
for fingerprint, metric := range metrics {
for labelName, labelValue := range metric {
labelPair := model.LabelPair{
labelPair := LabelPair{
Name: labelName,
Value: labelValue,
}
@ -337,7 +338,7 @@ func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[model.Fingerprint
if !ok {
fingerprintSet = utility.Set{}
fingerprints, err := l.GetFingerprintsForLabelSet(model.LabelSet{
fingerprints, err := l.GetFingerprintsForLabelSet(clientmodel.LabelSet{
labelName: labelValue,
})
if err != nil {
@ -358,9 +359,9 @@ func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[model.Fingerprint
defer batch.Close()
for labelPair, fingerprintSet := range labelPairFingerprints {
fingerprints := model.Fingerprints{}
fingerprints := clientmodel.Fingerprints{}
for e := range fingerprintSet {
fingerprint := e.(model.Fingerprint)
fingerprint := e.(clientmodel.Fingerprint)
fingerprints = append(fingerprints, &fingerprint)
}
@ -370,9 +371,11 @@ func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[model.Fingerprint
Name: proto.String(string(labelPair.Name)),
Value: proto.String(string(labelPair.Value)),
}
value := &dto.FingerprintCollection{}
value := new(dto.FingerprintCollection)
for _, fingerprint := range fingerprints {
value.Member = append(value.Member, fingerprint.ToDTO())
f := new(dto.Fingerprint)
dumpFingerprint(f, fingerprint)
value.Member = append(value.Member, f)
}
batch.Put(key, value)
@ -390,7 +393,7 @@ func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[model.Fingerprint
// in the index and then bulk updates.
//
// This operation is idempotent.
func (l *LevelDBMetricPersistence) indexFingerprints(metrics map[model.Fingerprint]model.Metric) (err error) {
func (l *LevelDBMetricPersistence) indexFingerprints(metrics map[clientmodel.Fingerprint]clientmodel.Metric) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
@ -401,7 +404,11 @@ func (l *LevelDBMetricPersistence) indexFingerprints(metrics map[model.Fingerpri
defer batch.Close()
for fingerprint, metric := range metrics {
batch.Put(fingerprint.ToDTO(), model.MetricToDTO(metric))
f := new(dto.Fingerprint)
dumpFingerprint(f, &fingerprint)
m := &dto.Metric{}
dumpMetric(m, metric)
batch.Put(f, m)
}
err = l.fingerprintToMetrics.Commit(batch)
@ -417,7 +424,7 @@ var existenceIdentity = &dto.MembershipIndexValue{}
// indexMetrics takes groups of samples, determines which ones contain metrics
// that are unknown to the storage stack, and then proceeds to update all
// affected indices.
func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[model.Fingerprint]model.Metric) (err error) {
func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[clientmodel.Fingerprint]clientmodel.Metric) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
@ -425,7 +432,7 @@ func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[model.Fingerpri
}(time.Now())
var (
absentMetrics map[model.Fingerprint]model.Metric
absentMetrics map[clientmodel.Fingerprint]clientmodel.Metric
)
absentMetrics, err = l.findUnindexedMetrics(fingerprints)
@ -466,7 +473,9 @@ func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[model.Fingerpri
defer batch.Close()
for _, metric := range absentMetrics {
batch.Put(model.MetricToDTO(metric), existenceIdentity)
m := &dto.Metric{}
dumpMetric(m, metric)
batch.Put(m, existenceIdentity)
}
err = l.metricMembershipIndex.Commit(batch)
@ -478,7 +487,7 @@ func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[model.Fingerpri
return
}
func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[model.Fingerprint]model.Samples) (err error) {
func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[clientmodel.Fingerprint]clientmodel.Samples) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
@ -491,7 +500,9 @@ func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[model.Finger
value := &dto.MetricHighWatermark{}
for fingerprint, samples := range groups {
value.Reset()
present, err := l.MetricHighWatermarks.Get(fingerprint.ToDTO(), value)
f := new(dto.Fingerprint)
dumpFingerprint(f, &fingerprint)
present, err := l.MetricHighWatermarks.Get(f, value)
if err != nil {
return err
}
@ -500,7 +511,7 @@ func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[model.Finger
if !present {
value.Timestamp = proto.Int64(newestSampleTimestamp.Unix())
batch.Put(fingerprint.ToDTO(), value)
batch.Put(f, value)
continue
}
@ -508,7 +519,7 @@ func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[model.Finger
// BUG(matt): Repace this with watermark management.
if newestSampleTimestamp.After(time.Unix(value.GetTimestamp(), 0)) {
value.Timestamp = proto.Int64(newestSampleTimestamp.Unix())
batch.Put(fingerprint.ToDTO(), value)
batch.Put(f, value)
}
}
@ -520,7 +531,7 @@ func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[model.Finger
return nil
}
func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err error) {
func (l *LevelDBMetricPersistence) AppendSamples(samples clientmodel.Samples) (err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
@ -531,8 +542,8 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err err
indexErrChan := make(chan error, 1)
watermarkErrChan := make(chan error, 1)
go func(groups map[model.Fingerprint]model.Samples) {
metrics := map[model.Fingerprint]model.Metric{}
go func(groups map[clientmodel.Fingerprint]clientmodel.Samples) {
metrics := map[clientmodel.Fingerprint]clientmodel.Metric{}
for fingerprint, samples := range groups {
metrics[fingerprint] = samples[0].Metric
@ -541,7 +552,7 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err err
indexErrChan <- l.indexMetrics(metrics)
}(fingerprintToSamples)
go func(groups map[model.Fingerprint]model.Samples) {
go func(groups map[clientmodel.Fingerprint]clientmodel.Samples) {
watermarkErrChan <- l.refreshHighWatermarks(groups)
}(fingerprintToSamples)
@ -564,22 +575,24 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err err
chunk := group[0:take]
group = group[take:lengthOfGroup]
key := model.SampleKey{
key := SampleKey{
Fingerprint: &fingerprint,
FirstTimestamp: chunk[0].Timestamp,
LastTimestamp: chunk[take-1].Timestamp,
SampleCount: uint32(take),
}.ToDTO()
}
value := &dto.SampleValueSeries{}
for _, sample := range chunk {
value.Value = append(value.Value, &dto.SampleValueSeries_Value{
Timestamp: proto.Int64(sample.Timestamp.Unix()),
Value: sample.Value.ToDTO(),
Value: proto.Float64(float64(sample.Value)),
})
}
samplesBatch.Put(key, value)
k := &dto.SampleKey{}
key.Dump(k)
samplesBatch.Put(k, value)
}
}
@ -601,48 +614,27 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err err
return
}
func extractSampleKey(i leveldb.Iterator) (key model.SampleKey, err error) {
func extractSampleKey(i leveldb.Iterator) (*SampleKey, error) {
k := &dto.SampleKey{}
err = proto.Unmarshal(i.Key(), k)
err := proto.Unmarshal(i.Key(), k)
if err != nil {
return
return nil, err
}
key = model.NewSampleKeyFromDTO(k)
key := &SampleKey{}
key.Load(k)
return
return key, nil
}
func extractSampleValues(i leveldb.Iterator) (values model.Values, err error) {
func extractSampleValues(i leveldb.Iterator) (Values, error) {
v := &dto.SampleValueSeries{}
err = proto.Unmarshal(i.Value(), v)
err := proto.Unmarshal(i.Value(), v)
if err != nil {
return
return nil, err
}
values = model.NewValuesFromDTO(v)
return
}
func fingerprintsEqual(l *dto.Fingerprint, r *dto.Fingerprint) bool {
if l == r {
return true
}
if l == nil && r == nil {
return true
}
if r.Signature == l.Signature {
return true
}
if *r.Signature == *l.Signature {
return true
}
return false
return NewValuesFromDTO(v), nil
}
func (l *LevelDBMetricPersistence) hasIndexMetric(dto *dto.Metric) (value bool, err error) {
@ -681,7 +673,7 @@ func (l *LevelDBMetricPersistence) HasLabelName(dto *dto.LabelName) (value bool,
return
}
func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet model.LabelSet) (fps model.Fingerprints, err error) {
func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet clientmodel.LabelSet) (fps clientmodel.Fingerprints, err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
@ -689,10 +681,17 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet model.Lab
}(time.Now())
sets := []utility.Set{}
pair := &dto.LabelPair{}
unmarshaled := new(dto.FingerprintCollection)
for _, labelSetDTO := range model.LabelSetToDTOs(&labelSet) {
unmarshaled := &dto.FingerprintCollection{}
present, err := l.labelSetToFingerprints.Get(labelSetDTO, unmarshaled)
for name, value := range labelSet {
pair.Reset()
unmarshaled.Reset()
pair.Name = proto.String(string(name))
pair.Value = proto.String(string(value))
present, err := l.labelSetToFingerprints.Get(pair, unmarshaled)
if err != nil {
return fps, err
}
@ -703,7 +702,8 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet model.Lab
set := utility.Set{}
for _, m := range unmarshaled.Member {
fp := model.NewFingerprintFromRowKey(*m.Signature)
fp := &clientmodel.Fingerprint{}
loadFingerprint(fp, m)
set.Add(*fp)
}
@ -712,7 +712,7 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet model.Lab
numberOfSets := len(sets)
if numberOfSets == 0 {
return
return nil, nil
}
base := sets[0]
@ -720,22 +720,24 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet model.Lab
base = base.Intersection(sets[i])
}
for _, e := range base.Elements() {
fingerprint := e.(model.Fingerprint)
fingerprint := e.(clientmodel.Fingerprint)
fps = append(fps, &fingerprint)
}
return
return fps, nil
}
func (l *LevelDBMetricPersistence) GetFingerprintsForLabelName(labelName model.LabelName) (fps model.Fingerprints, err error) {
func (l *LevelDBMetricPersistence) GetFingerprintsForLabelName(labelName clientmodel.LabelName) (fps clientmodel.Fingerprints, err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
recordOutcome(duration, err, map[string]string{operation: getFingerprintsForLabelName, result: success}, map[string]string{operation: getFingerprintsForLabelName, result: failure})
}(time.Now())
unmarshaled := &dto.FingerprintCollection{}
present, err := l.labelNameToFingerprints.Get(model.LabelNameToDTO(&labelName), unmarshaled)
unmarshaled := new(dto.FingerprintCollection)
d := &dto.LabelName{}
dumpLabelName(d, labelName)
present, err := l.labelNameToFingerprints.Get(d, unmarshaled)
if err != nil {
return nil, err
}
@ -744,14 +746,15 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelName(labelName model.L
}
for _, m := range unmarshaled.Member {
fp := model.NewFingerprintFromRowKey(*m.Signature)
fp := &clientmodel.Fingerprint{}
loadFingerprint(fp, m)
fps = append(fps, fp)
}
return fps, nil
}
func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f *model.Fingerprint) (m model.Metric, err error) {
func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f *clientmodel.Fingerprint) (m clientmodel.Metric, err error) {
defer func(begin time.Time) {
duration := time.Since(begin)
@ -759,7 +762,9 @@ func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f *model.Fingerprint)
}(time.Now())
unmarshaled := &dto.Metric{}
present, err := l.fingerprintToMetrics.Get(model.FingerprintToDTO(f), unmarshaled)
d := new(dto.Fingerprint)
dumpFingerprint(d, f)
present, err := l.fingerprintToMetrics.Get(d, unmarshaled)
if err != nil {
return nil, err
}
@ -767,24 +772,24 @@ func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f *model.Fingerprint)
return nil, nil
}
m = model.Metric{}
m = clientmodel.Metric{}
for _, v := range unmarshaled.LabelPair {
m[model.LabelName(*v.Name)] = model.LabelValue(*v.Value)
m[clientmodel.LabelName(v.GetName())] = clientmodel.LabelValue(v.GetValue())
}
return m, nil
}
func (l LevelDBMetricPersistence) GetValueAtTime(f *model.Fingerprint, t time.Time) model.Values {
func (l *LevelDBMetricPersistence) GetValueAtTime(f *clientmodel.Fingerprint, t time.Time) Values {
panic("Not implemented")
}
func (l LevelDBMetricPersistence) GetBoundaryValues(f *model.Fingerprint, i model.Interval) model.Values {
func (l *LevelDBMetricPersistence) GetBoundaryValues(f *clientmodel.Fingerprint, i Interval) Values {
panic("Not implemented")
}
func (l *LevelDBMetricPersistence) GetRangeValues(f *model.Fingerprint, i model.Interval) model.Values {
func (l *LevelDBMetricPersistence) GetRangeValues(f *clientmodel.Fingerprint, i Interval) Values {
panic("Not implemented")
}
@ -797,9 +802,9 @@ func (d *MetricKeyDecoder) DecodeKey(in interface{}) (out interface{}, err error
return
}
out = model.LabelPair{
Name: model.LabelName(*unmarshaled.Name),
Value: model.LabelValue(*unmarshaled.Value),
out = LabelPair{
Name: clientmodel.LabelName(*unmarshaled.Name),
Value: clientmodel.LabelValue(*unmarshaled.Value),
}
return
@ -810,11 +815,11 @@ func (d *MetricKeyDecoder) DecodeValue(in interface{}) (out interface{}, err err
}
type LabelNameFilter struct {
labelName model.LabelName
labelName clientmodel.LabelName
}
func (f LabelNameFilter) Filter(key, value interface{}) (filterResult storage.FilterResult) {
labelPair, ok := key.(model.LabelPair)
labelPair, ok := key.(LabelPair)
if ok && labelPair.Name == f.labelName {
return storage.ACCEPT
}
@ -822,16 +827,16 @@ func (f LabelNameFilter) Filter(key, value interface{}) (filterResult storage.Fi
}
type CollectLabelValuesOp struct {
labelValues []model.LabelValue
labelValues []clientmodel.LabelValue
}
func (op *CollectLabelValuesOp) Operate(key, value interface{}) (err *storage.OperatorError) {
labelPair := key.(model.LabelPair)
op.labelValues = append(op.labelValues, model.LabelValue(labelPair.Value))
labelPair := key.(LabelPair)
op.labelValues = append(op.labelValues, clientmodel.LabelValue(labelPair.Value))
return
}
func (l *LevelDBMetricPersistence) GetAllValuesForLabel(labelName model.LabelName) (values model.LabelValues, err error) {
func (l *LevelDBMetricPersistence) GetAllValuesForLabel(labelName clientmodel.LabelName) (values clientmodel.LabelValues, err error) {
filter := &LabelNameFilter{
labelName: labelName,
}

View file

@ -18,7 +18,8 @@ import (
"sync"
"time"
"github.com/prometheus/prometheus/model"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility"
)
@ -29,49 +30,49 @@ const initialSeriesArenaSize = 4 * 60
// Models a given sample entry stored in the in-memory arena.
type value interface {
// Gets the given value.
get() model.SampleValue
get() clientmodel.SampleValue
}
// Models a single sample value. It presumes that there is either no subsequent
// value seen or that any subsequent values are of a different value.
type singletonValue model.SampleValue
type singletonValue clientmodel.SampleValue
func (v singletonValue) get() model.SampleValue {
return model.SampleValue(v)
func (v singletonValue) get() clientmodel.SampleValue {
return clientmodel.SampleValue(v)
}
type stream struct {
sync.RWMutex
metric model.Metric
values model.Values
metric clientmodel.Metric
values Values
}
func (s *stream) add(timestamp time.Time, value model.SampleValue) {
func (s *stream) add(timestamp time.Time, value clientmodel.SampleValue) {
s.Lock()
defer s.Unlock()
// BUG(all): https://github.com/prometheus/prometheus/pull/265/files#r4336435.
s.values = append(s.values, model.SamplePair{
s.values = append(s.values, &SamplePair{
Timestamp: timestamp,
Value: value,
})
}
func (s *stream) clone() model.Values {
func (s *stream) clone() Values {
s.RLock()
defer s.RUnlock()
// BUG(all): Examine COW technique.
clone := make(model.Values, len(s.values))
clone := make(Values, len(s.values))
copy(clone, s.values)
return clone
}
func (s *stream) getValueAtTime(t time.Time) model.Values {
func (s *stream) getValueAtTime(t time.Time) Values {
s.RLock()
defer s.RUnlock()
@ -79,29 +80,29 @@ func (s *stream) getValueAtTime(t time.Time) model.Values {
l := len(s.values)
switch l {
case 0:
return model.Values{}
return Values{}
case 1:
return model.Values{s.values[0]}
return Values{s.values[0]}
default:
index := sort.Search(l, func(i int) bool {
return !s.values[i].Timestamp.Before(t)
})
if index == 0 {
return model.Values{s.values[0]}
return Values{s.values[0]}
}
if index == l {
return model.Values{s.values[l-1]}
return Values{s.values[l-1]}
}
if s.values[index].Timestamp.Equal(t) {
return model.Values{s.values[index]}
return Values{s.values[index]}
}
return model.Values{s.values[index-1], s.values[index]}
return Values{s.values[index-1], s.values[index]}
}
}
func (s *stream) getBoundaryValues(in model.Interval) model.Values {
func (s *stream) getBoundaryValues(in Interval) Values {
s.RLock()
defer s.RUnlock()
@ -116,15 +117,15 @@ func (s *stream) getBoundaryValues(in model.Interval) model.Values {
resultRange := s.values[oldest:newest]
switch len(resultRange) {
case 0:
return model.Values{}
return Values{}
case 1:
return model.Values{resultRange[0]}
return Values{resultRange[0]}
default:
return model.Values{resultRange[0], resultRange[len(resultRange)-1]}
return Values{resultRange[0], resultRange[len(resultRange)-1]}
}
}
func (s *stream) getRangeValues(in model.Interval) model.Values {
func (s *stream) getRangeValues(in Interval) Values {
s.RLock()
defer s.RUnlock()
@ -136,7 +137,7 @@ func (s *stream) getRangeValues(in model.Interval) model.Values {
return s.values[i].Timestamp.After(in.NewestInclusive)
})
result := make(model.Values, newest-oldest)
result := make(Values, newest-oldest)
copy(result, s.values[oldest:newest])
return result
@ -146,10 +147,10 @@ func (s *stream) empty() bool {
return len(s.values) == 0
}
func newStream(metric model.Metric) *stream {
func newStream(metric clientmodel.Metric) *stream {
return &stream{
metric: metric,
values: make(model.Values, 0, initialSeriesArenaSize),
values: make(Values, 0, initialSeriesArenaSize),
}
}
@ -157,9 +158,9 @@ type memorySeriesStorage struct {
sync.RWMutex
wmCache *WatermarkCache
fingerprintToSeries map[model.Fingerprint]*stream
labelPairToFingerprints map[model.LabelPair]model.Fingerprints
labelNameToFingerprints map[model.LabelName]model.Fingerprints
fingerprintToSeries map[clientmodel.Fingerprint]*stream
labelPairToFingerprints map[LabelPair]clientmodel.Fingerprints
labelNameToFingerprints map[clientmodel.LabelName]clientmodel.Fingerprints
}
type MemorySeriesOptions struct {
@ -168,7 +169,7 @@ type MemorySeriesOptions struct {
WatermarkCache *WatermarkCache
}
func (s *memorySeriesStorage) AppendSamples(samples model.Samples) error {
func (s *memorySeriesStorage) AppendSamples(samples clientmodel.Samples) error {
for _, sample := range samples {
s.AppendSample(sample)
}
@ -176,30 +177,32 @@ func (s *memorySeriesStorage) AppendSamples(samples model.Samples) error {
return nil
}
func (s *memorySeriesStorage) AppendSample(sample model.Sample) error {
func (s *memorySeriesStorage) AppendSample(sample *clientmodel.Sample) error {
s.Lock()
defer s.Unlock()
fingerprint := model.NewFingerprintFromMetric(sample.Metric)
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromMetric(sample.Metric)
series := s.getOrCreateSeries(sample.Metric, fingerprint)
series.add(sample.Timestamp, sample.Value)
if s.wmCache != nil {
s.wmCache.Set(fingerprint, &Watermarks{High: sample.Timestamp})
s.wmCache.Set(fingerprint, &watermarks{High: sample.Timestamp})
}
return nil
}
func (s *memorySeriesStorage) CreateEmptySeries(metric model.Metric) {
func (s *memorySeriesStorage) CreateEmptySeries(metric clientmodel.Metric) {
s.Lock()
defer s.Unlock()
fingerprint := model.NewFingerprintFromMetric(metric)
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromMetric(metric)
s.getOrCreateSeries(metric, fingerprint)
}
func (s *memorySeriesStorage) getOrCreateSeries(metric model.Metric, fingerprint *model.Fingerprint) *stream {
func (s *memorySeriesStorage) getOrCreateSeries(metric clientmodel.Metric, fingerprint *clientmodel.Fingerprint) *stream {
series, ok := s.fingerprintToSeries[*fingerprint]
if !ok {
@ -207,7 +210,7 @@ func (s *memorySeriesStorage) getOrCreateSeries(metric model.Metric, fingerprint
s.fingerprintToSeries[*fingerprint] = series
for k, v := range metric {
labelPair := model.LabelPair{
labelPair := LabelPair{
Name: k,
Value: v,
}
@ -223,8 +226,8 @@ func (s *memorySeriesStorage) getOrCreateSeries(metric model.Metric, fingerprint
return series
}
func (s *memorySeriesStorage) Flush(flushOlderThan time.Time, queue chan<- model.Samples) {
emptySeries := []model.Fingerprint{}
func (s *memorySeriesStorage) Flush(flushOlderThan time.Time, queue chan<- clientmodel.Samples) {
emptySeries := clientmodel.Fingerprints{}
s.RLock()
for fingerprint, stream := range s.fingerprintToSeries {
@ -237,10 +240,10 @@ func (s *memorySeriesStorage) Flush(flushOlderThan time.Time, queue chan<- model
i := sort.Search(len(stream.values), finder)
toArchive := stream.values[:i]
toKeep := stream.values[i:]
queued := make(model.Samples, 0, len(toArchive))
queued := make(clientmodel.Samples, 0, len(toArchive))
for _, value := range toArchive {
queued = append(queued, model.Sample{
queued = append(queued, &clientmodel.Sample{
Metric: stream.metric,
Timestamp: value.Timestamp,
Value: value.Value,
@ -255,7 +258,7 @@ func (s *memorySeriesStorage) Flush(flushOlderThan time.Time, queue chan<- model
stream.values = toKeep
if len(toKeep) == 0 {
emptySeries = append(emptySeries, fingerprint)
emptySeries = append(emptySeries, &fingerprint)
}
stream.Unlock()
}
@ -263,21 +266,24 @@ func (s *memorySeriesStorage) Flush(flushOlderThan time.Time, queue chan<- model
s.Lock()
for _, fingerprint := range emptySeries {
if s.fingerprintToSeries[fingerprint].empty() {
s.dropSeries(&fingerprint)
series, _ := s.fingerprintToSeries[*fingerprint]
if series.empty() {
continue
}
s.dropSeries(fingerprint)
}
s.Unlock()
}
// Drop all references to a series, including any samples.
func (s *memorySeriesStorage) dropSeries(fingerprint *model.Fingerprint) {
func (s *memorySeriesStorage) dropSeries(fingerprint *clientmodel.Fingerprint) {
series, ok := s.fingerprintToSeries[*fingerprint]
if !ok {
return
}
for k, v := range series.metric {
labelPair := model.LabelPair{
labelPair := LabelPair{
Name: k,
Value: v,
}
@ -289,14 +295,14 @@ func (s *memorySeriesStorage) dropSeries(fingerprint *model.Fingerprint) {
// Append raw samples, bypassing indexing. Only used to add data to views,
// which don't need to lookup by metric.
func (s *memorySeriesStorage) appendSamplesWithoutIndexing(fingerprint *model.Fingerprint, samples model.Values) {
func (s *memorySeriesStorage) appendSamplesWithoutIndexing(fingerprint *clientmodel.Fingerprint, samples Values) {
s.Lock()
defer s.Unlock()
series, ok := s.fingerprintToSeries[*fingerprint]
if !ok {
series = newStream(model.Metric{})
series = newStream(clientmodel.Metric{})
s.fingerprintToSeries[*fingerprint] = series
}
@ -305,13 +311,13 @@ func (s *memorySeriesStorage) appendSamplesWithoutIndexing(fingerprint *model.Fi
}
}
func (s *memorySeriesStorage) GetFingerprintsForLabelSet(l model.LabelSet) (fingerprints model.Fingerprints, err error) {
func (s *memorySeriesStorage) GetFingerprintsForLabelSet(l clientmodel.LabelSet) (fingerprints clientmodel.Fingerprints, err error) {
s.RLock()
defer s.RUnlock()
sets := []utility.Set{}
for k, v := range l {
values := s.labelPairToFingerprints[model.LabelPair{
values := s.labelPairToFingerprints[LabelPair{
Name: k,
Value: v,
}]
@ -332,14 +338,14 @@ func (s *memorySeriesStorage) GetFingerprintsForLabelSet(l model.LabelSet) (fing
base = base.Intersection(sets[i])
}
for _, e := range base.Elements() {
fingerprint := e.(model.Fingerprint)
fingerprint := e.(clientmodel.Fingerprint)
fingerprints = append(fingerprints, &fingerprint)
}
return fingerprints, nil
}
func (s *memorySeriesStorage) GetFingerprintsForLabelName(l model.LabelName) (model.Fingerprints, error) {
func (s *memorySeriesStorage) GetFingerprintsForLabelName(l clientmodel.LabelName) (clientmodel.Fingerprints, error) {
s.RLock()
defer s.RUnlock()
@ -348,13 +354,13 @@ func (s *memorySeriesStorage) GetFingerprintsForLabelName(l model.LabelName) (mo
return nil, nil
}
fingerprints := make(model.Fingerprints, len(values))
fingerprints := make(clientmodel.Fingerprints, len(values))
copy(fingerprints, values)
return fingerprints, nil
}
func (s *memorySeriesStorage) GetMetricForFingerprint(f *model.Fingerprint) (model.Metric, error) {
func (s *memorySeriesStorage) GetMetricForFingerprint(f *clientmodel.Fingerprint) (clientmodel.Metric, error) {
s.RLock()
defer s.RUnlock()
@ -363,7 +369,7 @@ func (s *memorySeriesStorage) GetMetricForFingerprint(f *model.Fingerprint) (mod
return nil, nil
}
metric := model.Metric{}
metric := clientmodel.Metric{}
for label, value := range series.metric {
metric[label] = value
}
@ -371,7 +377,7 @@ func (s *memorySeriesStorage) GetMetricForFingerprint(f *model.Fingerprint) (mod
return metric, nil
}
func (s *memorySeriesStorage) HasFingerprint(f *model.Fingerprint) bool {
func (s *memorySeriesStorage) HasFingerprint(f *clientmodel.Fingerprint) bool {
s.RLock()
defer s.RUnlock()
@ -380,7 +386,7 @@ func (s *memorySeriesStorage) HasFingerprint(f *model.Fingerprint) bool {
return has
}
func (s *memorySeriesStorage) CloneSamples(f *model.Fingerprint) model.Values {
func (s *memorySeriesStorage) CloneSamples(f *clientmodel.Fingerprint) Values {
s.RLock()
defer s.RUnlock()
@ -392,7 +398,7 @@ func (s *memorySeriesStorage) CloneSamples(f *model.Fingerprint) model.Values {
return series.clone()
}
func (s *memorySeriesStorage) GetValueAtTime(f *model.Fingerprint, t time.Time) model.Values {
func (s *memorySeriesStorage) GetValueAtTime(f *clientmodel.Fingerprint, t time.Time) Values {
s.RLock()
defer s.RUnlock()
@ -404,7 +410,7 @@ func (s *memorySeriesStorage) GetValueAtTime(f *model.Fingerprint, t time.Time)
return series.getValueAtTime(t)
}
func (s *memorySeriesStorage) GetBoundaryValues(f *model.Fingerprint, i model.Interval) model.Values {
func (s *memorySeriesStorage) GetBoundaryValues(f *clientmodel.Fingerprint, i Interval) Values {
s.RLock()
defer s.RUnlock()
@ -416,7 +422,7 @@ func (s *memorySeriesStorage) GetBoundaryValues(f *model.Fingerprint, i model.In
return series.getBoundaryValues(i)
}
func (s *memorySeriesStorage) GetRangeValues(f *model.Fingerprint, i model.Interval) model.Values {
func (s *memorySeriesStorage) GetRangeValues(f *clientmodel.Fingerprint, i Interval) Values {
s.RLock()
defer s.RUnlock()
@ -433,16 +439,16 @@ func (s *memorySeriesStorage) Close() {
s.Lock()
defer s.Unlock()
s.fingerprintToSeries = map[model.Fingerprint]*stream{}
s.labelPairToFingerprints = map[model.LabelPair]model.Fingerprints{}
s.labelNameToFingerprints = map[model.LabelName]model.Fingerprints{}
s.fingerprintToSeries = map[clientmodel.Fingerprint]*stream{}
s.labelPairToFingerprints = map[LabelPair]clientmodel.Fingerprints{}
s.labelNameToFingerprints = map[clientmodel.LabelName]clientmodel.Fingerprints{}
}
func (s *memorySeriesStorage) GetAllValuesForLabel(labelName model.LabelName) (values model.LabelValues, err error) {
func (s *memorySeriesStorage) GetAllValuesForLabel(labelName clientmodel.LabelName) (values clientmodel.LabelValues, err error) {
s.RLock()
defer s.RUnlock()
valueSet := map[model.LabelValue]bool{}
valueSet := map[clientmodel.LabelValue]bool{}
for _, series := range s.fingerprintToSeries {
if value, ok := series.metric[labelName]; ok {
if !valueSet[value] {
@ -457,9 +463,9 @@ func (s *memorySeriesStorage) GetAllValuesForLabel(labelName model.LabelName) (v
func NewMemorySeriesStorage(o MemorySeriesOptions) *memorySeriesStorage {
return &memorySeriesStorage{
fingerprintToSeries: make(map[model.Fingerprint]*stream),
labelPairToFingerprints: make(map[model.LabelPair]model.Fingerprints),
labelNameToFingerprints: make(map[model.LabelName]model.Fingerprints),
fingerprintToSeries: make(map[clientmodel.Fingerprint]*stream),
labelPairToFingerprints: make(map[LabelPair]clientmodel.Fingerprints),
labelNameToFingerprints: make(map[clientmodel.LabelName]clientmodel.Fingerprints),
wmCache: o.WatermarkCache,
}
}

View file

@ -15,20 +15,21 @@ package metric
import (
"fmt"
"github.com/prometheus/prometheus/model"
"runtime"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
)
func BenchmarkStreamAdd(b *testing.B) {
b.StopTimer()
s := newStream(model.Metric{})
s := newStream(clientmodel.Metric{})
times := make([]time.Time, 0, b.N)
samples := make([]model.SampleValue, 0, b.N)
samples := make([]clientmodel.SampleValue, 0, b.N)
for i := 0; i < b.N; i++ {
times = append(times, time.Date(i, 0, 0, 0, 0, 0, 0, time.UTC))
samples = append(samples, model.SampleValue(i))
samples = append(samples, clientmodel.SampleValue(i))
}
b.StartTimer()
@ -50,16 +51,16 @@ func benchmarkAppendSample(b *testing.B, labels int) {
b.StopTimer()
s := NewMemorySeriesStorage(MemorySeriesOptions{})
metric := model.Metric{}
metric := clientmodel.Metric{}
for i := 0; i < labels; i++ {
metric[model.LabelName(fmt.Sprintf("label_%d", i))] = model.LabelValue(fmt.Sprintf("value_%d", i))
metric[clientmodel.LabelName(fmt.Sprintf("label_%d", i))] = clientmodel.LabelValue(fmt.Sprintf("value_%d", i))
}
samples := make(model.Samples, 0, b.N)
samples := make(clientmodel.Samples, 0, b.N)
for i := 0; i < b.N; i++ {
samples = append(samples, model.Sample{
samples = append(samples, &clientmodel.Sample{
Metric: metric,
Value: model.SampleValue(i),
Value: clientmodel.SampleValue(i),
Timestamp: time.Date(i, 0, 0, 0, 0, 0, 0, time.UTC),
})
}

View file

@ -18,8 +18,6 @@ import (
"math"
"sort"
"time"
"github.com/prometheus/prometheus/model"
)
// Encapsulates a primitive query operation.
@ -27,7 +25,7 @@ type op interface {
// The time at which this operation starts.
StartsAt() time.Time
// Extract samples from stream of values and advance operation time.
ExtractSamples(in model.Values) (out model.Values)
ExtractSamples(in Values) (out Values)
// Get current operation time or nil if no subsequent work associated with
// this operator remains.
CurrentTime() *time.Time
@ -74,7 +72,7 @@ func (g *getValuesAtTimeOp) StartsAt() time.Time {
return g.time
}
func (g *getValuesAtTimeOp) ExtractSamples(in model.Values) (out model.Values) {
func (g *getValuesAtTimeOp) ExtractSamples(in Values) (out Values) {
if len(in) == 0 {
return
}
@ -101,7 +99,7 @@ func (g *getValuesAtTimeOp) GreedierThan(op op) (superior bool) {
// are adjacent to it.
//
// An assumption of this is that the provided samples are already sorted!
func extractValuesAroundTime(t time.Time, in model.Values) (out model.Values) {
func extractValuesAroundTime(t time.Time, in Values) (out Values) {
i := sort.Search(len(in), func(i int) bool {
return !in[i].Timestamp.Before(t)
})
@ -152,7 +150,7 @@ func (g *getValuesAtIntervalOp) Through() time.Time {
return g.through
}
func (g *getValuesAtIntervalOp) ExtractSamples(in model.Values) (out model.Values) {
func (g *getValuesAtIntervalOp) ExtractSamples(in Values) (out Values) {
if len(in) == 0 {
return
}
@ -212,7 +210,7 @@ func (g *getValuesAlongRangeOp) Through() time.Time {
return g.through
}
func (g *getValuesAlongRangeOp) ExtractSamples(in model.Values) (out model.Values) {
func (g *getValuesAlongRangeOp) ExtractSamples(in Values) (out Values) {
if len(in) == 0 {
return
}

View file

@ -18,7 +18,6 @@ import (
"testing"
"time"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
)
@ -1203,8 +1202,8 @@ func BenchmarkOptimize(b *testing.B) {
func TestGetValuesAtTimeOp(t *testing.T) {
var scenarios = []struct {
op getValuesAtTimeOp
in model.Values
out model.Values
in Values
out Values
}{
// No values.
{
@ -1217,13 +1216,13 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1235,13 +1234,13 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant.Add(1 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1253,13 +1252,13 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant.Add(2 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1271,7 +1270,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1281,7 +1280,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1293,7 +1292,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant.Add(1 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1303,7 +1302,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1315,7 +1314,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant.Add(90 * time.Second),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1325,7 +1324,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1341,7 +1340,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant.Add(2 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1351,7 +1350,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1367,7 +1366,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
op: getValuesAtTimeOp{
time: testInstant.Add(3 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1377,7 +1376,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(2 * time.Minute),
Value: 1,
@ -1385,6 +1384,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
},
},
}
for i, scenario := range scenarios {
actual := scenario.op.ExtractSamples(scenario.in)
if len(actual) != len(scenario.out) {
@ -1392,7 +1392,7 @@ func TestGetValuesAtTimeOp(t *testing.T) {
t.Fatalf("%d. expected length %d, got %d", i, len(scenario.out), len(actual))
}
for j, out := range scenario.out {
if out != actual[j] {
if !out.Equal(actual[j]) {
t.Fatalf("%d. expected output %v, got %v", i, scenario.out, actual)
}
}
@ -1402,8 +1402,8 @@ func TestGetValuesAtTimeOp(t *testing.T) {
func TestGetValuesAtIntervalOp(t *testing.T) {
var scenarios = []struct {
op getValuesAtIntervalOp
in model.Values
out model.Values
in Values
out Values
}{
// No values.
{
@ -1420,7 +1420,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(1 * time.Minute),
interval: 30 * time.Second,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(2 * time.Minute),
Value: 1,
@ -1430,7 +1430,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(2 * time.Minute),
Value: 1,
@ -1444,7 +1444,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(2 * time.Minute),
interval: 30 * time.Second,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1454,7 +1454,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1472,7 +1472,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(2 * time.Minute),
interval: 30 * time.Second,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1486,7 +1486,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1504,7 +1504,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(3 * time.Minute),
interval: 30 * time.Second,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1514,7 +1514,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1532,7 +1532,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(4 * time.Minute),
interval: 30 * time.Second,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1550,7 +1550,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(2 * time.Minute),
Value: 1,
@ -1568,7 +1568,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(3 * time.Minute),
interval: 30 * time.Second,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1578,7 +1578,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1596,7 +1596,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
through: testInstant.Add(4 * time.Minute),
interval: 3 * time.Minute,
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1614,7 +1614,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1647,7 +1647,7 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
}
for j, out := range scenario.out {
if out != actual[j] {
if !out.Equal(actual[j]) {
t.Fatalf("%d. expected output %v, got %v", i, scenario.out, actual)
}
}
@ -1657,8 +1657,8 @@ func TestGetValuesAtIntervalOp(t *testing.T) {
func TestGetValuesAlongRangeOp(t *testing.T) {
var scenarios = []struct {
op getValuesAlongRangeOp
in model.Values
out model.Values
in Values
out Values
}{
// No values.
{
@ -1673,7 +1673,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
from: testInstant,
through: testInstant.Add(1 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(2 * time.Minute),
Value: 1,
@ -1683,7 +1683,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{},
out: Values{},
},
// Operator range starts before first value, ends within available values.
{
@ -1691,7 +1691,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
from: testInstant,
through: testInstant.Add(2 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1701,7 +1701,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1714,7 +1714,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
from: testInstant.Add(1 * time.Minute),
through: testInstant.Add(2 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1728,7 +1728,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1741,7 +1741,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
from: testInstant,
through: testInstant.Add(3 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1751,7 +1751,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(1 * time.Minute),
Value: 1,
@ -1768,7 +1768,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
from: testInstant.Add(2 * time.Minute),
through: testInstant.Add(4 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1786,7 +1786,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{
out: Values{
{
Timestamp: testInstant.Add(2 * time.Minute),
Value: 1,
@ -1803,7 +1803,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
from: testInstant.Add(2 * time.Minute),
through: testInstant.Add(3 * time.Minute),
},
in: model.Values{
in: Values{
{
Timestamp: testInstant,
Value: 1,
@ -1813,7 +1813,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
Value: 1,
},
},
out: model.Values{},
out: Values{},
},
}
for i, scenario := range scenarios {
@ -1823,7 +1823,7 @@ func TestGetValuesAlongRangeOp(t *testing.T) {
t.Fatalf("%d. expected length %d, got %d", i, len(scenario.out), len(actual))
}
for j, out := range scenario.out {
if out != actual[j] {
if !out.Equal(actual[j]) {
t.Fatalf("%d. expected output %v, got %v", i, scenario.out, actual)
}
}

View file

@ -19,9 +19,10 @@ import (
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/storage/raw"
"github.com/prometheus/prometheus/storage/raw/leveldb"
)
@ -35,7 +36,7 @@ type Processor interface {
Name() string
// Signature emits a byte signature for this process for the purpose of
// remarking how far along it has been applied to the database.
Signature() (signature []byte, err error)
Signature() []byte
// Apply runs this processor against the sample set. sampleIterator expects
// to be pre-seeked to the initial starting position. The processor will
// run until up until stopAt has been reached. It is imperative that the
@ -43,7 +44,7 @@ type Processor interface {
//
// Upon completion or error, the last time at which the processor finished
// shall be emitted in addition to any errors.
Apply(sampleIterator leveldb.Iterator, samplesPersistence raw.Persistence, stopAt time.Time, fingerprint *model.Fingerprint) (lastCurated time.Time, err error)
Apply(sampleIterator leveldb.Iterator, samplesPersistence raw.Persistence, stopAt time.Time, fingerprint *clientmodel.Fingerprint) (lastCurated time.Time, err error)
}
// CompactionProcessor combines sparse values in the database together such
@ -63,29 +64,30 @@ type CompactionProcessor struct {
signature []byte
}
func (p CompactionProcessor) Name() string {
func (p *CompactionProcessor) Name() string {
return "io.prometheus.CompactionProcessorDefinition"
}
func (p *CompactionProcessor) Signature() (out []byte, err error) {
func (p *CompactionProcessor) Signature() []byte {
if len(p.signature) == 0 {
out, err = proto.Marshal(&dto.CompactionProcessorDefinition{
out, err := proto.Marshal(&dto.CompactionProcessorDefinition{
MinimumGroupSize: proto.Uint32(uint32(p.MinimumGroupSize)),
})
if err != nil {
panic(err)
}
p.signature = out
}
out = p.signature
return
return p.signature
}
func (p CompactionProcessor) String() string {
func (p *CompactionProcessor) String() string {
return fmt.Sprintf("compactionProcessor for minimum group size %d", p.MinimumGroupSize)
}
func (p CompactionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersistence raw.Persistence, stopAt time.Time, fingerprint *model.Fingerprint) (lastCurated time.Time, err error) {
func (p *CompactionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersistence raw.Persistence, stopAt time.Time, fingerprint *clientmodel.Fingerprint) (lastCurated time.Time, err error) {
var pendingBatch raw.Batch = nil
defer func() {
@ -95,9 +97,9 @@ func (p CompactionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersi
}()
var pendingMutations = 0
var pendingSamples model.Values
var sampleKey model.SampleKey
var unactedSamples model.Values
var pendingSamples Values
var sampleKey *SampleKey
var unactedSamples Values
var lastTouchedTime time.Time
var keyDropped bool
@ -151,27 +153,36 @@ func (p CompactionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersi
case len(pendingSamples) == 0 && len(unactedSamples) >= p.MinimumGroupSize:
lastTouchedTime = unactedSamples[len(unactedSamples)-1].Timestamp
unactedSamples = model.Values{}
unactedSamples = Values{}
case len(pendingSamples)+len(unactedSamples) < p.MinimumGroupSize:
if !keyDropped {
pendingBatch.Drop(sampleKey.ToDTO())
k := new(dto.SampleKey)
sampleKey.Dump(k)
pendingBatch.Drop(k)
keyDropped = true
}
pendingSamples = append(pendingSamples, unactedSamples...)
lastTouchedTime = unactedSamples[len(unactedSamples)-1].Timestamp
unactedSamples = model.Values{}
unactedSamples = Values{}
pendingMutations++
// If the number of pending writes equals the target group size
case len(pendingSamples) == p.MinimumGroupSize:
k := new(dto.SampleKey)
newSampleKey := pendingSamples.ToSampleKey(fingerprint)
pendingBatch.Put(newSampleKey.ToDTO(), pendingSamples.ToDTO())
newSampleKey.Dump(k)
b := new(dto.SampleValueSeries)
pendingSamples.dump(b)
pendingBatch.Put(k, b)
pendingMutations++
lastCurated = newSampleKey.FirstTimestamp.In(time.UTC)
if len(unactedSamples) > 0 {
if !keyDropped {
pendingBatch.Drop(sampleKey.ToDTO())
sampleKey.Dump(k)
pendingBatch.Drop(k)
keyDropped = true
}
@ -182,13 +193,15 @@ func (p CompactionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersi
} else {
pendingSamples = unactedSamples
lastTouchedTime = pendingSamples[len(pendingSamples)-1].Timestamp
unactedSamples = model.Values{}
unactedSamples = Values{}
}
}
case len(pendingSamples)+len(unactedSamples) >= p.MinimumGroupSize:
if !keyDropped {
pendingBatch.Drop(sampleKey.ToDTO())
k := new(dto.SampleKey)
sampleKey.Dump(k)
pendingBatch.Drop(k)
keyDropped = true
}
remainder := p.MinimumGroupSize - len(pendingSamples)
@ -207,9 +220,13 @@ func (p CompactionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersi
if len(unactedSamples) > 0 || len(pendingSamples) > 0 {
pendingSamples = append(pendingSamples, unactedSamples...)
k := new(dto.SampleKey)
newSampleKey := pendingSamples.ToSampleKey(fingerprint)
pendingBatch.Put(newSampleKey.ToDTO(), pendingSamples.ToDTO())
pendingSamples = model.Values{}
newSampleKey.Dump(k)
b := new(dto.SampleValueSeries)
pendingSamples.dump(b)
pendingBatch.Put(k, b)
pendingSamples = Values{}
pendingMutations++
lastCurated = newSampleKey.FirstTimestamp.In(time.UTC)
}
@ -237,27 +254,29 @@ type DeletionProcessor struct {
signature []byte
}
func (p DeletionProcessor) Name() string {
func (p *DeletionProcessor) Name() string {
return "io.prometheus.DeletionProcessorDefinition"
}
func (p *DeletionProcessor) Signature() (out []byte, err error) {
func (p *DeletionProcessor) Signature() []byte {
if len(p.signature) == 0 {
out, err = proto.Marshal(&dto.DeletionProcessorDefinition{})
out, err := proto.Marshal(&dto.DeletionProcessorDefinition{})
if err != nil {
panic(err)
}
p.signature = out
}
out = p.signature
return
return p.signature
}
func (p DeletionProcessor) String() string {
func (p *DeletionProcessor) String() string {
return "deletionProcessor"
}
func (p DeletionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersistence raw.Persistence, stopAt time.Time, fingerprint *model.Fingerprint) (lastCurated time.Time, err error) {
func (p *DeletionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersistence raw.Persistence, stopAt time.Time, fingerprint *clientmodel.Fingerprint) (lastCurated time.Time, err error) {
var pendingBatch raw.Batch = nil
defer func() {
@ -315,20 +334,28 @@ func (p DeletionProcessor) Apply(sampleIterator leveldb.Iterator, samplesPersist
pendingBatch = nil
case !sampleKey.MayContain(stopAt):
pendingBatch.Drop(sampleKey.ToDTO())
k := &dto.SampleKey{}
sampleKey.Dump(k)
pendingBatch.Drop(k)
lastCurated = sampleKey.LastTimestamp
sampleValues = model.Values{}
sampleValues = Values{}
pendingMutations++
case sampleKey.MayContain(stopAt):
pendingBatch.Drop(sampleKey.ToDTO())
k := &dto.SampleKey{}
sampleKey.Dump(k)
pendingBatch.Drop(k)
pendingMutations++
sampleValues = sampleValues.TruncateBefore(stopAt)
if len(sampleValues) > 0 {
k := &dto.SampleKey{}
sampleKey = sampleValues.ToSampleKey(fingerprint)
sampleKey.Dump(k)
v := &dto.SampleValueSeries{}
sampleValues.dump(v)
lastCurated = sampleKey.FirstTimestamp
pendingBatch.Put(sampleKey.ToDTO(), sampleValues.ToDTO())
pendingBatch.Put(k, v)
pendingMutations++
} else {
lastCurated = sampleKey.LastTimestamp

View file

@ -20,10 +20,11 @@ import (
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
fixture "github.com/prometheus/prometheus/storage/raw/leveldb/test"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/storage/raw/leveldb"
)
@ -41,7 +42,7 @@ type watermarkState struct {
type sampleGroup struct {
fingerprint string
values model.Values
values Values
}
type in struct {
@ -59,41 +60,59 @@ type out struct {
}
func (c curationState) Get() (key, value proto.Message) {
signature, err := c.processor.Signature()
if err != nil {
panic(err)
}
key = model.CurationKey{
Fingerprint: model.NewFingerprintFromRowKey(c.fingerprint),
signature := c.processor.Signature()
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromString(c.fingerprint)
keyRaw := curationKey{
Fingerprint: fingerprint,
ProcessorMessageRaw: signature,
ProcessorMessageTypeName: c.processor.Name(),
IgnoreYoungerThan: c.ignoreYoungerThan,
}.ToDTO()
}
value = model.CurationRemark{
k := &dto.CurationKey{}
keyRaw.dump(k)
key = k
valueRaw := curationRemark{
LastCompletionTimestamp: c.lastCurated,
}.ToDTO()
}
v := &dto.CurationValue{}
valueRaw.dump(v)
return
return k, v
}
func (w watermarkState) Get() (key, value proto.Message) {
key = model.NewFingerprintFromRowKey(w.fingerprint).ToDTO()
value = model.NewWatermarkFromTime(w.lastAppended).ToMetricHighWatermarkDTO()
return
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromString(w.fingerprint)
k := &dto.Fingerprint{}
dumpFingerprint(k, fingerprint)
v := &dto.MetricHighWatermark{}
rawValue := &watermarks{
High: w.lastAppended,
}
rawValue.dump(v)
return k, v
}
func (s sampleGroup) Get() (key, value proto.Message) {
key = model.SampleKey{
Fingerprint: model.NewFingerprintFromRowKey(s.fingerprint),
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromString(s.fingerprint)
keyRaw := SampleKey{
Fingerprint: fingerprint,
FirstTimestamp: s.values[0].Timestamp,
LastTimestamp: s.values[len(s.values)-1].Timestamp,
SampleCount: uint32(len(s.values)),
}.ToDTO()
}
k := &dto.SampleKey{}
keyRaw.Dump(k)
value = s.values.ToDTO()
v := &dto.SampleValueSeries{}
s.values.dump(v)
return
return k, v
}
func TestCuratorCompactionProcessor(t *testing.T) {
@ -152,7 +171,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroups: fixture.Pairs{
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 90 * time.Minute),
Value: 0,
@ -177,7 +196,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 65 * time.Minute),
Value: 0.25,
@ -202,7 +221,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 40 * time.Minute),
Value: 0.50,
@ -219,7 +238,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 25 * time.Minute),
Value: 0.75,
@ -228,7 +247,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 20 * time.Minute),
Value: -2,
@ -237,7 +256,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 15 * time.Minute),
Value: -3,
@ -247,7 +266,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 1
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 90 * time.Minute),
Value: 0,
@ -257,7 +276,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 1
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 89 * time.Minute),
Value: 1,
@ -267,7 +286,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 1
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 88 * time.Minute),
Value: 2,
@ -277,7 +296,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 1
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 87 * time.Minute),
Value: 3,
@ -287,7 +306,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 1
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 86 * time.Minute),
Value: 4,
@ -297,7 +316,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 2
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 85 * time.Minute),
Value: 5,
@ -307,7 +326,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 2
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 84 * time.Minute),
Value: 6,
@ -317,7 +336,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 2
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 83 * time.Minute),
Value: 7,
@ -327,7 +346,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 2
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 82 * time.Minute),
Value: 8,
@ -337,7 +356,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 2
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 81 * time.Minute),
Value: 9,
@ -347,7 +366,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 3
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 80 * time.Minute),
Value: 10,
@ -357,7 +376,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 3
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 79 * time.Minute),
Value: 11,
@ -367,7 +386,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 3
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 78 * time.Minute),
Value: 12,
@ -377,7 +396,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 3
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 77 * time.Minute),
Value: 13,
@ -387,7 +406,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Blocks 3 and 4 and 5
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
// Block 3
Timestamp: testInstant.Add(-1 * 76 * time.Minute),
@ -428,7 +447,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 5
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 69 * time.Minute),
Value: 21,
@ -438,7 +457,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 5
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 68 * time.Minute),
Value: 22,
@ -448,7 +467,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 5
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 67 * time.Minute),
Value: 23,
@ -458,7 +477,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 5
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 66 * time.Minute),
Value: 24,
@ -468,7 +487,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 6
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 65 * time.Minute),
Value: 25,
@ -478,7 +497,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 6
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 64 * time.Minute),
Value: 26,
@ -488,7 +507,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 6
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 63 * time.Minute),
Value: 27,
@ -498,7 +517,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 6
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 62 * time.Minute),
Value: 28,
@ -508,7 +527,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 6
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 61 * time.Minute),
Value: 29,
@ -518,7 +537,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroup{
// Moved into Block 7
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 60 * time.Minute),
Value: 30,
@ -560,7 +579,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
sampleGroups: []sampleGroup{
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 90 * time.Minute),
Value: 0,
@ -585,7 +604,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 65 * time.Minute),
Value: 0.25,
@ -610,7 +629,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 40 * time.Minute),
Value: 0.50,
@ -627,7 +646,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 25 * time.Minute),
Value: 0.75,
@ -636,7 +655,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 20 * time.Minute),
Value: -2,
@ -645,7 +664,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 15 * time.Minute),
Value: -3,
@ -655,7 +674,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
{
// Block 1
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 90 * time.Minute),
Value: 0,
@ -681,7 +700,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
{
// Block 2
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 85 * time.Minute),
Value: 5,
@ -707,7 +726,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
{
// Block 3
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 80 * time.Minute),
Value: 10,
@ -732,7 +751,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 75 * time.Minute),
Value: 15,
@ -757,7 +776,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 70 * time.Minute),
Value: 20,
@ -782,7 +801,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 65 * time.Minute),
Value: 25,
@ -807,7 +826,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
},
{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 60 * time.Minute),
Value: 30,
@ -888,17 +907,16 @@ func TestCuratorCompactionProcessor(t *testing.T) {
if err != nil {
t.Fatalf("%d.%d. could not unmarshal: %s", i, j, err)
}
actualKey := &curationKey{}
actualKey.load(curationKeyDto)
actualCurationRemark := &curationRemark{}
actualCurationRemark.load(curationValueDto)
signature := expected.processor.Signature()
curationKey := model.NewCurationKeyFromDTO(curationKeyDto)
actualCurationRemark := model.NewCurationRemarkFromDTO(curationValueDto)
signature, err := expected.processor.Signature()
if err != nil {
t.Fatal(err)
}
actualKey := curationKey
expectedKey := model.CurationKey{
Fingerprint: model.NewFingerprintFromRowKey(expected.fingerprint),
expectedFingerprint := &clientmodel.Fingerprint{}
expectedFingerprint.LoadFromString(expected.fingerprint)
expectedKey := &curationKey{
Fingerprint: expectedFingerprint,
IgnoreYoungerThan: expected.ignoreYoungerThan,
ProcessorMessageRaw: signature,
ProcessorMessageTypeName: expected.processor.Name(),
@ -906,7 +924,7 @@ func TestCuratorCompactionProcessor(t *testing.T) {
if !actualKey.Equal(expectedKey) {
t.Fatalf("%d.%d. expected %s, got %s", i, j, expectedKey, actualKey)
}
expectedCurationRemark := model.CurationRemark{
expectedCurationRemark := curationRemark{
LastCompletionTimestamp: expected.lastCurated,
}
if !actualCurationRemark.Equal(expectedCurationRemark) {
@ -938,7 +956,9 @@ func TestCuratorCompactionProcessor(t *testing.T) {
t.Fatalf("%d.%d. error %s", i, j, err)
}
if !model.NewFingerprintFromRowKey(expected.fingerprint).Equal(sampleKey.Fingerprint) {
expectedFingerprint := &clientmodel.Fingerprint{}
expectedFingerprint.LoadFromString(expected.fingerprint)
if !expectedFingerprint.Equal(sampleKey.Fingerprint) {
t.Fatalf("%d.%d. expected fingerprint %s, got %s", i, j, expected.fingerprint, sampleKey.Fingerprint)
}
@ -1014,7 +1034,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
sampleGroups: fixture.Pairs{
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 90 * time.Minute),
Value: 90,
@ -1027,7 +1047,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 15 * time.Minute),
Value: 15,
@ -1036,7 +1056,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 90 * time.Minute),
Value: 0,
@ -1045,7 +1065,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 89 * time.Minute),
Value: 1,
@ -1054,7 +1074,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 88 * time.Minute),
Value: 2,
@ -1063,7 +1083,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 87 * time.Minute),
Value: 3,
@ -1072,7 +1092,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 86 * time.Minute),
Value: 4,
@ -1081,7 +1101,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 85 * time.Minute),
Value: 5,
@ -1090,7 +1110,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 84 * time.Minute),
Value: 6,
@ -1099,7 +1119,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 83 * time.Minute),
Value: 7,
@ -1108,7 +1128,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 82 * time.Minute),
Value: 8,
@ -1117,7 +1137,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 81 * time.Minute),
Value: 9,
@ -1126,7 +1146,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 80 * time.Minute),
Value: 10,
@ -1135,7 +1155,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 79 * time.Minute),
Value: 11,
@ -1144,7 +1164,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 78 * time.Minute),
Value: 12,
@ -1153,7 +1173,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 77 * time.Minute),
Value: 13,
@ -1162,7 +1182,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 76 * time.Minute),
Value: 14,
@ -1195,7 +1215,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 69 * time.Minute),
Value: 21,
@ -1204,7 +1224,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 68 * time.Minute),
Value: 22,
@ -1213,7 +1233,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 67 * time.Minute),
Value: 23,
@ -1222,7 +1242,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 66 * time.Minute),
Value: 24,
@ -1231,7 +1251,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 65 * time.Minute),
Value: 25,
@ -1240,7 +1260,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 64 * time.Minute),
Value: 26,
@ -1249,7 +1269,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 63 * time.Minute),
Value: 27,
@ -1258,7 +1278,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 62 * time.Minute),
Value: 28,
@ -1267,7 +1287,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 61 * time.Minute),
Value: 29,
@ -1276,7 +1296,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
sampleGroup{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 60 * time.Minute),
Value: 30,
@ -1307,7 +1327,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
sampleGroups: []sampleGroup{
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 30 * time.Minute),
Value: 30,
@ -1316,7 +1336,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
{
fingerprint: "0001-A-1-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 15 * time.Minute),
Value: 15,
@ -1325,7 +1345,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
},
{
fingerprint: "0002-A-2-Z",
values: model.Values{
values: Values{
{
Timestamp: testInstant.Add(-1 * 60 * time.Minute),
Value: 30,
@ -1407,16 +1427,16 @@ func TestCuratorDeletionProcessor(t *testing.T) {
t.Fatalf("%d.%d. could not unmarshal: %s", i, j, err)
}
curationKey := model.NewCurationKeyFromDTO(curationKeyDto)
actualCurationRemark := model.NewCurationRemarkFromDTO(curationValueDto)
signature, err := expected.processor.Signature()
if err != nil {
t.Fatal(err)
}
actualKey := &curationKey{}
actualKey.load(curationKeyDto)
actualCurationRemark := &curationRemark{}
actualCurationRemark.load(curationValueDto)
signature := expected.processor.Signature()
actualKey := curationKey
expectedKey := model.CurationKey{
Fingerprint: model.NewFingerprintFromRowKey(expected.fingerprint),
expectedFingerprint := &clientmodel.Fingerprint{}
expectedFingerprint.LoadFromString(expected.fingerprint)
expectedKey := &curationKey{
Fingerprint: expectedFingerprint,
IgnoreYoungerThan: expected.ignoreYoungerThan,
ProcessorMessageRaw: signature,
ProcessorMessageTypeName: expected.processor.Name(),
@ -1424,7 +1444,7 @@ func TestCuratorDeletionProcessor(t *testing.T) {
if !actualKey.Equal(expectedKey) {
t.Fatalf("%d.%d. expected %s, got %s", i, j, expectedKey, actualKey)
}
expectedCurationRemark := model.CurationRemark{
expectedCurationRemark := curationRemark{
LastCompletionTimestamp: expected.lastCurated,
}
if !actualCurationRemark.Equal(expectedCurationRemark) {
@ -1456,7 +1476,9 @@ func TestCuratorDeletionProcessor(t *testing.T) {
t.Fatalf("%d.%d. error %s", i, j, err)
}
if !model.NewFingerprintFromRowKey(expected.fingerprint).Equal(sampleKey.Fingerprint) {
expectedFingerprint := &clientmodel.Fingerprint{}
expectedFingerprint.LoadFromString(expected.fingerprint)
if !expectedFingerprint.Equal(sampleKey.Fingerprint) {
t.Fatalf("%d.%d. expected fingerprint %s, got %s", i, j, expected.fingerprint, sampleKey.Fingerprint)
}

View file

@ -14,38 +14,40 @@
package metric
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility/test"
)
func GetFingerprintsForLabelSetUsesAndForLabelMatchingTests(p MetricPersistence, t test.Tester) {
metrics := []model.LabelSet{
{model.MetricNameLabel: "request_metrics_latency_equal_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{model.MetricNameLabel: "requests_metrics_latency_equal_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{model.MetricNameLabel: "requests_metrics_latency_logarithmic_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{model.MetricNameLabel: "requests_metrics_latency_logarithmic_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{model.MetricNameLabel: "targets_healthy_scrape_latency_ms", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
metrics := []clientmodel.LabelSet{
{clientmodel.MetricNameLabel: "request_metrics_latency_equal_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{clientmodel.MetricNameLabel: "requests_metrics_latency_equal_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{clientmodel.MetricNameLabel: "requests_metrics_latency_logarithmic_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{clientmodel.MetricNameLabel: "requests_metrics_latency_logarithmic_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
{clientmodel.MetricNameLabel: "targets_healthy_scrape_latency_ms", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
}
for _, metric := range metrics {
m := model.Metric{}
m := clientmodel.Metric{}
for k, v := range metric {
m[model.LabelName(k)] = model.LabelValue(v)
m[clientmodel.LabelName(k)] = clientmodel.LabelValue(v)
}
testAppendSample(p, model.Sample{
Value: model.SampleValue(0.0),
testAppendSample(p, &clientmodel.Sample{
Value: clientmodel.SampleValue(0.0),
Timestamp: time.Now(),
Metric: m,
}, t)
}
labelSet := model.LabelSet{
model.MetricNameLabel: "targets_healthy_scrape_latency_ms",
"percentile": "0.010000",
labelSet := clientmodel.LabelSet{
clientmodel.MetricNameLabel: "targets_healthy_scrape_latency_ms",
"percentile": "0.010000",
}
fingerprints, err := p.GetFingerprintsForLabelSet(labelSet)

View file

@ -14,10 +14,12 @@
package metric
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility/test"
)
func GetValueAtTimeTests(persistenceMaker func() (MetricPersistence, test.Closer), t test.Tester) {
@ -26,7 +28,7 @@ func GetValueAtTimeTests(persistenceMaker func() (MetricPersistence, test.Closer
month time.Month
day int
hour int
value model.SampleValue
value clientmodel.SampleValue
}
type input struct {
@ -36,7 +38,7 @@ func GetValueAtTimeTests(persistenceMaker func() (MetricPersistence, test.Closer
hour int
}
type output []model.SampleValue
type output []clientmodel.SampleValue
type behavior struct {
name string
@ -320,13 +322,13 @@ func GetValueAtTimeTests(persistenceMaker func() (MetricPersistence, test.Closer
defer closer.Close()
defer p.Close()
m := model.Metric{
model.MetricNameLabel: "age_in_years",
m := clientmodel.Metric{
clientmodel.MetricNameLabel: "age_in_years",
}
for _, value := range context.values {
testAppendSample(p, model.Sample{
Value: model.SampleValue(value.value),
testAppendSample(p, &clientmodel.Sample{
Value: clientmodel.SampleValue(value.value),
Timestamp: time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC),
Metric: m,
}, t)
@ -335,8 +337,9 @@ func GetValueAtTimeTests(persistenceMaker func() (MetricPersistence, test.Closer
for j, behavior := range context.behaviors {
input := behavior.input
time := time.Date(input.year, input.month, input.day, input.hour, 0, 0, 0, time.UTC)
actual := p.GetValueAtTime(model.NewFingerprintFromMetric(m), time)
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromMetric(m)
actual := p.GetValueAtTime(fingerprint, time)
if len(behavior.output) != len(actual) {
t.Fatalf("%d.%d(%s.%s). Expected %d samples but got: %v\n", i, j, context.name, behavior.name, len(behavior.output), actual)
@ -358,7 +361,7 @@ func GetRangeValuesTests(persistenceMaker func() (MetricPersistence, test.Closer
month time.Month
day int
hour int
value model.SampleValue
value clientmodel.SampleValue
}
type input struct {
@ -377,7 +380,7 @@ func GetRangeValuesTests(persistenceMaker func() (MetricPersistence, test.Closer
month time.Month
day int
hour int
value model.SampleValue
value clientmodel.SampleValue
}
type behavior struct {
@ -811,13 +814,13 @@ func GetRangeValuesTests(persistenceMaker func() (MetricPersistence, test.Closer
defer closer.Close()
defer p.Close()
m := model.Metric{
model.MetricNameLabel: "age_in_years",
m := clientmodel.Metric{
clientmodel.MetricNameLabel: "age_in_years",
}
for _, value := range context.values {
testAppendSample(p, model.Sample{
Value: model.SampleValue(value.value),
testAppendSample(p, &clientmodel.Sample{
Value: clientmodel.SampleValue(value.value),
Timestamp: time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC),
Metric: m,
}, t)
@ -827,14 +830,15 @@ func GetRangeValuesTests(persistenceMaker func() (MetricPersistence, test.Closer
input := behavior.input
open := time.Date(input.openYear, input.openMonth, input.openDay, input.openHour, 0, 0, 0, time.UTC)
end := time.Date(input.endYear, input.endMonth, input.endDay, input.endHour, 0, 0, 0, time.UTC)
in := model.Interval{
in := Interval{
OldestInclusive: open,
NewestInclusive: end,
}
actualValues := model.Values{}
actualValues := Values{}
expectedValues := []output{}
fp := model.NewFingerprintFromMetric(m)
fp := &clientmodel.Fingerprint{}
fp.LoadFromMetric(m)
if onlyBoundaries {
actualValues = p.GetBoundaryValues(fp, in)
l := len(behavior.output)
@ -865,7 +869,7 @@ func GetRangeValuesTests(persistenceMaker func() (MetricPersistence, test.Closer
for k, actual := range actualValues {
expected := expectedValues[k]
if actual.Value != model.SampleValue(expected.value) {
if actual.Value != clientmodel.SampleValue(expected.value) {
t.Fatalf("%d.%d.%d(%s). Expected %v but got: %v\n", i, j, k, behavior.name, expected.value, actual.Value)
}

170
storage/metric/sample.go Normal file
View file

@ -0,0 +1,170 @@
package metric
import (
"bytes"
"fmt"
"sort"
"time"
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
)
func (s SamplePair) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf("{\"Value\": \"%f\", \"Timestamp\": %d}", s.Value, s.Timestamp.Unix())), nil
}
type SamplePair struct {
Value clientmodel.SampleValue
Timestamp time.Time
}
func (s *SamplePair) Equal(o *SamplePair) bool {
if s == o {
return true
}
return s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp)
}
func (s *SamplePair) dump(d *dto.SampleValueSeries_Value) {
d.Reset()
d.Timestamp = proto.Int64(s.Timestamp.Unix())
d.Value = proto.Float64(float64(s.Value))
}
func (s *SamplePair) String() string {
return fmt.Sprintf("SamplePair at %s of %s", s.Timestamp, s.Value)
}
type Values []*SamplePair
func (v Values) Len() int {
return len(v)
}
func (v Values) Less(i, j int) bool {
return v[i].Timestamp.Before(v[j].Timestamp)
}
func (v Values) Swap(i, j int) {
v[i], v[j] = v[j], v[i]
}
func (v Values) Equal(o Values) bool {
if len(v) != len(o) {
return false
}
for i, expected := range v {
if !expected.Equal(o[i]) {
return false
}
}
return true
}
// FirstTimeAfter indicates whether the first sample of a set is after a given
// timestamp.
func (v Values) FirstTimeAfter(t time.Time) bool {
return v[0].Timestamp.After(t)
}
// LastTimeBefore indicates whether the last sample of a set is before a given
// timestamp.
func (v Values) LastTimeBefore(t time.Time) bool {
return v[len(v)-1].Timestamp.Before(t)
}
// InsideInterval indicates whether a given range of sorted values could contain
// a value for a given time.
func (v Values) InsideInterval(t time.Time) bool {
switch {
case v.Len() == 0:
return false
case t.Before(v[0].Timestamp):
return false
case !v[v.Len()-1].Timestamp.Before(t):
return false
default:
return true
}
}
// TruncateBefore returns a subslice of the original such that extraneous
// samples in the collection that occur before the provided time are
// dropped. The original slice is not mutated
func (v Values) TruncateBefore(t time.Time) Values {
index := sort.Search(len(v), func(i int) bool {
timestamp := v[i].Timestamp
return !timestamp.Before(t)
})
return v[index:]
}
func (v Values) dump(d *dto.SampleValueSeries) {
d.Reset()
for _, value := range v {
element := &dto.SampleValueSeries_Value{}
value.dump(element)
d.Value = append(d.Value, element)
}
}
func (v Values) ToSampleKey(f *clientmodel.Fingerprint) *SampleKey {
return &SampleKey{
Fingerprint: f,
FirstTimestamp: v[0].Timestamp,
LastTimestamp: v[len(v)-1].Timestamp,
SampleCount: uint32(len(v)),
}
}
func (v Values) String() string {
buffer := bytes.Buffer{}
fmt.Fprintf(&buffer, "[")
for i, value := range v {
fmt.Fprintf(&buffer, "%d. %s", i, value)
if i != len(v)-1 {
fmt.Fprintf(&buffer, "\n")
}
}
fmt.Fprintf(&buffer, "]")
return buffer.String()
}
func NewValuesFromDTO(d *dto.SampleValueSeries) Values {
// BUG(matt): Incogruent from the other load/dump API types, but much more
// performant.
v := make(Values, 0, len(d.Value))
for _, value := range d.Value {
v = append(v, &SamplePair{
Timestamp: time.Unix(value.GetTimestamp(), 0).UTC(),
Value: clientmodel.SampleValue(value.GetValue()),
})
}
return v
}
type SampleSet struct {
Metric clientmodel.Metric
Values Values
}
type Interval struct {
OldestInclusive time.Time
NewestInclusive time.Time
}

View file

@ -11,20 +11,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package model
package metric
import (
"code.google.com/p/goprotobuf/proto"
"fmt"
"github.com/prometheus/prometheus/coding/indexable"
dto "github.com/prometheus/prometheus/model/generated"
"time"
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/coding/indexable"
)
// SampleKey models the business logic around the data-transfer object
// SampleKey.
type SampleKey struct {
Fingerprint *Fingerprint
Fingerprint *clientmodel.Fingerprint
FirstTimestamp time.Time
LastTimestamp time.Time
SampleCount uint32
@ -33,7 +38,7 @@ type SampleKey struct {
// MayContain indicates whether the given SampleKey could potentially contain a
// value at the provided time. Even if true is emitted, that does not mean a
// satisfactory value, in fact, exists.
func (s SampleKey) MayContain(t time.Time) bool {
func (s *SampleKey) MayContain(t time.Time) bool {
switch {
case t.Before(s.FirstTimestamp):
return false
@ -45,40 +50,39 @@ func (s SampleKey) MayContain(t time.Time) bool {
}
// ToDTO converts this SampleKey into a DTO for use in serialization purposes.
func (s SampleKey) ToDTO() (out *dto.SampleKey) {
out = &dto.SampleKey{
Fingerprint: s.Fingerprint.ToDTO(),
Timestamp: indexable.EncodeTime(s.FirstTimestamp),
LastTimestamp: proto.Int64(s.LastTimestamp.Unix()),
SampleCount: proto.Uint32(s.SampleCount),
}
func (s *SampleKey) Dump(d *dto.SampleKey) {
d.Reset()
fp := &dto.Fingerprint{}
dumpFingerprint(fp, s.Fingerprint)
return
d.Fingerprint = fp
d.Timestamp = indexable.EncodeTime(s.FirstTimestamp)
d.LastTimestamp = proto.Int64(s.LastTimestamp.Unix())
d.SampleCount = proto.Uint32(s.SampleCount)
}
// ToPartialDTO converts this SampleKey into a DTO that is only suitable for
// database exploration purposes for a given (Fingerprint, First Sample Time)
// tuple.
func (s SampleKey) ToPartialDTO(out *dto.SampleKey) {
out = &dto.SampleKey{
Fingerprint: s.Fingerprint.ToDTO(),
Timestamp: indexable.EncodeTime(s.FirstTimestamp),
}
func (s *SampleKey) FOOdumpPartial(d *dto.SampleKey) {
d.Reset()
return
f := &dto.Fingerprint{}
dumpFingerprint(f, s.Fingerprint)
d.Fingerprint = f
d.Timestamp = indexable.EncodeTime(s.FirstTimestamp)
}
func (s SampleKey) String() string {
func (s *SampleKey) String() string {
return fmt.Sprintf("SampleKey for %s at %s to %s with %d values.", s.Fingerprint, s.FirstTimestamp, s.LastTimestamp, s.SampleCount)
}
// NewSampleKeyFromDTO builds a new SampleKey from a provided data-transfer
// object.
func NewSampleKeyFromDTO(dto *dto.SampleKey) SampleKey {
return SampleKey{
Fingerprint: NewFingerprintFromDTO(dto.Fingerprint),
FirstTimestamp: indexable.DecodeTime(dto.Timestamp),
LastTimestamp: time.Unix(*dto.LastTimestamp, 0).UTC(),
SampleCount: *dto.SampleCount,
}
func (s *SampleKey) Load(d *dto.SampleKey) {
f := &clientmodel.Fingerprint{}
loadFingerprint(f, d.GetFingerprint())
s.Fingerprint = f
s.FirstTimestamp = indexable.DecodeTime(d.Timestamp)
s.LastTimestamp = time.Unix(d.GetLastTimestamp(), 0).UTC()
s.SampleCount = d.GetSampleCount()
}

View file

@ -16,12 +16,13 @@ package metric
import (
"bytes"
"fmt"
"github.com/prometheus/prometheus/model"
clientmodel "github.com/prometheus/client_golang/model"
)
// scanJob models a range of queries.
type scanJob struct {
fingerprint *model.Fingerprint
fingerprint *clientmodel.Fingerprint
operations ops
}

View file

@ -15,22 +15,23 @@ package metric
import (
"fmt"
"github.com/prometheus/prometheus/coding"
"github.com/prometheus/prometheus/coding/indexable"
"github.com/prometheus/prometheus/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/utility/test"
"math"
"math/rand"
"sort"
"testing"
"testing/quick"
"time"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/coding"
"github.com/prometheus/prometheus/coding/indexable"
"github.com/prometheus/prometheus/utility/test"
)
const (
stochasticMaximumVariance = 8
)
const stochasticMaximumVariance = 8
func BasicLifecycleTests(p MetricPersistence, t test.Tester) {
if p == nil {
@ -41,10 +42,10 @@ func BasicLifecycleTests(p MetricPersistence, t test.Tester) {
func ReadEmptyTests(p MetricPersistence, t test.Tester) {
hasLabelPair := func(x int) (success bool) {
name := model.LabelName(string(x))
value := model.LabelValue(string(x))
name := clientmodel.LabelName(string(x))
value := clientmodel.LabelValue(string(x))
labelSet := model.LabelSet{
labelSet := clientmodel.LabelSet{
name: value,
}
@ -69,7 +70,7 @@ func ReadEmptyTests(p MetricPersistence, t test.Tester) {
}
hasLabelName := func(x int) (success bool) {
labelName := model.LabelName(string(x))
labelName := clientmodel.LabelName(string(x))
fingerprints, err := p.GetFingerprintsForLabelName(labelName)
if err != nil {
@ -94,13 +95,13 @@ func ReadEmptyTests(p MetricPersistence, t test.Tester) {
func AppendSampleAsPureSparseAppendTests(p MetricPersistence, t test.Tester) {
appendSample := func(x int) (success bool) {
v := model.SampleValue(x)
v := clientmodel.SampleValue(x)
ts := time.Unix(int64(x), int64(x))
labelName := model.LabelName(x)
labelValue := model.LabelValue(x)
l := model.Metric{labelName: labelValue}
labelName := clientmodel.LabelName(x)
labelValue := clientmodel.LabelValue(x)
l := clientmodel.Metric{labelName: labelValue}
sample := model.Sample{
sample := &clientmodel.Sample{
Value: v,
Timestamp: ts,
Metric: l,
@ -123,13 +124,13 @@ func AppendSampleAsPureSparseAppendTests(p MetricPersistence, t test.Tester) {
func AppendSampleAsSparseAppendWithReadsTests(p MetricPersistence, t test.Tester) {
appendSample := func(x int) (success bool) {
v := model.SampleValue(x)
v := clientmodel.SampleValue(x)
ts := time.Unix(int64(x), int64(x))
labelName := model.LabelName(x)
labelValue := model.LabelValue(x)
l := model.Metric{labelName: labelValue}
labelName := clientmodel.LabelName(x)
labelValue := clientmodel.LabelValue(x)
l := clientmodel.Metric{labelName: labelValue}
sample := model.Sample{
sample := &clientmodel.Sample{
Value: v,
Timestamp: ts,
Metric: l,
@ -151,7 +152,7 @@ func AppendSampleAsSparseAppendWithReadsTests(p MetricPersistence, t test.Tester
return
}
fingerprints, err = p.GetFingerprintsForLabelSet(model.LabelSet{
fingerprints, err = p.GetFingerprintsForLabelSet(clientmodel.LabelSet{
labelName: labelValue,
})
if err != nil {
@ -173,10 +174,10 @@ func AppendSampleAsSparseAppendWithReadsTests(p MetricPersistence, t test.Tester
func AppendSampleAsPureSingleEntityAppendTests(p MetricPersistence, t test.Tester) {
appendSample := func(x int) bool {
sample := model.Sample{
Value: model.SampleValue(x),
sample := &clientmodel.Sample{
Value: clientmodel.SampleValue(x),
Timestamp: time.Unix(int64(x), 0),
Metric: model.Metric{model.MetricNameLabel: "my_metric"},
Metric: clientmodel.Metric{clientmodel.MetricNameLabel: "my_metric"},
}
err := p.AppendSample(sample)
@ -189,9 +190,11 @@ func AppendSampleAsPureSingleEntityAppendTests(p MetricPersistence, t test.Teste
}
}
func levelDBGetRangeValues(l *LevelDBMetricPersistence, fp *model.Fingerprint, i model.Interval) (samples model.Values, err error) {
func levelDBGetRangeValues(l *LevelDBMetricPersistence, fp *clientmodel.Fingerprint, i Interval) (samples Values, err error) {
fpDto := &dto.Fingerprint{}
dumpFingerprint(fpDto, fp)
k := &dto.SampleKey{
Fingerprint: fp.ToDTO(),
Fingerprint: fpDto,
Timestamp: indexable.EncodeTime(i.OldestInclusive),
}
@ -258,23 +261,23 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
metricNewestSample := map[int]int64{}
for metricIndex := 0; metricIndex < numberOfMetrics; metricIndex++ {
sample := model.Sample{
Metric: model.Metric{},
sample := &clientmodel.Sample{
Metric: clientmodel.Metric{},
}
v := model.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex))
sample.Metric[model.MetricNameLabel] = v
v := clientmodel.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex))
sample.Metric[clientmodel.MetricNameLabel] = v
for sharedLabelIndex := 0; sharedLabelIndex < numberOfSharedLabels; sharedLabelIndex++ {
l := model.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex))
v := model.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex))
l := clientmodel.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex))
v := clientmodel.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex))
sample.Metric[l] = v
}
for unsharedLabelIndex := 0; unsharedLabelIndex < numberOfUnsharedLabels; unsharedLabelIndex++ {
l := model.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex))
v := model.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex))
l := clientmodel.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex))
v := clientmodel.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex))
sample.Metric[l] = v
}
@ -316,7 +319,7 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
for sampleIndex := 0; sampleIndex < numberOfSamples; sampleIndex++ {
sample.Timestamp = sortedTimestamps[sampleIndex]
sample.Value = model.SampleValue(sampleIndex)
sample.Value = clientmodel.SampleValue(sampleIndex)
err := p.AppendSample(sample)
@ -330,8 +333,8 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
metricNewestSample[metricIndex] = newestSample
for sharedLabelIndex := 0; sharedLabelIndex < numberOfSharedLabels; sharedLabelIndex++ {
labelPair := model.LabelSet{
model.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)): model.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex)),
labelPair := clientmodel.LabelSet{
clientmodel.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)): clientmodel.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex)),
}
fingerprints, err := p.GetFingerprintsForLabelSet(labelPair)
@ -344,7 +347,7 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
return
}
labelName := model.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex))
labelName := clientmodel.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex))
fingerprints, err = p.GetFingerprintsForLabelName(labelName)
if err != nil {
t.Error(err)
@ -358,7 +361,7 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
}
for sharedIndex := 0; sharedIndex < numberOfSharedLabels; sharedIndex++ {
labelName := model.LabelName(fmt.Sprintf("shared_label_%d", sharedIndex))
labelName := clientmodel.LabelName(fmt.Sprintf("shared_label_%d", sharedIndex))
fingerprints, err := p.GetFingerprintsForLabelName(labelName)
if err != nil {
t.Error(err)
@ -373,9 +376,9 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
for metricIndex := 0; metricIndex < numberOfMetrics; metricIndex++ {
for unsharedLabelIndex := 0; unsharedLabelIndex < numberOfUnsharedLabels; unsharedLabelIndex++ {
labelName := model.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex))
labelValue := model.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex))
labelSet := model.LabelSet{
labelName := clientmodel.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex))
labelValue := clientmodel.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex))
labelSet := clientmodel.LabelSet{
labelName: labelValue,
}
@ -400,19 +403,19 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
}
}
metric := model.Metric{}
metric[model.MetricNameLabel] = model.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex))
metric := clientmodel.Metric{}
metric[clientmodel.MetricNameLabel] = clientmodel.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex))
for i := 0; i < numberOfSharedLabels; i++ {
l := model.LabelName(fmt.Sprintf("shared_label_%d", i))
v := model.LabelValue(fmt.Sprintf("label_%d", i))
l := clientmodel.LabelName(fmt.Sprintf("shared_label_%d", i))
v := clientmodel.LabelValue(fmt.Sprintf("label_%d", i))
metric[l] = v
}
for i := 0; i < numberOfUnsharedLabels; i++ {
l := model.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, i))
v := model.LabelValue(fmt.Sprintf("private_label_%d", i))
l := clientmodel.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, i))
v := clientmodel.LabelValue(fmt.Sprintf("private_label_%d", i))
metric[l] = v
}
@ -461,13 +464,14 @@ func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t
begin, end = second, first
}
interval := model.Interval{
interval := Interval{
OldestInclusive: time.Unix(begin, 0),
NewestInclusive: time.Unix(end, 0),
}
samples := model.Values{}
fp := model.NewFingerprintFromMetric(metric)
samples := Values{}
fp := &clientmodel.Fingerprint{}
fp.LoadFromMetric(metric)
switch persistence := p.(type) {
case *LevelDBMetricPersistence:
var err error

View file

@ -17,19 +17,20 @@ import (
"fmt"
"log"
"sort"
"sync"
"time"
dto "github.com/prometheus/prometheus/model/generated"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/coding"
"github.com/prometheus/prometheus/coding/indexable"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/storage/raw/leveldb"
"sync"
)
type chunk model.Values
type chunk Values
// TruncateBefore returns a subslice of the original such that extraneous
// samples in the collection that occur before the provided time are
@ -78,7 +79,7 @@ type TieredStorage struct {
// BUG(matt): This introduces a Law of Demeter violation. Ugh.
DiskStorage *LevelDBMetricPersistence
appendToDiskQueue chan model.Samples
appendToDiskQueue chan clientmodel.Samples
memoryArena *memorySeriesStorage
memoryTTL time.Duration
@ -120,7 +121,7 @@ func NewTieredStorage(appendToDiskQueueDepth, viewQueueDepth uint, flushMemoryIn
memOptions := MemorySeriesOptions{WatermarkCache: wmCache}
s := &TieredStorage{
appendToDiskQueue: make(chan model.Samples, appendToDiskQueueDepth),
appendToDiskQueue: make(chan clientmodel.Samples, appendToDiskQueueDepth),
DiskStorage: diskStorage,
draining: make(chan chan<- bool),
flushMemoryInterval: flushMemoryInterval,
@ -145,7 +146,7 @@ func NewTieredStorage(appendToDiskQueueDepth, viewQueueDepth uint, flushMemoryIn
}
// Enqueues Samples for storage.
func (t *TieredStorage) AppendSamples(samples model.Samples) (err error) {
func (t *TieredStorage) AppendSamples(samples clientmodel.Samples) (err error) {
t.mu.RLock()
defer t.mu.RUnlock()
if t.state != tieredStorageServing {
@ -170,6 +171,8 @@ func (t *TieredStorage) drain(drained chan<- bool) {
panic("Illegal State: Supplemental drain requested.")
}
t.state = tieredStorageDraining
log.Println("Triggering drain...")
t.draining <- (drained)
}
@ -269,7 +272,7 @@ func (t *TieredStorage) flushMemory(ttl time.Duration) {
queueLength := len(t.appendToDiskQueue)
if queueLength > 0 {
samples := model.Samples{}
samples := clientmodel.Samples{}
for i := 0; i < queueLength; i++ {
chunk := <-t.appendToDiskQueue
samples = append(samples, chunk...)
@ -286,6 +289,10 @@ func (t *TieredStorage) Close() {
t.mu.Lock()
defer t.mu.Unlock()
t.close()
}
func (t *TieredStorage) close() {
if t.state == tieredStorageStopping {
panic("Illegal State: Attempted to restop TieredStorage.")
}
@ -305,7 +312,7 @@ func (t *TieredStorage) Close() {
t.state = tieredStorageStopping
}
func (t *TieredStorage) seriesTooOld(f *model.Fingerprint, i time.Time) (bool, error) {
func (t *TieredStorage) seriesTooOld(f *clientmodel.Fingerprint, i time.Time) (bool, error) {
// BUG(julius): Make this configurable by query layer.
i = i.Add(-stalenessLimit)
@ -315,21 +322,23 @@ func (t *TieredStorage) seriesTooOld(f *model.Fingerprint, i time.Time) (bool, e
samples := t.memoryArena.CloneSamples(f)
if len(samples) > 0 {
newest := samples[len(samples)-1].Timestamp
t.wmCache.Set(f, &Watermarks{High: newest})
t.wmCache.Set(f, &watermarks{High: newest})
return newest.Before(i), nil
}
}
value := &dto.MetricHighWatermark{}
diskHit, err := t.DiskStorage.MetricHighWatermarks.Get(f.ToDTO(), value)
k := &dto.Fingerprint{}
dumpFingerprint(k, f)
diskHit, err := t.DiskStorage.MetricHighWatermarks.Get(k, value)
if err != nil {
return false, err
}
if diskHit {
wmTime := time.Unix(*value.Timestamp, 0).UTC()
t.wmCache.Set(f, &Watermarks{High: wmTime})
t.wmCache.Set(f, &watermarks{High: wmTime})
return wmTime.Before(i), nil
}
@ -454,7 +463,7 @@ func (t *TieredStorage) renderView(viewJob viewJob) {
}
// For each op, extract all needed data from the current chunk.
out := model.Values{}
out := Values{}
for _, op := range standingOps {
if op.CurrentTime().After(targetTime) {
break
@ -463,7 +472,7 @@ func (t *TieredStorage) renderView(viewJob viewJob) {
currentChunk = currentChunk.TruncateBefore(*(op.CurrentTime()))
for op.CurrentTime() != nil && !op.CurrentTime().After(targetTime) {
out = op.ExtractSamples(model.Values(currentChunk))
out = op.ExtractSamples(Values(currentChunk))
// Append the extracted samples to the materialized view.
view.appendSamples(scanJob.fingerprint, out)
@ -500,14 +509,15 @@ func (t *TieredStorage) renderView(viewJob viewJob) {
return
}
func (t *TieredStorage) loadChunkAroundTime(iterator leveldb.Iterator, frontier *seriesFrontier, fingerprint *model.Fingerprint, ts time.Time) (chunk model.Values) {
var (
targetKey = &dto.SampleKey{
Fingerprint: fingerprint.ToDTO(),
}
foundKey model.SampleKey
foundValues model.Values
)
func (t *TieredStorage) loadChunkAroundTime(iterator leveldb.Iterator, frontier *seriesFrontier, fingerprint *clientmodel.Fingerprint, ts time.Time) (chunk Values) {
fd := &dto.Fingerprint{}
dumpFingerprint(fd, fingerprint)
targetKey := &dto.SampleKey{
Fingerprint: fd,
}
var foundKey *SampleKey
var foundValues Values
// Limit the target key to be within the series' keyspace.
if ts.After(frontier.lastSupertime) {
@ -577,7 +587,7 @@ func (t *TieredStorage) loadChunkAroundTime(iterator leveldb.Iterator, frontier
}
// Get all label values that are associated with the provided label name.
func (t *TieredStorage) GetAllValuesForLabel(labelName model.LabelName) (model.LabelValues, error) {
func (t *TieredStorage) GetAllValuesForLabel(labelName clientmodel.LabelName) (clientmodel.LabelValues, error) {
t.mu.RLock()
defer t.mu.RUnlock()
@ -594,8 +604,8 @@ func (t *TieredStorage) GetAllValuesForLabel(labelName model.LabelName) (model.L
return nil, err
}
valueSet := map[model.LabelValue]bool{}
values := model.LabelValues{}
valueSet := map[clientmodel.LabelValue]bool{}
values := clientmodel.LabelValues{}
for _, value := range append(diskValues, memoryValues...) {
if !valueSet[value] {
values = append(values, value)
@ -608,7 +618,7 @@ func (t *TieredStorage) GetAllValuesForLabel(labelName model.LabelName) (model.L
// Get all of the metric fingerprints that are associated with the provided
// label set.
func (t *TieredStorage) GetFingerprintsForLabelSet(labelSet model.LabelSet) (model.Fingerprints, error) {
func (t *TieredStorage) GetFingerprintsForLabelSet(labelSet clientmodel.LabelSet) (clientmodel.Fingerprints, error) {
t.mu.RLock()
defer t.mu.RUnlock()
@ -624,11 +634,11 @@ func (t *TieredStorage) GetFingerprintsForLabelSet(labelSet model.LabelSet) (mod
if err != nil {
return nil, err
}
fingerprintSet := map[model.Fingerprint]bool{}
fingerprintSet := map[clientmodel.Fingerprint]bool{}
for _, fingerprint := range append(memFingerprints, diskFingerprints...) {
fingerprintSet[*fingerprint] = true
}
fingerprints := model.Fingerprints{}
fingerprints := clientmodel.Fingerprints{}
for fingerprint := range fingerprintSet {
fpCopy := fingerprint
fingerprints = append(fingerprints, &fpCopy)
@ -638,7 +648,7 @@ func (t *TieredStorage) GetFingerprintsForLabelSet(labelSet model.LabelSet) (mod
}
// Get the metric associated with the provided fingerprint.
func (t *TieredStorage) GetMetricForFingerprint(f *model.Fingerprint) (model.Metric, error) {
func (t *TieredStorage) GetMetricForFingerprint(f *clientmodel.Fingerprint) (clientmodel.Metric, error) {
t.mu.RLock()
defer t.mu.RUnlock()

View file

@ -14,19 +14,21 @@
package metric
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/utility/test"
"sort"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/stats"
"github.com/prometheus/prometheus/utility/test"
)
func buildSamples(from, to time.Time, interval time.Duration, m model.Metric) (v []model.Sample) {
i := model.SampleValue(0)
func buildSamples(from, to time.Time, interval time.Duration, m clientmodel.Metric) (v clientmodel.Samples) {
i := clientmodel.SampleValue(0)
for from.Before(to) {
v = append(v, model.Sample{
v = append(v, &clientmodel.Sample{
Metric: m,
Value: i,
Timestamp: from,
@ -47,16 +49,17 @@ func testMakeView(t test.Tester, flushToDisk bool) {
}
type out struct {
atTime []model.Values
atInterval []model.Values
alongRange []model.Values
atTime []Values
atInterval []Values
alongRange []Values
}
metric := clientmodel.Metric{clientmodel.MetricNameLabel: "request_count"}
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromMetric(metric)
var (
instant = time.Date(1984, 3, 30, 0, 0, 0, 0, time.Local)
metric = model.Metric{model.MetricNameLabel: "request_count"}
fingerprint = *model.NewFingerprintFromMetric(metric)
scenarios = []struct {
data []model.Sample
instant = time.Date(1984, 3, 30, 0, 0, 0, 0, time.Local)
scenarios = []struct {
data clientmodel.Samples
in in
out out
}{
@ -70,12 +73,12 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{{}},
atTime: []Values{{}},
},
},
// Single sample, query asks for exact sample time.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -90,7 +93,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant,
@ -102,7 +105,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
// Single sample, query time before the sample.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -122,7 +125,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant.Add(time.Second),
@ -134,7 +137,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
// Single sample, query time after the sample.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -149,7 +152,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant,
@ -161,7 +164,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
// Two samples, query asks for first sample time.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -181,7 +184,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant,
@ -193,7 +196,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
// Three samples, query asks for second sample time.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -218,7 +221,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant.Add(time.Second),
@ -230,7 +233,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
// Three samples, query asks for time between first and second samples.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -255,7 +258,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant,
@ -271,7 +274,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
// Three samples, query asks for time between second and third samples.
{
data: []model.Sample{
data: clientmodel.Samples{
{
Metric: metric,
Value: 0,
@ -296,7 +299,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant.Add(time.Second * 2),
@ -321,7 +324,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
},
},
out: out{
atTime: []model.Values{
atTime: []Values{
{
{
Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize/2)),
@ -371,7 +374,7 @@ func testMakeView(t test.Tester, flushToDisk bool) {
}
for j, atTime := range scenario.in.atTime {
actual := v.GetValueAtTime(&fingerprint, atTime.time)
actual := v.GetValueAtTime(fingerprint, atTime.time)
if len(actual) != len(scenario.out.atTime[j]) {
t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(scenario.out.atTime[j]), len(actual))
@ -475,8 +478,8 @@ func TestGetAllValuesForLabel(t *testing.T) {
for i, scenario := range scenarios {
tiered, closer := NewTestTieredStorage(t)
for j, metric := range scenario.in {
sample := model.Sample{
Metric: model.Metric{model.MetricNameLabel: model.LabelValue(metric.metricName)},
sample := &clientmodel.Sample{
Metric: clientmodel.Metric{clientmodel.MetricNameLabel: clientmodel.LabelValue(metric.metricName)},
}
if metric.appendToMemory {
if err := tiered.memoryArena.AppendSample(sample); err != nil {
@ -489,7 +492,7 @@ func TestGetAllValuesForLabel(t *testing.T) {
}
}
}
metricNames, err := tiered.GetAllValuesForLabel(model.MetricNameLabel)
metricNames, err := tiered.GetAllValuesForLabel(clientmodel.MetricNameLabel)
closer.Close()
if err != nil {
t.Fatalf("%d. Error getting metric names: %s", i, err)
@ -510,11 +513,11 @@ func TestGetAllValuesForLabel(t *testing.T) {
func TestGetFingerprintsForLabelSet(t *testing.T) {
tiered, closer := NewTestTieredStorage(t)
defer closer.Close()
memorySample := model.Sample{
Metric: model.Metric{model.MetricNameLabel: "http_requests", "method": "/foo"},
memorySample := &clientmodel.Sample{
Metric: clientmodel.Metric{clientmodel.MetricNameLabel: "http_requests", "method": "/foo"},
}
diskSample := model.Sample{
Metric: model.Metric{model.MetricNameLabel: "http_requests", "method": "/bar"},
diskSample := &clientmodel.Sample{
Metric: clientmodel.Metric{clientmodel.MetricNameLabel: "http_requests", "method": "/bar"},
}
if err := tiered.memoryArena.AppendSample(memorySample); err != nil {
t.Fatalf("Failed to add fixture data: %s", err)
@ -525,33 +528,33 @@ func TestGetFingerprintsForLabelSet(t *testing.T) {
tiered.Flush()
scenarios := []struct {
labels model.LabelSet
labels clientmodel.LabelSet
fpCount int
}{
{
labels: model.LabelSet{},
labels: clientmodel.LabelSet{},
fpCount: 0,
}, {
labels: model.LabelSet{
model.MetricNameLabel: "http_requests",
labels: clientmodel.LabelSet{
clientmodel.MetricNameLabel: "http_requests",
},
fpCount: 2,
}, {
labels: model.LabelSet{
model.MetricNameLabel: "http_requests",
"method": "/foo",
labels: clientmodel.LabelSet{
clientmodel.MetricNameLabel: "http_requests",
"method": "/foo",
},
fpCount: 1,
}, {
labels: model.LabelSet{
model.MetricNameLabel: "http_requests",
"method": "/bar",
labels: clientmodel.LabelSet{
clientmodel.MetricNameLabel: "http_requests",
"method": "/bar",
},
fpCount: 1,
}, {
labels: model.LabelSet{
model.MetricNameLabel: "http_requests",
"method": "/baz",
labels: clientmodel.LabelSet{
clientmodel.MetricNameLabel: "http_requests",
"method": "/baz",
},
fpCount: 0,
},
@ -570,18 +573,18 @@ func TestGetFingerprintsForLabelSet(t *testing.T) {
func testTruncateBefore(t test.Tester) {
type in struct {
values model.Values
values Values
time time.Time
}
instant := time.Now()
var scenarios = []struct {
in in
out model.Values
out Values
}{
{
in: in{
time: instant,
values: model.Values{
values: Values{
{
Value: 0,
Timestamp: instant,
@ -604,7 +607,7 @@ func testTruncateBefore(t test.Tester) {
},
},
},
out: model.Values{
out: Values{
{
Value: 0,
Timestamp: instant,
@ -630,7 +633,7 @@ func testTruncateBefore(t test.Tester) {
{
in: in{
time: instant.Add(2 * time.Second),
values: model.Values{
values: Values{
{
Value: 0,
Timestamp: instant,
@ -653,7 +656,7 @@ func testTruncateBefore(t test.Tester) {
},
},
},
out: model.Values{
out: Values{
{
Value: 1,
Timestamp: instant.Add(time.Second),
@ -675,7 +678,7 @@ func testTruncateBefore(t test.Tester) {
{
in: in{
time: instant.Add(5 * time.Second),
values: model.Values{
values: Values{
{
Value: 0,
Timestamp: instant,
@ -698,7 +701,7 @@ func testTruncateBefore(t test.Tester) {
},
},
},
out: model.Values{
out: Values{
// Preserve the last value in case it needs to be used for the next set.
{
Value: 4,

View file

@ -17,7 +17,7 @@ import (
"sort"
"time"
"github.com/prometheus/prometheus/model"
clientmodel "github.com/prometheus/client_golang/model"
)
var (
@ -30,56 +30,56 @@ var (
// Represents the summation of all datastore queries that shall be performed to
// extract values. Each operation mutates the state of the builder.
type ViewRequestBuilder interface {
GetMetricAtTime(fingerprint model.Fingerprint, time time.Time)
GetMetricAtInterval(fingerprint model.Fingerprint, from, through time.Time, interval time.Duration)
GetMetricRange(fingerprint model.Fingerprint, from, through time.Time)
GetMetricAtTime(fingerprint *clientmodel.Fingerprint, time time.Time)
GetMetricAtInterval(fingerprint *clientmodel.Fingerprint, from, through time.Time, interval time.Duration)
GetMetricRange(fingerprint *clientmodel.Fingerprint, from, through time.Time)
ScanJobs() scanJobs
}
// Contains the various unoptimized requests for data.
type viewRequestBuilder struct {
operations map[model.Fingerprint]ops
operations map[clientmodel.Fingerprint]ops
}
// Furnishes a ViewRequestBuilder for remarking what types of queries to perform.
func NewViewRequestBuilder() viewRequestBuilder {
return viewRequestBuilder{
operations: make(map[model.Fingerprint]ops),
operations: make(map[clientmodel.Fingerprint]ops),
}
}
// Gets for the given Fingerprint either the value at that time if there is an
// match or the one or two values adjacent thereto.
func (v viewRequestBuilder) GetMetricAtTime(fingerprint model.Fingerprint, time time.Time) {
ops := v.operations[fingerprint]
func (v viewRequestBuilder) GetMetricAtTime(fingerprint *clientmodel.Fingerprint, time time.Time) {
ops := v.operations[*fingerprint]
ops = append(ops, &getValuesAtTimeOp{
time: time,
})
v.operations[fingerprint] = ops
v.operations[*fingerprint] = ops
}
// Gets for the given Fingerprint either the value at that interval from From
// through Through if there is an match or the one or two values adjacent
// for each point.
func (v viewRequestBuilder) GetMetricAtInterval(fingerprint model.Fingerprint, from, through time.Time, interval time.Duration) {
ops := v.operations[fingerprint]
func (v viewRequestBuilder) GetMetricAtInterval(fingerprint *clientmodel.Fingerprint, from, through time.Time, interval time.Duration) {
ops := v.operations[*fingerprint]
ops = append(ops, &getValuesAtIntervalOp{
from: from,
through: through,
interval: interval,
})
v.operations[fingerprint] = ops
v.operations[*fingerprint] = ops
}
// Gets for the given Fingerprint either the values that occur inclusively from
// From through Through.
func (v viewRequestBuilder) GetMetricRange(fingerprint model.Fingerprint, from, through time.Time) {
ops := v.operations[fingerprint]
func (v viewRequestBuilder) GetMetricRange(fingerprint *clientmodel.Fingerprint, from, through time.Time) {
ops := v.operations[*fingerprint]
ops = append(ops, &getValuesAlongRangeOp{
from: from,
through: through,
})
v.operations[fingerprint] = ops
v.operations[*fingerprint] = ops
}
// Emits the optimized scans that will occur in the data store. This
@ -106,7 +106,7 @@ type view struct {
*memorySeriesStorage
}
func (v view) appendSamples(fingerprint *model.Fingerprint, samples model.Values) {
func (v view) appendSamples(fingerprint *clientmodel.Fingerprint, samples Values) {
v.memorySeriesStorage.appendSamplesWithoutIndexing(fingerprint, samples)
}

View file

@ -14,10 +14,12 @@
package metric
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility/test"
"testing"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/utility/test"
)
func testBuilder(t test.Tester) {
@ -140,33 +142,36 @@ func testBuilder(t test.Tester) {
for i, scenario := range scenarios {
builder := viewRequestBuilder{
operations: map[model.Fingerprint]ops{},
operations: map[clientmodel.Fingerprint]ops{},
}
for _, atTime := range scenario.in.atTimes {
fingerprint := *model.NewFingerprintFromRowKey(atTime.fingerprint)
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromString(atTime.fingerprint)
builder.GetMetricAtTime(fingerprint, atTime.time)
}
for _, atInterval := range scenario.in.atIntervals {
fingerprint := *model.NewFingerprintFromRowKey(atInterval.fingerprint)
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromString(atInterval.fingerprint)
builder.GetMetricAtInterval(fingerprint, atInterval.from, atInterval.through, atInterval.interval)
}
for _, atRange := range scenario.in.atRanges {
fingerprint := *model.NewFingerprintFromRowKey(atRange.fingerprint)
fingerprint := &clientmodel.Fingerprint{}
fingerprint.LoadFromString(atRange.fingerprint)
builder.GetMetricRange(fingerprint, atRange.from, atRange.through)
}
jobs := builder.ScanJobs()
if len(scenario.out) != len(jobs) {
t.Fatalf("%d. expected job length of %d, got %d\n", i, len(scenario.out), len(jobs))
t.Fatalf("%d. expected job length of %d, got %d", i, len(scenario.out), len(jobs))
}
for j, job := range scenario.out {
if jobs[j].fingerprint.ToRowKey() != job.fingerprint {
t.Fatalf("%d.%d. expected fingerprint %s, got %s\n", i, j, job.fingerprint, jobs[j].fingerprint.ToRowKey())
if jobs[j].fingerprint.String() != job.fingerprint {
t.Fatalf("%d.%d. expected fingerprint %s, got %s", i, j, job.fingerprint, jobs[j].fingerprint)
}
}
}

View file

@ -18,10 +18,14 @@ import (
"sync"
"time"
"github.com/prometheus/prometheus/model"
"code.google.com/p/goprotobuf/proto"
clientmodel "github.com/prometheus/client_golang/model"
dto "github.com/prometheus/prometheus/model/generated"
)
// unsafe.Sizeof(Watermarks{})
// unsafe.Sizeof(watermarks{})
const elementSize = 24
type Bytes uint64
@ -32,32 +36,42 @@ type WatermarkCache struct {
mu sync.Mutex
list *list.List
table map[model.Fingerprint]*list.Element
table map[clientmodel.Fingerprint]*list.Element
size Bytes
allowance Bytes
}
type Watermarks struct {
type watermarks struct {
High time.Time
}
func (w *watermarks) load(d *dto.MetricHighWatermark) {
w.High = time.Unix(d.GetTimestamp(), 0).UTC()
}
func (w *watermarks) dump(d *dto.MetricHighWatermark) {
d.Reset()
d.Timestamp = proto.Int64(w.High.Unix())
}
type entry struct {
fingerprint *model.Fingerprint
watermarks *Watermarks
fingerprint *clientmodel.Fingerprint
watermarks *watermarks
accessed time.Time
}
func NewWatermarkCache(allowance Bytes) *WatermarkCache {
return &WatermarkCache{
list: list.New(),
table: map[model.Fingerprint]*list.Element{},
table: map[clientmodel.Fingerprint]*list.Element{},
allowance: allowance,
}
}
func (lru *WatermarkCache) Get(f *model.Fingerprint) (v *Watermarks, ok bool) {
func (lru *WatermarkCache) Get(f *clientmodel.Fingerprint) (v *watermarks, ok bool) {
lru.mu.Lock()
defer lru.mu.Unlock()
@ -71,7 +85,7 @@ func (lru *WatermarkCache) Get(f *model.Fingerprint) (v *Watermarks, ok bool) {
return element.Value.(*entry).watermarks, true
}
func (lru *WatermarkCache) Set(f *model.Fingerprint, w *Watermarks) {
func (lru *WatermarkCache) Set(f *clientmodel.Fingerprint, w *watermarks) {
lru.mu.Lock()
defer lru.mu.Unlock()
@ -82,7 +96,7 @@ func (lru *WatermarkCache) Set(f *model.Fingerprint, w *Watermarks) {
}
}
func (lru *WatermarkCache) SetIfAbsent(f *model.Fingerprint, w *Watermarks) {
func (lru *WatermarkCache) SetIfAbsent(f *clientmodel.Fingerprint, w *watermarks) {
lru.mu.Lock()
defer lru.mu.Unlock()
@ -93,7 +107,7 @@ func (lru *WatermarkCache) SetIfAbsent(f *model.Fingerprint, w *Watermarks) {
}
}
func (lru *WatermarkCache) Delete(f *model.Fingerprint) bool {
func (lru *WatermarkCache) Delete(f *clientmodel.Fingerprint) bool {
lru.mu.Lock()
defer lru.mu.Unlock()
@ -114,11 +128,11 @@ func (lru *WatermarkCache) Clear() {
defer lru.mu.Unlock()
lru.list.Init()
lru.table = map[model.Fingerprint]*list.Element{}
lru.table = map[clientmodel.Fingerprint]*list.Element{}
lru.size = 0
}
func (lru *WatermarkCache) updateInplace(e *list.Element, w *Watermarks) {
func (lru *WatermarkCache) updateInplace(e *list.Element, w *watermarks) {
e.Value.(*entry).watermarks = w
lru.moveToFront(e)
lru.checkCapacity()
@ -129,7 +143,7 @@ func (lru *WatermarkCache) moveToFront(e *list.Element) {
e.Value.(*entry).accessed = time.Now()
}
func (lru *WatermarkCache) addNew(f *model.Fingerprint, w *Watermarks) {
func (lru *WatermarkCache) addNew(f *clientmodel.Fingerprint, w *watermarks) {
lru.table[*f] = lru.list.PushFront(&entry{
fingerprint: f,
watermarks: w,

View file

@ -14,10 +14,11 @@
package leveldb
import (
"github.com/prometheus/prometheus/storage/raw"
"testing"
"github.com/prometheus/prometheus/storage/raw"
)
func TestInterfaceAdherence(t *testing.T) {
var _ raw.Persistence = &LevelDBPersistence{}
var _ raw.Persistence = new(LevelDBPersistence)
}

View file

@ -30,7 +30,6 @@ import (
dto "github.com/prometheus/prometheus/model/generated"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/metric"
)
@ -43,33 +42,36 @@ type SamplesDumper struct {
*csv.Writer
}
func (d SamplesDumper) DecodeKey(in interface{}) (interface{}, error) {
func (d *SamplesDumper) DecodeKey(in interface{}) (interface{}, error) {
key := &dto.SampleKey{}
err := proto.Unmarshal(in.([]byte), key)
if err != nil {
return nil, err
}
return model.NewSampleKeyFromDTO(key), nil
sampleKey := &metric.SampleKey{}
sampleKey.Load(key)
return sampleKey, nil
}
func (d SamplesDumper) DecodeValue(in interface{}) (interface{}, error) {
func (d *SamplesDumper) DecodeValue(in interface{}) (interface{}, error) {
values := &dto.SampleValueSeries{}
err := proto.Unmarshal(in.([]byte), values)
if err != nil {
return nil, err
}
return model.NewValuesFromDTO(values), nil
return metric.NewValuesFromDTO(values), nil
}
func (d SamplesDumper) Filter(_, _ interface{}) storage.FilterResult {
func (d *SamplesDumper) Filter(_, _ interface{}) storage.FilterResult {
return storage.ACCEPT
}
func (d SamplesDumper) Operate(key, value interface{}) *storage.OperatorError {
sampleKey := key.(model.SampleKey)
for i, sample := range value.(model.Values) {
func (d *SamplesDumper) Operate(key, value interface{}) *storage.OperatorError {
sampleKey := key.(*metric.SampleKey)
for i, sample := range value.(metric.Values) {
d.Write([]string{
sampleKey.Fingerprint.String(),
strconv.FormatInt(sampleKey.FirstTimestamp.Unix(), 10),
@ -102,7 +104,10 @@ func main() {
}
defer persistence.Close()
dumper := SamplesDumper{csv.NewWriter(os.Stdout)}
dumper := &SamplesDumper{
csv.NewWriter(os.Stdout),
}
entire, err := persistence.MetricSamples.ForEach(dumper, dumper, dumper)
if err != nil {
log.Fatalf("Error dumping samples: %s", err)

View file

@ -14,17 +14,20 @@
package api
import (
"code.google.com/p/gorest"
"encoding/json"
"errors"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/stats"
"log"
"net/http"
"sort"
"time"
"code.google.com/p/gorest"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/rules/ast"
"github.com/prometheus/prometheus/stats"
)
func (serv MetricsService) setAccessControlHeaders(rb *gorest.ResponseBuilder) {
@ -114,7 +117,7 @@ func (serv MetricsService) QueryRange(expr string, end int64, duration int64, st
}
func (serv MetricsService) Metrics() string {
metricNames, err := serv.Storage.GetAllValuesForLabel(model.MetricNameLabel)
metricNames, err := serv.Storage.GetAllValuesForLabel(clientmodel.MetricNameLabel)
rb := serv.ResponseBuilder()
serv.setAccessControlHeaders(rb)
rb.SetContentType(gorest.Application_Json)

View file

@ -14,10 +14,12 @@
package api
import (
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/retrieval"
"net/http"
"time"
clientmodel "github.com/prometheus/client_golang/model"
"github.com/prometheus/prometheus/retrieval"
)
type TargetGroup struct {
@ -37,11 +39,11 @@ func (serv MetricsService) SetTargets(targetGroups []TargetGroup, jobName string
for _, targetGroup := range targetGroups {
// Do mandatory map type conversion due to Go shortcomings.
baseLabels := model.LabelSet{
model.JobLabel: model.LabelValue(job.GetName()),
baseLabels := clientmodel.LabelSet{
clientmodel.JobLabel: clientmodel.LabelValue(job.GetName()),
}
for label, value := range targetGroup.BaseLabels {
baseLabels[model.LabelName(label)] = model.LabelValue(value)
baseLabels[clientmodel.LabelName(label)] = clientmodel.LabelValue(value)
}
for _, endpoint := range targetGroup.Endpoints {