Add initial config and rule language implementation.

This commit is contained in:
Julius Volz 2013-01-07 23:24:26 +01:00
parent 2e865c710e
commit 56384bf42a
28 changed files with 4419 additions and 18 deletions

View file

@ -30,7 +30,7 @@ clean:
-find . -type f -iname '.#*' -exec rm '{}' ';' -find . -type f -iname '.#*' -exec rm '{}' ';'
format: format:
find . -iname '*.go' | grep -v generated | xargs -n1 gofmt -w -s=true find . -iname '*.go' | egrep -v "generated|\.(l|y)\.go" | xargs -n1 gofmt -w -s=true
search_index: search_index:
godoc -index -write_index -index_files='search_index' godoc -index -write_index -index_files='search_index'

10
config/Makefile Normal file
View file

@ -0,0 +1,10 @@
all: parser.y.go lexer.l.go
parser.y.go: parser.y
go tool yacc -o parser.y.go -v "" parser.y
lexer.l.go: parser.y.go lexer.l
golex lexer.l
clean:
rm lexer.l.go parser.y.go

105
config/config.go Normal file
View file

@ -0,0 +1,105 @@
package config
import (
"errors"
"fmt"
"github.com/matttproud/prometheus/model"
"time"
)
type Config struct {
Global *GlobalConfig
Jobs []JobConfig
}
type GlobalConfig struct {
ScrapeInterval time.Duration
EvaluationInterval time.Duration
Labels model.LabelSet
RuleFiles []string
}
type JobConfig struct {
Name string
ScrapeInterval time.Duration
Targets []Targets
}
type Targets struct {
Endpoints []string
Labels model.LabelSet
}
func New() *Config {
return &Config{
Global: &GlobalConfig{Labels: model.LabelSet{}},
}
}
func (config *Config) AddJob(options map[string]string, targets []Targets) error {
name, ok := options["name"]
if !ok {
return errors.New("Missing job name")
}
if len(targets) == 0 {
return errors.New(fmt.Sprintf("No targets configured for job '%v'", name))
}
job := &JobConfig{
Targets: tmpJobTargets,
}
for option, value := range options {
if err := job.SetOption(option, value); err != nil {
return err
}
}
config.Jobs = append(config.Jobs, *job)
return nil
}
func (config *GlobalConfig) SetOption(option string, value string) error {
switch option {
case "scrape_interval":
config.ScrapeInterval = stringToDuration(value)
return nil
case "evaluation_interval":
config.EvaluationInterval = stringToDuration(value)
return nil
default:
return errors.New(fmt.Sprintf("Unrecognized global configuration option '%v'", option))
}
return nil
}
func (config *GlobalConfig) SetLabels(labels model.LabelSet) {
for k, v := range labels {
config.Labels[k] = v
}
}
func (config *GlobalConfig) AddRuleFiles(ruleFiles []string) {
for _, ruleFile := range ruleFiles {
config.RuleFiles = append(config.RuleFiles, ruleFile)
}
}
func (job *JobConfig) SetOption(option string, value string) error {
switch option {
case "name":
job.Name = value
return nil
case "scrape_interval":
job.ScrapeInterval = stringToDuration(value)
return nil
default:
return errors.New(fmt.Sprintf("Unrecognized job configuration option '%v'", option))
}
return nil
}
func (job *JobConfig) AddTargets(endpoints []string, labels model.LabelSet) {
targets := Targets{
Endpoints: endpoints,
Labels: labels,
}
job.Targets = append(job.Targets, targets)
}

76
config/helpers.go Normal file
View file

@ -0,0 +1,76 @@
package config
import (
"fmt"
"github.com/matttproud/prometheus/model"
"log"
"regexp"
"strconv"
"time"
)
// Unfortunately, more global variables that are needed for parsing.
var tmpJobOptions = map[string]string{}
var tmpJobTargets = []Targets{}
var tmpTargetEndpoints = []string{}
var tmpTargetLabels = model.LabelSet{}
func configError(error string, v ...interface{}) {
message := fmt.Sprintf(error, v...)
log.Fatal(fmt.Sprintf("Line %v, char %v: %s", yyline, yypos, message))
}
func PushJobOption(option string, value string) {
tmpJobOptions[option] = value
}
func PushJobTargets() {
targets := Targets{
Endpoints: tmpTargetEndpoints,
Labels: tmpTargetLabels,
}
tmpJobTargets = append(tmpJobTargets, targets)
tmpTargetLabels = model.LabelSet{}
tmpTargetEndpoints = []string{}
}
func PushTargetEndpoints(endpoints []string) {
for _, endpoint := range endpoints {
tmpTargetEndpoints = append(tmpTargetEndpoints, endpoint)
}
}
func PushTargetLabels(labels model.LabelSet) {
for k, v := range labels {
tmpTargetLabels[k] = v
}
}
func PopJob() {
if err := parsedConfig.AddJob(tmpJobOptions, tmpJobTargets); err != nil {
configError(err.Error())
}
tmpJobOptions = map[string]string{}
tmpJobTargets = []Targets{}
}
func stringToDuration(durationStr string) time.Duration {
durationRE := regexp.MustCompile("([0-9]+)([ydhms]+)")
matches := durationRE.FindStringSubmatch(durationStr)
if len(matches) != 3 {
configError("Not a valid duration string: '%v'", durationStr)
}
value, _ := strconv.Atoi(matches[1])
unit := matches[2]
switch unit {
case "y":
value *= 60 * 60 * 24 * 365
case "d":
value *= 60 * 60 * 24
case "h":
value *= 60 * 60
case "m":
value *= 60
}
return time.Duration(value) * time.Second
}

43
config/lexer.l Normal file
View file

@ -0,0 +1,43 @@
%{
package config
%}
D [0-9]
L [a-zA-Z_]
%s S_GLOBAL S_GLOBAL_LABELS S_JOB S_TARGETS S_TARGET_LABELS
%x S_COMMENTS
%%
. { yypos++; REJECT }
\n { yyline++; yypos = 1; REJECT }
"/*" { BEGIN(S_COMMENTS); }
<S_COMMENTS>"*/" { BEGIN(0) }
<S_COMMENTS>. { /* ignore chars within multi-line comments */ }
\/\/[^\r\n]*\n { /* gobble up one-line comments */ }
<0>global { BEGIN(S_GLOBAL); return GLOBAL }
<S_GLOBAL>labels { BEGIN(S_GLOBAL_LABELS); return LABELS }
<S_GLOBAL>rule_files { return RULE_FILES }
<S_GLOBAL_LABELS>"}" { BEGIN(S_GLOBAL); REJECT }
<S_GLOBAL>"}" { BEGIN(0); REJECT }
<0>job { BEGIN(S_JOB); return JOB }
<S_JOB>targets { BEGIN(S_TARGETS); return TARGETS }
<S_TARGETS>endpoints { return ENDPOINTS }
<S_TARGETS>labels { BEGIN(S_TARGET_LABELS); return LABELS }
<S_TARGET_LABELS>"}" { BEGIN(S_TARGETS); REJECT }
<S_TARGETS>"}" { BEGIN(S_JOB); REJECT }
<S_JOB>"}" { BEGIN(0); REJECT }
{L}({L}|{D})+ { yylval.str = yytext; return IDENTIFIER }
\"(\\.|[^\\"])*\" { yylval.str = yytext[1:len(yytext) - 1]; return STRING }
\'(\\.|[^\\'])*\' { yylval.str = yytext[1:len(yytext) - 1]; return STRING }
[{}\[\]()=,] { return int(yytext[0]) }
. { /* don't print any remaining chars (whitespace) */ }
\n { /* don't print any remaining chars (whitespace) */ }
%%

551
config/lexer.l.go Normal file
View file

@ -0,0 +1,551 @@
// Generated by golex
package config
import (
"bufio"
"io"
"os"
"regexp"
"sort"
)
var yyin io.Reader = os.Stdin
var yyout io.Writer = os.Stdout
type yyrule struct {
regexp *regexp.Regexp
trailing *regexp.Regexp
startConds []yystartcondition
sol bool
action func() yyactionreturn
}
type yyactionreturn struct {
userReturn int
returnType yyactionreturntype
}
type yyactionreturntype int
const (
yyRT_FALLTHROUGH yyactionreturntype = iota
yyRT_USER_RETURN
yyRT_REJECT
)
var yydata string = ""
var yyorig string
var yyorigidx int
var yytext string = ""
var yytextrepl bool = true
func yymore() {
yytextrepl = false
}
func yyBEGIN(state yystartcondition) {
YY_START = state
}
func yyECHO() {
yyout.Write([]byte(yytext))
}
func yyREJECT() {
panic("yyREJECT")
}
var yylessed int
func yyless(n int) {
yylessed = len(yytext) - n
}
func unput(c uint8) {
yyorig = yyorig[:yyorigidx] + string(c) + yyorig[yyorigidx:]
yydata = yydata[:len(yytext)-yylessed] + string(c) + yydata[len(yytext)-yylessed:]
}
func input() int {
if len(yyorig) <= yyorigidx {
return EOF
}
c := yyorig[yyorigidx]
yyorig = yyorig[:yyorigidx] + yyorig[yyorigidx+1:]
yydata = yydata[:len(yytext)-yylessed] + yydata[len(yytext)-yylessed+1:]
return int(c)
}
var EOF int = -1
type yystartcondition int
var INITIAL yystartcondition = 0
var YY_START yystartcondition = INITIAL
type yylexMatch struct {
index int
matchFunc func() yyactionreturn
sortLen int
advLen int
}
type yylexMatchList []yylexMatch
func (ml yylexMatchList) Len() int {
return len(ml)
}
func (ml yylexMatchList) Less(i, j int) bool {
return ml[i].sortLen > ml[j].sortLen && ml[i].index > ml[j].index
}
func (ml yylexMatchList) Swap(i, j int) {
ml[i], ml[j] = ml[j], ml[i]
}
func yylex() int {
reader := bufio.NewReader(yyin)
for {
line, err := reader.ReadString('\n')
if len(line) == 0 && err == io.EOF {
break
}
yydata += line
}
yyorig = yydata
yyorigidx = 0
yyactioninline(yyBEGIN)
for len(yydata) > 0 {
matches := yylexMatchList(make([]yylexMatch, 0, 6))
excl := yystartconditionexclmap[YY_START]
for i, v := range yyrules {
sol := yyorigidx == 0 || yyorig[yyorigidx-1] == '\n'
if v.sol && !sol {
continue
}
// Check start conditions.
ok := false
// YY_START or '*' must feature in v.startConds
for _, c := range v.startConds {
if c == YY_START || c == -1 {
ok = true
break
}
}
if !excl {
// If v.startConds is empty, this is also acceptable.
if len(v.startConds) == 0 {
ok = true
}
}
if !ok {
continue
}
idxs := v.regexp.FindStringIndex(yydata)
if idxs != nil && idxs[0] == 0 {
// Check the trailing context, if any.
checksOk := true
sortLen := idxs[1]
advLen := idxs[1]
if v.trailing != nil {
tridxs := v.trailing.FindStringIndex(yydata[idxs[1]:])
if tridxs == nil || tridxs[0] != 0 {
checksOk = false
} else {
sortLen += tridxs[1]
}
}
if checksOk {
matches = append(matches, yylexMatch{i, v.action, sortLen, advLen})
}
}
}
if yytextrepl {
yytext = ""
}
sort.Sort(matches)
tryMatch:
if len(matches) == 0 {
yytext += yydata[:1]
yydata = yydata[1:]
yyorigidx += 1
yyout.Write([]byte(yytext))
} else {
m := matches[0]
yytext += yydata[:m.advLen]
yyorigidx += m.advLen
yytextrepl, yylessed = true, 0
ar := m.matchFunc()
if ar.returnType != yyRT_REJECT {
yydata = yydata[m.advLen-yylessed:]
yyorigidx -= yylessed
}
switch ar.returnType {
case yyRT_FALLTHROUGH:
// Do nothing.
case yyRT_USER_RETURN:
return ar.userReturn
case yyRT_REJECT:
matches = matches[1:]
yytext = yytext[:len(yytext)-m.advLen]
yyorigidx -= m.advLen
goto tryMatch
}
}
}
return 0
}
var S_GLOBAL yystartcondition = 1024
var S_JOB yystartcondition = 1026
var S_GLOBAL_LABELS yystartcondition = 1025
var S_TARGET_LABELS yystartcondition = 1028
var S_COMMENTS yystartcondition = 1029
var S_TARGETS yystartcondition = 1027
var yystartconditionexclmap = map[yystartcondition]bool{S_GLOBAL: false, S_JOB: false, S_GLOBAL_LABELS: false, S_TARGET_LABELS: false, S_COMMENTS: true, S_TARGETS: false, }
var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yypos++
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyline++
yypos = 1
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("/\\*"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_COMMENTS)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\*/"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\/\\/[^\\r\\n]*\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("global"), nil, []yystartcondition{0, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_GLOBAL)
return yyactionreturn{GLOBAL, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("labels"), nil, []yystartcondition{S_GLOBAL, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_GLOBAL_LABELS)
return yyactionreturn{LABELS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("rule_files"), nil, []yystartcondition{S_GLOBAL, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{RULE_FILES, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_GLOBAL_LABELS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_GLOBAL)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_GLOBAL, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("job"), nil, []yystartcondition{0, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_JOB)
return yyactionreturn{JOB, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("targets"), nil, []yystartcondition{S_JOB, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_TARGETS)
return yyactionreturn{TARGETS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("endpoints"), nil, []yystartcondition{S_TARGETS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{ENDPOINTS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("labels"), nil, []yystartcondition{S_TARGETS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_TARGET_LABELS)
return yyactionreturn{LABELS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_TARGET_LABELS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_TARGETS)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_TARGETS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_JOB)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_JOB, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("([a-zA-Z_])(([a-zA-Z_])|([0-9]))+"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{IDENTIFIER, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\\"(\\\\[^\\n]|[^\\\\\"])*\\\""), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext[1 : len(yytext)-1]
return yyactionreturn{STRING, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\'(\\\\[^\\n]|[^\\\\'])*\\'"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext[1 : len(yytext)-1]
return yyactionreturn{STRING, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[{}\\[\\]()=,]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{int(yytext[0]), yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, }
func yyactioninline(BEGIN func(yystartcondition)) {}

61
config/load.go Normal file
View file

@ -0,0 +1,61 @@
package config
import (
"errors"
"fmt"
"io"
"os"
"strings"
)
// NOTE: This parser is non-reentrant due to its dependence on global state.
// GoLex sadly needs these global variables for storing temporary token/parsing information.
var yylval *yySymType // For storing extra token information, like the contents of a string.
var yyline int // Line number within the current file or buffer.
var yypos int // Character position within the current line.
var parsedConfig = New() // Temporary variable for storing the parsed configuration.
type ConfigLexer struct {
errors []string
}
func (lexer *ConfigLexer) Lex(lval *yySymType) int {
yylval = lval
token_type := yylex()
return token_type
}
func (lexer *ConfigLexer) Error(errorStr string) {
err := fmt.Sprintf("Error reading config at line %v, char %v: %v", yyline, yypos, errorStr)
lexer.errors = append(lexer.errors, err)
}
func LoadFromReader(configReader io.Reader) (*Config, error) {
yyin = configReader
yypos = 1
yyline = 1
lexer := &ConfigLexer{}
yyParse(lexer)
if len(lexer.errors) > 0 {
err := errors.New(strings.Join(lexer.errors, "\n"))
return &Config{}, err
}
return parsedConfig, nil
}
func LoadFromString(configString string) (*Config, error) {
configReader := strings.NewReader(configString)
return LoadFromReader(configReader)
}
func LoadFromFile(fileName string) (*Config, error) {
configReader, err := os.Open(fileName)
if err != nil {
return &Config{}, err
}
return LoadFromReader(configReader)
}

102
config/parser.y Normal file
View file

@ -0,0 +1,102 @@
%{
package config
import "fmt"
import "github.com/matttproud/prometheus/model"
%}
%union {
num model.SampleValue
str string
stringSlice []string
labelSet model.LabelSet
}
%token <str> IDENTIFIER STRING
%token GLOBAL JOB
%token RULE_FILES
%token LABELS TARGETS ENDPOINTS
%type <stringSlice> string_array string_list rule_files_stat endpoints_stat
%type <labelSet> labels_stat label_assign label_assign_list
%start config
%%
config : /* empty */
| config config_stanza
;
config_stanza : GLOBAL '{' global_stat_list '}'
| JOB '{' job_stat_list '}'
{ PopJob() }
;
global_stat_list : /* empty */
| global_stat_list global_stat
;
global_stat : IDENTIFIER '=' STRING
{ parsedConfig.Global.SetOption($1, $3) }
| labels_stat
{ parsedConfig.Global.SetLabels($1) }
| rule_files_stat
{ parsedConfig.Global.AddRuleFiles($1) }
;
labels_stat : LABELS '{' label_assign_list '}'
{ $$ = $3 }
| LABELS '{' '}'
{ $$ = model.LabelSet{} }
;
label_assign_list : label_assign
{ $$ = $1 }
| label_assign_list ',' label_assign
{ for k, v := range $3 { $$[k] = v } }
;
label_assign : IDENTIFIER '=' STRING
{ $$ = model.LabelSet{ model.LabelName($1): model.LabelValue($3) } }
;
rule_files_stat : RULE_FILES '=' string_array
{ $$ = $3 }
;
job_stat_list : /* empty */
| job_stat_list job_stat
;
job_stat : IDENTIFIER '=' STRING
{ PushJobOption($1, $3) }
| TARGETS '{' targets_stat_list '}'
{ PushJobTargets() }
;
targets_stat_list : /* empty */
| targets_stat_list targets_stat
;
targets_stat : endpoints_stat
{ PushTargetEndpoints($1) }
| labels_stat
{ PushTargetLabels($1) }
;
endpoints_stat : ENDPOINTS '=' string_array
{ $$ = $3 }
;
string_array : '[' string_list ']'
{ $$ = $2 }
| '[' ']'
{ $$ = []string{} }
;
string_list : STRING
{ $$ = []string{$1} }
| string_list ',' STRING
{ $$ = append($$, $3) }
;
%%

420
config/parser.y.go Normal file
View file

@ -0,0 +1,420 @@
//line parser.y:2
package config
import "fmt"
import "github.com/matttproud/prometheus/model"
//line parser.y:8
type yySymType struct {
yys int
num model.SampleValue
str string
stringSlice []string
labelSet model.LabelSet
}
const IDENTIFIER = 57346
const STRING = 57347
const GLOBAL = 57348
const JOB = 57349
const RULE_FILES = 57350
const LABELS = 57351
const TARGETS = 57352
const ENDPOINTS = 57353
var yyToknames = []string{
"IDENTIFIER",
"STRING",
"GLOBAL",
"JOB",
"RULE_FILES",
"LABELS",
"TARGETS",
"ENDPOINTS",
}
var yyStatenames = []string{}
const yyEofCode = 1
const yyErrCode = 2
const yyMaxDepth = 200
//line parser.y:102
//line yacctab:1
var yyExca = []int{
-1, 1,
1, -1,
-2, 0,
}
const yyNprod = 29
const yyPrivate = 57344
var yyTokenNames []string
var yyStates []string
const yyLast = 53
var yyAct = []int{
30, 28, 12, 48, 39, 47, 31, 34, 11, 35,
49, 36, 15, 14, 29, 18, 38, 9, 14, 23,
44, 19, 40, 27, 16, 50, 22, 20, 24, 21,
6, 5, 3, 4, 46, 32, 43, 45, 25, 29,
41, 33, 17, 10, 8, 7, 2, 1, 26, 42,
51, 13, 37,
}
var yyPact = []int{
-1000, 26, -1000, 19, 18, -1000, -1000, 4, 11, -1000,
-1000, 13, -1000, -1000, 17, 12, -1000, -1000, 5, 16,
33, 10, -10, 30, -1000, -1000, -6, -1000, -1000, -3,
-1000, -1, -1000, 9, -1000, 35, 29, -12, -1000, -1000,
-1000, -1000, -1000, -1000, -4, -1000, -1000, -1000, 20, -10,
-1000, -1000,
}
var yyPgo = []int{
0, 0, 52, 51, 49, 2, 1, 48, 47, 46,
45, 44, 43, 42, 41, 40,
}
var yyR1 = []int{
0, 8, 8, 9, 9, 10, 10, 12, 12, 12,
5, 5, 7, 7, 6, 3, 11, 11, 13, 13,
14, 14, 15, 15, 4, 1, 1, 2, 2,
}
var yyR2 = []int{
0, 0, 2, 4, 4, 0, 2, 3, 1, 1,
4, 3, 1, 3, 3, 3, 0, 2, 3, 4,
0, 2, 1, 1, 3, 3, 2, 1, 3,
}
var yyChk = []int{
-1000, -8, -9, 6, 7, 12, 12, -10, -11, 13,
-12, 4, -5, -3, 9, 8, 13, -13, 4, 10,
14, 12, 14, 14, 12, 5, -7, 13, -6, 4,
-1, 16, 5, -14, 13, 15, 14, -2, 17, 5,
13, -15, -4, -5, 11, -6, 5, 17, 15, 14,
5, -1,
}
var yyDef = []int{
1, -2, 2, 0, 0, 5, 16, 0, 0, 3,
6, 0, 8, 9, 0, 0, 4, 17, 0, 0,
0, 0, 0, 0, 20, 7, 0, 11, 12, 0,
15, 0, 18, 0, 10, 0, 0, 0, 26, 27,
19, 21, 22, 23, 0, 13, 14, 25, 0, 0,
28, 24,
}
var yyTok1 = []int{
1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 15, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 14, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 16, 3, 17, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 12, 3, 13,
}
var yyTok2 = []int{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
}
var yyTok3 = []int{
0,
}
//line yaccpar:1
/* parser for yacc output */
var yyDebug = 0
type yyLexer interface {
Lex(lval *yySymType) int
Error(s string)
}
const yyFlag = -1000
func yyTokname(c int) string {
if c > 0 && c <= len(yyToknames) {
if yyToknames[c-1] != "" {
return yyToknames[c-1]
}
}
return fmt.Sprintf("tok-%v", c)
}
func yyStatname(s int) string {
if s >= 0 && s < len(yyStatenames) {
if yyStatenames[s] != "" {
return yyStatenames[s]
}
}
return fmt.Sprintf("state-%v", s)
}
func yylex1(lex yyLexer, lval *yySymType) int {
c := 0
char := lex.Lex(lval)
if char <= 0 {
c = yyTok1[0]
goto out
}
if char < len(yyTok1) {
c = yyTok1[char]
goto out
}
if char >= yyPrivate {
if char < yyPrivate+len(yyTok2) {
c = yyTok2[char-yyPrivate]
goto out
}
}
for i := 0; i < len(yyTok3); i += 2 {
c = yyTok3[i+0]
if c == char {
c = yyTok3[i+1]
goto out
}
}
out:
if c == 0 {
c = yyTok2[1] /* unknown char */
}
if yyDebug >= 3 {
fmt.Printf("lex %U %s\n", uint(char), yyTokname(c))
}
return c
}
func yyParse(yylex yyLexer) int {
var yyn int
var yylval yySymType
var yyVAL yySymType
yyS := make([]yySymType, yyMaxDepth)
Nerrs := 0 /* number of errors */
Errflag := 0 /* error recovery flag */
yystate := 0
yychar := -1
yyp := -1
goto yystack
ret0:
return 0
ret1:
return 1
yystack:
/* put a state and value onto the stack */
if yyDebug >= 4 {
fmt.Printf("char %v in %v\n", yyTokname(yychar), yyStatname(yystate))
}
yyp++
if yyp >= len(yyS) {
nyys := make([]yySymType, len(yyS)*2)
copy(nyys, yyS)
yyS = nyys
}
yyS[yyp] = yyVAL
yyS[yyp].yys = yystate
yynewstate:
yyn = yyPact[yystate]
if yyn <= yyFlag {
goto yydefault /* simple state */
}
if yychar < 0 {
yychar = yylex1(yylex, &yylval)
}
yyn += yychar
if yyn < 0 || yyn >= yyLast {
goto yydefault
}
yyn = yyAct[yyn]
if yyChk[yyn] == yychar { /* valid shift */
yychar = -1
yyVAL = yylval
yystate = yyn
if Errflag > 0 {
Errflag--
}
goto yystack
}
yydefault:
/* default state action */
yyn = yyDef[yystate]
if yyn == -2 {
if yychar < 0 {
yychar = yylex1(yylex, &yylval)
}
/* look through exception table */
xi := 0
for {
if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
break
}
xi += 2
}
for xi += 2; ; xi += 2 {
yyn = yyExca[xi+0]
if yyn < 0 || yyn == yychar {
break
}
}
yyn = yyExca[xi+1]
if yyn < 0 {
goto ret0
}
}
if yyn == 0 {
/* error ... attempt to resume parsing */
switch Errflag {
case 0: /* brand new error */
yylex.Error("syntax error")
Nerrs++
if yyDebug >= 1 {
fmt.Printf("%s", yyStatname(yystate))
fmt.Printf("saw %s\n", yyTokname(yychar))
}
fallthrough
case 1, 2: /* incompletely recovered error ... try again */
Errflag = 3
/* find a state where "error" is a legal shift action */
for yyp >= 0 {
yyn = yyPact[yyS[yyp].yys] + yyErrCode
if yyn >= 0 && yyn < yyLast {
yystate = yyAct[yyn] /* simulate a shift of "error" */
if yyChk[yystate] == yyErrCode {
goto yystack
}
}
/* the current p has no shift on "error", pop stack */
if yyDebug >= 2 {
fmt.Printf("error recovery pops state %d\n", yyS[yyp].yys)
}
yyp--
}
/* there is no state on the stack with an error shift ... abort */
goto ret1
case 3: /* no shift yet; clobber input char */
if yyDebug >= 2 {
fmt.Printf("error recovery discards %s\n", yyTokname(yychar))
}
if yychar == yyEofCode {
goto ret1
}
yychar = -1
goto yynewstate /* try again in the same state */
}
}
/* reduction by production yyn */
if yyDebug >= 2 {
fmt.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
}
yynt := yyn
yypt := yyp
_ = yypt // guard against "declared and not used"
yyp -= yyR2[yyn]
yyVAL = yyS[yyp+1]
/* consult goto table to find next state */
yyn = yyR1[yyn]
yyg := yyPgo[yyn]
yyj := yyg + yyS[yyp].yys + 1
if yyj >= yyLast {
yystate = yyAct[yyg]
} else {
yystate = yyAct[yyj]
if yyChk[yystate] != -yyn {
yystate = yyAct[yyg]
}
}
// dummy call; replaced with literal code
switch yynt {
case 4:
//line parser.y:32
{ PopJob() }
case 7:
//line parser.y:40
{ parsedConfig.Global.SetOption(yyS[yypt-2].str, yyS[yypt-0].str) }
case 8:
//line parser.y:42
{ parsedConfig.Global.SetLabels(yyS[yypt-0].labelSet) }
case 9:
//line parser.y:44
{ parsedConfig.Global.AddRuleFiles(yyS[yypt-0].stringSlice) }
case 10:
//line parser.y:48
{ yyVAL.labelSet = yyS[yypt-1].labelSet }
case 11:
//line parser.y:50
{ yyVAL.labelSet = model.LabelSet{} }
case 12:
//line parser.y:54
{ yyVAL.labelSet = yyS[yypt-0].labelSet }
case 13:
//line parser.y:56
{ for k, v := range yyS[yypt-0].labelSet { yyVAL.labelSet[k] = v } }
case 14:
//line parser.y:60
{ yyVAL.labelSet = model.LabelSet{ model.LabelName(yyS[yypt-2].str): model.LabelValue(yyS[yypt-0].str) } }
case 15:
//line parser.y:64
{ yyVAL.stringSlice = yyS[yypt-0].stringSlice }
case 18:
//line parser.y:72
{ PushJobOption(yyS[yypt-2].str, yyS[yypt-0].str) }
case 19:
//line parser.y:74
{ PushJobTargets() }
case 22:
//line parser.y:82
{ PushTargetEndpoints(yyS[yypt-0].stringSlice) }
case 23:
//line parser.y:84
{ PushTargetLabels(yyS[yypt-0].labelSet) }
case 24:
//line parser.y:88
{ yyVAL.stringSlice = yyS[yypt-0].stringSlice }
case 25:
//line parser.y:92
{ yyVAL.stringSlice = yyS[yypt-1].stringSlice }
case 26:
//line parser.y:94
{ yyVAL.stringSlice = []string{} }
case 27:
//line parser.y:98
{ yyVAL.stringSlice = []string{yyS[yypt-0].str} }
case 28:
//line parser.y:100
{ yyVAL.stringSlice = append(yyVAL.stringSlice, yyS[yypt-0].str) }
}
goto yystack /* stack new state and value */
}

66
config/printer.go Normal file
View file

@ -0,0 +1,66 @@
package config
import (
"fmt"
"github.com/matttproud/prometheus/model"
"strings"
)
func indentStr(indent int, str string, v ...interface{}) string {
indentStr := ""
for i := 0; i < indent; i++ {
indentStr += "\t"
}
return fmt.Sprintf(indentStr+str, v...)
}
func (config *Config) ToString(indent int) string {
global := config.Global.ToString(indent)
jobs := []string{}
for _, job := range config.Jobs {
jobs = append(jobs, job.ToString(indent))
}
return indentStr(indent, "%v\n%v\n", global, strings.Join(jobs, "\n"))
}
func labelsToString(indent int, labels model.LabelSet) string {
str := indentStr(indent, "labels {\n")
for label, value := range labels {
str += indentStr(indent+1, "%v = \"%v\",\n", label, value)
}
str += indentStr(indent, "}\n")
return str
}
func (global *GlobalConfig) ToString(indent int) string {
str := indentStr(indent, "global {\n")
str += indentStr(indent+1, "scrape_interval = \"%vs\"\n", global.ScrapeInterval)
str += indentStr(indent+1, "evaluation_interval = \"%vs\"\n", global.EvaluationInterval)
str += labelsToString(indent+1, global.Labels)
str += indentStr(indent, "}\n")
str += indentStr(indent+1, "rule_files = [\n")
for _, ruleFile := range global.RuleFiles {
str += indentStr(indent+2, "\"%v\",\n", ruleFile)
}
str += indentStr(indent+1, "]\n")
return str
}
func (job *JobConfig) ToString(indent int) string {
str := indentStr(indent, "job {\n")
str += indentStr(indent+1, "job {\n")
str += indentStr(indent+1, "name = \"%v\"\n", job.Name)
str += indentStr(indent+1, "scrape_interval = \"%vs\"\n", job.ScrapeInterval)
for _, targets := range job.Targets {
str += indentStr(indent+1, "targets {\n")
str += indentStr(indent+2, "endpoints = [\n")
for _, endpoint := range targets.Endpoints {
str += indentStr(indent+3, "\"%v\",\n", endpoint)
}
str += indentStr(indent+2, "]\n")
str += labelsToString(indent+2, targets.Labels)
str += indentStr(indent+1, "}\n")
}
str += indentStr(indent, "}\n")
return str
}

51
main.go
View file

@ -14,18 +14,28 @@
package main package main
import ( import (
"fmt"
"github.com/matttproud/golang_instrumentation" "github.com/matttproud/golang_instrumentation"
"github.com/matttproud/prometheus/config"
"github.com/matttproud/prometheus/retrieval" "github.com/matttproud/prometheus/retrieval"
"github.com/matttproud/prometheus/rules"
"github.com/matttproud/prometheus/rules/ast"
"github.com/matttproud/prometheus/storage/metric/leveldb" "github.com/matttproud/prometheus/storage/metric/leveldb"
"log" "log"
"net/http" "net/http"
"os" "os"
"os/signal" "os/signal"
"time"
) )
func main() { func main() {
m, err := leveldb.NewLevelDBMetricPersistence("/tmp/metrics") configFile := "prometheus.conf"
conf, err := config.LoadFromFile(configFile)
if err != nil {
log.Fatal(fmt.Sprintf("Error loading configuration from %s: %v",
configFile, err))
}
persistence, err := leveldb.NewLevelDBMetricPersistence("/tmp/metrics")
if err != nil { if err != nil {
log.Print(err) log.Print(err)
os.Exit(1) os.Exit(1)
@ -35,23 +45,26 @@ func main() {
notifier := make(chan os.Signal) notifier := make(chan os.Signal)
signal.Notify(notifier, os.Interrupt) signal.Notify(notifier, os.Interrupt)
<-notifier <-notifier
m.Close() persistence.Close()
os.Exit(0) os.Exit(0)
}() }()
defer m.Close() defer persistence.Close()
results := make(chan retrieval.Result, 4096) scrapeResults := make(chan retrieval.Result, 4096)
t := &retrieval.Target{ targetManager := retrieval.NewTargetManager(scrapeResults, 1)
Address: "http://localhost:8080/metrics.json", targetManager.AddTargetsFromConfig(conf)
Deadline: time.Second * 5,
Interval: time.Second * 3, ruleResults := make(chan *rules.Result, 4096)
ast.SetPersistence(persistence)
ruleManager := rules.NewRuleManager(ruleResults, conf.Global.EvaluationInterval)
err = ruleManager.AddRulesFromConfig(conf)
if err != nil {
log.Fatal(fmt.Sprintf("Error loading rule files: %v", err))
} }
manager := retrieval.NewTargetManager(results, 1)
manager.Add(t)
go func() { go func() {
exporter := registry.DefaultRegistry.YieldExporter() exporter := registry.DefaultRegistry.YieldExporter()
http.Handle("/metrics.json", exporter) http.Handle("/metrics.json", exporter)
@ -59,9 +72,17 @@ func main() {
}() }()
for { for {
result := <-results select {
for _, s := range result.Samples { case scrapeResult := <-scrapeResults:
m.AppendSample(&s) fmt.Printf("scrapeResult -> %s\n", scrapeResult)
for _, sample := range scrapeResult.Samples {
persistence.AppendSample(&sample)
}
case ruleResult := <-ruleResults:
fmt.Printf("ruleResult -> %s\n", ruleResult)
for _, sample := range ruleResult.Samples {
persistence.AppendSample(sample)
}
} }
} }
} }

View file

@ -42,8 +42,9 @@ type Target struct {
unreachableCount int unreachableCount int
state TargetState state TargetState
Address string Address string
Deadline time.Duration Deadline time.Duration
BaseLabels model.LabelSet
// XXX: Move this to a field with the target manager initialization instead of here. // XXX: Move this to a field with the target manager initialization instead of here.
Interval time.Duration Interval time.Duration

View file

@ -15,6 +15,8 @@ package retrieval
import ( import (
"container/heap" "container/heap"
"github.com/matttproud/prometheus/config"
"github.com/matttproud/prometheus/model"
"log" "log"
"time" "time"
) )
@ -24,6 +26,7 @@ type TargetManager interface {
release() release()
Add(t *Target) Add(t *Target)
Remove(t *Target) Remove(t *Target)
AddTargetsFromConfig(config *config.Config)
} }
type targetManager struct { type targetManager struct {
@ -64,3 +67,31 @@ func (m targetManager) Add(t *Target) {
func (m targetManager) Remove(t *Target) { func (m targetManager) Remove(t *Target) {
panic("not implemented") panic("not implemented")
} }
func (m targetManager) AddTargetsFromConfig(config *config.Config) {
for _, job := range config.Jobs {
for _, configTargets := range job.Targets {
baseLabels := model.LabelSet{
model.LabelName("job"): model.LabelValue(job.Name),
}
for label, value := range configTargets.Labels {
baseLabels[label] = value
}
interval := job.ScrapeInterval
if interval == 0 {
interval = config.Global.ScrapeInterval
}
for _, endpoint := range configTargets.Endpoints {
target := &Target{
Address: endpoint,
BaseLabels: baseLabels,
Deadline: time.Second * 5,
Interval: interval,
}
m.Add(target)
}
}
}
}

14
rules/Makefile Normal file
View file

@ -0,0 +1,14 @@
all: parser.y.go lexer.l.go
parser.y.go: parser.y
go tool yacc -o parser.y.go -v "" parser.y
lexer.l.go: parser.y.go lexer.l
golex lexer.l
test: all
go test -i github.com/matttproud/prometheus/rules
go test github.com/matttproud/prometheus/rules
clean:
rm lexer.l.go parser.y.go

588
rules/ast/ast.go Normal file
View file

@ -0,0 +1,588 @@
package ast
import (
"errors"
"github.com/matttproud/prometheus/model"
"log"
"math"
"strings"
"time"
)
// ----------------------------------------------------------------------------
// Raw data value types.
type Vector []*model.Sample
type Matrix []*model.SampleSet
type groupedAggregation struct {
labels model.Metric
value model.SampleValue
groupCount int
}
type labelValuePair struct {
label model.LabelName
value model.LabelValue
}
// ----------------------------------------------------------------------------
// Enums.
// Rule language expression types.
type ExprType int
const (
SCALAR ExprType = iota
VECTOR
MATRIX
STRING
)
// Binary operator types.
type BinOpType int
const (
ADD BinOpType = iota
SUB
MUL
DIV
MOD
NE
EQ
GT
LT
GE
LE
AND
OR
)
// Aggregation types.
type AggrType int
const (
SUM AggrType = iota
AVG
MIN
MAX
)
// ----------------------------------------------------------------------------
// Interfaces.
// All node interfaces include the Node interface.
type Node interface {
Type() ExprType
NodeTreeToDotGraph() string
}
// All node types implement one of the following interfaces. The name of the
// interface represents the type returned to the parent node.
type ScalarNode interface {
Node
Eval(timestamp *time.Time) model.SampleValue
}
type VectorNode interface {
Node
Eval(timestamp *time.Time) Vector
}
type MatrixNode interface {
Node
Eval(timestamp *time.Time) Matrix
EvalBoundaries(timestamp *time.Time) Matrix
}
type StringNode interface {
Node
Eval(timestamp *time.Time) string
}
// ----------------------------------------------------------------------------
// ScalarNode types.
type (
// A numeric literal.
ScalarLiteral struct {
value model.SampleValue
}
// A function of numeric return type.
ScalarFunctionCall struct {
function *Function
args []Node
}
// An arithmetic expression of numeric type.
ScalarArithExpr struct {
opType BinOpType
lhs ScalarNode
rhs ScalarNode
}
)
// ----------------------------------------------------------------------------
// VectorNode types.
type (
// Vector literal, i.e. metric name plus labelset.
VectorLiteral struct {
labels model.LabelSet
}
// A function of vector return type.
VectorFunctionCall struct {
function *Function
args []Node
}
// A vector aggregation with vector return type.
VectorAggregation struct {
aggrType AggrType
groupBy []model.LabelName
vector VectorNode
}
// An arithmetic expression of vector type.
VectorArithExpr struct {
opType BinOpType
lhs VectorNode
rhs Node
}
)
// ----------------------------------------------------------------------------
// MatrixNode types.
type (
// Matrix literal, i.e. metric name plus labelset and timerange.
MatrixLiteral struct {
labels model.LabelSet
interval time.Duration
}
)
// ----------------------------------------------------------------------------
// StringNode types.
type (
// String literal.
StringLiteral struct {
str string
}
// A function of string return type.
StringFunctionCall struct {
function *Function
args []Node
}
)
// ----------------------------------------------------------------------------
// Implementations.
func (node ScalarLiteral) Type() ExprType { return SCALAR }
func (node ScalarFunctionCall) Type() ExprType { return SCALAR }
func (node ScalarArithExpr) Type() ExprType { return SCALAR }
func (node VectorLiteral) Type() ExprType { return VECTOR }
func (node VectorFunctionCall) Type() ExprType { return VECTOR }
func (node VectorAggregation) Type() ExprType { return VECTOR }
func (node VectorArithExpr) Type() ExprType { return VECTOR }
func (node MatrixLiteral) Type() ExprType { return MATRIX }
func (node StringLiteral) Type() ExprType { return STRING }
func (node StringFunctionCall) Type() ExprType { return STRING }
func (node *ScalarLiteral) Eval(timestamp *time.Time) model.SampleValue {
return node.value
}
func (node *ScalarArithExpr) Eval(timestamp *time.Time) model.SampleValue {
lhs := node.lhs.Eval(timestamp)
rhs := node.rhs.Eval(timestamp)
return evalScalarBinop(node.opType, lhs, rhs)
}
func (node *ScalarFunctionCall) Eval(timestamp *time.Time) model.SampleValue {
return node.function.callFn(timestamp, node.args).(model.SampleValue)
}
func (node *VectorAggregation) labelsToGroupingKey(labels model.Metric) string {
keyParts := []string{}
for _, keyLabel := range node.groupBy {
keyParts = append(keyParts, string(labels[keyLabel]))
}
return strings.Join(keyParts, ",") // TODO not safe when label value contains comma.
}
func labelIntersection(metric1, metric2 model.Metric) model.Metric {
intersection := model.Metric{}
for label, value := range metric1 {
if metric2[label] == value {
intersection[label] = value
}
}
return intersection
}
func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[string]*groupedAggregation, timestamp *time.Time) Vector {
vector := Vector{}
for _, aggregation := range aggregations {
if node.aggrType == AVG {
aggregation.value = aggregation.value / model.SampleValue(aggregation.groupCount)
}
sample := &model.Sample{
Metric: aggregation.labels,
Value: aggregation.value,
Timestamp: *timestamp,
}
vector = append(vector, sample)
}
return vector
}
func (node *VectorAggregation) Eval(timestamp *time.Time) Vector {
vector := node.vector.Eval(timestamp)
result := map[string]*groupedAggregation{}
for _, sample := range vector {
groupingKey := node.labelsToGroupingKey(sample.Metric)
if groupedResult, ok := result[groupingKey]; ok {
groupedResult.labels = labelIntersection(groupedResult.labels, sample.Metric)
switch node.aggrType {
case SUM:
groupedResult.value += sample.Value
case AVG:
groupedResult.value += sample.Value
groupedResult.groupCount++
case MAX:
if groupedResult.value < sample.Value {
groupedResult.value = sample.Value
}
case MIN:
if groupedResult.value > sample.Value {
groupedResult.value = sample.Value
}
}
} else {
result[groupingKey] = &groupedAggregation{
labels: sample.Metric,
value: sample.Value,
groupCount: 1,
}
}
}
return node.groupedAggregationsToVector(result, timestamp)
}
func (node *VectorLiteral) Eval(timestamp *time.Time) Vector {
values, err := persistence.GetValueAtTime(node.labels, timestamp, &stalenessPolicy)
if err != nil {
log.Printf("Unable to get vector values")
return Vector{}
}
return values
}
func (node *VectorFunctionCall) Eval(timestamp *time.Time) Vector {
return node.function.callFn(timestamp, node.args).(Vector)
}
func evalScalarBinop(opType BinOpType,
lhs model.SampleValue,
rhs model.SampleValue) model.SampleValue {
switch opType {
case ADD:
return lhs + rhs
case SUB:
return lhs - rhs
case MUL:
return lhs * rhs
case DIV:
if rhs != 0 {
return lhs / rhs
} else {
return model.SampleValue(math.Inf(int(rhs)))
}
case MOD:
if rhs != 0 {
return model.SampleValue(int(lhs) % int(rhs))
} else {
return model.SampleValue(math.Inf(int(rhs)))
}
case EQ:
if lhs == rhs {
return 1
} else {
return 0
}
case NE:
if lhs != rhs {
return 1
} else {
return 0
}
case GT:
if lhs > rhs {
return 1
} else {
return 0
}
case LT:
if lhs < rhs {
return 1
} else {
return 0
}
case GE:
if lhs >= rhs {
return 1
} else {
return 0
}
case LE:
if lhs <= rhs {
return 1
} else {
return 0
}
}
panic("Not all enum values enumerated in switch")
}
func evalVectorBinop(opType BinOpType,
lhs model.SampleValue,
rhs model.SampleValue) (model.SampleValue, bool) {
switch opType {
case ADD:
return lhs + rhs, true
case SUB:
return lhs - rhs, true
case MUL:
return lhs * rhs, true
case DIV:
if rhs != 0 {
return lhs / rhs, true
} else {
return model.SampleValue(math.Inf(int(rhs))), true
}
case MOD:
if rhs != 0 {
return model.SampleValue(int(lhs) % int(rhs)), true
} else {
return model.SampleValue(math.Inf(int(rhs))), true
}
case EQ:
if lhs == rhs {
return lhs, true
} else {
return 0, false
}
case NE:
if lhs != rhs {
return lhs, true
} else {
return 0, false
}
case GT:
if lhs > rhs {
return lhs, true
} else {
return 0, false
}
case LT:
if lhs < rhs {
return lhs, true
} else {
return 0, false
}
case GE:
if lhs >= rhs {
return lhs, true
} else {
return 0, false
}
case LE:
if lhs <= rhs {
return lhs, true
} else {
return 0, false
}
case AND:
return lhs, true
}
panic("Not all enum values enumerated in switch")
}
func labelsEqual(labels1, labels2 model.Metric) bool {
if len(labels1) != len(labels2) {
return false
}
for label, value := range labels1 {
if labels2[label] != value {
return false
}
}
return true
}
func (node *VectorArithExpr) Eval(timestamp *time.Time) Vector {
lhs := node.lhs.Eval(timestamp)
result := Vector{}
if node.rhs.Type() == SCALAR {
rhs := node.rhs.(ScalarNode).Eval(timestamp)
for _, lhsSample := range lhs {
value, keep := evalVectorBinop(node.opType, lhsSample.Value, rhs)
if keep {
lhsSample.Value = value
result = append(result, lhsSample)
}
}
return result
} else if node.rhs.Type() == VECTOR {
rhs := node.rhs.(VectorNode).Eval(timestamp)
for _, lhsSample := range lhs {
for _, rhsSample := range rhs {
if labelsEqual(lhsSample.Metric, rhsSample.Metric) {
value, keep := evalVectorBinop(node.opType, lhsSample.Value, rhsSample.Value)
if keep {
lhsSample.Value = value
result = append(result, lhsSample)
}
}
}
}
return result
}
panic("Invalid vector arithmetic expression operands")
}
func (node *MatrixLiteral) Eval(timestamp *time.Time) Matrix {
values, err := persistence.GetRangeValues(node.labels, &model.Interval{}, &stalenessPolicy)
if err != nil {
log.Printf("Unable to get values for vector interval")
return Matrix{}
}
return values
}
func (node *MatrixLiteral) EvalBoundaries(timestamp *time.Time) Matrix {
interval := &model.Interval{
OldestInclusive: timestamp.Add(-node.interval),
NewestInclusive: *timestamp,
}
values, err := persistence.GetBoundaryValues(node.labels, interval, &stalenessPolicy)
if err != nil {
log.Printf("Unable to get boundary values for vector interval")
return Matrix{}
}
return values
}
func (node *StringLiteral) Eval(timestamp *time.Time) string {
return node.str
}
func (node *StringFunctionCall) Eval(timestamp *time.Time) string {
return node.function.callFn(timestamp, node.args).(string)
}
// ----------------------------------------------------------------------------
// Constructors.
func NewScalarLiteral(value model.SampleValue) *ScalarLiteral {
return &ScalarLiteral{
value: value,
}
}
func NewVectorLiteral(labels model.LabelSet) *VectorLiteral {
return &VectorLiteral{
labels: labels,
}
}
func NewVectorAggregation(aggrType AggrType, vector VectorNode, groupBy []model.LabelName) *VectorAggregation {
return &VectorAggregation{
aggrType: aggrType,
groupBy: groupBy,
vector: vector,
}
}
func NewFunctionCall(function *Function, args []Node) (Node, error) {
if err := function.CheckArgTypes(args); err != nil {
return nil, err
}
switch function.returnType {
case SCALAR:
return &ScalarFunctionCall{
function: function,
args: args,
}, nil
case VECTOR:
return &VectorFunctionCall{
function: function,
args: args,
}, nil
case STRING:
return &StringFunctionCall{
function: function,
args: args,
}, nil
}
panic("Function with invalid return type")
}
func nodesHaveTypes(nodes []Node, exprTypes []ExprType) bool {
for _, node := range nodes {
for _, exprType := range exprTypes {
if node.Type() == exprType {
return true
}
}
}
return false
}
func NewArithExpr(opType BinOpType, lhs Node, rhs Node) (Node, error) {
if !nodesHaveTypes([]Node{lhs, rhs}, []ExprType{SCALAR, VECTOR}) {
return nil, errors.New("Binary operands must be of vector or scalar type")
}
if lhs.Type() == SCALAR && rhs.Type() == VECTOR {
return nil, errors.New("Left side of vector binary operation must be of vector type")
}
if opType == AND || opType == OR {
if lhs.Type() == SCALAR || rhs.Type() == SCALAR {
return nil, errors.New("AND and OR operators may only be used between vectors")
}
}
if lhs.Type() == VECTOR || rhs.Type() == VECTOR {
return &VectorArithExpr{
opType: opType,
lhs: lhs.(VectorNode),
rhs: rhs,
}, nil
}
return &ScalarArithExpr{
opType: opType,
lhs: lhs.(ScalarNode),
rhs: rhs.(ScalarNode),
}, nil
}
func NewMatrixLiteral(vector *VectorLiteral, interval time.Duration) *MatrixLiteral {
return &MatrixLiteral{
labels: vector.labels,
interval: interval,
}
}
func NewStringLiteral(str string) *StringLiteral {
return &StringLiteral{
str: str,
}
}

216
rules/ast/functions.go Normal file
View file

@ -0,0 +1,216 @@
package ast
import (
"errors"
"fmt"
"github.com/matttproud/prometheus/model"
"time"
)
type Function struct {
name string
argTypes []ExprType
returnType ExprType
callFn func(timestamp *time.Time, args []Node) interface{}
}
func (function *Function) CheckArgTypes(args []Node) error {
if len(function.argTypes) != len(args) {
return errors.New(
fmt.Sprintf("Wrong number of arguments to function %v(): %v expected, %v given",
function.name, len(function.argTypes), len(args)))
}
for idx, argType := range function.argTypes {
invalidType := false
var expectedType string
if _, ok := args[idx].(ScalarNode); argType == SCALAR && !ok {
invalidType = true
expectedType = "scalar"
}
if _, ok := args[idx].(VectorNode); argType == VECTOR && !ok {
invalidType = true
expectedType = "vector"
}
if _, ok := args[idx].(MatrixNode); argType == MATRIX && !ok {
invalidType = true
expectedType = "matrix"
}
if _, ok := args[idx].(StringNode); argType == STRING && !ok {
invalidType = true
expectedType = "string"
}
if invalidType {
return errors.New(
fmt.Sprintf("Wrong type for argument %v in function %v(), expected %v",
idx, function.name, expectedType))
}
}
return nil
}
// === time() ===
func timeImpl(timestamp *time.Time, args []Node) interface{} {
return model.SampleValue(time.Now().Unix())
}
// === count(vector VectorNode) ===
func countImpl(timestamp *time.Time, args []Node) interface{} {
return model.SampleValue(len(args[0].(VectorNode).Eval(timestamp)))
}
// === delta(matrix MatrixNode, isCounter ScalarNode) ===
func deltaImpl(timestamp *time.Time, args []Node) interface{} {
matrixNode := args[0].(MatrixNode)
isCounter := int(args[1].(ScalarNode).Eval(timestamp))
resultVector := Vector{}
// If we treat these metrics as counters, we need to fetch all values
// in the interval to find breaks in the timeseries' monotonicity.
// I.e. if a counter resets, we want to ignore that reset.
var matrixValue Matrix
if isCounter > 0 {
matrixValue = matrixNode.Eval(timestamp)
} else {
matrixValue = matrixNode.EvalBoundaries(timestamp)
}
for _, samples := range matrixValue {
counterCorrection := model.SampleValue(0)
lastValue := model.SampleValue(0)
for _, sample := range samples.Values {
currentValue := sample.Value
if currentValue < lastValue {
counterCorrection += lastValue - currentValue
}
lastValue = currentValue
}
resultValue := lastValue - samples.Values[0].Value + counterCorrection
resultSample := &model.Sample{
Metric: samples.Metric,
Value: resultValue,
Timestamp: *timestamp,
}
resultVector = append(resultVector, resultSample)
}
return resultVector
}
// === rate(node *MatrixNode) ===
func rateImpl(timestamp *time.Time, args []Node) interface{} {
args = append(args, &ScalarLiteral{value: 1})
return deltaImpl(timestamp, args)
}
// === sampleVectorImpl() ===
func sampleVectorImpl(timestamp *time.Time, args []Node) interface{} {
return Vector{
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "0",
},
Value: 10,
Timestamp: *timestamp,
},
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "1",
},
Value: 20,
Timestamp: *timestamp,
},
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "2",
},
Value: 30,
Timestamp: *timestamp,
},
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "3",
"group": "canary",
},
Value: 40,
Timestamp: *timestamp,
},
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "2",
"group": "canary",
},
Value: 40,
Timestamp: *timestamp,
},
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "3",
"group": "mytest",
},
Value: 40,
Timestamp: *timestamp,
},
&model.Sample{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "3",
"group": "mytest",
},
Value: 40,
Timestamp: *timestamp,
},
}
}
var functions = map[string]*Function{
"time": {
name: "time",
argTypes: []ExprType{},
returnType: SCALAR,
callFn: timeImpl,
},
"count": {
name: "count",
argTypes: []ExprType{VECTOR},
returnType: SCALAR,
callFn: countImpl,
},
"delta": {
name: "delta",
argTypes: []ExprType{MATRIX, SCALAR},
returnType: VECTOR,
callFn: deltaImpl,
},
"rate": {
name: "rate",
argTypes: []ExprType{MATRIX},
returnType: VECTOR,
callFn: rateImpl,
},
"sampleVector": {
name: "sampleVector",
argTypes: []ExprType{},
returnType: VECTOR,
callFn: sampleVectorImpl,
},
}
func GetFunction(name string) (*Function, error) {
function, ok := functions[name]
if !ok {
return nil, errors.New(fmt.Sprintf("Couldn't find function %v()", name))
}
return function, nil
}

View file

@ -0,0 +1,64 @@
package ast
//////////
// TEMPORARY CRAP FILE IN LIEU OF MISSING FUNCTIONALITY IN STORAGE LAYER
//
// REMOVE!
import (
"github.com/matttproud/prometheus/model"
"github.com/matttproud/prometheus/storage/metric"
"time"
)
// TODO ask matt about using pointers in nested metric structs
// TODO move this somewhere proper
var stalenessPolicy = metric.StalenessPolicy{
DeltaAllowance: time.Duration(300) * time.Second,
}
// TODO remove PersistenceBridge temporary helper.
type PersistenceBridge struct {
persistence metric.MetricPersistence
}
// AST-global persistence to use.
var persistence *PersistenceBridge = nil
func (p *PersistenceBridge) GetValueAtTime(labels model.LabelSet, timestamp *time.Time, stalenessPolicy *metric.StalenessPolicy) ([]*model.Sample, error) {
fingerprints, err := p.persistence.GetFingerprintsForLabelSet(&labels)
if err != nil {
return nil, err
}
samples := []*model.Sample{}
for _, fingerprint := range fingerprints {
metric, err := p.persistence.GetMetricForFingerprint(fingerprint)
if err != nil {
return nil, err
}
sample, err := p.persistence.GetValueAtTime(metric, timestamp, stalenessPolicy)
if err != nil {
return nil, err
}
if sample == nil {
continue
}
samples = append(samples, sample)
}
return samples, nil
}
func (p *PersistenceBridge) GetBoundaryValues(labels model.LabelSet, interval *model.Interval, stalenessPolicy *metric.StalenessPolicy) ([]*model.SampleSet, error) {
return []*model.SampleSet{}, nil // TODO real values
}
func (p *PersistenceBridge) GetRangeValues(labels model.LabelSet, interval *model.Interval, stalenessPolicy *metric.StalenessPolicy) ([]*model.SampleSet, error) {
return []*model.SampleSet{}, nil // TODO real values
}
func SetPersistence(p metric.MetricPersistence) {
persistence = &PersistenceBridge{
persistence: p,
}
}

181
rules/ast/printer.go Normal file
View file

@ -0,0 +1,181 @@
package ast
import (
"fmt"
"sort"
"strings"
"time"
)
func binOpTypeToString(opType BinOpType) string {
opTypeMap := map[BinOpType]string{
ADD: "+",
SUB: "-",
MUL: "*",
DIV: "/",
MOD: "%",
GT: ">",
LT: "<",
EQ: "==",
NE: "!=",
GE: ">=",
LE: "<=",
}
return opTypeMap[opType]
}
func aggrTypeToString(aggrType AggrType) string {
aggrTypeMap := map[AggrType]string{
SUM: "SUM",
AVG: "AVG",
MIN: "MIN",
MAX: "MAX",
}
return aggrTypeMap[aggrType]
}
func durationToString(duration time.Duration) string {
seconds := int64(duration / time.Second)
factors := map[string]int64{
"y": 60 * 60 * 24 * 365,
"d": 60 * 60 * 24,
"h": 60 * 60,
"m": 60,
"s": 1,
}
unit := "s"
switch int64(0) {
case seconds % factors["y"]:
unit = "y"
case seconds % factors["d"]:
unit = "d"
case seconds % factors["h"]:
unit = "h"
case seconds % factors["m"]:
unit = "m"
}
return fmt.Sprintf("%v%v", seconds/factors[unit], unit)
}
func (vector Vector) ToString() string {
metricStrings := []string{}
for _, sample := range vector {
metricName, ok := sample.Metric["name"]
if !ok {
panic("Tried to print vector without metric name")
}
labelStrings := []string{}
for label, value := range sample.Metric {
if label != "name" {
labelStrings = append(labelStrings, fmt.Sprintf("%v='%v'", label, value))
}
}
sort.Strings(labelStrings)
metricStrings = append(metricStrings,
fmt.Sprintf("%v{%v} => %v @[%v]",
metricName,
strings.Join(labelStrings, ","),
sample.Value, sample.Timestamp))
}
sort.Strings(metricStrings)
return strings.Join(metricStrings, "\n")
}
func (node *VectorLiteral) ToString() string {
metricName, ok := node.labels["name"]
if !ok {
panic("Tried to print vector without metric name")
}
labelStrings := []string{}
for label, value := range node.labels {
if label != "name" {
labelStrings = append(labelStrings, fmt.Sprintf("%v='%v'", label, value))
}
}
sort.Strings(labelStrings)
return fmt.Sprintf("%v{%v}", metricName, strings.Join(labelStrings, ","))
}
func (node *MatrixLiteral) ToString() string {
vectorString := (&VectorLiteral{labels: node.labels}).ToString()
intervalString := fmt.Sprintf("['%v']", durationToString(node.interval))
return vectorString + intervalString
}
func (node *ScalarLiteral) NodeTreeToDotGraph() string {
return fmt.Sprintf("%#p[label=\"%v\"];\n", node, node.value)
}
func functionArgsToDotGraph(node Node, args []Node) string {
graph := ""
for _, arg := range args {
graph += fmt.Sprintf("%#p -> %#p;\n", node, arg)
}
for _, arg := range args {
graph += arg.NodeTreeToDotGraph()
}
return graph
}
func (node *ScalarFunctionCall) NodeTreeToDotGraph() string {
graph := fmt.Sprintf("%#p[label=\"%v\"];\n", node, node.function.name)
graph += functionArgsToDotGraph(node, node.args)
return graph
}
func (node *ScalarArithExpr) NodeTreeToDotGraph() string {
graph := fmt.Sprintf("%#p[label=\"%v\"];\n", node, binOpTypeToString(node.opType))
graph += fmt.Sprintf("%#p -> %#p;\n", node, node.lhs)
graph += fmt.Sprintf("%#p -> %#p;\n", node, node.rhs)
graph += node.lhs.NodeTreeToDotGraph()
graph += node.rhs.NodeTreeToDotGraph()
return graph
}
func (node *VectorLiteral) NodeTreeToDotGraph() string {
return fmt.Sprintf("%#p[label=\"%v\"];\n", node, node.ToString())
}
func (node *VectorFunctionCall) NodeTreeToDotGraph() string {
graph := fmt.Sprintf("%#p[label=\"%v\"];\n", node, node.function.name)
graph += functionArgsToDotGraph(node, node.args)
return graph
}
func (node *VectorAggregation) NodeTreeToDotGraph() string {
groupByStrings := []string{}
for _, label := range node.groupBy {
groupByStrings = append(groupByStrings, string(label))
}
graph := fmt.Sprintf("%#p[label=\"%v BY (%v)\"]\n",
node,
aggrTypeToString(node.aggrType),
strings.Join(groupByStrings, ", "))
graph += fmt.Sprintf("%#p -> %#p;\n", node, node.vector)
graph += node.vector.NodeTreeToDotGraph()
return graph
}
func (node *VectorArithExpr) NodeTreeToDotGraph() string {
graph := fmt.Sprintf("%#p[label=\"%v\"];\n", node, binOpTypeToString(node.opType))
graph += fmt.Sprintf("%#p -> %#p;\n", node, node.lhs)
graph += fmt.Sprintf("%#p -> %#p;\n", node, node.rhs)
graph += node.lhs.NodeTreeToDotGraph()
graph += node.rhs.NodeTreeToDotGraph()
return graph
}
func (node *MatrixLiteral) NodeTreeToDotGraph() string {
return fmt.Sprintf("%#p[label=\"%v\"];\n", node, node.ToString())
}
func (node *StringLiteral) NodeTreeToDotGraph() string {
return fmt.Sprintf("%#p[label=\"'%v'\"];\n", node.str)
}
func (node *StringFunctionCall) NodeTreeToDotGraph() string {
graph := fmt.Sprintf("%#p[label=\"%v\"];\n", node, node.function.name)
graph += functionArgsToDotGraph(node, node.args)
return graph
}

120
rules/helpers.go Normal file
View file

@ -0,0 +1,120 @@
package rules
import (
"errors"
"fmt"
"github.com/matttproud/prometheus/model"
"github.com/matttproud/prometheus/rules/ast"
"regexp"
"strconv"
"time"
)
func rulesError(error string, v ...interface{}) error {
return errors.New(fmt.Sprintf(error, v...))
}
// TODO move to common place, currently duplicated in config/
func stringToDuration(durationStr string) (time.Duration, error) {
durationRE := regexp.MustCompile("([0-9]+)([ydhms]+)")
matches := durationRE.FindStringSubmatch(durationStr)
if len(matches) != 3 {
return 0, rulesError("Not a valid duration string: '%v'", durationStr)
}
value, _ := strconv.Atoi(matches[1])
unit := matches[2]
switch unit {
case "y":
value *= 60 * 60 * 24 * 365
case "d":
value *= 60 * 60 * 24
case "h":
value *= 60 * 60
case "m":
value *= 60
case "s":
value *= 1
}
return time.Duration(value) * time.Second, nil
}
func CreateRule(name string, labels model.LabelSet, root ast.Node, permanent bool) (*Rule, error) {
if root.Type() != ast.VECTOR {
return nil, rulesError("Rule %v does not evaluate to vector type", name)
}
return NewRule(name, labels, root.(ast.VectorNode), permanent), nil
}
func NewFunctionCall(name string, args []ast.Node) (ast.Node, error) {
function, err := ast.GetFunction(name)
if err != nil {
return nil, rulesError("Unknown function \"%v\"", name)
}
functionCall, err := ast.NewFunctionCall(function, args)
if err != nil {
return nil, rulesError(err.Error())
}
return functionCall, nil
}
func NewVectorAggregation(aggrTypeStr string, vector ast.Node, groupBy []model.LabelName) (*ast.VectorAggregation, error) {
if vector.Type() != ast.VECTOR {
return nil, rulesError("Operand of %v aggregation must be of vector type", aggrTypeStr)
}
var aggrTypes = map[string]ast.AggrType{
"SUM": ast.SUM,
"MAX": ast.MAX,
"MIN": ast.MIN,
"AVG": ast.AVG,
}
aggrType, ok := aggrTypes[aggrTypeStr]
if !ok {
return nil, rulesError("Unknown aggregation type '%v'", aggrTypeStr)
}
return ast.NewVectorAggregation(aggrType, vector.(ast.VectorNode), groupBy), nil
}
func NewArithExpr(opTypeStr string, lhs ast.Node, rhs ast.Node) (ast.Node, error) {
var opTypes = map[string]ast.BinOpType{
"+": ast.ADD,
"-": ast.SUB,
"*": ast.MUL,
"/": ast.DIV,
"%": ast.MOD,
">": ast.GT,
"<": ast.LT,
"==": ast.EQ,
"!=": ast.NE,
">=": ast.GE,
"<=": ast.LE,
"AND": ast.AND,
"OR": ast.OR,
}
opType, ok := opTypes[opTypeStr]
if !ok {
return nil, rulesError("Invalid binary operator \"%v\"", opTypeStr)
}
expr, err := ast.NewArithExpr(opType, lhs, rhs)
if err != nil {
return nil, rulesError(err.Error())
}
return expr, nil
}
func NewMatrix(vector ast.Node, intervalStr string) (ast.MatrixNode, error) {
switch vector.(type) {
case *ast.VectorLiteral:
{
break
}
default:
return nil, rulesError("Intervals are currently only supported for vector literals.")
}
duration, err := stringToDuration(intervalStr)
if err != nil {
return nil, err
}
interval := time.Duration(duration) * time.Second
vectorLiteral := vector.(*ast.VectorLiteral)
return ast.NewMatrixLiteral(vectorLiteral, interval), nil
}

46
rules/lexer.l Normal file
View file

@ -0,0 +1,46 @@
%{
package rules
import (
"github.com/matttproud/prometheus/model"
"strconv"
)
%}
D [0-9]
L [a-zA-Z_:]
%x S_COMMENTS
%%
. { yypos++; REJECT }
\n { yyline++; yypos = 1; REJECT }
"/*" { BEGIN(S_COMMENTS) }
<S_COMMENTS>"*/" { BEGIN(0) }
<S_COMMENTS>. { /* ignore chars within multi-line comments */ }
\/\/[^\r\n]*\n { /* gobble up one-line comments */ }
permanent { return PERMANENT }
BY { return GROUP_OP }
AVG|SUM|MAX|MIN { yylval.str = yytext; return AGGR_OP }
\<|>|AND|OR { yylval.str = yytext; return CMP_OP }
==|!=|>=|<= { yylval.str = yytext; return CMP_OP }
[+\-] { yylval.str = yytext; return ADDITIVE_OP }
[*/%] { yylval.str = yytext; return MULT_OP }
{L}({L}|{D})+ { yylval.str = yytext; return IDENTIFIER }
\-?{D}+(\.{D}*)? { num, err := strconv.ParseFloat(yytext, 32);
if (err != nil) { rulesError("Invalid float %v", yytext) }
yylval.num = model.SampleValue(num)
return NUMBER }
\"(\\.|[^\\"])*\" { yylval.str = yytext[1:len(yytext) - 1]; return STRING }
\'(\\.|[^\\'])*\' { yylval.str = yytext[1:len(yytext) - 1]; return STRING }
[{}\[\]()=,] { return int(yytext[0]) }
. { /* don't print any remaining chars (whitespace) */ }
\n { /* don't print any remaining chars (whitespace) */ }
%%

499
rules/lexer.l.go Normal file
View file

@ -0,0 +1,499 @@
// Generated by golex
package rules
import (
"bufio"
"io"
"os"
"regexp"
"sort"
)
import (
"github.com/matttproud/prometheus/model"
"strconv"
)
var yyin io.Reader = os.Stdin
var yyout io.Writer = os.Stdout
type yyrule struct {
regexp *regexp.Regexp
trailing *regexp.Regexp
startConds []yystartcondition
sol bool
action func() yyactionreturn
}
type yyactionreturn struct {
userReturn int
returnType yyactionreturntype
}
type yyactionreturntype int
const (
yyRT_FALLTHROUGH yyactionreturntype = iota
yyRT_USER_RETURN
yyRT_REJECT
)
var yydata string = ""
var yyorig string
var yyorigidx int
var yytext string = ""
var yytextrepl bool = true
func yymore() {
yytextrepl = false
}
func yyBEGIN(state yystartcondition) {
YY_START = state
}
func yyECHO() {
yyout.Write([]byte(yytext))
}
func yyREJECT() {
panic("yyREJECT")
}
var yylessed int
func yyless(n int) {
yylessed = len(yytext) - n
}
func unput(c uint8) {
yyorig = yyorig[:yyorigidx] + string(c) + yyorig[yyorigidx:]
yydata = yydata[:len(yytext)-yylessed] + string(c) + yydata[len(yytext)-yylessed:]
}
func input() int {
if len(yyorig) <= yyorigidx {
return EOF
}
c := yyorig[yyorigidx]
yyorig = yyorig[:yyorigidx] + yyorig[yyorigidx+1:]
yydata = yydata[:len(yytext)-yylessed] + yydata[len(yytext)-yylessed+1:]
return int(c)
}
var EOF int = -1
type yystartcondition int
var INITIAL yystartcondition = 0
var YY_START yystartcondition = INITIAL
type yylexMatch struct {
index int
matchFunc func() yyactionreturn
sortLen int
advLen int
}
type yylexMatchList []yylexMatch
func (ml yylexMatchList) Len() int {
return len(ml)
}
func (ml yylexMatchList) Less(i, j int) bool {
return ml[i].sortLen > ml[j].sortLen && ml[i].index > ml[j].index
}
func (ml yylexMatchList) Swap(i, j int) {
ml[i], ml[j] = ml[j], ml[i]
}
func yylex() int {
reader := bufio.NewReader(yyin)
for {
line, err := reader.ReadString('\n')
if len(line) == 0 && err == io.EOF {
break
}
yydata += line
}
yyorig = yydata
yyorigidx = 0
yyactioninline(yyBEGIN)
for len(yydata) > 0 {
matches := yylexMatchList(make([]yylexMatch, 0, 6))
excl := yystartconditionexclmap[YY_START]
for i, v := range yyrules {
sol := yyorigidx == 0 || yyorig[yyorigidx-1] == '\n'
if v.sol && !sol {
continue
}
// Check start conditions.
ok := false
// YY_START or '*' must feature in v.startConds
for _, c := range v.startConds {
if c == YY_START || c == -1 {
ok = true
break
}
}
if !excl {
// If v.startConds is empty, this is also acceptable.
if len(v.startConds) == 0 {
ok = true
}
}
if !ok {
continue
}
idxs := v.regexp.FindStringIndex(yydata)
if idxs != nil && idxs[0] == 0 {
// Check the trailing context, if any.
checksOk := true
sortLen := idxs[1]
advLen := idxs[1]
if v.trailing != nil {
tridxs := v.trailing.FindStringIndex(yydata[idxs[1]:])
if tridxs == nil || tridxs[0] != 0 {
checksOk = false
} else {
sortLen += tridxs[1]
}
}
if checksOk {
matches = append(matches, yylexMatch{i, v.action, sortLen, advLen})
}
}
}
if yytextrepl {
yytext = ""
}
sort.Sort(matches)
tryMatch:
if len(matches) == 0 {
yytext += yydata[:1]
yydata = yydata[1:]
yyorigidx += 1
yyout.Write([]byte(yytext))
} else {
m := matches[0]
yytext += yydata[:m.advLen]
yyorigidx += m.advLen
yytextrepl, yylessed = true, 0
ar := m.matchFunc()
if ar.returnType != yyRT_REJECT {
yydata = yydata[m.advLen-yylessed:]
yyorigidx -= yylessed
}
switch ar.returnType {
case yyRT_FALLTHROUGH:
// Do nothing.
case yyRT_USER_RETURN:
return ar.userReturn
case yyRT_REJECT:
matches = matches[1:]
yytext = yytext[:len(yytext)-m.advLen]
yyorigidx -= m.advLen
goto tryMatch
}
}
}
return 0
}
var S_COMMENTS yystartcondition = 1024
var yystartconditionexclmap = map[yystartcondition]bool{S_COMMENTS: true, }
var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yypos++
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyline++
yypos = 1
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("/\\*"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_COMMENTS)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\*/"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\/\\/[^\\r\\n]*\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("permanent"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{PERMANENT, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("BY"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{GROUP_OP, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("AVG|SUM|MAX|MIN"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{AGGR_OP, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\<|>|AND|OR"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{CMP_OP, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("==|!=|>=|<="), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{CMP_OP, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[+\\-]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{ADDITIVE_OP, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[*/%]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{MULT_OP, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("([a-zA-Z_:])(([a-zA-Z_:])|([0-9]))+"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{IDENTIFIER, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\-?([0-9])+(\\.([0-9])*)?"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
num, err := strconv.ParseFloat(yytext, 32)
if err != nil {
rulesError("Invalid float %v", yytext)
}
yylval.num = model.SampleValue(num)
return yyactionreturn{NUMBER, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\\"(\\\\[^\\n]|[^\\\\\"])*\\\""), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext[1 : len(yytext)-1]
return yyactionreturn{STRING, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\'(\\\\[^\\n]|[^\\\\'])*\\'"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext[1 : len(yytext)-1]
return yyactionreturn{STRING, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[{}\\[\\]()=,]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{int(yytext[0]), yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, }
func yyactioninline(BEGIN func(yystartcondition)) {}

69
rules/load.go Normal file
View file

@ -0,0 +1,69 @@
package rules
import (
"errors"
"fmt"
"io"
"os"
"strings"
)
// NOTE: This parser is non-reentrant due to its dependence on global state.
// GoLex sadly needs these global variables for storing temporary token/parsing information.
var yylval *yySymType // For storing extra token information, like the contents of a string.
var yyline int // Line number within the current file or buffer.
var yypos int // Character position within the current line.
var parsedRules []*Rule // Parsed rules.
type RulesLexer struct {
errors []string
}
func addRule(rule *Rule) {
parsedRules = append(parsedRules, rule)
}
func (lexer *RulesLexer) Lex(lval *yySymType) int {
yylval = lval
token_type := yylex()
return token_type
}
func (lexer *RulesLexer) Error(errorStr string) {
err := fmt.Sprintf("Error parsing rules at line %v, char %v: %v", yyline, yypos, errorStr)
lexer.errors = append(lexer.errors, err)
}
func LoadFromReader(rulesReader io.Reader) ([]*Rule, error) {
yyin = rulesReader
yypos = 1
yyline = 1
parsedRules = []*Rule{}
lexer := &RulesLexer{}
ret := yyParse(lexer)
if ret != 0 && len(lexer.errors) == 0 {
lexer.Error("Unknown parser error")
}
if len(lexer.errors) > 0 {
err := errors.New(strings.Join(lexer.errors, "\n"))
return []*Rule{}, err
}
return parsedRules, nil
}
func LoadFromString(rulesString string) ([]*Rule, error) {
rulesReader := strings.NewReader(rulesString)
return LoadFromReader(rulesReader)
}
func LoadFromFile(fileName string) ([]*Rule, error) {
rulesReader, err := os.Open(fileName)
if err != nil {
return []*Rule{}, err
}
return LoadFromReader(rulesReader)
}

88
rules/manager.go Normal file
View file

@ -0,0 +1,88 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rules
import (
"github.com/matttproud/prometheus/config"
"github.com/matttproud/prometheus/rules/ast"
"log"
"time"
)
type Result struct {
Err error // TODO propagate errors from rule evaluation.
Samples ast.Vector
}
type RuleManager interface {
AddRulesFromConfig(config *config.Config) error
}
type ruleManager struct {
rules []*Rule
results chan *Result
done chan bool
interval time.Duration
}
func NewRuleManager(results chan *Result, interval time.Duration) RuleManager {
manager := &ruleManager{
results: results,
rules: []*Rule{},
interval: interval,
}
go manager.run(results)
return manager
}
func (m *ruleManager) run(results chan *Result) {
ticker := time.Tick(m.interval)
for {
select {
case <-ticker:
m.runIteration(results)
case <-m.done:
log.Printf("RuleManager exiting...")
break
}
}
}
func (m *ruleManager) Stop() {
m.done <- true
}
func (m *ruleManager) runIteration(results chan *Result) {
now := time.Now()
for _, rule := range m.rules {
go func() {
vector := rule.Eval(&now)
m.results <- &Result{
Samples: vector,
}
}()
}
}
func (m *ruleManager) AddRulesFromConfig(config *config.Config) error {
for _, ruleFile := range config.Global.RuleFiles {
newRules, err := LoadFromFile(ruleFile)
if err != nil {
return err
}
m.rules = append(m.rules, newRules...)
}
return nil
}

148
rules/parser.y Normal file
View file

@ -0,0 +1,148 @@
%{
package rules
import "fmt"
import "github.com/matttproud/prometheus/model"
import "github.com/matttproud/prometheus/rules/ast"
%}
%union {
num model.SampleValue
str string
ruleNode ast.Node
ruleNodeSlice []ast.Node
boolean bool
labelNameSlice []model.LabelName
labelSet model.LabelSet
}
%token <str> IDENTIFIER STRING
%token <num> NUMBER
%token PERMANENT GROUP_OP
%token <str> AGGR_OP CMP_OP ADDITIVE_OP MULT_OP
%type <ruleNodeSlice> func_arg_list
%type <labelNameSlice> label_list grouping_opts
%type <labelSet> label_assign label_assign_list rule_labels
%type <ruleNode> rule_expr func_arg
%type <boolean> qualifier
%right '='
%left CMP_OP
%left ADDITIVE_OP
%left MULT_OP
%start rules_stat_list
%%
rules_stat_list : /* empty */
| rules_stat_list rules_stat
;
rules_stat : qualifier IDENTIFIER rule_labels '=' rule_expr
{
rule, err := CreateRule($2, $3, $5, $1)
if err != nil { yylex.Error(err.Error()); return 1 }
addRule(rule)
}
;
qualifier : /* empty */
{ $$ = false }
| PERMANENT
{ $$ = true }
;
rule_labels : /* empty */
{ $$ = model.LabelSet{} }
| '{' label_assign_list '}'
{ $$ = $2 }
| '{' '}'
{ $$ = model.LabelSet{} }
label_assign_list : label_assign
{ $$ = $1 }
| label_assign_list ',' label_assign
{ for k, v := range $3 { $$[k] = v } }
;
label_assign : IDENTIFIER '=' STRING
{ $$ = model.LabelSet{ model.LabelName($1): model.LabelValue($3) } }
;
rule_expr : '(' rule_expr ')'
{ $$ = $2 }
| IDENTIFIER rule_labels
{ $2["name"] = model.LabelValue($1); $$ = ast.NewVectorLiteral($2) }
| IDENTIFIER '(' func_arg_list ')'
{
var err error
$$, err = NewFunctionCall($1, $3)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| IDENTIFIER '(' ')'
{
var err error
$$, err = NewFunctionCall($1, []ast.Node{})
if err != nil { yylex.Error(err.Error()); return 1 }
}
| AGGR_OP '(' rule_expr ')' grouping_opts
{
var err error
$$, err = NewVectorAggregation($1, $3, $5)
if err != nil { yylex.Error(err.Error()); return 1 }
}
/* Yacc can only attach associativity to terminals, so we
* have to list all operators here. */
| rule_expr ADDITIVE_OP rule_expr
{
var err error
$$, err = NewArithExpr($2, $1, $3)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| rule_expr MULT_OP rule_expr
{
var err error
$$, err = NewArithExpr($2, $1, $3)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| rule_expr CMP_OP rule_expr
{
var err error
$$, err = NewArithExpr($2, $1, $3)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| NUMBER
{ $$ = ast.NewScalarLiteral($1)}
;
grouping_opts :
{ $$ = []model.LabelName{} }
| GROUP_OP '(' label_list ')'
{ $$ = $3 }
;
label_list : IDENTIFIER
{ $$ = []model.LabelName{model.LabelName($1)} }
| label_list ',' IDENTIFIER
{ $$ = append($$, model.LabelName($3)) }
;
func_arg_list : func_arg
{ $$ = []ast.Node{$1} }
| func_arg_list ',' func_arg
{ $$ = append($$, $3) }
;
func_arg : rule_expr
{ $$ = $1 }
| rule_expr '[' STRING ']'
{
var err error
$$, err = NewMatrix($1, $3)
if err != nil { yylex.Error(err.Error()); return 1 }
}
| STRING
{ $$ = ast.NewStringLiteral($1) }
;
%%

486
rules/parser.y.go Normal file
View file

@ -0,0 +1,486 @@
//line parser.y:2
package rules
import "fmt"
import "github.com/matttproud/prometheus/model"
import "github.com/matttproud/prometheus/rules/ast"
//line parser.y:9
type yySymType struct {
yys int
num model.SampleValue
str string
ruleNode ast.Node
ruleNodeSlice []ast.Node
boolean bool
labelNameSlice []model.LabelName
labelSet model.LabelSet
}
const IDENTIFIER = 57346
const STRING = 57347
const NUMBER = 57348
const PERMANENT = 57349
const GROUP_OP = 57350
const AGGR_OP = 57351
const CMP_OP = 57352
const ADDITIVE_OP = 57353
const MULT_OP = 57354
var yyToknames = []string{
"IDENTIFIER",
"STRING",
"NUMBER",
"PERMANENT",
"GROUP_OP",
"AGGR_OP",
"CMP_OP",
"ADDITIVE_OP",
"MULT_OP",
" =",
}
var yyStatenames = []string{}
const yyEofCode = 1
const yyErrCode = 2
const yyMaxDepth = 200
//line parser.y:148
//line yacctab:1
var yyExca = []int{
-1, 1,
1, -1,
-2, 4,
}
const yyNprod = 30
const yyPrivate = 57344
var yyTokenNames []string
var yyStates []string
const yyLast = 77
var yyAct = []int{
36, 37, 48, 49, 15, 38, 17, 27, 7, 16,
13, 23, 21, 22, 18, 19, 24, 14, 35, 53,
42, 52, 20, 30, 31, 32, 15, 38, 17, 39,
11, 16, 8, 23, 21, 22, 23, 21, 22, 14,
22, 43, 44, 15, 33, 17, 12, 41, 16, 40,
28, 7, 6, 47, 26, 54, 14, 10, 23, 21,
22, 21, 22, 4, 45, 29, 51, 12, 25, 5,
2, 1, 3, 9, 46, 50, 34,
}
var yyPact = []int{
-1000, 56, -1000, 65, -1000, -6, 19, 42, 39, -1,
-1000, -1000, 9, 48, 39, 37, -10, -1000, -1000, 63,
60, 39, 39, 39, 26, -1000, 0, 39, -1000, -1000,
28, -1000, 50, -1000, 31, -1000, -1000, 1, -1000, 23,
-1000, 22, 59, 45, -1000, -18, -1000, -14, -1000, 62,
3, -1000, -1000, 51, -1000,
}
var yyPgo = []int{
0, 76, 75, 74, 30, 73, 52, 1, 0, 72,
71, 70,
}
var yyR1 = []int{
0, 10, 10, 11, 9, 9, 6, 6, 6, 5,
5, 4, 7, 7, 7, 7, 7, 7, 7, 7,
7, 3, 3, 2, 2, 1, 1, 8, 8, 8,
}
var yyR2 = []int{
0, 0, 2, 5, 0, 1, 0, 3, 2, 1,
3, 3, 3, 2, 4, 3, 5, 3, 3, 3,
1, 0, 4, 1, 3, 1, 3, 1, 4, 1,
}
var yyChk = []int{
-1000, -10, -11, -9, 7, 4, -6, 14, 13, -5,
15, -4, 4, -7, 17, 4, 9, 6, 15, 16,
13, 11, 12, 10, -7, -6, 17, 17, -4, 5,
-7, -7, -7, 18, -1, 18, -8, -7, 5, -7,
18, 16, 19, 18, -8, 5, -3, 8, 20, 17,
-2, 4, 18, 16, 4,
}
var yyDef = []int{
1, -2, 2, 0, 5, 6, 0, 0, 0, 0,
8, 9, 0, 3, 0, 6, 0, 20, 7, 0,
0, 0, 0, 0, 0, 13, 0, 0, 10, 11,
17, 18, 19, 12, 0, 15, 25, 27, 29, 0,
14, 0, 0, 21, 26, 0, 16, 0, 28, 0,
0, 23, 22, 0, 24,
}
var yyTok1 = []int{
1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
17, 18, 3, 3, 16, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 13, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 19, 3, 20, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 14, 3, 15,
}
var yyTok2 = []int{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12,
}
var yyTok3 = []int{
0,
}
//line yaccpar:1
/* parser for yacc output */
var yyDebug = 0
type yyLexer interface {
Lex(lval *yySymType) int
Error(s string)
}
const yyFlag = -1000
func yyTokname(c int) string {
if c > 0 && c <= len(yyToknames) {
if yyToknames[c-1] != "" {
return yyToknames[c-1]
}
}
return fmt.Sprintf("tok-%v", c)
}
func yyStatname(s int) string {
if s >= 0 && s < len(yyStatenames) {
if yyStatenames[s] != "" {
return yyStatenames[s]
}
}
return fmt.Sprintf("state-%v", s)
}
func yylex1(lex yyLexer, lval *yySymType) int {
c := 0
char := lex.Lex(lval)
if char <= 0 {
c = yyTok1[0]
goto out
}
if char < len(yyTok1) {
c = yyTok1[char]
goto out
}
if char >= yyPrivate {
if char < yyPrivate+len(yyTok2) {
c = yyTok2[char-yyPrivate]
goto out
}
}
for i := 0; i < len(yyTok3); i += 2 {
c = yyTok3[i+0]
if c == char {
c = yyTok3[i+1]
goto out
}
}
out:
if c == 0 {
c = yyTok2[1] /* unknown char */
}
if yyDebug >= 3 {
fmt.Printf("lex %U %s\n", uint(char), yyTokname(c))
}
return c
}
func yyParse(yylex yyLexer) int {
var yyn int
var yylval yySymType
var yyVAL yySymType
yyS := make([]yySymType, yyMaxDepth)
Nerrs := 0 /* number of errors */
Errflag := 0 /* error recovery flag */
yystate := 0
yychar := -1
yyp := -1
goto yystack
ret0:
return 0
ret1:
return 1
yystack:
/* put a state and value onto the stack */
if yyDebug >= 4 {
fmt.Printf("char %v in %v\n", yyTokname(yychar), yyStatname(yystate))
}
yyp++
if yyp >= len(yyS) {
nyys := make([]yySymType, len(yyS)*2)
copy(nyys, yyS)
yyS = nyys
}
yyS[yyp] = yyVAL
yyS[yyp].yys = yystate
yynewstate:
yyn = yyPact[yystate]
if yyn <= yyFlag {
goto yydefault /* simple state */
}
if yychar < 0 {
yychar = yylex1(yylex, &yylval)
}
yyn += yychar
if yyn < 0 || yyn >= yyLast {
goto yydefault
}
yyn = yyAct[yyn]
if yyChk[yyn] == yychar { /* valid shift */
yychar = -1
yyVAL = yylval
yystate = yyn
if Errflag > 0 {
Errflag--
}
goto yystack
}
yydefault:
/* default state action */
yyn = yyDef[yystate]
if yyn == -2 {
if yychar < 0 {
yychar = yylex1(yylex, &yylval)
}
/* look through exception table */
xi := 0
for {
if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
break
}
xi += 2
}
for xi += 2; ; xi += 2 {
yyn = yyExca[xi+0]
if yyn < 0 || yyn == yychar {
break
}
}
yyn = yyExca[xi+1]
if yyn < 0 {
goto ret0
}
}
if yyn == 0 {
/* error ... attempt to resume parsing */
switch Errflag {
case 0: /* brand new error */
yylex.Error("syntax error")
Nerrs++
if yyDebug >= 1 {
fmt.Printf("%s", yyStatname(yystate))
fmt.Printf("saw %s\n", yyTokname(yychar))
}
fallthrough
case 1, 2: /* incompletely recovered error ... try again */
Errflag = 3
/* find a state where "error" is a legal shift action */
for yyp >= 0 {
yyn = yyPact[yyS[yyp].yys] + yyErrCode
if yyn >= 0 && yyn < yyLast {
yystate = yyAct[yyn] /* simulate a shift of "error" */
if yyChk[yystate] == yyErrCode {
goto yystack
}
}
/* the current p has no shift on "error", pop stack */
if yyDebug >= 2 {
fmt.Printf("error recovery pops state %d\n", yyS[yyp].yys)
}
yyp--
}
/* there is no state on the stack with an error shift ... abort */
goto ret1
case 3: /* no shift yet; clobber input char */
if yyDebug >= 2 {
fmt.Printf("error recovery discards %s\n", yyTokname(yychar))
}
if yychar == yyEofCode {
goto ret1
}
yychar = -1
goto yynewstate /* try again in the same state */
}
}
/* reduction by production yyn */
if yyDebug >= 2 {
fmt.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
}
yynt := yyn
yypt := yyp
_ = yypt // guard against "declared and not used"
yyp -= yyR2[yyn]
yyVAL = yyS[yyp+1]
/* consult goto table to find next state */
yyn = yyR1[yyn]
yyg := yyPgo[yyn]
yyj := yyg + yyS[yyp].yys + 1
if yyj >= yyLast {
yystate = yyAct[yyg]
} else {
yystate = yyAct[yyj]
if yyChk[yystate] != -yyn {
yystate = yyAct[yyg]
}
}
// dummy call; replaced with literal code
switch yynt {
case 3:
//line parser.y:42
{
rule, err := CreateRule(yyS[yypt-3].str, yyS[yypt-2].labelSet, yyS[yypt-0].ruleNode, yyS[yypt-4].boolean)
if err != nil { yylex.Error(err.Error()); return 1 }
addRule(rule)
}
case 4:
//line parser.y:50
{ yyVAL.boolean = false }
case 5:
//line parser.y:52
{ yyVAL.boolean = true }
case 6:
//line parser.y:56
{ yyVAL.labelSet = model.LabelSet{} }
case 7:
//line parser.y:58
{ yyVAL.labelSet = yyS[yypt-1].labelSet }
case 8:
//line parser.y:60
{ yyVAL.labelSet = model.LabelSet{} }
case 9:
//line parser.y:63
{ yyVAL.labelSet = yyS[yypt-0].labelSet }
case 10:
//line parser.y:65
{ for k, v := range yyS[yypt-0].labelSet { yyVAL.labelSet[k] = v } }
case 11:
//line parser.y:69
{ yyVAL.labelSet = model.LabelSet{ model.LabelName(yyS[yypt-2].str): model.LabelValue(yyS[yypt-0].str) } }
case 12:
//line parser.y:74
{ yyVAL.ruleNode = yyS[yypt-1].ruleNode }
case 13:
//line parser.y:76
{ yyS[yypt-0].labelSet["name"] = model.LabelValue(yyS[yypt-1].str); yyVAL.ruleNode = ast.NewVectorLiteral(yyS[yypt-0].labelSet) }
case 14:
//line parser.y:78
{
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice)
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 15:
//line parser.y:84
{
var err error
yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{})
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 16:
//line parser.y:90
{
var err error
yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-4].str, yyS[yypt-2].ruleNode, yyS[yypt-0].labelNameSlice)
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 17:
//line parser.y:98
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 18:
//line parser.y:104
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 19:
//line parser.y:110
{
var err error
yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode)
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 20:
//line parser.y:116
{ yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num)}
case 21:
//line parser.y:120
{ yyVAL.labelNameSlice = []model.LabelName{} }
case 22:
//line parser.y:122
{ yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice }
case 23:
//line parser.y:126
{ yyVAL.labelNameSlice = []model.LabelName{model.LabelName(yyS[yypt-0].str)} }
case 24:
//line parser.y:128
{ yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, model.LabelName(yyS[yypt-0].str)) }
case 25:
//line parser.y:132
{ yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode} }
case 26:
//line parser.y:134
{ yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode) }
case 27:
//line parser.y:138
{ yyVAL.ruleNode = yyS[yypt-0].ruleNode }
case 28:
//line parser.y:140
{
var err error
yyVAL.ruleNode, err = NewMatrix(yyS[yypt-3].ruleNode, yyS[yypt-1].str)
if err != nil { yylex.Error(err.Error()); return 1 }
}
case 29:
//line parser.y:146
{ yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str) }
}
goto yystack /* stack new state and value */
}

58
rules/rules.go Normal file
View file

@ -0,0 +1,58 @@
package rules
import (
"fmt"
"github.com/matttproud/prometheus/model"
"github.com/matttproud/prometheus/rules/ast"
"time"
)
// A recorded rule.
type Rule struct {
name string
vector ast.VectorNode
labels model.LabelSet
permanent bool
}
func (rule *Rule) Name() string { return rule.name }
func (rule *Rule) EvalRaw(timestamp *time.Time) ast.Vector {
return rule.vector.Eval(timestamp)
}
func (rule *Rule) Eval(timestamp *time.Time) ast.Vector {
// Get the raw value of the rule expression.
vector := rule.EvalRaw(timestamp)
// Override the metric name and labels.
for _, sample := range vector {
sample.Metric["metric"] = model.LabelValue(rule.name)
for label, value := range rule.labels {
if value == "" {
delete(sample.Metric, label)
} else {
sample.Metric[label] = value
}
}
}
return vector
}
func (rule *Rule) RuleToDotGraph() string {
graph := "digraph \"Rules\" {\n"
graph += fmt.Sprintf("%#p[shape=\"box\",label=\"%v = \"];\n", rule, rule.name)
graph += fmt.Sprintf("%#p -> %#p;\n", rule, rule.vector)
graph += rule.vector.NodeTreeToDotGraph()
graph += "}\n"
return graph
}
func NewRule(name string, labels model.LabelSet, vector ast.VectorNode, permanent bool) *Rule {
return &Rule{
name: name,
labels: labels,
vector: vector,
permanent: permanent,
}
}

205
rules/rules_test.go Normal file
View file

@ -0,0 +1,205 @@
package rules
import (
"fmt"
"github.com/matttproud/prometheus/rules/ast"
"github.com/matttproud/prometheus/storage/metric/leveldb"
"io/ioutil"
"os"
"strings"
"testing"
)
var testEvalTime = testStartTime.Add(testDuration5m * 10)
// Expected output needs to be alphabetically sorted (labels within one line
// must be sorted and lines between each other must be sorted too).
var ruleTests = []struct {
rule string
output []string
shouldFail bool
}{
{
rule: "SUM(http_requests)",
output: []string{"http_requests{} => 3600 @[%v]"},
}, {
rule: "SUM(http_requests) BY (job)",
output: []string{
"http_requests{job='api-server'} => 1000 @[%v]",
"http_requests{job='app-server'} => 2600 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job, group)",
output: []string{
"http_requests{group='canary',job='api-server'} => 700 @[%v]",
"http_requests{group='canary',job='app-server'} => 1500 @[%v]",
"http_requests{group='production',job='api-server'} => 300 @[%v]",
"http_requests{group='production',job='app-server'} => 1100 @[%v]",
},
}, {
rule: "AVG(http_requests) BY (job)",
output: []string{
"http_requests{job='api-server'} => 250 @[%v]",
"http_requests{job='app-server'} => 650 @[%v]",
},
}, {
rule: "MIN(http_requests) BY (job)",
output: []string{
"http_requests{job='api-server'} => 100 @[%v]",
"http_requests{job='app-server'} => 500 @[%v]",
},
}, {
rule: "MAX(http_requests) BY (job)",
output: []string{
"http_requests{job='api-server'} => 400 @[%v]",
"http_requests{job='app-server'} => 800 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) - count(http_requests)",
output: []string{
"http_requests{job='api-server'} => 992 @[%v]",
"http_requests{job='app-server'} => 2592 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) - 2",
output: []string{
"http_requests{job='api-server'} => 998 @[%v]",
"http_requests{job='app-server'} => 2598 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) % 3",
output: []string{
"http_requests{job='api-server'} => 1 @[%v]",
"http_requests{job='app-server'} => 2 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) / 0",
output: []string{
"http_requests{job='api-server'} => +Inf @[%v]",
"http_requests{job='app-server'} => +Inf @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) > 1000",
output: []string{
"http_requests{job='app-server'} => 2600 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) <= 1000",
output: []string{
"http_requests{job='api-server'} => 1000 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) != 1000",
output: []string{
"http_requests{job='app-server'} => 2600 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) == 1000",
output: []string{
"http_requests{job='api-server'} => 1000 @[%v]",
},
}, {
rule: "SUM(http_requests) BY (job) + SUM(http_requests) BY (job)",
output: []string{
"http_requests{job='api-server'} => 2000 @[%v]",
"http_requests{job='app-server'} => 5200 @[%v]",
},
// Invalid rules that should fail to parse.
}, {
rule: "",
shouldFail: true,
}, {
rule: "http_requests['1d']",
shouldFail: true,
},
}
func annotateWithTime(lines []string) []string {
annotatedLines := []string{}
for _, line := range lines {
annotatedLines = append(annotatedLines, fmt.Sprintf(line, testEvalTime))
}
return annotatedLines
}
func vectorComparisonString(expected []string, actual []string) string {
separator := "\n--------------\n"
return fmt.Sprintf("Expected:%v%v%v\nActual:%v%v%v ",
separator,
strings.Join(expected, "\n"),
separator,
separator,
strings.Join(actual, "\n"),
separator)
}
func TestRules(t *testing.T) {
temporaryDirectory, err := ioutil.TempDir("", "leveldb_metric_persistence_test")
if err != nil {
t.Errorf("Could not create temporary directory: %q\n", err)
return
}
defer func() {
if err = os.RemoveAll(temporaryDirectory); err != nil {
t.Errorf("Could not remove temporary directory: %q\n", err)
}
}()
persistence, err := leveldb.NewLevelDBMetricPersistence(temporaryDirectory)
if err != nil {
t.Errorf("Could not create LevelDB Metric Persistence: %q\n", err)
return
}
if persistence == nil {
t.Errorf("Received nil LevelDB Metric Persistence.\n")
return
}
defer func() {
persistence.Close()
}()
storeMatrix(persistence, testMatrix)
ast.SetPersistence(persistence)
for _, ruleTest := range ruleTests {
expectedLines := annotateWithTime(ruleTest.output)
testRules, err := LoadFromString("testrule = " + ruleTest.rule)
if err != nil {
if ruleTest.shouldFail {
continue
}
t.Errorf("Error during parsing: %v", err)
t.Errorf("Rule: %v", ruleTest.rule)
} else if len(testRules) != 1 {
t.Errorf("Parser created %v rules instead of one", len(testRules))
t.Errorf("Rule: %v", ruleTest.rule)
} else {
failed := false
resultVector := testRules[0].EvalRaw(&testEvalTime)
resultStr := resultVector.ToString()
resultLines := strings.Split(resultStr, "\n")
if len(ruleTest.output) != len(resultLines) {
t.Errorf("Number of samples in expected and actual output don't match")
failed = true
}
for _, expectedSample := range expectedLines {
found := false
for _, actualSample := range resultLines {
if actualSample == expectedSample {
found = true
}
}
if !found {
t.Errorf("Couldn't find expected sample in output: '%v'",
expectedSample)
failed = true
}
}
if failed {
t.Errorf("Rule: %v\n%v", ruleTest.rule, vectorComparisonString(expectedLines, resultLines))
}
}
}
}

132
rules/testdata.go Normal file
View file

@ -0,0 +1,132 @@
package rules
import (
"github.com/matttproud/prometheus/model"
"github.com/matttproud/prometheus/rules/ast"
"github.com/matttproud/prometheus/storage/metric"
"time"
)
var testDuration5m = time.Duration(5) * time.Minute
var testStartTime = time.Time{}
func getTestValueStream(startVal model.SampleValue,
endVal model.SampleValue,
stepVal model.SampleValue) (resultValues []model.SamplePair) {
currentTime := testStartTime
for currentVal := startVal; currentVal <= endVal; currentVal += stepVal {
sample := model.SamplePair{
Value: currentVal,
Timestamp: currentTime,
}
resultValues = append(resultValues, sample)
currentTime = currentTime.Add(testDuration5m)
}
return resultValues
}
func getTestVectorFromTestMatrix(matrix ast.Matrix) ast.Vector {
vector := ast.Vector{}
for _, sampleSet := range matrix {
lastSample := sampleSet.Values[len(sampleSet.Values)-1]
vector = append(vector, &model.Sample{
Metric: sampleSet.Metric,
Value: lastSample.Value,
Timestamp: lastSample.Timestamp,
})
}
return vector
}
func storeMatrix(persistence metric.MetricPersistence, matrix ast.Matrix) error {
for _, sampleSet := range matrix {
for _, sample := range sampleSet.Values {
err := persistence.AppendSample(&model.Sample{
Metric: sampleSet.Metric,
Value: sample.Value,
Timestamp: sample.Timestamp,
})
if err != nil {
return err
}
}
}
return nil
}
var testMatrix = ast.Matrix{
{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "0",
"group": "production",
},
Values: getTestValueStream(0, 100, 10),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "1",
"group": "production",
},
Values: getTestValueStream(0, 200, 20),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "0",
"group": "canary",
},
Values: getTestValueStream(0, 300, 30),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "api-server",
"instance": "1",
"group": "canary",
},
Values: getTestValueStream(0, 400, 40),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "app-server",
"instance": "0",
"group": "production",
},
Values: getTestValueStream(0, 500, 50),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "app-server",
"instance": "1",
"group": "production",
},
Values: getTestValueStream(0, 600, 60),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "app-server",
"instance": "0",
"group": "canary",
},
Values: getTestValueStream(0, 700, 70),
},
{
Metric: model.Metric{
"name": "http_requests",
"job": "app-server",
"instance": "1",
"group": "canary",
},
Values: getTestValueStream(0, 800, 80),
},
}
var testVector = getTestVectorFromTestMatrix(testMatrix)