Merge pull request #209 from prometheus/refactor/protobuf-config

Convert the Prometheus configuration to protocol buffers.
This commit is contained in:
juliusv 2013-05-01 05:57:45 -07:00
commit 30f0239f96
30 changed files with 366 additions and 1674 deletions

View file

@ -39,11 +39,12 @@ advice:
binary: build
go build $(BUILDFLAGS) -o prometheus.build
build: preparation model web
build: preparation config model web
go build $(BUILDFLAGS) .
clean:
$(MAKE) -C build clean
$(MAKE) -C config clean
$(MAKE) -C model clean
$(MAKE) -C web clean
rm -rf $(TEST_ARTIFACTS)
@ -51,6 +52,9 @@ clean:
-find . -type f -iname '*#' -exec rm '{}' ';'
-find . -type f -iname '.#*' -exec rm '{}' ';'
config: preparation
$(MAKE) -C config
documentation: search_index
godoc -http=:6060 -index -index_files='search_index'
@ -90,7 +94,7 @@ test: build
go test ./utility/... $(GO_TEST_FLAGS)
go test ./web/... $(GO_TEST_FLAGS)
web: preparation model
web: preparation config model
$(MAKE) -C web
.PHONY: advice binary build clean documentation format model package preparation run search_index source_path test
.PHONY: advice binary build clean config documentation format model package preparation run search_index source_path test

View file

@ -24,7 +24,7 @@ import (
// runtime state. It enables simpler passing of this state to components that
// require it.
type ApplicationState struct {
Config *config.Config
Config config.Config
RuleManager rules.RuleManager
Storage metric.Storage
TargetManager retrieval.TargetManager

View file

@ -11,13 +11,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
all: parser.y.go lexer.l.go
all: generated
parser.y.go: parser.y
go tool yacc -o parser.y.go -v "" parser.y
SUFFIXES:
lexer.l.go: parser.y.go lexer.l
golex lexer.l
MAKE_ARTIFACTS = generated-stamp
include ../Makefile.INCLUDE
generated: generated-stamp
generated-stamp: config.proto
protoc --proto_path=$(PREFIX)/include:. --go_out=generated/ config.proto
touch $@
clean:
rm lexer.l.go parser.y.go
rm -rf generated/*
-rm -f $(MAKE_ARTIFACTS)
.PHONY: generated

View file

@ -14,114 +14,119 @@
package config
import (
"errors"
"code.google.com/p/goprotobuf/proto"
"fmt"
"github.com/prometheus/prometheus/model"
pb "github.com/prometheus/prometheus/config/generated"
"github.com/prometheus/prometheus/utility"
"regexp"
"time"
)
var jobNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_-]*$")
var labelNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_]*$")
// Config encapsulates the configuration of a Prometheus instance. It wraps the
// raw configuration protocol buffer to be able to add custom methods to it.
type Config struct {
Global *GlobalConfig
Jobs []JobConfig
// The protobuf containing the actual configuration values.
pb.PrometheusConfig
}
type GlobalConfig struct {
ScrapeInterval time.Duration
EvaluationInterval time.Duration
Labels model.LabelSet
RuleFiles []string
// String returns an ASCII serialization of the loaded configuration protobuf.
func (c Config) String() string {
return proto.MarshalTextString(&c.PrometheusConfig)
}
type JobConfig struct {
Name string
ScrapeInterval time.Duration
Targets []Targets
}
type Targets struct {
Endpoints []string
Labels model.LabelSet
}
func New() *Config {
return &Config{
Global: &GlobalConfig{Labels: model.LabelSet{}},
// validateLabels validates whether label names have the correct format.
func (c Config) validateLabels(labels *pb.LabelPairs) error {
if labels == nil {
return nil
}
}
func (config *Config) AddJob(options map[string]string, targets []Targets) error {
name, ok := options["name"]
if !ok {
return errors.New("Missing job name")
}
if len(targets) == 0 {
return fmt.Errorf("No targets configured for job '%v'", name)
}
job := JobConfig{
Targets: tmpJobTargets,
}
for option, value := range options {
if err := job.SetOption(option, value); err != nil {
return err
for _, label := range labels.Label {
if !labelNameRE.MatchString(label.GetName()) {
return fmt.Errorf("Invalid label name '%s'", label.GetName())
}
}
config.Jobs = append(config.Jobs, job)
return nil
}
func (config *Config) GetJobByName(name string) (jobConfig *JobConfig) {
for _, job := range config.Jobs {
if job.Name == name {
jobConfig = &job
// Validate checks an entire parsed Config for the validity of its fields.
func (c Config) Validate() error {
// Check the global configuration section for validity.
global := c.Global
if _, err := utility.StringToDuration(global.GetScrapeInterval()); err != nil {
return fmt.Errorf("Invalid global scrape interval: %s", err)
}
if _, err := utility.StringToDuration(global.GetEvaluationInterval()); err != nil {
return fmt.Errorf("Invalid rule evaluation interval: %s", err)
}
if err := c.validateLabels(global.Labels); err != nil {
return fmt.Errorf("Invalid global labels: %s", err)
}
// Check each job configuration for validity.
for _, job := range c.Job {
if !jobNameRE.MatchString(job.GetName()) {
return fmt.Errorf("Invalid job name '%s'", job.GetName())
}
if _, err := utility.StringToDuration(job.GetScrapeInterval()); err != nil {
return fmt.Errorf("Invalid scrape interval for job '%s': %s", job.GetName(), err)
}
for _, targetGroup := range job.TargetGroup {
if err := c.validateLabels(targetGroup.Labels); err != nil {
return fmt.Errorf("Invalid labels for job '%s': %s", job.GetName(), err)
}
}
}
return nil
}
// GetJobByName finds a job by its name in a Config object.
func (c Config) GetJobByName(name string) (jobConfig *JobConfig) {
for _, job := range c.Job {
if job.GetName() == name {
jobConfig = &JobConfig{*job}
break
}
}
return
}
func (config *GlobalConfig) SetOption(option string, value string) (err error) {
switch option {
case "scrape_interval":
config.ScrapeInterval, err = utility.StringToDuration(value)
return nil
case "evaluation_interval":
config.EvaluationInterval, err = utility.StringToDuration(value)
return err
default:
err = fmt.Errorf("Unrecognized global configuration option '%v'", option)
// Jobs returns all the jobs in a Config object.
func (c Config) Jobs() (jobs []JobConfig) {
for _, job := range c.Job {
jobs = append(jobs, JobConfig{*job})
}
return
}
func (config *GlobalConfig) SetLabels(labels model.LabelSet) {
for k, v := range labels {
config.Labels[k] = v
// stringToDuration converts a string to a duration and dies on invalid format.
func stringToDuration(intervalStr string) time.Duration {
duration, err := utility.StringToDuration(intervalStr)
if err != nil {
panic(err)
}
return duration
}
func (config *GlobalConfig) AddRuleFiles(ruleFiles []string) {
for _, ruleFile := range ruleFiles {
config.RuleFiles = append(config.RuleFiles, ruleFile)
}
// ScrapeInterval gets the default scrape interval for a Config.
func (c Config) ScrapeInterval() time.Duration {
return stringToDuration(c.Global.GetScrapeInterval())
}
func (job *JobConfig) SetOption(option string, value string) (err error) {
switch option {
case "name":
job.Name = value
case "scrape_interval":
job.ScrapeInterval, err = utility.StringToDuration(value)
default:
err = fmt.Errorf("Unrecognized job configuration option '%v'", option)
}
return
// EvaluationInterval gets the default evaluation interval for a Config.
func (c Config) EvaluationInterval() time.Duration {
return stringToDuration(c.Global.GetEvaluationInterval())
}
func (job *JobConfig) AddTargets(endpoints []string, labels model.LabelSet) {
targets := Targets{
Endpoints: endpoints,
Labels: labels,
}
job.Targets = append(job.Targets, targets)
// JobConfig encapsulates the configuration of a single job. It wraps the raw
// job protocol buffer to be able to add custom methods to it.
type JobConfig struct {
pb.JobConfig
}
// EvaluationInterval gets the scrape interval for a job.
func (c JobConfig) ScrapeInterval() time.Duration {
return stringToDuration(c.GetScrapeInterval())
}

70
config/config.proto Normal file
View file

@ -0,0 +1,70 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package io.prometheus;
// A label/value pair suitable for attaching to timeseries.
message LabelPair {
// The name of the label. Must adhere to the regex "[a-zA-Z_][a-zA-Z0-9_]*".
optional string name = 1;
// The value of the label. May contain any characters.
optional string value = 2;
}
// A set of label/value pairs.
message LabelPairs {
repeated LabelPair label = 1;
}
// The global Prometheus configuration section.
message GlobalConfig {
// How frequently to scrape targets by default. Must be a valid Prometheus
// duration string in the form "[0-9]+[smhdwy]".
optional string scrape_interval = 1 [default = "1m"];
// How frequently to evaluate rules by default. Must be a valid Prometheus
// duration string in the form "[0-9]+[smhdwy]".
optional string evaluation_interval = 2 [default = "1m"];
// The labels to add to any timeseries that this Prometheus instance scrapes.
optional LabelPairs labels = 3;
// The list of file names of rule files to load.
repeated string rule_file = 4;
}
// A labeled group of targets to scrape for a job.
message TargetGroup {
// The list of endpoints to scrape via HTTP.
repeated string target = 1;
// The labels to add to any timeseries scraped for this target group.
optional LabelPairs labels = 2;
}
// The configuration for a Prometheus job to scrape.
message JobConfig {
// The job name. Must adhere to the regex "[a-zA-Z_][a-zA-Z0-9_-]*".
required string name = 1;
// How frequently to scrape targets from this job. Overrides the global
// default.
optional string scrape_interval = 2;
// List of labeled target groups for this job.
repeated TargetGroup target_group = 3;
}
// The top-level Prometheus configuration.
message PrometheusConfig {
// Global Prometheus configuration options. If omitted, an empty global
// configuration with default values (see GlobalConfig definition) will be
// created.
optional GlobalConfig global = 1;
// The list of jobs to scrape.
repeated JobConfig job = 2;
}

View file

@ -14,8 +14,6 @@
package config
import (
"fmt"
"io/ioutil"
"path"
"strings"
"testing"
@ -25,37 +23,41 @@ var fixturesPath = "fixtures"
var configTests = []struct {
inputFile string
printedFile string
shouldFail bool
errContains string
}{
{
inputFile: "minimal.conf.input",
printedFile: "minimal.conf.printed",
inputFile: "minimal.conf.input",
}, {
inputFile: "sample.conf.input",
printedFile: "sample.conf.printed",
inputFile: "sample.conf.input",
}, {
// TODO: Options that are not provided should be set to sane defaults or
// create errors during config loading (as appropriate). Right now, these
// options remain at their zero-values, which is probably not what we want.
inputFile: "empty.conf.input",
printedFile: "empty.conf.printed",
inputFile: "empty.conf.input",
},
{
inputFile: "invalid_proto_format.conf.input",
shouldFail: true,
errContains: "unknown field name",
},
{
inputFile: "invalid_scrape_interval.conf.input",
shouldFail: true,
errContains: "Invalid global scrape interval",
},
{
inputFile: "invalid_job_name.conf.input",
shouldFail: true,
errContains: "Invalid job name",
},
{
inputFile: "invalid_label_name.conf.input",
shouldFail: true,
errContains: "Invalid label name",
},
// TODO: To enable testing of bad configs, we first need to change config
// loading so that it doesn't exit when loading a bad config. Instead, the
// configuration error should be passed back all the way to the caller.
//
//{
// inputFile: "bad_job_option.conf.input",
// shouldFail: true,
// errContains: "Missing job name",
//},
}
func TestConfigs(t *testing.T) {
for i, configTest := range configTests {
testConfig, err := LoadFromFile(path.Join(fixturesPath, configTest.inputFile))
_, err := LoadFromFile(path.Join(fixturesPath, configTest.inputFile))
if err != nil {
if !configTest.shouldFail {
@ -65,22 +67,6 @@ func TestConfigs(t *testing.T) {
t.Fatalf("%d. Expected error containing '%v', got: %v", i, configTest.errContains, err)
}
}
} else {
printedConfig, err := ioutil.ReadFile(path.Join(fixturesPath, configTest.printedFile))
if err != nil {
t.Fatalf("%d. Error reading config %v: %v", i, configTest.inputFile, err)
continue
}
expected := string(printedConfig)
actual := testConfig.ToString(0)
if actual != expected {
t.Errorf("%d. %v: printed config doesn't match expected output", i, configTest.inputFile)
t.Errorf("Expected:\n%v\n\nActual:\n%v\n", expected, actual)
t.Fatalf("Writing expected and actual printed configs to /tmp for diffing (see test source for paths)")
ioutil.WriteFile(fmt.Sprintf("/tmp/%s.expected", configTest.printedFile), []byte(expected), 0600)
ioutil.WriteFile(fmt.Sprintf("/tmp/%s.actual", configTest.printedFile), []byte(actual), 0600)
}
}
}
}

View file

@ -1,21 +0,0 @@
global {
scrape_interval = "30s"
evaluation_interval = "30s"
labels {
monitor = "test"
}
rule_files = [
"prometheus.rules"
]
}
job {
jobname = "prometheus"
scrape_interval = "15s"
targets {
endpoints = [
"http://localhost:9090/metrics.json"
]
}
}

View file

@ -1,8 +0,0 @@
global {
scrape_interval = "0y"
evaluation_interval = "0y"
rule_files = [
]
}

View file

@ -0,0 +1,3 @@
job: <
name: "1testjob"
>

View file

@ -0,0 +1,10 @@
global <
scrape_interval: "30s"
evaluation_interval: "30s"
labels: <
label: <
name: "monitor-test"
value: "test"
>
>
>

View file

@ -0,0 +1,11 @@
global <
scrape_interval: "30s"
evaluation_interval: "30s"
unknown_field: "foo"
labels: <
label: <
name: "monitor"
value: "test"
>
>
>

View file

@ -0,0 +1,10 @@
global <
scrape_interval: "30"
evaluation_interval: "30s"
labels: <
label: <
name: "monitor"
value: "test"
>
>
>

View file

@ -1,21 +1,20 @@
global {
scrape_interval = "30s"
evaluation_interval = "30s"
labels {
monitor = "test"
}
rule_files = [
"prometheus.rules"
]
}
global <
scrape_interval: "30s"
evaluation_interval: "30s"
labels: <
label: <
name: "monitor"
value: "test"
>
>
rule_file: "prometheus.rules"
>
job {
name = "prometheus"
scrape_interval = "15s"
job: <
name: "prometheus"
scrape_interval: "15s"
targets {
endpoints = [
"http://localhost:9090/metrics.json"
]
}
}
target_group: <
target: "http://localhost:9090/metrics.json"
>
>

View file

@ -1,20 +0,0 @@
global {
scrape_interval = "30s"
evaluation_interval = "30s"
labels {
monitor = "test"
}
rule_files = [
"prometheus.rules"
]
}
job {
name = "prometheus"
scrape_interval = "15s"
targets {
endpoints = [
"http://localhost:9090/metrics.json"
]
}
}

View file

@ -1,51 +1,55 @@
global {
scrape_interval = "30s"
evaluation_interval = "30s"
labels {
monitor = "test"
}
rule_files = [
"prometheus.rules"
]
}
global <
scrape_interval: "30s"
evaluation_interval: "30s"
labels: <
label: <
name: "monitor"
value: "test"
>
>
rule_file: "prometheus.rules"
>
job {
name = "prometheus"
scrape_interval = "15s"
job: <
name: "prometheus"
scrape_interval: "15s"
targets {
endpoints = [
"http://localhost:9090/metrics.json"
]
labels {
group = "canary"
}
}
}
target_group: <
target: "http://localhost:9090/metrics.json"
labels: <
label: <
name: "group"
value: "canary"
>
>
>
>
job {
name = "random"
scrape_interval = "30s"
job: <
name: "random"
scrape_interval: "30s"
targets {
endpoints = [
"http://random.com:8080/metrics.json",
"http://random.com:8081/metrics.json",
"http://random.com:8082/metrics.json",
"http://random.com:8083/metrics.json",
"http://random.com:8084/metrics.json"
]
labels {
group = "production"
}
}
targets {
endpoints = [
"http://random.com:8085/metrics.json",
"http://random.com:8086/metrics.json"
]
labels {
group = "canary"
}
}
}
target_group: <
target: "http://random.com:8080/metrics.json"
target: "http://random.com:8081/metrics.json"
target: "http://random.com:8082/metrics.json"
target: "http://random.com:8083/metrics.json"
target: "http://random.com:8084/metrics.json"
labels: <
label: <
name: "group"
value: "production"
>
>
>
target_group: <
target: "http://random.com:8085/metrics.json"
target: "http://random.com:8086/metrics.json"
labels: <
label: <
name: "group"
value: "canary"
>
>
>
>

View file

@ -1,49 +0,0 @@
global {
scrape_interval = "30s"
evaluation_interval = "30s"
labels {
monitor = "test"
}
rule_files = [
"prometheus.rules"
]
}
job {
name = "prometheus"
scrape_interval = "15s"
targets {
endpoints = [
"http://localhost:9090/metrics.json"
]
labels {
group = "canary"
}
}
}
job {
name = "random"
scrape_interval = "30s"
targets {
endpoints = [
"http://random.com:8080/metrics.json",
"http://random.com:8081/metrics.json",
"http://random.com:8082/metrics.json",
"http://random.com:8083/metrics.json",
"http://random.com:8084/metrics.json"
]
labels {
group = "production"
}
}
targets {
endpoints = [
"http://random.com:8085/metrics.json",
"http://random.com:8086/metrics.json"
]
labels {
group = "canary"
}
}
}

1
config/generated/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
config.pb.go

View file

@ -1,67 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"fmt"
"github.com/prometheus/prometheus/model"
"log"
)
// Unfortunately, more global variables that are needed for parsing.
var tmpJobOptions = map[string]string{}
var tmpJobTargets = []Targets{}
var tmpTargetEndpoints = []string{}
var tmpTargetLabels = model.LabelSet{}
func configError(error string, v ...interface{}) {
message := fmt.Sprintf(error, v...)
// TODO: Don't just die here. Pass errors back all the way to the caller
// instead.
log.Fatalf("Line %v, char %v: %s", yyline, yypos, message)
}
func PushJobOption(option string, value string) {
tmpJobOptions[option] = value
}
func PushJobTargets() {
targets := Targets{
Endpoints: tmpTargetEndpoints,
Labels: tmpTargetLabels,
}
tmpJobTargets = append(tmpJobTargets, targets)
tmpTargetLabels = model.LabelSet{}
tmpTargetEndpoints = []string{}
}
func PushTargetEndpoints(endpoints []string) {
for _, endpoint := range endpoints {
tmpTargetEndpoints = append(tmpTargetEndpoints, endpoint)
}
}
func PushTargetLabels(labels model.LabelSet) {
for k, v := range labels {
tmpTargetLabels[k] = v
}
}
func PopJob() {
if err := parsedConfig.AddJob(tmpJobOptions, tmpJobTargets); err != nil {
configError(err.Error())
}
tmpJobOptions = map[string]string{}
tmpJobTargets = []Targets{}
}

View file

@ -1,56 +0,0 @@
/* Copyright 2013 Prometheus Team
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http: *www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
%{
package config
%}
D [0-9]
L [a-zA-Z_]
%s S_GLOBAL S_GLOBAL_LABELS S_JOB S_TARGETS S_TARGET_LABELS
%x S_COMMENTS
%%
. { yypos++; REJECT }
\n { yyline++; yypos = 1; REJECT }
"/*" { BEGIN(S_COMMENTS); }
<S_COMMENTS>"*/" { BEGIN(0) }
<S_COMMENTS>. { /* ignore chars within multi-line comments */ }
\/\/[^\r\n]*\n { /* gobble up one-line comments */ }
<0>global { BEGIN(S_GLOBAL); return GLOBAL }
<S_GLOBAL>labels { BEGIN(S_GLOBAL_LABELS); return LABELS }
<S_GLOBAL>rule_files { return RULE_FILES }
<S_GLOBAL_LABELS>"}" { BEGIN(S_GLOBAL); REJECT }
<S_GLOBAL>"}" { BEGIN(0); REJECT }
<0>job { BEGIN(S_JOB); return JOB }
<S_JOB>targets { BEGIN(S_TARGETS); return TARGETS }
<S_TARGETS>endpoints { return ENDPOINTS }
<S_TARGETS>labels { BEGIN(S_TARGET_LABELS); return LABELS }
<S_TARGET_LABELS>"}" { BEGIN(S_TARGETS); REJECT }
<S_TARGETS>"}" { BEGIN(S_JOB); REJECT }
<S_JOB>"}" { BEGIN(0); REJECT }
{L}({L}|{D})+ { yylval.str = yytext; return IDENTIFIER }
\"(\\.|[^\\"])*\" { yylval.str = yytext[1:len(yytext) - 1]; return STRING }
\'(\\.|[^\\'])*\' { yylval.str = yytext[1:len(yytext) - 1]; return STRING }
[{}\[\]()=,] { return int(yytext[0]) }
. { /* don't print any remaining chars (whitespace) */ }
\n { /* don't print any remaining chars (whitespace) */ }
%%

View file

@ -1,551 +0,0 @@
// Generated by golex
package config
import (
"bufio"
"io"
"os"
"regexp"
"sort"
)
var yyin io.Reader = os.Stdin
var yyout io.Writer = os.Stdout
type yyrule struct {
regexp *regexp.Regexp
trailing *regexp.Regexp
startConds []yystartcondition
sol bool
action func() yyactionreturn
}
type yyactionreturn struct {
userReturn int
returnType yyactionreturntype
}
type yyactionreturntype int
const (
yyRT_FALLTHROUGH yyactionreturntype = iota
yyRT_USER_RETURN
yyRT_REJECT
)
var yydata string = ""
var yyorig string
var yyorigidx int
var yytext string = ""
var yytextrepl bool = true
func yymore() {
yytextrepl = false
}
func yyBEGIN(state yystartcondition) {
YY_START = state
}
func yyECHO() {
yyout.Write([]byte(yytext))
}
func yyREJECT() {
panic("yyREJECT")
}
var yylessed int
func yyless(n int) {
yylessed = len(yytext) - n
}
func unput(c uint8) {
yyorig = yyorig[:yyorigidx] + string(c) + yyorig[yyorigidx:]
yydata = yydata[:len(yytext)-yylessed] + string(c) + yydata[len(yytext)-yylessed:]
}
func input() int {
if len(yyorig) <= yyorigidx {
return EOF
}
c := yyorig[yyorigidx]
yyorig = yyorig[:yyorigidx] + yyorig[yyorigidx+1:]
yydata = yydata[:len(yytext)-yylessed] + yydata[len(yytext)-yylessed+1:]
return int(c)
}
var EOF int = -1
type yystartcondition int
var INITIAL yystartcondition = 0
var YY_START yystartcondition = INITIAL
type yylexMatch struct {
index int
matchFunc func() yyactionreturn
sortLen int
advLen int
}
type yylexMatchList []yylexMatch
func (ml yylexMatchList) Len() int {
return len(ml)
}
func (ml yylexMatchList) Less(i, j int) bool {
return ml[i].sortLen > ml[j].sortLen && ml[i].index > ml[j].index
}
func (ml yylexMatchList) Swap(i, j int) {
ml[i], ml[j] = ml[j], ml[i]
}
func yylex() int {
reader := bufio.NewReader(yyin)
for {
line, err := reader.ReadString('\n')
if len(line) == 0 && err == io.EOF {
break
}
yydata += line
}
yyorig = yydata
yyorigidx = 0
yyactioninline(yyBEGIN)
for len(yydata) > 0 {
matches := yylexMatchList(make([]yylexMatch, 0, 6))
excl := yystartconditionexclmap[YY_START]
for i, v := range yyrules {
sol := yyorigidx == 0 || yyorig[yyorigidx-1] == '\n'
if v.sol && !sol {
continue
}
// Check start conditions.
ok := false
// YY_START or '*' must feature in v.startConds
for _, c := range v.startConds {
if c == YY_START || c == -1 {
ok = true
break
}
}
if !excl {
// If v.startConds is empty, this is also acceptable.
if len(v.startConds) == 0 {
ok = true
}
}
if !ok {
continue
}
idxs := v.regexp.FindStringIndex(yydata)
if idxs != nil && idxs[0] == 0 {
// Check the trailing context, if any.
checksOk := true
sortLen := idxs[1]
advLen := idxs[1]
if v.trailing != nil {
tridxs := v.trailing.FindStringIndex(yydata[idxs[1]:])
if tridxs == nil || tridxs[0] != 0 {
checksOk = false
} else {
sortLen += tridxs[1]
}
}
if checksOk {
matches = append(matches, yylexMatch{i, v.action, sortLen, advLen})
}
}
}
if yytextrepl {
yytext = ""
}
sort.Sort(matches)
tryMatch:
if len(matches) == 0 {
yytext += yydata[:1]
yydata = yydata[1:]
yyorigidx += 1
yyout.Write([]byte(yytext))
} else {
m := matches[0]
yytext += yydata[:m.advLen]
yyorigidx += m.advLen
yytextrepl, yylessed = true, 0
ar := m.matchFunc()
if ar.returnType != yyRT_REJECT {
yydata = yydata[m.advLen-yylessed:]
yyorigidx -= yylessed
}
switch ar.returnType {
case yyRT_FALLTHROUGH:
// Do nothing.
case yyRT_USER_RETURN:
return ar.userReturn
case yyRT_REJECT:
matches = matches[1:]
yytext = yytext[:len(yytext)-m.advLen]
yyorigidx -= m.advLen
goto tryMatch
}
}
}
return 0
}
var S_TARGETS yystartcondition = 1027
var S_JOB yystartcondition = 1026
var S_GLOBAL yystartcondition = 1024
var S_GLOBAL_LABELS yystartcondition = 1025
var S_COMMENTS yystartcondition = 1029
var S_TARGET_LABELS yystartcondition = 1028
var yystartconditionexclmap = map[yystartcondition]bool{S_GLOBAL: false, S_GLOBAL_LABELS: false, S_COMMENTS: true, S_TARGET_LABELS: false, S_TARGETS: false, S_JOB: false, }
var yyrules []yyrule = []yyrule{{regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yypos++
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyline++
yypos = 1
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("/\\*"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_COMMENTS)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\*/"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{S_COMMENTS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\/\\/[^\\r\\n]*\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("global"), nil, []yystartcondition{0, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_GLOBAL)
return yyactionreturn{GLOBAL, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("labels"), nil, []yystartcondition{S_GLOBAL, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_GLOBAL_LABELS)
return yyactionreturn{LABELS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("rule_files"), nil, []yystartcondition{S_GLOBAL, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{RULE_FILES, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_GLOBAL_LABELS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_GLOBAL)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_GLOBAL, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("job"), nil, []yystartcondition{0, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_JOB)
return yyactionreturn{JOB, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("targets"), nil, []yystartcondition{S_JOB, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_TARGETS)
return yyactionreturn{TARGETS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("endpoints"), nil, []yystartcondition{S_TARGETS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{ENDPOINTS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("labels"), nil, []yystartcondition{S_TARGETS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_TARGET_LABELS)
return yyactionreturn{LABELS, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_TARGET_LABELS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_TARGETS)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_TARGETS, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(S_JOB)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\}"), nil, []yystartcondition{S_JOB, }, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yyBEGIN(0)
yyREJECT()
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("([a-zA-Z_])(([a-zA-Z_])|([0-9]))+"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext
return yyactionreturn{IDENTIFIER, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\\"(\\\\[^\\n]|[^\\\\\"])*\\\""), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext[1 : len(yytext)-1]
return yyactionreturn{STRING, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\'(\\\\[^\\n]|[^\\\\'])*\\'"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
yylval.str = yytext[1 : len(yytext)-1]
return yyactionreturn{STRING, yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[{}\\[\\]()=,]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
return yyactionreturn{int(yytext[0]), yyRT_USER_RETURN}
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("[^\\n]"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\n"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
defer func() {
if r := recover(); r != nil {
if r != "yyREJECT" {
panic(r)
}
yyar.returnType = yyRT_REJECT
}
}()
{
}
return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, }
func yyactioninline(BEGIN func(yystartcondition)) {}

View file

@ -14,66 +14,35 @@
package config
import (
"errors"
"fmt"
"io"
"os"
"strings"
"code.google.com/p/goprotobuf/proto"
pb "github.com/prometheus/prometheus/config/generated"
"io/ioutil"
)
// NOTE: This parser is non-reentrant due to its dependence on global state.
// GoLex sadly needs these global variables for storing temporary token/parsing information.
var yylval *yySymType // For storing extra token information, like the contents of a string.
var yyline int // Line number within the current file or buffer.
var yypos int // Character position within the current line.
var parsedConfig *Config // Temporary variable for storing the parsed configuration.
type ConfigLexer struct {
errors []string
}
func (lexer *ConfigLexer) Lex(lval *yySymType) int {
yylval = lval
token_type := yylex()
return token_type
}
func (lexer *ConfigLexer) Error(errorStr string) {
err := fmt.Sprintf("Error reading config at line %v, char %v: %v", yyline, yypos, errorStr)
lexer.errors = append(lexer.errors, err)
}
func LoadFromReader(configReader io.Reader) (*Config, error) {
parsedConfig = New()
yyin = configReader
yypos = 1
yyline = 1
yydata = ""
yytext = ""
lexer := &ConfigLexer{}
yyParse(lexer)
if len(lexer.errors) > 0 {
err := errors.New(strings.Join(lexer.errors, "\n"))
return &Config{}, err
func LoadFromString(configStr string) (config Config, err error) {
configProto := pb.PrometheusConfig{}
if err = proto.UnmarshalText(configStr, &configProto); err != nil {
return
}
return parsedConfig, nil
if configProto.Global == nil {
configProto.Global = &pb.GlobalConfig{}
}
for _, job := range configProto.Job {
if job.ScrapeInterval == nil {
job.ScrapeInterval = proto.String(configProto.Global.GetScrapeInterval())
}
}
config = Config{configProto}
err = config.Validate()
return
}
func LoadFromString(configString string) (*Config, error) {
configReader := strings.NewReader(configString)
return LoadFromReader(configReader)
}
func LoadFromFile(fileName string) (*Config, error) {
configReader, err := os.Open(fileName)
func LoadFromFile(fileName string) (config Config, err error) {
configStr, err := ioutil.ReadFile(fileName)
if err != nil {
return &Config{}, err
return
}
defer configReader.Close()
return LoadFromReader(configReader)
config, err = LoadFromString(string(configStr))
return
}

View file

@ -1,115 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
%{
package config
import "fmt"
import "github.com/prometheus/prometheus/model"
%}
%union {
num model.SampleValue
str string
stringSlice []string
labelSet model.LabelSet
}
%token <str> IDENTIFIER STRING
%token GLOBAL JOB
%token RULE_FILES
%token LABELS TARGETS ENDPOINTS
%type <stringSlice> string_array string_list rule_files_stat endpoints_stat
%type <labelSet> labels_stat label_assign label_assign_list
%start config
%%
config : /* empty */
| config config_stanza
;
config_stanza : GLOBAL '{' global_stat_list '}'
| JOB '{' job_stat_list '}'
{ PopJob() }
;
global_stat_list : /* empty */
| global_stat_list global_stat
;
global_stat : IDENTIFIER '=' STRING
{ parsedConfig.Global.SetOption($1, $3) }
| labels_stat
{ parsedConfig.Global.SetLabels($1) }
| rule_files_stat
{ parsedConfig.Global.AddRuleFiles($1) }
;
labels_stat : LABELS '{' label_assign_list '}'
{ $$ = $3 }
| LABELS '{' '}'
{ $$ = model.LabelSet{} }
;
label_assign_list : label_assign
{ $$ = $1 }
| label_assign_list ',' label_assign
{ for k, v := range $3 { $$[k] = v } }
;
label_assign : IDENTIFIER '=' STRING
{ $$ = model.LabelSet{ model.LabelName($1): model.LabelValue($3) } }
;
rule_files_stat : RULE_FILES '=' string_array
{ $$ = $3 }
;
job_stat_list : /* empty */
| job_stat_list job_stat
;
job_stat : IDENTIFIER '=' STRING
{ PushJobOption($1, $3) }
| TARGETS '{' targets_stat_list '}'
{ PushJobTargets() }
;
targets_stat_list : /* empty */
| targets_stat_list targets_stat
;
targets_stat : endpoints_stat
{ PushTargetEndpoints($1) }
| labels_stat
{ PushTargetLabels($1) }
;
endpoints_stat : ENDPOINTS '=' string_array
{ $$ = $3 }
;
string_array : '[' string_list ']'
{ $$ = $2 }
| '[' ']'
{ $$ = []string{} }
;
string_list : STRING
{ $$ = []string{$1} }
| string_list ',' STRING
{ $$ = append($$, $3) }
;
%%

View file

@ -1,420 +0,0 @@
//line parser.y:15
package config
import "fmt"
import "github.com/prometheus/prometheus/model"
//line parser.y:21
type yySymType struct {
yys int
num model.SampleValue
str string
stringSlice []string
labelSet model.LabelSet
}
const IDENTIFIER = 57346
const STRING = 57347
const GLOBAL = 57348
const JOB = 57349
const RULE_FILES = 57350
const LABELS = 57351
const TARGETS = 57352
const ENDPOINTS = 57353
var yyToknames = []string{
"IDENTIFIER",
"STRING",
"GLOBAL",
"JOB",
"RULE_FILES",
"LABELS",
"TARGETS",
"ENDPOINTS",
}
var yyStatenames = []string{}
const yyEofCode = 1
const yyErrCode = 2
const yyMaxDepth = 200
//line parser.y:115
//line yacctab:1
var yyExca = []int{
-1, 1,
1, -1,
-2, 0,
}
const yyNprod = 29
const yyPrivate = 57344
var yyTokenNames []string
var yyStates []string
const yyLast = 53
var yyAct = []int{
30, 28, 12, 48, 39, 47, 31, 34, 11, 35,
49, 36, 15, 14, 29, 18, 38, 9, 14, 23,
44, 19, 40, 27, 16, 50, 22, 20, 24, 21,
6, 5, 3, 4, 46, 32, 43, 45, 25, 29,
41, 33, 17, 10, 8, 7, 2, 1, 26, 42,
51, 13, 37,
}
var yyPact = []int{
-1000, 26, -1000, 19, 18, -1000, -1000, 4, 11, -1000,
-1000, 13, -1000, -1000, 17, 12, -1000, -1000, 5, 16,
33, 10, -10, 30, -1000, -1000, -6, -1000, -1000, -3,
-1000, -1, -1000, 9, -1000, 35, 29, -12, -1000, -1000,
-1000, -1000, -1000, -1000, -4, -1000, -1000, -1000, 20, -10,
-1000, -1000,
}
var yyPgo = []int{
0, 0, 52, 51, 49, 2, 1, 48, 47, 46,
45, 44, 43, 42, 41, 40,
}
var yyR1 = []int{
0, 8, 8, 9, 9, 10, 10, 12, 12, 12,
5, 5, 7, 7, 6, 3, 11, 11, 13, 13,
14, 14, 15, 15, 4, 1, 1, 2, 2,
}
var yyR2 = []int{
0, 0, 2, 4, 4, 0, 2, 3, 1, 1,
4, 3, 1, 3, 3, 3, 0, 2, 3, 4,
0, 2, 1, 1, 3, 3, 2, 1, 3,
}
var yyChk = []int{
-1000, -8, -9, 6, 7, 12, 12, -10, -11, 13,
-12, 4, -5, -3, 9, 8, 13, -13, 4, 10,
14, 12, 14, 14, 12, 5, -7, 13, -6, 4,
-1, 16, 5, -14, 13, 15, 14, -2, 17, 5,
13, -15, -4, -5, 11, -6, 5, 17, 15, 14,
5, -1,
}
var yyDef = []int{
1, -2, 2, 0, 0, 5, 16, 0, 0, 3,
6, 0, 8, 9, 0, 0, 4, 17, 0, 0,
0, 0, 0, 0, 20, 7, 0, 11, 12, 0,
15, 0, 18, 0, 10, 0, 0, 0, 26, 27,
19, 21, 22, 23, 0, 13, 14, 25, 0, 0,
28, 24,
}
var yyTok1 = []int{
1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 15, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 14, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 16, 3, 17, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 12, 3, 13,
}
var yyTok2 = []int{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
}
var yyTok3 = []int{
0,
}
//line yaccpar:1
/* parser for yacc output */
var yyDebug = 0
type yyLexer interface {
Lex(lval *yySymType) int
Error(s string)
}
const yyFlag = -1000
func yyTokname(c int) string {
if c > 0 && c <= len(yyToknames) {
if yyToknames[c-1] != "" {
return yyToknames[c-1]
}
}
return fmt.Sprintf("tok-%v", c)
}
func yyStatname(s int) string {
if s >= 0 && s < len(yyStatenames) {
if yyStatenames[s] != "" {
return yyStatenames[s]
}
}
return fmt.Sprintf("state-%v", s)
}
func yylex1(lex yyLexer, lval *yySymType) int {
c := 0
char := lex.Lex(lval)
if char <= 0 {
c = yyTok1[0]
goto out
}
if char < len(yyTok1) {
c = yyTok1[char]
goto out
}
if char >= yyPrivate {
if char < yyPrivate+len(yyTok2) {
c = yyTok2[char-yyPrivate]
goto out
}
}
for i := 0; i < len(yyTok3); i += 2 {
c = yyTok3[i+0]
if c == char {
c = yyTok3[i+1]
goto out
}
}
out:
if c == 0 {
c = yyTok2[1] /* unknown char */
}
if yyDebug >= 3 {
fmt.Printf("lex %U %s\n", uint(char), yyTokname(c))
}
return c
}
func yyParse(yylex yyLexer) int {
var yyn int
var yylval yySymType
var yyVAL yySymType
yyS := make([]yySymType, yyMaxDepth)
Nerrs := 0 /* number of errors */
Errflag := 0 /* error recovery flag */
yystate := 0
yychar := -1
yyp := -1
goto yystack
ret0:
return 0
ret1:
return 1
yystack:
/* put a state and value onto the stack */
if yyDebug >= 4 {
fmt.Printf("char %v in %v\n", yyTokname(yychar), yyStatname(yystate))
}
yyp++
if yyp >= len(yyS) {
nyys := make([]yySymType, len(yyS)*2)
copy(nyys, yyS)
yyS = nyys
}
yyS[yyp] = yyVAL
yyS[yyp].yys = yystate
yynewstate:
yyn = yyPact[yystate]
if yyn <= yyFlag {
goto yydefault /* simple state */
}
if yychar < 0 {
yychar = yylex1(yylex, &yylval)
}
yyn += yychar
if yyn < 0 || yyn >= yyLast {
goto yydefault
}
yyn = yyAct[yyn]
if yyChk[yyn] == yychar { /* valid shift */
yychar = -1
yyVAL = yylval
yystate = yyn
if Errflag > 0 {
Errflag--
}
goto yystack
}
yydefault:
/* default state action */
yyn = yyDef[yystate]
if yyn == -2 {
if yychar < 0 {
yychar = yylex1(yylex, &yylval)
}
/* look through exception table */
xi := 0
for {
if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
break
}
xi += 2
}
for xi += 2; ; xi += 2 {
yyn = yyExca[xi+0]
if yyn < 0 || yyn == yychar {
break
}
}
yyn = yyExca[xi+1]
if yyn < 0 {
goto ret0
}
}
if yyn == 0 {
/* error ... attempt to resume parsing */
switch Errflag {
case 0: /* brand new error */
yylex.Error("syntax error")
Nerrs++
if yyDebug >= 1 {
fmt.Printf("%s", yyStatname(yystate))
fmt.Printf("saw %s\n", yyTokname(yychar))
}
fallthrough
case 1, 2: /* incompletely recovered error ... try again */
Errflag = 3
/* find a state where "error" is a legal shift action */
for yyp >= 0 {
yyn = yyPact[yyS[yyp].yys] + yyErrCode
if yyn >= 0 && yyn < yyLast {
yystate = yyAct[yyn] /* simulate a shift of "error" */
if yyChk[yystate] == yyErrCode {
goto yystack
}
}
/* the current p has no shift on "error", pop stack */
if yyDebug >= 2 {
fmt.Printf("error recovery pops state %d\n", yyS[yyp].yys)
}
yyp--
}
/* there is no state on the stack with an error shift ... abort */
goto ret1
case 3: /* no shift yet; clobber input char */
if yyDebug >= 2 {
fmt.Printf("error recovery discards %s\n", yyTokname(yychar))
}
if yychar == yyEofCode {
goto ret1
}
yychar = -1
goto yynewstate /* try again in the same state */
}
}
/* reduction by production yyn */
if yyDebug >= 2 {
fmt.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
}
yynt := yyn
yypt := yyp
_ = yypt // guard against "declared and not used"
yyp -= yyR2[yyn]
yyVAL = yyS[yyp+1]
/* consult goto table to find next state */
yyn = yyR1[yyn]
yyg := yyPgo[yyn]
yyj := yyg + yyS[yyp].yys + 1
if yyj >= yyLast {
yystate = yyAct[yyg]
} else {
yystate = yyAct[yyj]
if yyChk[yystate] != -yyn {
yystate = yyAct[yyg]
}
}
// dummy call; replaced with literal code
switch yynt {
case 4:
//line parser.y:45
{ PopJob() }
case 7:
//line parser.y:53
{ parsedConfig.Global.SetOption(yyS[yypt-2].str, yyS[yypt-0].str) }
case 8:
//line parser.y:55
{ parsedConfig.Global.SetLabels(yyS[yypt-0].labelSet) }
case 9:
//line parser.y:57
{ parsedConfig.Global.AddRuleFiles(yyS[yypt-0].stringSlice) }
case 10:
//line parser.y:61
{ yyVAL.labelSet = yyS[yypt-1].labelSet }
case 11:
//line parser.y:63
{ yyVAL.labelSet = model.LabelSet{} }
case 12:
//line parser.y:67
{ yyVAL.labelSet = yyS[yypt-0].labelSet }
case 13:
//line parser.y:69
{ for k, v := range yyS[yypt-0].labelSet { yyVAL.labelSet[k] = v } }
case 14:
//line parser.y:73
{ yyVAL.labelSet = model.LabelSet{ model.LabelName(yyS[yypt-2].str): model.LabelValue(yyS[yypt-0].str) } }
case 15:
//line parser.y:77
{ yyVAL.stringSlice = yyS[yypt-0].stringSlice }
case 18:
//line parser.y:85
{ PushJobOption(yyS[yypt-2].str, yyS[yypt-0].str) }
case 19:
//line parser.y:87
{ PushJobTargets() }
case 22:
//line parser.y:95
{ PushTargetEndpoints(yyS[yypt-0].stringSlice) }
case 23:
//line parser.y:97
{ PushTargetLabels(yyS[yypt-0].labelSet) }
case 24:
//line parser.y:101
{ yyVAL.stringSlice = yyS[yypt-0].stringSlice }
case 25:
//line parser.y:105
{ yyVAL.stringSlice = yyS[yypt-1].stringSlice }
case 26:
//line parser.y:107
{ yyVAL.stringSlice = []string{} }
case 27:
//line parser.y:111
{ yyVAL.stringSlice = []string{yyS[yypt-0].str} }
case 28:
//line parser.y:113
{ yyVAL.stringSlice = append(yyVAL.stringSlice, yyS[yypt-0].str) }
}
goto yystack /* stack new state and value */
}

View file

@ -1,89 +0,0 @@
// Copyright 2013 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"fmt"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/utility"
"strings"
)
func indentStr(indent int, str string, v ...interface{}) string {
indentStr := ""
for i := 0; i < indent; i++ {
indentStr += "\t"
}
return fmt.Sprintf(indentStr+str, v...)
}
func (config *Config) ToString(indent int) string {
global := config.Global.ToString(indent)
jobs := []string{}
for _, job := range config.Jobs {
jobs = append(jobs, job.ToString(indent))
}
return indentStr(indent, "%v\n%v", global, strings.Join(jobs, "\n"))
}
func labelsToString(indent int, labels model.LabelSet) string {
str := indentStr(indent, "labels {\n")
labelStrings := []string{}
for label, value := range labels {
labelStrings = append(labelStrings, indentStr(indent+1, "%v = \"%v\"", label, value))
}
str += strings.Join(labelStrings, ",\n") + "\n"
str += indentStr(indent, "}\n")
return str
}
func stringListToString(indent int, list []string) string {
listString := []string{}
for _, item := range list {
listString = append(listString, indentStr(indent, "\"%v\"", item))
}
return strings.Join(listString, ",\n") + "\n"
}
func (global *GlobalConfig) ToString(indent int) string {
str := indentStr(indent, "global {\n")
str += indentStr(indent+1, "scrape_interval = \"%s\"\n", utility.DurationToString(global.ScrapeInterval))
str += indentStr(indent+1, "evaluation_interval = \"%s\"\n", utility.DurationToString(global.EvaluationInterval))
if len(global.Labels) > 0 {
str += labelsToString(indent+1, global.Labels)
}
str += indentStr(indent+1, "rule_files = [\n")
str += stringListToString(indent+2, global.RuleFiles)
str += indentStr(indent+1, "]\n")
str += indentStr(indent, "}\n")
return str
}
func (job *JobConfig) ToString(indent int) string {
str := indentStr(indent, "job {\n")
str += indentStr(indent+1, "name = \"%v\"\n", job.Name)
str += indentStr(indent+1, "scrape_interval = \"%s\"\n", utility.DurationToString(job.ScrapeInterval))
for _, targets := range job.Targets {
str += indentStr(indent+1, "targets {\n")
str += indentStr(indent+2, "endpoints = [\n")
str += stringListToString(indent+3, targets.Endpoints)
str += indentStr(indent+2, "]\n")
if len(targets.Labels) > 0 {
str += labelsToString(indent+2, targets.Labels)
}
str += indentStr(indent+1, "}\n")
}
str += indentStr(indent, "}\n")
return str
}

View file

@ -107,7 +107,7 @@ func main() {
ast.SetStorage(ts)
ruleManager := rules.NewRuleManager(ruleResults, conf.Global.EvaluationInterval)
ruleManager := rules.NewRuleManager(ruleResults, conf.EvaluationInterval())
err = ruleManager.AddRulesFromConfig(conf)
if err != nil {
log.Fatalf("Error loading rule files: %v", err)

View file

@ -24,10 +24,10 @@ import (
type TargetManager interface {
acquire()
release()
AddTarget(job *config.JobConfig, t Target, defaultScrapeInterval time.Duration)
ReplaceTargets(job *config.JobConfig, newTargets []Target, defaultScrapeInterval time.Duration)
AddTarget(job config.JobConfig, t Target, defaultScrapeInterval time.Duration)
ReplaceTargets(job config.JobConfig, newTargets []Target, defaultScrapeInterval time.Duration)
Remove(t Target)
AddTargetsFromConfig(config *config.Config)
AddTargetsFromConfig(config config.Config)
Pools() map[string]*TargetPool
}
@ -53,30 +53,27 @@ func (m *targetManager) release() {
<-m.requestAllowance
}
func (m *targetManager) TargetPoolForJob(job *config.JobConfig, defaultScrapeInterval time.Duration) (targetPool *TargetPool) {
targetPool, ok := m.poolsByJob[job.Name]
func (m *targetManager) TargetPoolForJob(job config.JobConfig, defaultScrapeInterval time.Duration) (targetPool *TargetPool) {
targetPool, ok := m.poolsByJob[job.GetName()]
if !ok {
targetPool = NewTargetPool(m)
log.Printf("Pool for job %s does not exist; creating and starting...", job.Name)
log.Printf("Pool for job %s does not exist; creating and starting...", job.GetName())
interval := job.ScrapeInterval
if interval == 0 {
interval = defaultScrapeInterval
}
m.poolsByJob[job.Name] = targetPool
interval := job.ScrapeInterval()
m.poolsByJob[job.GetName()] = targetPool
go targetPool.Run(m.results, interval)
}
return
}
func (m *targetManager) AddTarget(job *config.JobConfig, t Target, defaultScrapeInterval time.Duration) {
func (m *targetManager) AddTarget(job config.JobConfig, t Target, defaultScrapeInterval time.Duration) {
targetPool := m.TargetPoolForJob(job, defaultScrapeInterval)
targetPool.AddTarget(t)
m.poolsByJob[job.Name] = targetPool
m.poolsByJob[job.GetName()] = targetPool
}
func (m *targetManager) ReplaceTargets(job *config.JobConfig, newTargets []Target, defaultScrapeInterval time.Duration) {
func (m *targetManager) ReplaceTargets(job config.JobConfig, newTargets []Target, defaultScrapeInterval time.Duration) {
targetPool := m.TargetPoolForJob(job, defaultScrapeInterval)
targetPool.replaceTargets(newTargets)
}
@ -85,19 +82,21 @@ func (m targetManager) Remove(t Target) {
panic("not implemented")
}
func (m *targetManager) AddTargetsFromConfig(config *config.Config) {
for _, job := range config.Jobs {
for _, configTargets := range job.Targets {
func (m *targetManager) AddTargetsFromConfig(config config.Config) {
for _, job := range config.Jobs() {
for _, targetGroup := range job.TargetGroup {
baseLabels := model.LabelSet{
model.JobLabel: model.LabelValue(job.Name),
model.JobLabel: model.LabelValue(job.GetName()),
}
for label, value := range configTargets.Labels {
baseLabels[label] = value
if targetGroup.Labels != nil {
for _, label := range targetGroup.Labels.Label {
baseLabels[model.LabelName(label.GetName())] = model.LabelValue(label.GetValue())
}
}
for _, endpoint := range configTargets.Endpoints {
for _, endpoint := range targetGroup.Target {
target := NewTarget(endpoint, time.Second*5, baseLabels)
m.AddTarget(&job, target, config.Global.ScrapeInterval)
m.AddTarget(job, target, config.ScrapeInterval())
}
}
}

View file

@ -14,7 +14,9 @@
package retrieval
import (
"code.google.com/p/goprotobuf/proto"
"github.com/prometheus/prometheus/config"
pb "github.com/prometheus/prometheus/config/generated"
"github.com/prometheus/prometheus/model"
"github.com/prometheus/prometheus/retrieval/format"
"github.com/prometheus/prometheus/utility/test"
@ -67,11 +69,17 @@ func (t *fakeTarget) Merge(newTarget Target) {}
func testTargetManager(t test.Tester) {
results := make(chan format.Result, 5)
targetManager := NewTargetManager(results, 3)
testJob1 := &config.JobConfig{
Name: "test_job1",
testJob1 := config.JobConfig{
JobConfig: pb.JobConfig{
Name: proto.String("test_job1"),
ScrapeInterval: proto.String("1m"),
},
}
testJob2 := &config.JobConfig{
Name: "test_job2",
testJob2 := config.JobConfig{
JobConfig: pb.JobConfig{
Name: proto.String("test_job2"),
ScrapeInterval: proto.String("1m"),
},
}
target1GroupA := &fakeTarget{

View file

@ -27,7 +27,7 @@ type Result struct {
}
type RuleManager interface {
AddRulesFromConfig(config *config.Config) error
AddRulesFromConfig(config config.Config) error
}
type ruleManager struct {
@ -87,8 +87,8 @@ func (m *ruleManager) runIteration(results chan *Result) {
wg.Wait()
}
func (m *ruleManager) AddRulesFromConfig(config *config.Config) error {
for _, ruleFile := range config.Global.RuleFiles {
func (m *ruleManager) AddRulesFromConfig(config config.Config) error {
for _, ruleFile := range config.Global.RuleFile {
newRules, err := LoadRulesFromFile(ruleFile)
if err != nil {
return err

View file

@ -34,7 +34,7 @@ func (serv MetricsService) SetTargets(targetGroups []TargetGroup, jobName string
for _, targetGroup := range targetGroups {
// Do mandatory map type conversion due to Go shortcomings.
baseLabels := model.LabelSet{
model.JobLabel: model.LabelValue(job.Name),
model.JobLabel: model.LabelValue(job.GetName()),
}
for label, value := range targetGroup.BaseLabels {
baseLabels[model.LabelName(label)] = model.LabelValue(value)
@ -45,6 +45,6 @@ func (serv MetricsService) SetTargets(targetGroups []TargetGroup, jobName string
newTargets = append(newTargets, newTarget)
}
}
serv.appState.TargetManager.ReplaceTargets(job, newTargets, serv.appState.Config.Global.ScrapeInterval)
serv.appState.TargetManager.ReplaceTargets(*job, newTargets, serv.appState.Config.ScrapeInterval())
}
}

View file

@ -44,7 +44,7 @@ func (h *StatusHandler) Run() {
})
h.PrometheusStatus = &PrometheusStatus{
Config: h.appState.Config.ToString(0),
Config: h.appState.Config.String(),
Rules: "TODO: list rules here",
Status: "TODO: add status information here",
TargetPools: h.appState.TargetManager.Pools(),