2015-01-21 11:07:45 -08:00
|
|
|
// Copyright 2013 The Prometheus Authors
|
2013-01-15 08:06:17 -08:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-02-01 01:55:07 -08:00
|
|
|
package scrape
|
2016-08-25 11:36:26 -07:00
|
|
|
|
|
|
|
import (
|
2017-06-09 08:18:19 -07:00
|
|
|
"fmt"
|
2018-09-26 02:20:56 -07:00
|
|
|
"strconv"
|
2016-08-25 11:36:26 -07:00
|
|
|
"testing"
|
2018-09-26 02:20:56 -07:00
|
|
|
"time"
|
2016-08-25 11:36:26 -07:00
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
"github.com/prometheus/common/model"
|
2016-08-25 11:36:26 -07:00
|
|
|
"github.com/prometheus/prometheus/config"
|
2018-09-26 02:20:56 -07:00
|
|
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
2016-12-29 00:27:30 -08:00
|
|
|
"github.com/prometheus/prometheus/pkg/labels"
|
2018-04-27 05:11:16 -07:00
|
|
|
"github.com/prometheus/prometheus/util/testutil"
|
2018-07-04 04:01:19 -07:00
|
|
|
|
|
|
|
yaml "gopkg.in/yaml.v2"
|
2016-08-25 11:36:26 -07:00
|
|
|
)
|
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
func mustNewRegexp(s string) config.Regexp {
|
|
|
|
re, err := config.NewRegexp(s)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
return re
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestPopulateLabels(t *testing.T) {
|
|
|
|
cases := []struct {
|
2016-12-29 00:27:30 -08:00
|
|
|
in labels.Labels
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg *config.ScrapeConfig
|
2016-12-29 00:27:30 -08:00
|
|
|
res labels.Labels
|
|
|
|
resOrig labels.Labels
|
2017-06-09 08:18:19 -07:00
|
|
|
err error
|
2016-09-05 05:17:10 -07:00
|
|
|
}{
|
|
|
|
// Regular population of scrape config options.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.InstanceLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
// Pre-define/overwrite scrape config labels.
|
|
|
|
// Leave out port and expect it to be defaulted to scheme.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
model.InstanceLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
// Provide instance label. HTTPS port default for IPv6.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]:443",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2017-06-09 08:18:19 -07:00
|
|
|
// Address label missing.
|
2016-09-05 05:17:10 -07:00
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "value"),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
|
|
|
res: nil,
|
|
|
|
resOrig: nil,
|
|
|
|
err: fmt.Errorf("no address"),
|
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
RelabelConfigs: []*config.RelabelConfig{
|
|
|
|
{
|
|
|
|
Action: config.RelabelReplace,
|
|
|
|
Regex: mustNewRegexp("(.*)"),
|
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 04:15:44 -07:00
|
|
|
res: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
RelabelConfigs: []*config.RelabelConfig{
|
|
|
|
{
|
2017-06-09 08:18:19 -07:00
|
|
|
Action: config.RelabelReplace,
|
|
|
|
Regex: mustNewRegexp("(.*)"),
|
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 04:15:44 -07:00
|
|
|
res: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Invalid UTF-8 in label.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "\xbd",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-09-05 05:17:10 -07:00
|
|
|
res: nil,
|
|
|
|
resOrig: nil,
|
2017-06-09 08:18:19 -07:00
|
|
|
err: fmt.Errorf("invalid label value for \"custom\": \"\\xbd\""),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
}
|
2018-04-27 05:11:16 -07:00
|
|
|
for _, c := range cases {
|
2017-03-08 06:37:12 -08:00
|
|
|
in := c.in.Copy()
|
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
res, orig, err := populateLabels(c.in, c.cfg)
|
2018-04-27 05:11:16 -07:00
|
|
|
testutil.Equals(t, c.err, err)
|
|
|
|
testutil.Equals(t, c.in, in)
|
|
|
|
testutil.Equals(t, c.res, res)
|
|
|
|
testutil.Equals(t, c.resOrig, orig)
|
2016-09-05 05:17:10 -07:00
|
|
|
}
|
|
|
|
}
|
2018-01-19 03:36:21 -08:00
|
|
|
|
|
|
|
// TestScrapeManagerReloadNoChange tests that no scrape reload happens when there is no config change.
|
|
|
|
func TestManagerReloadNoChange(t *testing.T) {
|
|
|
|
tsetName := "test"
|
|
|
|
|
2018-07-04 04:01:19 -07:00
|
|
|
cfgText := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: '` + tsetName + `'
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
- targets: ["bar:9090"]
|
|
|
|
`
|
|
|
|
cfg := &config.Config{}
|
|
|
|
if err := yaml.UnmarshalStrict([]byte(cfgText), cfg); err != nil {
|
|
|
|
t.Fatalf("Unable to load YAML config cfgYaml: %s", err)
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
|
|
|
|
2018-01-30 09:45:37 -08:00
|
|
|
scrapeManager := NewManager(nil, nil)
|
2018-07-04 04:01:19 -07:00
|
|
|
// Load the current config.
|
|
|
|
scrapeManager.ApplyConfig(cfg)
|
|
|
|
|
2018-01-19 03:36:21 -08:00
|
|
|
// As reload never happens, new loop should never be called.
|
2018-04-12 07:54:53 -07:00
|
|
|
newLoop := func(_ *Target, s scraper, _ int, _ bool, _ []*config.RelabelConfig) loop {
|
2018-01-19 03:36:21 -08:00
|
|
|
t.Fatal("reload happened")
|
|
|
|
return nil
|
|
|
|
}
|
2018-07-04 04:01:19 -07:00
|
|
|
|
2018-01-19 03:36:21 -08:00
|
|
|
sp := &scrapePool{
|
2018-09-26 02:20:56 -07:00
|
|
|
appendable: &nopAppendable{},
|
|
|
|
activeTargets: map[uint64]*Target{},
|
2018-01-19 03:36:21 -08:00
|
|
|
loops: map[uint64]loop{
|
2018-07-04 04:01:19 -07:00
|
|
|
1: &testLoop{},
|
2018-01-19 03:36:21 -08:00
|
|
|
},
|
|
|
|
newLoop: newLoop,
|
|
|
|
logger: nil,
|
2018-07-04 04:01:19 -07:00
|
|
|
config: cfg.ScrapeConfigs[0],
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
|
|
|
scrapeManager.scrapePools = map[string]*scrapePool{
|
|
|
|
tsetName: sp,
|
|
|
|
}
|
|
|
|
|
2018-07-04 04:01:19 -07:00
|
|
|
scrapeManager.ApplyConfig(cfg)
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
2018-09-26 02:20:56 -07:00
|
|
|
|
|
|
|
func TestManagerTargetsUpdates(t *testing.T) {
|
|
|
|
m := NewManager(nil, nil)
|
|
|
|
|
|
|
|
ts := make(chan map[string][]*targetgroup.Group)
|
|
|
|
go m.Run(ts)
|
|
|
|
|
|
|
|
tgSent := make(map[string][]*targetgroup.Group)
|
|
|
|
for x := 0; x < 10; x++ {
|
|
|
|
|
|
|
|
tgSent[strconv.Itoa(x)] = []*targetgroup.Group{
|
|
|
|
&targetgroup.Group{
|
|
|
|
Source: strconv.Itoa(x),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
select {
|
|
|
|
case ts <- tgSent:
|
|
|
|
case <-time.After(10 * time.Millisecond):
|
|
|
|
t.Error("Scrape manager's channel remained blocked after the set threshold.")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
m.mtxScrape.Lock()
|
|
|
|
tsetActual := m.targetSets
|
|
|
|
m.mtxScrape.Unlock()
|
|
|
|
|
|
|
|
// Make sure all updates have been received.
|
|
|
|
testutil.Equals(t, tgSent, tsetActual)
|
|
|
|
|
|
|
|
select {
|
|
|
|
case <-m.triggerReload:
|
|
|
|
default:
|
|
|
|
t.Error("No scrape loops reload was triggered after targets update.")
|
|
|
|
}
|
|
|
|
}
|