2015-01-21 11:07:45 -08:00
|
|
|
// Copyright 2013 The Prometheus Authors
|
2013-01-15 08:06:17 -08:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package retrieval
|
2016-08-25 11:36:26 -07:00
|
|
|
|
|
|
|
import (
|
2017-06-09 08:18:19 -07:00
|
|
|
"fmt"
|
2016-09-05 05:17:10 -07:00
|
|
|
"reflect"
|
2016-08-25 11:36:26 -07:00
|
|
|
"testing"
|
2018-01-19 03:36:21 -08:00
|
|
|
"time"
|
2016-08-25 11:36:26 -07:00
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
"github.com/prometheus/common/model"
|
2016-08-25 11:36:26 -07:00
|
|
|
"github.com/prometheus/prometheus/config"
|
2018-01-19 03:36:21 -08:00
|
|
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
2016-12-29 00:27:30 -08:00
|
|
|
"github.com/prometheus/prometheus/pkg/labels"
|
2016-08-25 11:36:26 -07:00
|
|
|
)
|
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
func mustNewRegexp(s string) config.Regexp {
|
|
|
|
re, err := config.NewRegexp(s)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
return re
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestPopulateLabels(t *testing.T) {
|
|
|
|
cases := []struct {
|
2016-12-29 00:27:30 -08:00
|
|
|
in labels.Labels
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg *config.ScrapeConfig
|
2016-12-29 00:27:30 -08:00
|
|
|
res labels.Labels
|
|
|
|
resOrig labels.Labels
|
2017-06-09 08:18:19 -07:00
|
|
|
err error
|
2016-09-05 05:17:10 -07:00
|
|
|
}{
|
|
|
|
// Regular population of scrape config options.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.InstanceLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
// Pre-define/overwrite scrape config labels.
|
|
|
|
// Leave out port and expect it to be defaulted to scheme.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
model.InstanceLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
// Provide instance label. HTTPS port default for IPv6.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]:443",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2017-06-09 08:18:19 -07:00
|
|
|
// Address label missing.
|
2016-09-05 05:17:10 -07:00
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "value"),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
|
|
|
res: nil,
|
|
|
|
resOrig: nil,
|
|
|
|
err: fmt.Errorf("no address"),
|
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
RelabelConfigs: []*config.RelabelConfig{
|
|
|
|
{
|
|
|
|
Action: config.RelabelReplace,
|
|
|
|
Regex: mustNewRegexp("(.*)"),
|
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 04:15:44 -07:00
|
|
|
res: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
RelabelConfigs: []*config.RelabelConfig{
|
|
|
|
{
|
2017-06-09 08:18:19 -07:00
|
|
|
Action: config.RelabelReplace,
|
|
|
|
Regex: mustNewRegexp("(.*)"),
|
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 04:15:44 -07:00
|
|
|
res: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Invalid UTF-8 in label.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "\xbd",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
},
|
2016-09-05 05:17:10 -07:00
|
|
|
res: nil,
|
|
|
|
resOrig: nil,
|
2017-06-09 08:18:19 -07:00
|
|
|
err: fmt.Errorf("invalid label value for \"custom\": \"\\xbd\""),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
}
|
|
|
|
for i, c := range cases {
|
2017-03-08 06:37:12 -08:00
|
|
|
in := c.in.Copy()
|
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
res, orig, err := populateLabels(c.in, c.cfg)
|
2017-06-09 08:18:19 -07:00
|
|
|
if !reflect.DeepEqual(err, c.err) {
|
|
|
|
t.Fatalf("case %d: wanted %v error, got %v", i, c.err, err)
|
2016-09-05 05:17:10 -07:00
|
|
|
}
|
2017-02-01 11:49:50 -08:00
|
|
|
if !reflect.DeepEqual(c.in, in) {
|
|
|
|
t.Errorf("case %d: input lset was changed was\n\t%+v\n now\n\t%+v", i, in, c.in)
|
|
|
|
}
|
2016-09-05 05:17:10 -07:00
|
|
|
if !reflect.DeepEqual(res, c.res) {
|
|
|
|
t.Errorf("case %d: expected res\n\t%+v\n got\n\t%+v", i, c.res, res)
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(orig, c.resOrig) {
|
|
|
|
t.Errorf("case %d: expected resOrig\n\t%+v\n got\n\t%+v", i, c.resOrig, orig)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-01-19 03:36:21 -08:00
|
|
|
|
|
|
|
// TestScrapeManagerReloadNoChange tests that no scrape reload happens when there is no config change.
|
|
|
|
func TestManagerReloadNoChange(t *testing.T) {
|
|
|
|
tsetName := "test"
|
|
|
|
|
|
|
|
reloadCfg := &config.Config{
|
|
|
|
ScrapeConfigs: []*config.ScrapeConfig{
|
|
|
|
&config.ScrapeConfig{
|
|
|
|
ScrapeInterval: model.Duration(3 * time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(2 * time.Second),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2018-01-30 09:45:37 -08:00
|
|
|
scrapeManager := NewManager(nil, nil)
|
2018-01-19 03:36:21 -08:00
|
|
|
scrapeManager.scrapeConfigs[tsetName] = reloadCfg.ScrapeConfigs[0]
|
|
|
|
// As reload never happens, new loop should never be called.
|
|
|
|
newLoop := func(_ *Target, s scraper) loop {
|
|
|
|
t.Fatal("reload happened")
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
sp := &scrapePool{
|
|
|
|
appendable: &nopAppendable{},
|
|
|
|
targets: map[uint64]*Target{},
|
|
|
|
loops: map[uint64]loop{
|
|
|
|
1: &scrapeLoop{},
|
|
|
|
},
|
|
|
|
newLoop: newLoop,
|
|
|
|
logger: nil,
|
|
|
|
config: reloadCfg.ScrapeConfigs[0],
|
|
|
|
}
|
|
|
|
scrapeManager.scrapePools = map[string]*scrapePool{
|
|
|
|
tsetName: sp,
|
|
|
|
}
|
|
|
|
|
|
|
|
targets := map[string][]*targetgroup.Group{
|
|
|
|
tsetName: []*targetgroup.Group{},
|
|
|
|
}
|
|
|
|
|
|
|
|
scrapeManager.reload(targets)
|
|
|
|
}
|