2015-01-21 11:07:45 -08:00
|
|
|
// Copyright 2013 The Prometheus Authors
|
2013-01-15 08:06:17 -08:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-02-01 01:55:07 -08:00
|
|
|
package scrape
|
2016-08-25 11:36:26 -07:00
|
|
|
|
|
|
|
import (
|
2022-12-23 02:55:08 -08:00
|
|
|
"context"
|
2019-04-10 05:20:00 -07:00
|
|
|
"net/http"
|
2018-09-26 02:20:56 -07:00
|
|
|
"strconv"
|
2016-08-25 11:36:26 -07:00
|
|
|
"testing"
|
2018-09-26 02:20:56 -07:00
|
|
|
"time"
|
2016-08-25 11:36:26 -07:00
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
"github.com/prometheus/common/model"
|
2020-10-29 02:43:23 -07:00
|
|
|
"github.com/stretchr/testify/require"
|
2022-08-31 06:50:38 -07:00
|
|
|
"gopkg.in/yaml.v2"
|
2019-03-25 16:01:12 -07:00
|
|
|
|
2016-08-25 11:36:26 -07:00
|
|
|
"github.com/prometheus/prometheus/config"
|
2022-12-23 02:55:08 -08:00
|
|
|
"github.com/prometheus/prometheus/discovery"
|
2018-09-26 02:20:56 -07:00
|
|
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
|
|
|
"github.com/prometheus/prometheus/model/relabel"
|
2016-08-25 11:36:26 -07:00
|
|
|
)
|
|
|
|
|
2016-09-05 05:17:10 -07:00
|
|
|
func TestPopulateLabels(t *testing.T) {
|
|
|
|
cases := []struct {
|
2022-07-20 04:35:47 -07:00
|
|
|
in labels.Labels
|
|
|
|
cfg *config.ScrapeConfig
|
|
|
|
noDefaultPort bool
|
|
|
|
res labels.Labels
|
|
|
|
resOrig labels.Labels
|
|
|
|
err string
|
2016-09-05 05:17:10 -07:00
|
|
|
}{
|
|
|
|
// Regular population of scrape config options.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.InstanceLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "value",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "value",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
// Pre-define/overwrite scrape config labels.
|
|
|
|
// Leave out port and expect it to be defaulted to scheme.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
|
|
|
model.ScrapeIntervalLabel: "2s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
model.InstanceLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
|
|
|
model.ScrapeIntervalLabel: "2s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
|
|
|
model.ScrapeIntervalLabel: "2s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
// Provide instance label. HTTPS port default for IPv6.
|
|
|
|
{
|
2016-12-29 00:27:30 -08:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 05:17:10 -07:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2016-12-29 00:27:30 -08:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "[::1]:443",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2017-06-09 08:18:19 -07:00
|
|
|
// Address label missing.
|
2016-09-05 05:17:10 -07:00
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "value"),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2020-10-22 02:00:08 -07:00
|
|
|
err: "no address",
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2018-12-18 03:26:36 -08:00
|
|
|
RelabelConfigs: []*relabel.Config{
|
2017-06-09 08:18:19 -07:00
|
|
|
{
|
2018-12-18 03:26:36 -08:00
|
|
|
Action: relabel.Replace,
|
|
|
|
Regex: relabel.MustNewRegexp("(.*)"),
|
2017-06-09 08:18:19 -07:00
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 04:15:44 -07:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2016-12-29 00:27:30 -08:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2016-09-05 05:17:10 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2018-12-18 03:26:36 -08:00
|
|
|
RelabelConfigs: []*relabel.Config{
|
2016-09-05 05:17:10 -07:00
|
|
|
{
|
2018-12-18 03:26:36 -08:00
|
|
|
Action: relabel.Replace,
|
|
|
|
Regex: relabel.MustNewRegexp("(.*)"),
|
2017-06-09 08:18:19 -07:00
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 04:15:44 -07:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 08:37:32 -07:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
|
|
|
// Invalid UTF-8 in label.
|
|
|
|
{
|
2017-06-23 04:15:44 -07:00
|
|
|
in: labels.FromMap(map[string]string{
|
2017-06-09 08:18:19 -07:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "\xbd",
|
2017-06-23 04:15:44 -07:00
|
|
|
}),
|
2017-06-09 08:18:19 -07:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 08:37:32 -07:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2017-06-09 08:18:19 -07:00
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2020-10-22 02:00:08 -07:00
|
|
|
err: "invalid label value for \"custom\": \"\\xbd\"",
|
2016-09-05 05:17:10 -07:00
|
|
|
},
|
2021-08-31 08:37:32 -07:00
|
|
|
// Invalid duration in interval label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeIntervalLabel: "2notseconds",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2023-03-08 07:32:39 -08:00
|
|
|
err: "error parsing scrape interval: unknown unit \"notseconds\" in duration \"2notseconds\"",
|
2021-08-31 08:37:32 -07:00
|
|
|
},
|
|
|
|
// Invalid duration in timeout label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeTimeoutLabel: "2notseconds",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2023-03-08 07:32:39 -08:00
|
|
|
err: "error parsing scrape timeout: unknown unit \"notseconds\" in duration \"2notseconds\"",
|
2021-08-31 08:37:32 -07:00
|
|
|
},
|
|
|
|
// 0 interval in timeout label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeIntervalLabel: "0s",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2021-08-31 08:37:32 -07:00
|
|
|
err: "scrape interval cannot be 0",
|
|
|
|
},
|
|
|
|
// 0 duration in timeout label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeTimeoutLabel: "0s",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2021-08-31 08:37:32 -07:00
|
|
|
err: "scrape timeout cannot be 0",
|
|
|
|
},
|
|
|
|
// Timeout less than interval.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 07:37:16 -07:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2021-08-31 08:37:32 -07:00
|
|
|
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
|
|
|
|
},
|
2022-07-20 04:35:47 -07:00
|
|
|
// Don't attach default port.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
|
|
|
noDefaultPort: true,
|
|
|
|
res: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.InstanceLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
// Remove default port (http).
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "http",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
|
|
|
noDefaultPort: true,
|
|
|
|
res: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.InstanceLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
// Remove default port (https).
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:443",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
|
|
|
noDefaultPort: true,
|
|
|
|
res: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.InstanceLabel: "1.2.3.4:443",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:443",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
},
|
2016-09-05 05:17:10 -07:00
|
|
|
}
|
2018-04-27 05:11:16 -07:00
|
|
|
for _, c := range cases {
|
2017-03-08 06:37:12 -08:00
|
|
|
in := c.in.Copy()
|
|
|
|
|
2023-02-28 11:35:31 -08:00
|
|
|
res, orig, err := PopulateLabels(labels.NewBuilder(c.in), c.cfg, c.noDefaultPort)
|
2020-10-22 02:00:08 -07:00
|
|
|
if c.err != "" {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.EqualError(t, err, c.err)
|
2020-10-22 02:00:08 -07:00
|
|
|
} else {
|
2020-10-29 02:43:23 -07:00
|
|
|
require.NoError(t, err)
|
2020-10-22 02:00:08 -07:00
|
|
|
}
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, c.in, in)
|
|
|
|
require.Equal(t, c.res, res)
|
|
|
|
require.Equal(t, c.resOrig, orig)
|
2016-09-05 05:17:10 -07:00
|
|
|
}
|
|
|
|
}
|
2018-01-19 03:36:21 -08:00
|
|
|
|
2023-02-28 08:12:27 -08:00
|
|
|
func loadConfiguration(t testing.TB, c string) *config.Config {
|
2019-02-13 05:24:22 -08:00
|
|
|
t.Helper()
|
2018-01-19 03:36:21 -08:00
|
|
|
|
2019-02-13 05:24:22 -08:00
|
|
|
cfg := &config.Config{}
|
|
|
|
if err := yaml.UnmarshalStrict([]byte(c), cfg); err != nil {
|
|
|
|
t.Fatalf("Unable to load YAML config: %s", err)
|
|
|
|
}
|
|
|
|
return cfg
|
|
|
|
}
|
|
|
|
|
|
|
|
func noopLoop() loop {
|
|
|
|
return &testLoop{
|
|
|
|
startFunc: func(interval, timeout time.Duration, errc chan<- error) {},
|
|
|
|
stopFunc: func() {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestManagerApplyConfig(t *testing.T) {
|
|
|
|
// Valid initial configuration.
|
|
|
|
cfgText1 := `
|
2018-07-04 04:01:19 -07:00
|
|
|
scrape_configs:
|
2019-02-13 05:24:22 -08:00
|
|
|
- job_name: job1
|
2018-07-04 04:01:19 -07:00
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
`
|
2019-02-13 05:24:22 -08:00
|
|
|
// Invalid configuration.
|
|
|
|
cfgText2 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
scheme: https
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
tls_config:
|
|
|
|
ca_file: /not/existing/ca/file
|
|
|
|
`
|
|
|
|
// Valid configuration.
|
|
|
|
cfgText3 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
scheme: https
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
`
|
|
|
|
var (
|
|
|
|
cfg1 = loadConfiguration(t, cfgText1)
|
|
|
|
cfg2 = loadConfiguration(t, cfgText2)
|
|
|
|
cfg3 = loadConfiguration(t, cfgText3)
|
2018-01-19 03:36:21 -08:00
|
|
|
|
2019-02-13 05:24:22 -08:00
|
|
|
ch = make(chan struct{}, 1)
|
|
|
|
)
|
2018-07-04 04:01:19 -07:00
|
|
|
|
2021-08-24 05:31:14 -07:00
|
|
|
opts := Options{}
|
|
|
|
scrapeManager := NewManager(&opts, nil, nil)
|
2019-03-12 03:26:18 -07:00
|
|
|
newLoop := func(scrapeLoopOptions) loop {
|
2019-02-13 05:24:22 -08:00
|
|
|
ch <- struct{}{}
|
|
|
|
return noopLoop()
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
|
|
|
sp := &scrapePool{
|
2022-06-28 02:58:52 -07:00
|
|
|
appendable: &nopAppendable{},
|
|
|
|
activeTargets: map[uint64]*Target{
|
|
|
|
1: {},
|
|
|
|
},
|
2018-01-19 03:36:21 -08:00
|
|
|
loops: map[uint64]loop{
|
2019-02-13 05:24:22 -08:00
|
|
|
1: noopLoop(),
|
2018-01-19 03:36:21 -08:00
|
|
|
},
|
|
|
|
newLoop: newLoop,
|
|
|
|
logger: nil,
|
2019-02-13 05:24:22 -08:00
|
|
|
config: cfg1.ScrapeConfigs[0],
|
2019-04-10 05:20:00 -07:00
|
|
|
client: http.DefaultClient,
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
|
|
|
scrapeManager.scrapePools = map[string]*scrapePool{
|
2019-02-13 05:24:22 -08:00
|
|
|
"job1": sp,
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
|
|
|
|
2019-02-13 05:24:22 -08:00
|
|
|
// Apply the initial configuration.
|
|
|
|
if err := scrapeManager.ApplyConfig(cfg1); err != nil {
|
|
|
|
t.Fatalf("unable to apply configuration: %s", err)
|
|
|
|
}
|
|
|
|
select {
|
|
|
|
case <-ch:
|
|
|
|
t.Fatal("reload happened")
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply a configuration for which the reload fails.
|
|
|
|
if err := scrapeManager.ApplyConfig(cfg2); err == nil {
|
|
|
|
t.Fatalf("expecting error but got none")
|
|
|
|
}
|
|
|
|
select {
|
|
|
|
case <-ch:
|
|
|
|
t.Fatal("reload happened")
|
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply a configuration for which the reload succeeds.
|
|
|
|
if err := scrapeManager.ApplyConfig(cfg3); err != nil {
|
|
|
|
t.Fatalf("unable to apply configuration: %s", err)
|
|
|
|
}
|
|
|
|
select {
|
|
|
|
case <-ch:
|
|
|
|
default:
|
|
|
|
t.Fatal("reload didn't happen")
|
|
|
|
}
|
|
|
|
|
|
|
|
// Re-applying the same configuration shouldn't trigger a reload.
|
|
|
|
if err := scrapeManager.ApplyConfig(cfg3); err != nil {
|
|
|
|
t.Fatalf("unable to apply configuration: %s", err)
|
|
|
|
}
|
|
|
|
select {
|
|
|
|
case <-ch:
|
|
|
|
t.Fatal("reload happened")
|
|
|
|
default:
|
|
|
|
}
|
2018-01-19 03:36:21 -08:00
|
|
|
}
|
2018-09-26 02:20:56 -07:00
|
|
|
|
|
|
|
func TestManagerTargetsUpdates(t *testing.T) {
|
2021-08-24 05:31:14 -07:00
|
|
|
opts := Options{}
|
|
|
|
m := NewManager(&opts, nil, nil)
|
2018-09-26 02:20:56 -07:00
|
|
|
|
|
|
|
ts := make(chan map[string][]*targetgroup.Group)
|
|
|
|
go m.Run(ts)
|
2019-09-23 03:28:37 -07:00
|
|
|
defer m.Stop()
|
2018-09-26 02:20:56 -07:00
|
|
|
|
|
|
|
tgSent := make(map[string][]*targetgroup.Group)
|
|
|
|
for x := 0; x < 10; x++ {
|
|
|
|
|
|
|
|
tgSent[strconv.Itoa(x)] = []*targetgroup.Group{
|
2019-01-16 14:28:08 -08:00
|
|
|
{
|
2018-09-26 02:20:56 -07:00
|
|
|
Source: strconv.Itoa(x),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
select {
|
|
|
|
case ts <- tgSent:
|
|
|
|
case <-time.After(10 * time.Millisecond):
|
|
|
|
t.Error("Scrape manager's channel remained blocked after the set threshold.")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
m.mtxScrape.Lock()
|
|
|
|
tsetActual := m.targetSets
|
|
|
|
m.mtxScrape.Unlock()
|
|
|
|
|
|
|
|
// Make sure all updates have been received.
|
2020-10-29 02:43:23 -07:00
|
|
|
require.Equal(t, tgSent, tsetActual)
|
2018-09-26 02:20:56 -07:00
|
|
|
|
|
|
|
select {
|
|
|
|
case <-m.triggerReload:
|
|
|
|
default:
|
|
|
|
t.Error("No scrape loops reload was triggered after targets update.")
|
|
|
|
}
|
|
|
|
}
|
2019-03-12 03:46:15 -07:00
|
|
|
|
2023-05-25 02:49:43 -07:00
|
|
|
func TestSetOffsetSeed(t *testing.T) {
|
2019-03-12 03:46:15 -07:00
|
|
|
getConfig := func(prometheus string) *config.Config {
|
|
|
|
cfgText := `
|
|
|
|
global:
|
|
|
|
external_labels:
|
|
|
|
prometheus: '` + prometheus + `'
|
|
|
|
`
|
|
|
|
|
|
|
|
cfg := &config.Config{}
|
|
|
|
if err := yaml.UnmarshalStrict([]byte(cfgText), cfg); err != nil {
|
|
|
|
t.Fatalf("Unable to load YAML config cfgYaml: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return cfg
|
|
|
|
}
|
|
|
|
|
2021-08-24 05:31:14 -07:00
|
|
|
opts := Options{}
|
|
|
|
scrapeManager := NewManager(&opts, nil, nil)
|
2019-03-12 03:46:15 -07:00
|
|
|
|
|
|
|
// Load the first config.
|
|
|
|
cfg1 := getConfig("ha1")
|
2023-05-25 02:49:43 -07:00
|
|
|
if err := scrapeManager.setOffsetSeed(cfg1.GlobalConfig.ExternalLabels); err != nil {
|
2019-03-12 03:46:15 -07:00
|
|
|
t.Error(err)
|
|
|
|
}
|
2023-05-25 02:49:43 -07:00
|
|
|
offsetSeed1 := scrapeManager.offsetSeed
|
2019-03-12 03:46:15 -07:00
|
|
|
|
2023-05-25 02:49:43 -07:00
|
|
|
if offsetSeed1 == 0 {
|
|
|
|
t.Error("Offset seed has to be a hash of uint64")
|
2019-03-12 03:46:15 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Load the first config.
|
|
|
|
cfg2 := getConfig("ha2")
|
2023-05-25 02:49:43 -07:00
|
|
|
if err := scrapeManager.setOffsetSeed(cfg2.GlobalConfig.ExternalLabels); err != nil {
|
2019-03-12 03:46:15 -07:00
|
|
|
t.Error(err)
|
|
|
|
}
|
2023-05-25 02:49:43 -07:00
|
|
|
offsetSeed2 := scrapeManager.offsetSeed
|
2019-03-12 03:46:15 -07:00
|
|
|
|
2023-05-25 02:49:43 -07:00
|
|
|
if offsetSeed1 == offsetSeed2 {
|
|
|
|
t.Error("Offset seed should not be the same on different set of external labels")
|
2019-03-12 03:46:15 -07:00
|
|
|
}
|
|
|
|
}
|
2022-12-23 02:55:08 -08:00
|
|
|
|
|
|
|
func TestManagerScrapePools(t *testing.T) {
|
|
|
|
cfgText1 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
- job_name: job2
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9091", "foo:9092"]
|
|
|
|
`
|
|
|
|
cfgText2 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090", "foo:9094"]
|
|
|
|
- job_name: job3
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9093"]
|
|
|
|
`
|
|
|
|
var (
|
|
|
|
cfg1 = loadConfiguration(t, cfgText1)
|
|
|
|
cfg2 = loadConfiguration(t, cfgText2)
|
|
|
|
)
|
|
|
|
|
|
|
|
reload := func(scrapeManager *Manager, cfg *config.Config) {
|
|
|
|
newLoop := func(scrapeLoopOptions) loop {
|
|
|
|
return noopLoop()
|
|
|
|
}
|
|
|
|
scrapeManager.scrapePools = map[string]*scrapePool{}
|
|
|
|
for _, sc := range cfg.ScrapeConfigs {
|
|
|
|
_, cancel := context.WithCancel(context.Background())
|
|
|
|
defer cancel()
|
|
|
|
sp := &scrapePool{
|
|
|
|
appendable: &nopAppendable{},
|
|
|
|
activeTargets: map[uint64]*Target{},
|
|
|
|
loops: map[uint64]loop{
|
|
|
|
1: noopLoop(),
|
|
|
|
},
|
|
|
|
newLoop: newLoop,
|
|
|
|
logger: nil,
|
|
|
|
config: sc,
|
|
|
|
client: http.DefaultClient,
|
|
|
|
cancel: cancel,
|
|
|
|
}
|
|
|
|
for _, c := range sc.ServiceDiscoveryConfigs {
|
|
|
|
staticConfig := c.(discovery.StaticConfig)
|
|
|
|
for _, group := range staticConfig {
|
|
|
|
for i := range group.Targets {
|
|
|
|
sp.activeTargets[uint64(i)] = &Target{}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
scrapeManager.scrapePools[sc.JobName] = sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
opts := Options{}
|
|
|
|
scrapeManager := NewManager(&opts, nil, nil)
|
|
|
|
|
|
|
|
reload(scrapeManager, cfg1)
|
|
|
|
require.ElementsMatch(t, []string{"job1", "job2"}, scrapeManager.ScrapePools())
|
|
|
|
|
|
|
|
reload(scrapeManager, cfg2)
|
|
|
|
require.ElementsMatch(t, []string{"job1", "job3"}, scrapeManager.ScrapePools())
|
|
|
|
}
|