Remove check against cfg so interval/ timeout are always set (#10023)

Signed-off-by: Nicholas Blott <blottn@tcd.ie>
This commit is contained in:
Nicholas Blott 2021-12-16 12:28:46 +00:00 committed by GitHub
parent 13af2470bf
commit c92673fb14
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 26 additions and 25 deletions

View file

@ -15,6 +15,7 @@ package main
import ( import (
"testing" "testing"
"time"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -37,6 +38,8 @@ func TestSDCheckResult(t *testing.T) {
require.Nil(t, err) require.Nil(t, err)
scrapeConfig := &config.ScrapeConfig{ scrapeConfig := &config.ScrapeConfig{
ScrapeInterval: model.Duration(1 * time.Minute),
ScrapeTimeout: model.Duration(10 * time.Second),
RelabelConfigs: []*relabel.Config{{ RelabelConfigs: []*relabel.Config{{
SourceLabels: model.LabelNames{"foo"}, SourceLabels: model.LabelNames{"foo"},
Action: relabel.Replace, Action: relabel.Replace,
@ -50,14 +53,14 @@ func TestSDCheckResult(t *testing.T) {
{ {
DiscoveredLabels: labels.Labels{ DiscoveredLabels: labels.Labels{
labels.Label{Name: "__address__", Value: "localhost:8080"}, labels.Label{Name: "__address__", Value: "localhost:8080"},
labels.Label{Name: "__scrape_interval__", Value: "0s"}, labels.Label{Name: "__scrape_interval__", Value: "1m"},
labels.Label{Name: "__scrape_timeout__", Value: "0s"}, labels.Label{Name: "__scrape_timeout__", Value: "10s"},
labels.Label{Name: "foo", Value: "bar"}, labels.Label{Name: "foo", Value: "bar"},
}, },
Labels: labels.Labels{ Labels: labels.Labels{
labels.Label{Name: "__address__", Value: "localhost:8080"}, labels.Label{Name: "__address__", Value: "localhost:8080"},
labels.Label{Name: "__scrape_interval__", Value: "0s"}, labels.Label{Name: "__scrape_interval__", Value: "1m"},
labels.Label{Name: "__scrape_timeout__", Value: "0s"}, labels.Label{Name: "__scrape_timeout__", Value: "10s"},
labels.Label{Name: "foo", Value: "bar"}, labels.Label{Name: "foo", Value: "bar"},
labels.Label{Name: "instance", Value: "localhost:8080"}, labels.Label{Name: "instance", Value: "localhost:8080"},
labels.Label{Name: "newfoo", Value: "bar"}, labels.Label{Name: "newfoo", Value: "bar"},

View file

@ -418,28 +418,22 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
return nil, nil, err return nil, nil, err
} }
var interval string interval := lset.Get(model.ScrapeIntervalLabel)
var intervalDuration model.Duration intervalDuration, err := model.ParseDuration(interval)
if interval = lset.Get(model.ScrapeIntervalLabel); interval != cfg.ScrapeInterval.String() { if err != nil {
intervalDuration, err = model.ParseDuration(interval) return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
if err != nil { }
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err) if time.Duration(intervalDuration) == 0 {
} return nil, nil, errors.New("scrape interval cannot be 0")
if time.Duration(intervalDuration) == 0 {
return nil, nil, errors.New("scrape interval cannot be 0")
}
} }
var timeout string timeout := lset.Get(model.ScrapeTimeoutLabel)
var timeoutDuration model.Duration timeoutDuration, err := model.ParseDuration(timeout)
if timeout = lset.Get(model.ScrapeTimeoutLabel); timeout != cfg.ScrapeTimeout.String() { if err != nil {
timeoutDuration, err = model.ParseDuration(timeout) return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
if err != nil { }
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err) if time.Duration(timeoutDuration) == 0 {
} return nil, nil, errors.New("scrape timeout cannot be 0")
if time.Duration(timeoutDuration) == 0 {
return nil, nil, errors.New("scrape timeout cannot be 0")
}
} }
if timeoutDuration > intervalDuration { if timeoutDuration > intervalDuration {

View file

@ -371,7 +371,11 @@ func TestNewClientWithBadTLSConfig(t *testing.T) {
func TestTargetsFromGroup(t *testing.T) { func TestTargetsFromGroup(t *testing.T) {
expectedError := "instance 0 in group : no address" expectedError := "instance 0 in group : no address"
targets, failures := TargetsFromGroup(&targetgroup.Group{Targets: []model.LabelSet{{}, {model.AddressLabel: "localhost:9090"}}}, &config.ScrapeConfig{}) cfg := config.ScrapeConfig{
ScrapeTimeout: model.Duration(10 * time.Second),
ScrapeInterval: model.Duration(1 * time.Minute),
}
targets, failures := TargetsFromGroup(&targetgroup.Group{Targets: []model.LabelSet{{}, {model.AddressLabel: "localhost:9090"}}}, &cfg)
if len(targets) != 1 { if len(targets) != 1 {
t.Fatalf("Expected 1 target, got %v", len(targets)) t.Fatalf("Expected 1 target, got %v", len(targets))
} }