retrieval: add Scrape-Timeout-Seconds header to each scrape request (#2565)

Fixes #2508.
This commit is contained in:
Matt Layher 2017-04-04 13:26:28 -04:00 committed by Brian Brazil
parent 5f3327f620
commit fe4b6693f7
2 changed files with 29 additions and 5 deletions

View file

@ -178,8 +178,12 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
for fp, oldLoop := range sp.loops {
var (
t = sp.targets[fp]
s = &targetScraper{Target: t, client: sp.client}
t = sp.targets[fp]
s = &targetScraper{
Target: t,
client: sp.client,
timeout: timeout,
}
newLoop = sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
)
wg.Add(1)
@ -240,7 +244,12 @@ func (sp *scrapePool) sync(targets []*Target) {
uniqueTargets[hash] = struct{}{}
if _, ok := sp.targets[hash]; !ok {
s := &targetScraper{Target: t, client: sp.client}
s := &targetScraper{
Target: t,
client: sp.client,
timeout: timeout,
}
l := sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
sp.targets[hash] = t
@ -283,7 +292,8 @@ type scraper interface {
// targetScraper implements the scraper interface for a target.
type targetScraper struct {
*Target
client *http.Client
client *http.Client
timeout time.Duration
}
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,*/*;q=0.1`
@ -297,6 +307,7 @@ func (s *targetScraper) scrape(ctx context.Context, ts time.Time) (model.Samples
}
req.Header.Add("Accept", acceptHeader)
req.Header.Set("User-Agent", userAgentHeader)
req.Header.Set("Scrape-Timeout-Seconds", fmt.Sprintf("%f", s.timeout.Seconds()))
resp, err := ctxhttp.Do(ctx, s.client, req)
if err != nil {

View file

@ -581,8 +581,20 @@ func TestScrapeLoopRun(t *testing.T) {
}
func TestTargetScraperScrapeOK(t *testing.T) {
const (
configTimeout = 1500 * time.Millisecond
expectedTimeout = "1.500000"
)
server := httptest.NewServer(
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
timeout := r.Header.Get("Scrape-Timeout-Seconds")
if timeout != expectedTimeout {
t.Errorf("Scrape timeout did not match expected timeout")
t.Errorf("Expected: %v", expectedTimeout)
t.Fatalf("Got: %v", timeout)
}
w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
w.Write([]byte("metric_a 1\nmetric_b 2\n"))
}),
@ -601,7 +613,8 @@ func TestTargetScraperScrapeOK(t *testing.T) {
model.AddressLabel: model.LabelValue(serverURL.Host),
},
},
client: http.DefaultClient,
client: http.DefaultClient,
timeout: configTimeout,
}
now := time.Now()