mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
retrieval: add Scrape-Timeout-Seconds header to each scrape request (#2565)
Fixes #2508.
This commit is contained in:
parent
5f3327f620
commit
fe4b6693f7
|
@ -179,7 +179,11 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
|
||||||
for fp, oldLoop := range sp.loops {
|
for fp, oldLoop := range sp.loops {
|
||||||
var (
|
var (
|
||||||
t = sp.targets[fp]
|
t = sp.targets[fp]
|
||||||
s = &targetScraper{Target: t, client: sp.client}
|
s = &targetScraper{
|
||||||
|
Target: t,
|
||||||
|
client: sp.client,
|
||||||
|
timeout: timeout,
|
||||||
|
}
|
||||||
newLoop = sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
|
newLoop = sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
|
||||||
)
|
)
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
@ -240,7 +244,12 @@ func (sp *scrapePool) sync(targets []*Target) {
|
||||||
uniqueTargets[hash] = struct{}{}
|
uniqueTargets[hash] = struct{}{}
|
||||||
|
|
||||||
if _, ok := sp.targets[hash]; !ok {
|
if _, ok := sp.targets[hash]; !ok {
|
||||||
s := &targetScraper{Target: t, client: sp.client}
|
s := &targetScraper{
|
||||||
|
Target: t,
|
||||||
|
client: sp.client,
|
||||||
|
timeout: timeout,
|
||||||
|
}
|
||||||
|
|
||||||
l := sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
|
l := sp.newLoop(sp.ctx, s, sp.appender, t.Labels(), sp.config)
|
||||||
|
|
||||||
sp.targets[hash] = t
|
sp.targets[hash] = t
|
||||||
|
@ -284,6 +293,7 @@ type scraper interface {
|
||||||
type targetScraper struct {
|
type targetScraper struct {
|
||||||
*Target
|
*Target
|
||||||
client *http.Client
|
client *http.Client
|
||||||
|
timeout time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,*/*;q=0.1`
|
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,*/*;q=0.1`
|
||||||
|
@ -297,6 +307,7 @@ func (s *targetScraper) scrape(ctx context.Context, ts time.Time) (model.Samples
|
||||||
}
|
}
|
||||||
req.Header.Add("Accept", acceptHeader)
|
req.Header.Add("Accept", acceptHeader)
|
||||||
req.Header.Set("User-Agent", userAgentHeader)
|
req.Header.Set("User-Agent", userAgentHeader)
|
||||||
|
req.Header.Set("Scrape-Timeout-Seconds", fmt.Sprintf("%f", s.timeout.Seconds()))
|
||||||
|
|
||||||
resp, err := ctxhttp.Do(ctx, s.client, req)
|
resp, err := ctxhttp.Do(ctx, s.client, req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -581,8 +581,20 @@ func TestScrapeLoopRun(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTargetScraperScrapeOK(t *testing.T) {
|
func TestTargetScraperScrapeOK(t *testing.T) {
|
||||||
|
const (
|
||||||
|
configTimeout = 1500 * time.Millisecond
|
||||||
|
expectedTimeout = "1.500000"
|
||||||
|
)
|
||||||
|
|
||||||
server := httptest.NewServer(
|
server := httptest.NewServer(
|
||||||
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
timeout := r.Header.Get("Scrape-Timeout-Seconds")
|
||||||
|
if timeout != expectedTimeout {
|
||||||
|
t.Errorf("Scrape timeout did not match expected timeout")
|
||||||
|
t.Errorf("Expected: %v", expectedTimeout)
|
||||||
|
t.Fatalf("Got: %v", timeout)
|
||||||
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
|
w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
|
||||||
w.Write([]byte("metric_a 1\nmetric_b 2\n"))
|
w.Write([]byte("metric_a 1\nmetric_b 2\n"))
|
||||||
}),
|
}),
|
||||||
|
@ -602,6 +614,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
|
timeout: configTimeout,
|
||||||
}
|
}
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue