Merge pull request #3160 from bboreham/remote-keepalive

Re-enable http keepalive on remote storage
This commit is contained in:
Tom Wilkie 2017-09-14 08:23:43 +01:00 committed by GitHub
commit f66f882d08
3 changed files with 12 additions and 1 deletions

View file

@ -526,6 +526,8 @@ type HTTPClientConfig struct {
ProxyURL URL `yaml:"proxy_url,omitempty"`
// TLSConfig to use to connect to the targets.
TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
// If set, override whether to use HTTP KeepAlive - scraping defaults OFF, remote read/write defaults ON
KeepAlive *bool `yaml:"keep_alive,omitempty"`
// Catches all undefined fields and must be empty after parsing.
XXX map[string]interface{} `yaml:",inline"`

View file

@ -51,6 +51,11 @@ type clientConfig struct {
// NewClient creates a new Client.
func NewClient(index int, conf *clientConfig) (*Client, error) {
// If not specified in config, allow HTTP connections for remote API to use keep-alive
if conf.httpClientConfig.KeepAlive == nil {
val := true
conf.httpClientConfig.KeepAlive = &val
}
httpClient, err := httputil.NewClientFromConfig(conf.httpClientConfig)
if err != nil {
return nil, err

View file

@ -36,11 +36,15 @@ func NewClientFromConfig(cfg config.HTTPClientConfig) (*http.Client, error) {
if err != nil {
return nil, err
}
disableKeepAlives := true // hard-coded default unless overridden in config
if cfg.KeepAlive != nil {
disableKeepAlives = !*cfg.KeepAlive
}
// The only timeout we care about is the configured scrape timeout.
// It is applied on request. So we leave out any timings here.
var rt http.RoundTripper = &http.Transport{
Proxy: http.ProxyURL(cfg.ProxyURL.URL),
DisableKeepAlives: true,
DisableKeepAlives: disableKeepAlives,
TLSClientConfig: tlsConfig,
}