mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
discovery: Expose custom HTTP client options to discoverers (#10462)
* discovery: expose HTTP client options to discoverers Signed-off-by: Robert Fratto <robertfratto@gmail.com> * discovery/http: use HTTP client options for created client Signed-off-by: Robert Fratto <robertfratto@gmail.com> * scrape: use a list of HTTP client options instead of just dial context Signed-off-by: Robert Fratto <robertfratto@gmail.com> * discovery: rephrase comment Signed-off-by: Robert Fratto <robertfratto@gmail.com>
This commit is contained in:
parent
4d8bbfd416
commit
44a5e705be
|
@ -41,6 +41,10 @@ type Discoverer interface {
|
||||||
// DiscovererOptions provides options for a Discoverer.
|
// DiscovererOptions provides options for a Discoverer.
|
||||||
type DiscovererOptions struct {
|
type DiscovererOptions struct {
|
||||||
Logger log.Logger
|
Logger log.Logger
|
||||||
|
|
||||||
|
// Extra HTTP client options to expose to Discoverers. This field may be
|
||||||
|
// ignored; Discoverer implementations must opt-in to reading it.
|
||||||
|
HTTPClientOptions []config.HTTPClientOption
|
||||||
}
|
}
|
||||||
|
|
||||||
// A Config provides the configuration and constructor for a Discoverer.
|
// A Config provides the configuration and constructor for a Discoverer.
|
||||||
|
|
|
@ -71,7 +71,7 @@ func (*SDConfig) Name() string { return "http" }
|
||||||
|
|
||||||
// NewDiscoverer returns a Discoverer for the Config.
|
// NewDiscoverer returns a Discoverer for the Config.
|
||||||
func (c *SDConfig) NewDiscoverer(opts discovery.DiscovererOptions) (discovery.Discoverer, error) {
|
func (c *SDConfig) NewDiscoverer(opts discovery.DiscovererOptions) (discovery.Discoverer, error) {
|
||||||
return NewDiscovery(c, opts.Logger)
|
return NewDiscovery(c, opts.Logger, opts.HTTPClientOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetDirectory joins any relative file paths with dir.
|
// SetDirectory joins any relative file paths with dir.
|
||||||
|
@ -116,12 +116,12 @@ type Discovery struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewDiscovery returns a new HTTP discovery for the given config.
|
// NewDiscovery returns a new HTTP discovery for the given config.
|
||||||
func NewDiscovery(conf *SDConfig, logger log.Logger) (*Discovery, error) {
|
func NewDiscovery(conf *SDConfig, logger log.Logger, clientOpts []config.HTTPClientOption) (*Discovery, error) {
|
||||||
if logger == nil {
|
if logger == nil {
|
||||||
logger = log.NewNopLogger()
|
logger = log.NewNopLogger()
|
||||||
}
|
}
|
||||||
|
|
||||||
client, err := config.NewClientFromConfig(conf.HTTPClientConfig, "http")
|
client, err := config.NewClientFromConfig(conf.HTTPClientConfig, "http", clientOpts...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ func TestHTTPValidRefresh(t *testing.T) {
|
||||||
RefreshInterval: model.Duration(30 * time.Second),
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
}
|
}
|
||||||
|
|
||||||
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
d, err := NewDiscovery(&cfg, log.NewNopLogger(), nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
@ -79,7 +79,7 @@ func TestHTTPInvalidCode(t *testing.T) {
|
||||||
RefreshInterval: model.Duration(30 * time.Second),
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
}
|
}
|
||||||
|
|
||||||
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
d, err := NewDiscovery(&cfg, log.NewNopLogger(), nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
@ -101,7 +101,7 @@ func TestHTTPInvalidFormat(t *testing.T) {
|
||||||
RefreshInterval: model.Duration(30 * time.Second),
|
RefreshInterval: model.Duration(30 * time.Second),
|
||||||
}
|
}
|
||||||
|
|
||||||
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
d, err := NewDiscovery(&cfg, log.NewNopLogger(), nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
@ -417,7 +417,7 @@ func TestSourceDisappeared(t *testing.T) {
|
||||||
URL: ts.URL,
|
URL: ts.URL,
|
||||||
RefreshInterval: model.Duration(1 * time.Second),
|
RefreshInterval: model.Duration(1 * time.Second),
|
||||||
}
|
}
|
||||||
d, err := NewDiscovery(&cfg, log.NewNopLogger())
|
d, err := NewDiscovery(&cfg, log.NewNopLogger(), nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
for _, test := range cases {
|
for _, test := range cases {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
|
@ -23,6 +23,7 @@ import (
|
||||||
"github.com/go-kit/log"
|
"github.com/go-kit/log"
|
||||||
"github.com/go-kit/log/level"
|
"github.com/go-kit/log/level"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
"github.com/prometheus/common/config"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||||
)
|
)
|
||||||
|
@ -124,13 +125,22 @@ func Name(n string) func(*Manager) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HTTPClientOptions sets the list of HTTP client options to expose to
|
||||||
|
// Discoverers. It is up to Discoverers to choose to use the options provided.
|
||||||
|
func HTTPClientOptions(opts ...config.HTTPClientOption) func(*Manager) {
|
||||||
|
return func(m *Manager) {
|
||||||
|
m.httpOpts = opts
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Manager maintains a set of discovery providers and sends each update to a map channel.
|
// Manager maintains a set of discovery providers and sends each update to a map channel.
|
||||||
// Targets are grouped by the target set name.
|
// Targets are grouped by the target set name.
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
name string
|
name string
|
||||||
mtx sync.RWMutex
|
httpOpts []config.HTTPClientOption
|
||||||
ctx context.Context
|
mtx sync.RWMutex
|
||||||
|
ctx context.Context
|
||||||
|
|
||||||
// Some Discoverers(e.g. k8s) send only the updates for a given target group,
|
// Some Discoverers(e.g. k8s) send only the updates for a given target group,
|
||||||
// so we use map[tg.Source]*targetgroup.Group to know which group to update.
|
// so we use map[tg.Source]*targetgroup.Group to know which group to update.
|
||||||
|
@ -404,7 +414,8 @@ func (m *Manager) registerProviders(cfgs Configs, setName string) int {
|
||||||
}
|
}
|
||||||
typ := cfg.Name()
|
typ := cfg.Name()
|
||||||
d, err := cfg.NewDiscoverer(DiscovererOptions{
|
d, err := cfg.NewDiscoverer(DiscovererOptions{
|
||||||
Logger: log.With(m.logger, "discovery", typ),
|
Logger: log.With(m.logger, "discovery", typ),
|
||||||
|
HTTPClientOptions: m.httpOpts,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(m.logger).Log("msg", "Cannot create service discovery", "err", err, "type", typ)
|
level.Error(m.logger).Log("msg", "Cannot create service discovery", "err", err, "type", typ)
|
||||||
|
|
|
@ -125,9 +125,8 @@ func NewManager(o *Options, logger log.Logger, app storage.Appendable) *Manager
|
||||||
type Options struct {
|
type Options struct {
|
||||||
ExtraMetrics bool
|
ExtraMetrics bool
|
||||||
|
|
||||||
// Optional function to override dialing to scrape targets. Go's default
|
// Optional HTTP client options to use when scraping.
|
||||||
// dialer is used when not provided.
|
HTTPClientOptions []config_util.HTTPClientOption
|
||||||
DialContextFunc config_util.DialContextFunc
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Manager maintains a set of scrape pools and manages start/stop cycles
|
// Manager maintains a set of scrape pools and manages start/stop cycles
|
||||||
|
@ -196,7 +195,7 @@ func (m *Manager) reload() {
|
||||||
level.Error(m.logger).Log("msg", "error reloading target set", "err", "invalid config id:"+setName)
|
level.Error(m.logger).Log("msg", "error reloading target set", "err", "invalid config id:"+setName)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName), m.opts.ExtraMetrics, m.opts.DialContextFunc)
|
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName), m.opts.ExtraMetrics, m.opts.HTTPClientOptions)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(m.logger).Log("msg", "error creating new scrape pool", "err", err, "scrape_pool", setName)
|
level.Error(m.logger).Log("msg", "error creating new scrape pool", "err", err, "scrape_pool", setName)
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -224,7 +224,7 @@ type scrapePool struct {
|
||||||
appendable storage.Appendable
|
appendable storage.Appendable
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
cancel context.CancelFunc
|
cancel context.CancelFunc
|
||||||
dialFunc config_util.DialContextFunc
|
httpOpts []config_util.HTTPClientOption
|
||||||
|
|
||||||
// mtx must not be taken after targetMtx.
|
// mtx must not be taken after targetMtx.
|
||||||
mtx sync.Mutex
|
mtx sync.Mutex
|
||||||
|
@ -265,18 +265,13 @@ const maxAheadTime = 10 * time.Minute
|
||||||
|
|
||||||
type labelsMutator func(labels.Labels) labels.Labels
|
type labelsMutator func(labels.Labels) labels.Labels
|
||||||
|
|
||||||
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, reportExtraMetrics bool, dialFunc config_util.DialContextFunc) (*scrapePool, error) {
|
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, reportExtraMetrics bool, httpOpts []config_util.HTTPClientOption) (*scrapePool, error) {
|
||||||
targetScrapePools.Inc()
|
targetScrapePools.Inc()
|
||||||
if logger == nil {
|
if logger == nil {
|
||||||
logger = log.NewNopLogger()
|
logger = log.NewNopLogger()
|
||||||
}
|
}
|
||||||
|
|
||||||
var extraOptions []config_util.HTTPClientOption
|
client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName, httpOpts...)
|
||||||
if dialFunc != nil {
|
|
||||||
extraOptions = append(extraOptions, config_util.WithDialContextFunc(dialFunc))
|
|
||||||
}
|
|
||||||
|
|
||||||
client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName, extraOptions...)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
targetScrapePoolsFailed.Inc()
|
targetScrapePoolsFailed.Inc()
|
||||||
return nil, errors.Wrap(err, "error creating HTTP client")
|
return nil, errors.Wrap(err, "error creating HTTP client")
|
||||||
|
@ -293,7 +288,7 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed
|
||||||
activeTargets: map[uint64]*Target{},
|
activeTargets: map[uint64]*Target{},
|
||||||
loops: map[uint64]loop{},
|
loops: map[uint64]loop{},
|
||||||
logger: logger,
|
logger: logger,
|
||||||
dialFunc: dialFunc,
|
httpOpts: httpOpts,
|
||||||
}
|
}
|
||||||
sp.newLoop = func(opts scrapeLoopOptions) loop {
|
sp.newLoop = func(opts scrapeLoopOptions) loop {
|
||||||
// Update the targets retrieval function for metadata to a new scrape cache.
|
// Update the targets retrieval function for metadata to a new scrape cache.
|
||||||
|
@ -392,12 +387,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
|
||||||
targetScrapePoolReloads.Inc()
|
targetScrapePoolReloads.Inc()
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
var extraOptions []config_util.HTTPClientOption
|
client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName, sp.httpOpts...)
|
||||||
if sp.dialFunc != nil {
|
|
||||||
extraOptions = append(extraOptions, config_util.WithDialContextFunc(sp.dialFunc))
|
|
||||||
}
|
|
||||||
|
|
||||||
client, err := config_util.NewClientFromConfig(cfg.HTTPClientConfig, cfg.JobName, extraOptions...)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
targetScrapePoolReloadsFailed.Inc()
|
targetScrapePoolReloadsFailed.Inc()
|
||||||
return errors.Wrap(err, "error creating HTTP client")
|
return errors.Wrap(err, "error creating HTTP client")
|
||||||
|
|
Loading…
Reference in a new issue