From 587dec9eb970531cddc7f1803d258e72129b5aa0 Mon Sep 17 00:00:00 2001 From: Krasi Georgiev Date: Mon, 18 Dec 2017 19:41:31 +0000 Subject: [PATCH] rebased and resolved conflicts with the new Discovery GUI page Signed-off-by: Krasi Georgiev --- cmd/prometheus/main.go | 8 ++++---- discovery/manager.go | 6 +++--- discovery/manager_test.go | 10 +++++----- retrieval/manager.go | 25 +++++++++++++------------ web/web.go | 2 +- 5 files changed, 26 insertions(+), 25 deletions(-) diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index 757ec5e85..ad580e997 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -238,11 +238,11 @@ func main() { discoveryManager = discovery.NewManager(log.With(logger, "component", "discovery manager")) scrapeManager = retrieval.NewScrapeManager(log.With(logger, "component", "scrape manager"), fanoutStorage) queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine) - ruleManager := rules.NewManager(&rules.ManagerOptions{ + ruleManager = rules.NewManager(&rules.ManagerOptions{ Appendable: fanoutStorage, QueryFunc: rules.EngineQueryFunc(queryEngine), NotifyFunc: sendAlerts(notifier, cfg.web.ExternalURL.String()), - Context: ctx, + Context: ctxRule, ExternalURL: cfg.web.ExternalURL, Registerer: prometheus.DefaultRegisterer, Logger: log.With(logger, "component", "rule manager"), @@ -271,7 +271,7 @@ func main() { cfg.web.Flags[f.Name] = f.Value.String() } - // Depend on cfg.web.ScrapeManager so needs to be after cfg.web.ScrapeManager = scrapeManager + // Depends on cfg.web.ScrapeManager so needs to be after cfg.web.ScrapeManager = scrapeManager webHandler := web.New(log.With(logger, "component", "web"), &cfg.web) // Monitor outgoing connections on default transport with conntrack. @@ -281,9 +281,9 @@ func main() { reloaders := []func(cfg *config.Config) error{ remoteStorage.ApplyConfig, - discoveryManager.ApplyConfig, webHandler.ApplyConfig, notifier.ApplyConfig, + discoveryManager.ApplyConfig, scrapeManager.ApplyConfig, func(cfg *config.Config) error { // Get all rule files matching the configuration oaths. diff --git a/discovery/manager.go b/discovery/manager.go index dbfc0bda4..a76676a57 100644 --- a/discovery/manager.go +++ b/discovery/manager.go @@ -53,7 +53,7 @@ type Discoverer interface { } type poolKey struct { - set string + setName string provider string } @@ -111,7 +111,7 @@ func (m *Manager) ApplyConfig(cfg *config.Config) error { m.cancelDiscoverers() for _, scfg := range cfg.ScrapeConfigs { for provName, prov := range m.providersFromConfig(scfg.ServiceDiscoveryConfig) { - m.startProvider(ctx, poolKey{set: scfg.JobName, provider: provName}, prov) + m.startProvider(ctx, poolKey{setName: scfg.JobName, provider: provName}, prov) } } close(err) @@ -184,7 +184,7 @@ func (m *Manager) allGroups(pk poolKey) map[string][]*config.TargetGroup { for _, pk := range pKeys { for _, tg := range m.targets[pk] { if tg.Source != "" { // Don't add empty targets. - tSetsAll[pk.set] = append(tSetsAll[pk.set], tg) + tSetsAll[pk.setName] = append(tSetsAll[pk.setName], tg) } } } diff --git a/discovery/manager_test.go b/discovery/manager_test.go index 845fc8a87..748d19f18 100644 --- a/discovery/manager_test.go +++ b/discovery/manager_test.go @@ -590,7 +590,7 @@ func TestDiscoveryManagerSyncCalls(t *testing.T) { var totalUpdatesCount int for tpName, update := range testCase.updates { provider := newMockDiscoveryProvider(update) - discoveryManager.startProvider(ctx, poolKey{set: strconv.Itoa(testIndex), provider: tpName}, provider) + discoveryManager.startProvider(ctx, poolKey{setName: strconv.Itoa(testIndex), provider: tpName}, provider) if len(update) > 0 { totalUpdatesCount = totalUpdatesCount + len(update) @@ -674,8 +674,8 @@ scrape_configs: discoveryManager.ApplyConfig(cfg) _ = <-discoveryManager.SyncCh() - verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true) - verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", true) + verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true) + verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", true) sTwo := ` scrape_configs: @@ -689,8 +689,8 @@ scrape_configs: discoveryManager.ApplyConfig(cfg) _ = <-discoveryManager.SyncCh() - verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true) - verifyPresence(discoveryManager.targets, poolKey{set: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", false) + verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"foo:9090\"}", true) + verifyPresence(discoveryManager.targets, poolKey{setName: "prometheus", provider: "static/0"}, "{__address__=\"bar:9090\"}", false) } type update struct { diff --git a/retrieval/manager.go b/retrieval/manager.go index e1bfc0ff9..7d9de9445 100644 --- a/retrieval/manager.go +++ b/retrieval/manager.go @@ -92,20 +92,21 @@ func (m *ScrapeManager) ApplyConfig(cfg *config.Config) error { } // TargetMap returns map of active and dropped targets and their corresponding scrape config job name. -func (tm *TargetManager) TargetMap() map[string][]*Target { - tm.mtx.RLock() - defer tm.mtx.RUnlock() - - targetsMap := make(map[string][]*Target) - for jobName, ps := range tm.targetSets { - ps.sp.mtx.RLock() - for _, t := range ps.sp.targets { - targetsMap[jobName] = append(targetsMap[jobName], t) +func (m *ScrapeManager) TargetMap() map[string][]*Target { + targetsMap := make(chan map[string][]*Target) + m.actionCh <- func() { + targets := make(map[string][]*Target) + for jobName, sp := range m.scrapePools { + sp.mtx.RLock() + for _, t := range sp.targets { + targets[jobName] = append(targets[jobName], t) + } + targets[jobName] = append(targets[jobName], sp.droppedTargets...) + sp.mtx.RUnlock() } - targetsMap[jobName] = append(targetsMap[jobName], ps.sp.droppedTargets...) - ps.sp.mtx.RUnlock() + targetsMap <- targets } - return targetsMap + return <-targetsMap } // Targets returns the targets currently being scraped. diff --git a/web/web.go b/web/web.go index 6dfe67a39..74752f72e 100644 --- a/web/web.go +++ b/web/web.go @@ -587,7 +587,7 @@ func (h *Handler) rules(w http.ResponseWriter, r *http.Request) { func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) { var index []string - targets := h.targetManager.TargetMap() + targets := h.scrapeManager.TargetMap() for job := range targets { index = append(index, job) }