Merge pull request #3779 from krasi-georgiev/rename-retrieval-to-scrape

Rename retrieval to scrape
This commit is contained in:
Frederic Branczyk 2018-02-01 14:16:34 +01:00 committed by GitHub
commit 81a96d3140
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 50 additions and 50 deletions

View file

@ -50,8 +50,8 @@ import (
sd_config "github.com/prometheus/prometheus/discovery/config" sd_config "github.com/prometheus/prometheus/discovery/config"
"github.com/prometheus/prometheus/notifier" "github.com/prometheus/prometheus/notifier"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote" "github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/storage/tsdb" "github.com/prometheus/prometheus/storage/tsdb"
@ -246,7 +246,7 @@ func main() {
ctxNotify, cancelNotify = context.WithCancel(context.Background()) ctxNotify, cancelNotify = context.WithCancel(context.Background())
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify")) discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"))
scrapeManager = retrieval.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage) scrapeManager = scrape.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine) queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
ruleManager = rules.NewManager(&rules.ManagerOptions{ ruleManager = rules.NewManager(&rules.ManagerOptions{
Appendable: fanoutStorage, Appendable: fanoutStorage,

View file

@ -389,7 +389,7 @@ func (d *Discovery) readFile(filename string) ([]*targetgroup.Group, error) {
return nil, err return nil, err
} }
default: default:
panic(fmt.Errorf("retrieval.FileDiscovery.readFile: unhandled file extension %q", ext)) panic(fmt.Errorf("discovery.File.readFile: unhandled file extension %q", ext))
} }
for i, tg := range targetGroups { for i, tg := range targetGroups {

View file

@ -96,7 +96,7 @@ func relabel(lset labels.Labels, cfg *config.RelabelConfig) labels.Labels {
} }
} }
default: default:
panic(fmt.Errorf("retrieval.relabel: unknown relabel action type %q", cfg.Action)) panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
} }
return lb.Labels() return lb.Labels()

View file

@ -99,7 +99,7 @@ func relabel(labels model.LabelSet, cfg *config.RelabelConfig) model.LabelSet {
} }
} }
default: default:
panic(fmt.Errorf("retrieval.relabel: unknown relabel action type %q", cfg.Action)) panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
} }
return labels return labels
} }

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"fmt" "fmt"
@ -31,10 +31,10 @@ type Appendable interface {
Appender() (storage.Appender, error) Appender() (storage.Appender, error)
} }
// NewManager is the ScrapeManager constructor // NewManager is the Manager constructor
func NewManager(logger log.Logger, app Appendable) *ScrapeManager { func NewManager(logger log.Logger, app Appendable) *Manager {
return &ScrapeManager{ return &Manager{
append: app, append: app,
logger: logger, logger: logger,
scrapeConfigs: make(map[string]*config.ScrapeConfig), scrapeConfigs: make(map[string]*config.ScrapeConfig),
@ -43,9 +43,9 @@ func NewManager(logger log.Logger, app Appendable) *ScrapeManager {
} }
} }
// ScrapeManager maintains a set of scrape pools and manages start/stop cycles // Manager maintains a set of scrape pools and manages start/stop cycles
// when receiving new target groups form the discovery manager. // when receiving new target groups form the discovery manager.
type ScrapeManager struct { type Manager struct {
logger log.Logger logger log.Logger
append Appendable append Appendable
scrapeConfigs map[string]*config.ScrapeConfig scrapeConfigs map[string]*config.ScrapeConfig
@ -55,7 +55,7 @@ type ScrapeManager struct {
} }
// Run starts background processing to handle target updates and reload the scraping loops. // Run starts background processing to handle target updates and reload the scraping loops.
func (m *ScrapeManager) Run(tsets <-chan map[string][]*targetgroup.Group) error { func (m *Manager) Run(tsets <-chan map[string][]*targetgroup.Group) error {
for { for {
select { select {
case ts := <-tsets: case ts := <-tsets:
@ -67,7 +67,7 @@ func (m *ScrapeManager) Run(tsets <-chan map[string][]*targetgroup.Group) error
} }
// Stop cancels all running scrape pools and blocks until all have exited. // Stop cancels all running scrape pools and blocks until all have exited.
func (m *ScrapeManager) Stop() { func (m *Manager) Stop() {
for _, sp := range m.scrapePools { for _, sp := range m.scrapePools {
sp.stop() sp.stop()
} }
@ -75,7 +75,7 @@ func (m *ScrapeManager) Stop() {
} }
// ApplyConfig resets the manager's target providers and job configurations as defined by the new cfg. // ApplyConfig resets the manager's target providers and job configurations as defined by the new cfg.
func (m *ScrapeManager) ApplyConfig(cfg *config.Config) error { func (m *Manager) ApplyConfig(cfg *config.Config) error {
m.mtx.Lock() m.mtx.Lock()
defer m.mtx.Unlock() defer m.mtx.Unlock()
c := make(map[string]*config.ScrapeConfig) c := make(map[string]*config.ScrapeConfig)
@ -98,7 +98,7 @@ func (m *ScrapeManager) ApplyConfig(cfg *config.Config) error {
} }
// TargetMap returns map of active and dropped targets and their corresponding scrape config job name. // TargetMap returns map of active and dropped targets and their corresponding scrape config job name.
func (m *ScrapeManager) TargetMap() map[string][]*Target { func (m *Manager) TargetMap() map[string][]*Target {
m.mtx.Lock() m.mtx.Lock()
defer m.mtx.Unlock() defer m.mtx.Unlock()
@ -116,7 +116,7 @@ func (m *ScrapeManager) TargetMap() map[string][]*Target {
} }
// Targets returns the targets currently being scraped. // Targets returns the targets currently being scraped.
func (m *ScrapeManager) Targets() []*Target { func (m *Manager) Targets() []*Target {
m.mtx.Lock() m.mtx.Lock()
defer m.mtx.Unlock() defer m.mtx.Unlock()
@ -132,7 +132,7 @@ func (m *ScrapeManager) Targets() []*Target {
return targets return targets
} }
func (m *ScrapeManager) reload(t map[string][]*targetgroup.Group) { func (m *Manager) reload(t map[string][]*targetgroup.Group) {
for tsetName, tgroup := range t { for tsetName, tgroup := range t {
scrapeConfig, ok := m.scrapeConfigs[tsetName] scrapeConfig, ok := m.scrapeConfigs[tsetName]
if !ok { if !ok {

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"fmt" "fmt"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"bufio" "bufio"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"bytes" "bytes"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"errors" "errors"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package retrieval package scrape
import ( import (
"crypto/tls" "crypto/tls"

View file

@ -19,7 +19,7 @@ import (
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
) )
// Appender implements retrieval.Appendable. // Appender implements scrape.Appendable.
func (s *Storage) Appender() (storage.Appender, error) { func (s *Storage) Appender() (storage.Appender, error) {
return s, nil return s, nil
} }

View file

@ -38,7 +38,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval" "github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote" "github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
@ -82,7 +82,7 @@ func (e *apiError) Error() string {
} }
type targetRetriever interface { type targetRetriever interface {
Targets() []*retrieval.Target Targets() []*scrape.Target
} }
type alertmanagerRetriever interface { type alertmanagerRetriever interface {
@ -428,9 +428,9 @@ type Target struct {
ScrapeURL string `json:"scrapeUrl"` ScrapeURL string `json:"scrapeUrl"`
LastError string `json:"lastError"` LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"` LastScrape time.Time `json:"lastScrape"`
Health retrieval.TargetHealth `json:"health"` Health scrape.TargetHealth `json:"health"`
} }
// TargetDiscovery has all the active targets. // TargetDiscovery has all the active targets.

View file

@ -38,13 +38,13 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb" "github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval" "github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage/remote" "github.com/prometheus/prometheus/storage/remote"
) )
type targetRetrieverFunc func() []*retrieval.Target type targetRetrieverFunc func() []*scrape.Target
func (f targetRetrieverFunc) Targets() []*retrieval.Target { func (f targetRetrieverFunc) Targets() []*scrape.Target {
return f() return f()
} }
@ -81,9 +81,9 @@ func TestEndpoints(t *testing.T) {
now := time.Now() now := time.Now()
tr := targetRetrieverFunc(func() []*retrieval.Target { tr := targetRetrieverFunc(func() []*scrape.Target {
return []*retrieval.Target{ return []*scrape.Target{
retrieval.NewTarget( scrape.NewTarget(
labels.FromMap(map[string]string{ labels.FromMap(map[string]string{
model.SchemeLabel: "http", model.SchemeLabel: "http",
model.AddressLabel: "example.com:8080", model.AddressLabel: "example.com:8080",

View file

@ -40,7 +40,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/pkg/timestamp"
pb "github.com/prometheus/prometheus/prompb" pb "github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval" "github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
) )
@ -50,7 +50,7 @@ type API struct {
now func() time.Time now func() time.Time
db func() *tsdb.DB db func() *tsdb.DB
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error) q func(ctx context.Context, mint, maxt int64) (storage.Querier, error)
targets func() []*retrieval.Target targets func() []*scrape.Target
alertmanagers func() []*url.URL alertmanagers func() []*url.URL
} }
@ -60,7 +60,7 @@ func New(
db func() *tsdb.DB, db func() *tsdb.DB,
qe *promql.Engine, qe *promql.Engine,
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error), q func(ctx context.Context, mint, maxt int64) (storage.Querier, error),
targets func() []*retrieval.Target, targets func() []*scrape.Target,
alertmanagers func() []*url.URL, alertmanagers func() []*url.URL,
enableAdmin bool, enableAdmin bool,
) *API { ) *API {

View file

@ -55,8 +55,8 @@ import (
"github.com/prometheus/prometheus/notifier" "github.com/prometheus/prometheus/notifier"
"github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/rules" "github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/template" "github.com/prometheus/prometheus/template"
"github.com/prometheus/prometheus/util/httputil" "github.com/prometheus/prometheus/util/httputil"
@ -71,7 +71,7 @@ var localhostRepresentations = []string{"127.0.0.1", "localhost"}
type Handler struct { type Handler struct {
logger log.Logger logger log.Logger
scrapeManager *retrieval.ScrapeManager scrapeManager *scrape.Manager
ruleManager *rules.Manager ruleManager *rules.Manager
queryEngine *promql.Engine queryEngine *promql.Engine
context context.Context context context.Context
@ -125,7 +125,7 @@ type Options struct {
TSDB func() *tsdb.DB TSDB func() *tsdb.DB
Storage storage.Storage Storage storage.Storage
QueryEngine *promql.Engine QueryEngine *promql.Engine
ScrapeManager *retrieval.ScrapeManager ScrapeManager *scrape.Manager
RuleManager *rules.Manager RuleManager *rules.Manager
Notifier *notifier.Manager Notifier *notifier.Manager
Version *PrometheusVersion Version *PrometheusVersion
@ -404,7 +404,7 @@ func (h *Handler) Run(ctx context.Context) error {
h.options.TSDB, h.options.TSDB,
h.options.QueryEngine, h.options.QueryEngine,
h.options.Storage.Querier, h.options.Storage.Querier,
func() []*retrieval.Target { func() []*scrape.Target {
return h.options.ScrapeManager.Targets() return h.options.ScrapeManager.Targets()
}, },
func() []*url.URL { func() []*url.URL {
@ -594,7 +594,7 @@ func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
sort.Strings(index) sort.Strings(index)
scrapeConfigData := struct { scrapeConfigData := struct {
Index []string Index []string
Targets map[string][]*retrieval.Target Targets map[string][]*scrape.Target
}{ }{
Index: index, Index: index,
Targets: targets, Targets: targets,
@ -604,7 +604,7 @@ func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
func (h *Handler) targets(w http.ResponseWriter, r *http.Request) { func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
// Bucket targets by job label // Bucket targets by job label
tps := map[string][]*retrieval.Target{} tps := map[string][]*scrape.Target{}
for _, t := range h.scrapeManager.Targets() { for _, t := range h.scrapeManager.Targets() {
job := t.Labels().Get(model.JobLabel) job := t.Labels().Get(model.JobLabel)
tps[job] = append(tps[job], t) tps[job] = append(tps[job], t)
@ -617,7 +617,7 @@ func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
} }
h.executeTemplate(w, "targets.html", struct { h.executeTemplate(w, "targets.html", struct {
TargetPools map[string][]*retrieval.Target TargetPools map[string][]*scrape.Target
}{ }{
TargetPools: tps, TargetPools: tps,
}) })
@ -707,21 +707,21 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
} }
return u return u
}, },
"numHealthy": func(pool []*retrieval.Target) int { "numHealthy": func(pool []*scrape.Target) int {
alive := len(pool) alive := len(pool)
for _, p := range pool { for _, p := range pool {
if p.Health() != retrieval.HealthGood { if p.Health() != scrape.HealthGood {
alive-- alive--
} }
} }
return alive return alive
}, },
"healthToClass": func(th retrieval.TargetHealth) string { "healthToClass": func(th scrape.TargetHealth) string {
switch th { switch th {
case retrieval.HealthUnknown: case scrape.HealthUnknown:
return "warning" return "warning"
case retrieval.HealthGood: case scrape.HealthGood:
return "success" return "success"
default: default:
return "danger" return "danger"