Merge pull request #3779 from krasi-georgiev/rename-retrieval-to-scrape

Rename retrieval to scrape
This commit is contained in:
Frederic Branczyk 2018-02-01 14:16:34 +01:00 committed by GitHub
commit 81a96d3140
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 50 additions and 50 deletions

View file

@ -50,8 +50,8 @@ import (
sd_config "github.com/prometheus/prometheus/discovery/config"
"github.com/prometheus/prometheus/notifier"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/storage/tsdb"
@ -246,7 +246,7 @@ func main() {
ctxNotify, cancelNotify = context.WithCancel(context.Background())
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"))
scrapeManager = retrieval.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
scrapeManager = scrape.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
ruleManager = rules.NewManager(&rules.ManagerOptions{
Appendable: fanoutStorage,

View file

@ -389,7 +389,7 @@ func (d *Discovery) readFile(filename string) ([]*targetgroup.Group, error) {
return nil, err
}
default:
panic(fmt.Errorf("retrieval.FileDiscovery.readFile: unhandled file extension %q", ext))
panic(fmt.Errorf("discovery.File.readFile: unhandled file extension %q", ext))
}
for i, tg := range targetGroups {

View file

@ -96,7 +96,7 @@ func relabel(lset labels.Labels, cfg *config.RelabelConfig) labels.Labels {
}
}
default:
panic(fmt.Errorf("retrieval.relabel: unknown relabel action type %q", cfg.Action))
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
}
return lb.Labels()

View file

@ -99,7 +99,7 @@ func relabel(labels model.LabelSet, cfg *config.RelabelConfig) model.LabelSet {
}
}
default:
panic(fmt.Errorf("retrieval.relabel: unknown relabel action type %q", cfg.Action))
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
}
return labels
}

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"github.com/prometheus/prometheus/pkg/labels"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"fmt"
@ -31,10 +31,10 @@ type Appendable interface {
Appender() (storage.Appender, error)
}
// NewManager is the ScrapeManager constructor
func NewManager(logger log.Logger, app Appendable) *ScrapeManager {
// NewManager is the Manager constructor
func NewManager(logger log.Logger, app Appendable) *Manager {
return &ScrapeManager{
return &Manager{
append: app,
logger: logger,
scrapeConfigs: make(map[string]*config.ScrapeConfig),
@ -43,9 +43,9 @@ func NewManager(logger log.Logger, app Appendable) *ScrapeManager {
}
}
// ScrapeManager maintains a set of scrape pools and manages start/stop cycles
// Manager maintains a set of scrape pools and manages start/stop cycles
// when receiving new target groups form the discovery manager.
type ScrapeManager struct {
type Manager struct {
logger log.Logger
append Appendable
scrapeConfigs map[string]*config.ScrapeConfig
@ -55,7 +55,7 @@ type ScrapeManager struct {
}
// Run starts background processing to handle target updates and reload the scraping loops.
func (m *ScrapeManager) Run(tsets <-chan map[string][]*targetgroup.Group) error {
func (m *Manager) Run(tsets <-chan map[string][]*targetgroup.Group) error {
for {
select {
case ts := <-tsets:
@ -67,7 +67,7 @@ func (m *ScrapeManager) Run(tsets <-chan map[string][]*targetgroup.Group) error
}
// Stop cancels all running scrape pools and blocks until all have exited.
func (m *ScrapeManager) Stop() {
func (m *Manager) Stop() {
for _, sp := range m.scrapePools {
sp.stop()
}
@ -75,7 +75,7 @@ func (m *ScrapeManager) Stop() {
}
// ApplyConfig resets the manager's target providers and job configurations as defined by the new cfg.
func (m *ScrapeManager) ApplyConfig(cfg *config.Config) error {
func (m *Manager) ApplyConfig(cfg *config.Config) error {
m.mtx.Lock()
defer m.mtx.Unlock()
c := make(map[string]*config.ScrapeConfig)
@ -98,7 +98,7 @@ func (m *ScrapeManager) ApplyConfig(cfg *config.Config) error {
}
// TargetMap returns map of active and dropped targets and their corresponding scrape config job name.
func (m *ScrapeManager) TargetMap() map[string][]*Target {
func (m *Manager) TargetMap() map[string][]*Target {
m.mtx.Lock()
defer m.mtx.Unlock()
@ -116,7 +116,7 @@ func (m *ScrapeManager) TargetMap() map[string][]*Target {
}
// Targets returns the targets currently being scraped.
func (m *ScrapeManager) Targets() []*Target {
func (m *Manager) Targets() []*Target {
m.mtx.Lock()
defer m.mtx.Unlock()
@ -132,7 +132,7 @@ func (m *ScrapeManager) Targets() []*Target {
return targets
}
func (m *ScrapeManager) reload(t map[string][]*targetgroup.Group) {
func (m *Manager) reload(t map[string][]*targetgroup.Group) {
for tsetName, tgroup := range t {
scrapeConfig, ok := m.scrapeConfigs[tsetName]
if !ok {

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"fmt"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"bufio"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"bytes"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"errors"

View file

@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package retrieval
package scrape
import (
"crypto/tls"

View file

@ -19,7 +19,7 @@ import (
"github.com/prometheus/prometheus/storage"
)
// Appender implements retrieval.Appendable.
// Appender implements scrape.Appendable.
func (s *Storage) Appender() (storage.Appender, error) {
return s, nil
}

View file

@ -38,7 +38,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/storage/remote"
"github.com/prometheus/prometheus/util/httputil"
@ -82,7 +82,7 @@ func (e *apiError) Error() string {
}
type targetRetriever interface {
Targets() []*retrieval.Target
Targets() []*scrape.Target
}
type alertmanagerRetriever interface {
@ -428,9 +428,9 @@ type Target struct {
ScrapeURL string `json:"scrapeUrl"`
LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"`
Health retrieval.TargetHealth `json:"health"`
LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"`
Health scrape.TargetHealth `json:"health"`
}
// TargetDiscovery has all the active targets.

View file

@ -38,13 +38,13 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
"github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage/remote"
)
type targetRetrieverFunc func() []*retrieval.Target
type targetRetrieverFunc func() []*scrape.Target
func (f targetRetrieverFunc) Targets() []*retrieval.Target {
func (f targetRetrieverFunc) Targets() []*scrape.Target {
return f()
}
@ -81,9 +81,9 @@ func TestEndpoints(t *testing.T) {
now := time.Now()
tr := targetRetrieverFunc(func() []*retrieval.Target {
return []*retrieval.Target{
retrieval.NewTarget(
tr := targetRetrieverFunc(func() []*scrape.Target {
return []*scrape.Target{
scrape.NewTarget(
labels.FromMap(map[string]string{
model.SchemeLabel: "http",
model.AddressLabel: "example.com:8080",

View file

@ -40,7 +40,7 @@ import (
"github.com/prometheus/prometheus/pkg/timestamp"
pb "github.com/prometheus/prometheus/prompb"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
)
@ -50,7 +50,7 @@ type API struct {
now func() time.Time
db func() *tsdb.DB
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error)
targets func() []*retrieval.Target
targets func() []*scrape.Target
alertmanagers func() []*url.URL
}
@ -60,7 +60,7 @@ func New(
db func() *tsdb.DB,
qe *promql.Engine,
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error),
targets func() []*retrieval.Target,
targets func() []*scrape.Target,
alertmanagers func() []*url.URL,
enableAdmin bool,
) *API {

View file

@ -55,8 +55,8 @@ import (
"github.com/prometheus/prometheus/notifier"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/retrieval"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/scrape"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/template"
"github.com/prometheus/prometheus/util/httputil"
@ -71,7 +71,7 @@ var localhostRepresentations = []string{"127.0.0.1", "localhost"}
type Handler struct {
logger log.Logger
scrapeManager *retrieval.ScrapeManager
scrapeManager *scrape.Manager
ruleManager *rules.Manager
queryEngine *promql.Engine
context context.Context
@ -125,7 +125,7 @@ type Options struct {
TSDB func() *tsdb.DB
Storage storage.Storage
QueryEngine *promql.Engine
ScrapeManager *retrieval.ScrapeManager
ScrapeManager *scrape.Manager
RuleManager *rules.Manager
Notifier *notifier.Manager
Version *PrometheusVersion
@ -404,7 +404,7 @@ func (h *Handler) Run(ctx context.Context) error {
h.options.TSDB,
h.options.QueryEngine,
h.options.Storage.Querier,
func() []*retrieval.Target {
func() []*scrape.Target {
return h.options.ScrapeManager.Targets()
},
func() []*url.URL {
@ -594,7 +594,7 @@ func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
sort.Strings(index)
scrapeConfigData := struct {
Index []string
Targets map[string][]*retrieval.Target
Targets map[string][]*scrape.Target
}{
Index: index,
Targets: targets,
@ -604,7 +604,7 @@ func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
// Bucket targets by job label
tps := map[string][]*retrieval.Target{}
tps := map[string][]*scrape.Target{}
for _, t := range h.scrapeManager.Targets() {
job := t.Labels().Get(model.JobLabel)
tps[job] = append(tps[job], t)
@ -617,7 +617,7 @@ func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
}
h.executeTemplate(w, "targets.html", struct {
TargetPools map[string][]*retrieval.Target
TargetPools map[string][]*scrape.Target
}{
TargetPools: tps,
})
@ -707,21 +707,21 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
}
return u
},
"numHealthy": func(pool []*retrieval.Target) int {
"numHealthy": func(pool []*scrape.Target) int {
alive := len(pool)
for _, p := range pool {
if p.Health() != retrieval.HealthGood {
if p.Health() != scrape.HealthGood {
alive--
}
}
return alive
},
"healthToClass": func(th retrieval.TargetHealth) string {
"healthToClass": func(th scrape.TargetHealth) string {
switch th {
case retrieval.HealthUnknown:
case scrape.HealthUnknown:
return "warning"
case retrieval.HealthGood:
case scrape.HealthGood:
return "success"
default:
return "danger"