mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-09 23:24:05 -08:00
rename package retrieve to scrape
no fucnctinal changes just renaming retrieval to scrape
This commit is contained in:
parent
b30ee3e69a
commit
b75428ec19
|
@ -50,8 +50,8 @@ import (
|
||||||
sd_config "github.com/prometheus/prometheus/discovery/config"
|
sd_config "github.com/prometheus/prometheus/discovery/config"
|
||||||
"github.com/prometheus/prometheus/notifier"
|
"github.com/prometheus/prometheus/notifier"
|
||||||
"github.com/prometheus/prometheus/promql"
|
"github.com/prometheus/prometheus/promql"
|
||||||
"github.com/prometheus/prometheus/retrieval"
|
|
||||||
"github.com/prometheus/prometheus/rules"
|
"github.com/prometheus/prometheus/rules"
|
||||||
|
"github.com/prometheus/prometheus/scrape"
|
||||||
"github.com/prometheus/prometheus/storage"
|
"github.com/prometheus/prometheus/storage"
|
||||||
"github.com/prometheus/prometheus/storage/remote"
|
"github.com/prometheus/prometheus/storage/remote"
|
||||||
"github.com/prometheus/prometheus/storage/tsdb"
|
"github.com/prometheus/prometheus/storage/tsdb"
|
||||||
|
@ -246,7 +246,7 @@ func main() {
|
||||||
ctxNotify, cancelNotify = context.WithCancel(context.Background())
|
ctxNotify, cancelNotify = context.WithCancel(context.Background())
|
||||||
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"))
|
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"))
|
||||||
|
|
||||||
scrapeManager = retrieval.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
|
scrapeManager = scrape.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
|
||||||
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
|
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
|
||||||
ruleManager = rules.NewManager(&rules.ManagerOptions{
|
ruleManager = rules.NewManager(&rules.ManagerOptions{
|
||||||
Appendable: fanoutStorage,
|
Appendable: fanoutStorage,
|
||||||
|
|
|
@ -389,7 +389,7 @@ func (d *Discovery) readFile(filename string) ([]*targetgroup.Group, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("retrieval.FileDiscovery.readFile: unhandled file extension %q", ext))
|
panic(fmt.Errorf("discovery.File.readFile: unhandled file extension %q", ext))
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, tg := range targetGroups {
|
for i, tg := range targetGroups {
|
||||||
|
|
|
@ -96,7 +96,7 @@ func relabel(lset labels.Labels, cfg *config.RelabelConfig) labels.Labels {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("retrieval.relabel: unknown relabel action type %q", cfg.Action))
|
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
|
||||||
}
|
}
|
||||||
|
|
||||||
return lb.Labels()
|
return lb.Labels()
|
||||||
|
|
|
@ -99,7 +99,7 @@ func relabel(labels model.LabelSet, cfg *config.RelabelConfig) model.LabelSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("retrieval.relabel: unknown relabel action type %q", cfg.Action))
|
panic(fmt.Errorf("relabel: unknown relabel action type %q", cfg.Action))
|
||||||
}
|
}
|
||||||
return labels
|
return labels
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/prometheus/prometheus/pkg/labels"
|
"github.com/prometheus/prometheus/pkg/labels"
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package retrieval
|
package scrape
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/tls"
|
"crypto/tls"
|
|
@ -19,7 +19,7 @@ import (
|
||||||
"github.com/prometheus/prometheus/storage"
|
"github.com/prometheus/prometheus/storage"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Appender implements retrieval.Appendable.
|
// Appender implements scrape.Appendable.
|
||||||
func (s *Storage) Appender() (storage.Appender, error) {
|
func (s *Storage) Appender() (storage.Appender, error) {
|
||||||
return s, nil
|
return s, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ import (
|
||||||
"github.com/prometheus/prometheus/pkg/timestamp"
|
"github.com/prometheus/prometheus/pkg/timestamp"
|
||||||
"github.com/prometheus/prometheus/prompb"
|
"github.com/prometheus/prometheus/prompb"
|
||||||
"github.com/prometheus/prometheus/promql"
|
"github.com/prometheus/prometheus/promql"
|
||||||
"github.com/prometheus/prometheus/retrieval"
|
"github.com/prometheus/prometheus/scrape"
|
||||||
"github.com/prometheus/prometheus/storage"
|
"github.com/prometheus/prometheus/storage"
|
||||||
"github.com/prometheus/prometheus/storage/remote"
|
"github.com/prometheus/prometheus/storage/remote"
|
||||||
"github.com/prometheus/prometheus/util/httputil"
|
"github.com/prometheus/prometheus/util/httputil"
|
||||||
|
@ -82,7 +82,7 @@ func (e *apiError) Error() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
type targetRetriever interface {
|
type targetRetriever interface {
|
||||||
Targets() []*retrieval.Target
|
Targets() []*scrape.Target
|
||||||
}
|
}
|
||||||
|
|
||||||
type alertmanagerRetriever interface {
|
type alertmanagerRetriever interface {
|
||||||
|
@ -430,7 +430,7 @@ type Target struct {
|
||||||
|
|
||||||
LastError string `json:"lastError"`
|
LastError string `json:"lastError"`
|
||||||
LastScrape time.Time `json:"lastScrape"`
|
LastScrape time.Time `json:"lastScrape"`
|
||||||
Health retrieval.TargetHealth `json:"health"`
|
Health scrape.TargetHealth `json:"health"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TargetDiscovery has all the active targets.
|
// TargetDiscovery has all the active targets.
|
||||||
|
|
|
@ -38,13 +38,13 @@ import (
|
||||||
"github.com/prometheus/prometheus/pkg/timestamp"
|
"github.com/prometheus/prometheus/pkg/timestamp"
|
||||||
"github.com/prometheus/prometheus/prompb"
|
"github.com/prometheus/prometheus/prompb"
|
||||||
"github.com/prometheus/prometheus/promql"
|
"github.com/prometheus/prometheus/promql"
|
||||||
"github.com/prometheus/prometheus/retrieval"
|
"github.com/prometheus/prometheus/scrape"
|
||||||
"github.com/prometheus/prometheus/storage/remote"
|
"github.com/prometheus/prometheus/storage/remote"
|
||||||
)
|
)
|
||||||
|
|
||||||
type targetRetrieverFunc func() []*retrieval.Target
|
type targetRetrieverFunc func() []*scrape.Target
|
||||||
|
|
||||||
func (f targetRetrieverFunc) Targets() []*retrieval.Target {
|
func (f targetRetrieverFunc) Targets() []*scrape.Target {
|
||||||
return f()
|
return f()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,9 +81,9 @@ func TestEndpoints(t *testing.T) {
|
||||||
|
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
|
|
||||||
tr := targetRetrieverFunc(func() []*retrieval.Target {
|
tr := targetRetrieverFunc(func() []*scrape.Target {
|
||||||
return []*retrieval.Target{
|
return []*scrape.Target{
|
||||||
retrieval.NewTarget(
|
scrape.NewTarget(
|
||||||
labels.FromMap(map[string]string{
|
labels.FromMap(map[string]string{
|
||||||
model.SchemeLabel: "http",
|
model.SchemeLabel: "http",
|
||||||
model.AddressLabel: "example.com:8080",
|
model.AddressLabel: "example.com:8080",
|
||||||
|
|
|
@ -40,7 +40,7 @@ import (
|
||||||
"github.com/prometheus/prometheus/pkg/timestamp"
|
"github.com/prometheus/prometheus/pkg/timestamp"
|
||||||
pb "github.com/prometheus/prometheus/prompb"
|
pb "github.com/prometheus/prometheus/prompb"
|
||||||
"github.com/prometheus/prometheus/promql"
|
"github.com/prometheus/prometheus/promql"
|
||||||
"github.com/prometheus/prometheus/retrieval"
|
"github.com/prometheus/prometheus/scrape"
|
||||||
"github.com/prometheus/prometheus/storage"
|
"github.com/prometheus/prometheus/storage"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ type API struct {
|
||||||
now func() time.Time
|
now func() time.Time
|
||||||
db func() *tsdb.DB
|
db func() *tsdb.DB
|
||||||
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error)
|
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error)
|
||||||
targets func() []*retrieval.Target
|
targets func() []*scrape.Target
|
||||||
alertmanagers func() []*url.URL
|
alertmanagers func() []*url.URL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ func New(
|
||||||
db func() *tsdb.DB,
|
db func() *tsdb.DB,
|
||||||
qe *promql.Engine,
|
qe *promql.Engine,
|
||||||
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error),
|
q func(ctx context.Context, mint, maxt int64) (storage.Querier, error),
|
||||||
targets func() []*retrieval.Target,
|
targets func() []*scrape.Target,
|
||||||
alertmanagers func() []*url.URL,
|
alertmanagers func() []*url.URL,
|
||||||
enableAdmin bool,
|
enableAdmin bool,
|
||||||
) *API {
|
) *API {
|
||||||
|
|
24
web/web.go
24
web/web.go
|
@ -55,8 +55,8 @@ import (
|
||||||
"github.com/prometheus/prometheus/notifier"
|
"github.com/prometheus/prometheus/notifier"
|
||||||
"github.com/prometheus/prometheus/pkg/labels"
|
"github.com/prometheus/prometheus/pkg/labels"
|
||||||
"github.com/prometheus/prometheus/promql"
|
"github.com/prometheus/prometheus/promql"
|
||||||
"github.com/prometheus/prometheus/retrieval"
|
|
||||||
"github.com/prometheus/prometheus/rules"
|
"github.com/prometheus/prometheus/rules"
|
||||||
|
"github.com/prometheus/prometheus/scrape"
|
||||||
"github.com/prometheus/prometheus/storage"
|
"github.com/prometheus/prometheus/storage"
|
||||||
"github.com/prometheus/prometheus/template"
|
"github.com/prometheus/prometheus/template"
|
||||||
"github.com/prometheus/prometheus/util/httputil"
|
"github.com/prometheus/prometheus/util/httputil"
|
||||||
|
@ -71,7 +71,7 @@ var localhostRepresentations = []string{"127.0.0.1", "localhost"}
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
|
|
||||||
scrapeManager *retrieval.ScrapeManager
|
scrapeManager *scrape.ScrapeManager
|
||||||
ruleManager *rules.Manager
|
ruleManager *rules.Manager
|
||||||
queryEngine *promql.Engine
|
queryEngine *promql.Engine
|
||||||
context context.Context
|
context context.Context
|
||||||
|
@ -125,7 +125,7 @@ type Options struct {
|
||||||
TSDB func() *tsdb.DB
|
TSDB func() *tsdb.DB
|
||||||
Storage storage.Storage
|
Storage storage.Storage
|
||||||
QueryEngine *promql.Engine
|
QueryEngine *promql.Engine
|
||||||
ScrapeManager *retrieval.ScrapeManager
|
ScrapeManager *scrape.ScrapeManager
|
||||||
RuleManager *rules.Manager
|
RuleManager *rules.Manager
|
||||||
Notifier *notifier.Manager
|
Notifier *notifier.Manager
|
||||||
Version *PrometheusVersion
|
Version *PrometheusVersion
|
||||||
|
@ -404,7 +404,7 @@ func (h *Handler) Run(ctx context.Context) error {
|
||||||
h.options.TSDB,
|
h.options.TSDB,
|
||||||
h.options.QueryEngine,
|
h.options.QueryEngine,
|
||||||
h.options.Storage.Querier,
|
h.options.Storage.Querier,
|
||||||
func() []*retrieval.Target {
|
func() []*scrape.Target {
|
||||||
return h.options.ScrapeManager.Targets()
|
return h.options.ScrapeManager.Targets()
|
||||||
},
|
},
|
||||||
func() []*url.URL {
|
func() []*url.URL {
|
||||||
|
@ -594,7 +594,7 @@ func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
|
||||||
sort.Strings(index)
|
sort.Strings(index)
|
||||||
scrapeConfigData := struct {
|
scrapeConfigData := struct {
|
||||||
Index []string
|
Index []string
|
||||||
Targets map[string][]*retrieval.Target
|
Targets map[string][]*scrape.Target
|
||||||
}{
|
}{
|
||||||
Index: index,
|
Index: index,
|
||||||
Targets: targets,
|
Targets: targets,
|
||||||
|
@ -604,7 +604,7 @@ func (h *Handler) serviceDiscovery(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
|
func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
|
||||||
// Bucket targets by job label
|
// Bucket targets by job label
|
||||||
tps := map[string][]*retrieval.Target{}
|
tps := map[string][]*scrape.Target{}
|
||||||
for _, t := range h.scrapeManager.Targets() {
|
for _, t := range h.scrapeManager.Targets() {
|
||||||
job := t.Labels().Get(model.JobLabel)
|
job := t.Labels().Get(model.JobLabel)
|
||||||
tps[job] = append(tps[job], t)
|
tps[job] = append(tps[job], t)
|
||||||
|
@ -617,7 +617,7 @@ func (h *Handler) targets(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
h.executeTemplate(w, "targets.html", struct {
|
h.executeTemplate(w, "targets.html", struct {
|
||||||
TargetPools map[string][]*retrieval.Target
|
TargetPools map[string][]*scrape.Target
|
||||||
}{
|
}{
|
||||||
TargetPools: tps,
|
TargetPools: tps,
|
||||||
})
|
})
|
||||||
|
@ -707,21 +707,21 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
|
||||||
}
|
}
|
||||||
return u
|
return u
|
||||||
},
|
},
|
||||||
"numHealthy": func(pool []*retrieval.Target) int {
|
"numHealthy": func(pool []*scrape.Target) int {
|
||||||
alive := len(pool)
|
alive := len(pool)
|
||||||
for _, p := range pool {
|
for _, p := range pool {
|
||||||
if p.Health() != retrieval.HealthGood {
|
if p.Health() != scrape.HealthGood {
|
||||||
alive--
|
alive--
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return alive
|
return alive
|
||||||
},
|
},
|
||||||
"healthToClass": func(th retrieval.TargetHealth) string {
|
"healthToClass": func(th scrape.TargetHealth) string {
|
||||||
switch th {
|
switch th {
|
||||||
case retrieval.HealthUnknown:
|
case scrape.HealthUnknown:
|
||||||
return "warning"
|
return "warning"
|
||||||
case retrieval.HealthGood:
|
case scrape.HealthGood:
|
||||||
return "success"
|
return "success"
|
||||||
default:
|
default:
|
||||||
return "danger"
|
return "danger"
|
||||||
|
|
Loading…
Reference in a new issue