mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-24 21:24:05 -08:00
b96b89ef8b
Scraping targets are synced by creating the full set, then adding/removing any which have changed. This PR speeds up the process of creating the full set. I added a benchmark for `TargetsFromGroup`; it uses configuration from a typical Kubernetes SD. The crux of the change is to do relabeling inside labels.Builder instead of converting to labels.Labels and back again for every rule. The change is broken into several commits for easier review. This is a breaking change to `scrape.PopulateLabels()`, but `relabel.Process` is left as-is, with a new `relabel.ProcessBuilder` option.
155 lines
4.2 KiB
Go
155 lines
4.2 KiB
Go
// Copyright 2021 The Prometheus Authors
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
|
|
package main
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"os"
|
|
"reflect"
|
|
"time"
|
|
|
|
"github.com/go-kit/log"
|
|
|
|
"github.com/prometheus/prometheus/config"
|
|
"github.com/prometheus/prometheus/discovery"
|
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
|
"github.com/prometheus/prometheus/model/labels"
|
|
"github.com/prometheus/prometheus/scrape"
|
|
)
|
|
|
|
type sdCheckResult struct {
|
|
DiscoveredLabels labels.Labels `json:"discoveredLabels"`
|
|
Labels labels.Labels `json:"labels"`
|
|
Error error `json:"error,omitempty"`
|
|
}
|
|
|
|
// CheckSD performs service discovery for the given job name and reports the results.
|
|
func CheckSD(sdConfigFiles, sdJobName string, sdTimeout time.Duration, noDefaultScrapePort bool) int {
|
|
logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
|
|
|
cfg, err := config.LoadFile(sdConfigFiles, false, false, logger)
|
|
if err != nil {
|
|
fmt.Fprintln(os.Stderr, "Cannot load config", err)
|
|
return failureExitCode
|
|
}
|
|
|
|
var scrapeConfig *config.ScrapeConfig
|
|
scfgs, err := cfg.GetScrapeConfigs()
|
|
if err != nil {
|
|
fmt.Fprintln(os.Stderr, "Cannot load scrape configs", err)
|
|
return failureExitCode
|
|
}
|
|
|
|
jobs := []string{}
|
|
jobMatched := false
|
|
for _, v := range scfgs {
|
|
jobs = append(jobs, v.JobName)
|
|
if v.JobName == sdJobName {
|
|
jobMatched = true
|
|
scrapeConfig = v
|
|
break
|
|
}
|
|
}
|
|
|
|
if !jobMatched {
|
|
fmt.Fprintf(os.Stderr, "Job %s not found. Select one of:\n", sdJobName)
|
|
for _, job := range jobs {
|
|
fmt.Fprintf(os.Stderr, "\t%s\n", job)
|
|
}
|
|
return failureExitCode
|
|
}
|
|
|
|
targetGroupChan := make(chan []*targetgroup.Group)
|
|
ctx, cancel := context.WithTimeout(context.Background(), sdTimeout)
|
|
defer cancel()
|
|
|
|
for _, cfg := range scrapeConfig.ServiceDiscoveryConfigs {
|
|
d, err := cfg.NewDiscoverer(discovery.DiscovererOptions{Logger: logger})
|
|
if err != nil {
|
|
fmt.Fprintln(os.Stderr, "Could not create new discoverer", err)
|
|
return failureExitCode
|
|
}
|
|
go d.Run(ctx, targetGroupChan)
|
|
}
|
|
|
|
var targetGroups []*targetgroup.Group
|
|
sdCheckResults := make(map[string][]*targetgroup.Group)
|
|
outerLoop:
|
|
for {
|
|
select {
|
|
case targetGroups = <-targetGroupChan:
|
|
for _, tg := range targetGroups {
|
|
sdCheckResults[tg.Source] = append(sdCheckResults[tg.Source], tg)
|
|
}
|
|
case <-ctx.Done():
|
|
break outerLoop
|
|
}
|
|
}
|
|
results := []sdCheckResult{}
|
|
for _, tgs := range sdCheckResults {
|
|
results = append(results, getSDCheckResult(tgs, scrapeConfig, noDefaultScrapePort)...)
|
|
}
|
|
|
|
res, err := json.MarshalIndent(results, "", " ")
|
|
if err != nil {
|
|
fmt.Fprintf(os.Stderr, "Could not marshal result json: %s", err)
|
|
return failureExitCode
|
|
}
|
|
|
|
fmt.Printf("%s", res)
|
|
return successExitCode
|
|
}
|
|
|
|
func getSDCheckResult(targetGroups []*targetgroup.Group, scrapeConfig *config.ScrapeConfig, noDefaultScrapePort bool) []sdCheckResult {
|
|
sdCheckResults := []sdCheckResult{}
|
|
lb := labels.NewBuilder(labels.EmptyLabels())
|
|
for _, targetGroup := range targetGroups {
|
|
for _, target := range targetGroup.Targets {
|
|
lb.Reset(labels.EmptyLabels())
|
|
|
|
for name, value := range target {
|
|
lb.Set(string(name), string(value))
|
|
}
|
|
|
|
for name, value := range targetGroup.Labels {
|
|
if _, ok := target[name]; !ok {
|
|
lb.Set(string(name), string(value))
|
|
}
|
|
}
|
|
|
|
res, orig, err := scrape.PopulateLabels(lb, scrapeConfig, noDefaultScrapePort)
|
|
result := sdCheckResult{
|
|
DiscoveredLabels: orig,
|
|
Labels: res,
|
|
Error: err,
|
|
}
|
|
|
|
duplicateRes := false
|
|
for _, sdCheckRes := range sdCheckResults {
|
|
if reflect.DeepEqual(sdCheckRes, result) {
|
|
duplicateRes = true
|
|
break
|
|
}
|
|
}
|
|
|
|
if !duplicateRes {
|
|
sdCheckResults = append(sdCheckResults, result)
|
|
}
|
|
}
|
|
}
|
|
return sdCheckResults
|
|
}
|