2021-11-01 06:42:12 -07:00
|
|
|
// Copyright 2021 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"os"
|
|
|
|
"reflect"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/go-kit/log"
|
|
|
|
|
|
|
|
"github.com/prometheus/prometheus/config"
|
|
|
|
"github.com/prometheus/prometheus/discovery"
|
|
|
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
2021-11-01 06:42:12 -07:00
|
|
|
"github.com/prometheus/prometheus/scrape"
|
|
|
|
)
|
|
|
|
|
|
|
|
type sdCheckResult struct {
|
|
|
|
DiscoveredLabels labels.Labels `json:"discoveredLabels"`
|
|
|
|
Labels labels.Labels `json:"labels"`
|
|
|
|
Error error `json:"error,omitempty"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckSD performs service discovery for the given job name and reports the results.
|
|
|
|
func CheckSD(sdConfigFiles, sdJobName string, sdTimeout time.Duration) int {
|
|
|
|
logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
|
|
|
|
|
|
|
cfg, err := config.LoadFile(sdConfigFiles, false, false, logger)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Fprintln(os.Stderr, "Cannot load config", err)
|
2021-12-30 05:37:57 -08:00
|
|
|
return failureExitCode
|
2021-11-01 06:42:12 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
var scrapeConfig *config.ScrapeConfig
|
|
|
|
jobs := []string{}
|
|
|
|
jobMatched := false
|
|
|
|
for _, v := range cfg.ScrapeConfigs {
|
|
|
|
jobs = append(jobs, v.JobName)
|
|
|
|
if v.JobName == sdJobName {
|
|
|
|
jobMatched = true
|
|
|
|
scrapeConfig = v
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !jobMatched {
|
|
|
|
fmt.Fprintf(os.Stderr, "Job %s not found. Select one of:\n", sdJobName)
|
|
|
|
for _, job := range jobs {
|
|
|
|
fmt.Fprintf(os.Stderr, "\t%s\n", job)
|
|
|
|
}
|
2021-12-01 04:45:18 -08:00
|
|
|
return failureExitCode
|
2021-11-01 06:42:12 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
targetGroupChan := make(chan []*targetgroup.Group)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), sdTimeout)
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
for _, cfg := range scrapeConfig.ServiceDiscoveryConfigs {
|
|
|
|
d, err := cfg.NewDiscoverer(discovery.DiscovererOptions{Logger: logger})
|
|
|
|
if err != nil {
|
|
|
|
fmt.Fprintln(os.Stderr, "Could not create new discoverer", err)
|
2021-12-18 04:44:13 -08:00
|
|
|
return failureExitCode
|
2021-11-01 06:42:12 -07:00
|
|
|
}
|
|
|
|
go d.Run(ctx, targetGroupChan)
|
|
|
|
}
|
|
|
|
|
|
|
|
var targetGroups []*targetgroup.Group
|
|
|
|
sdCheckResults := make(map[string][]*targetgroup.Group)
|
|
|
|
outerLoop:
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case targetGroups = <-targetGroupChan:
|
|
|
|
for _, tg := range targetGroups {
|
|
|
|
sdCheckResults[tg.Source] = append(sdCheckResults[tg.Source], tg)
|
|
|
|
}
|
|
|
|
case <-ctx.Done():
|
|
|
|
break outerLoop
|
|
|
|
}
|
|
|
|
}
|
|
|
|
results := []sdCheckResult{}
|
|
|
|
for _, tgs := range sdCheckResults {
|
|
|
|
results = append(results, getSDCheckResult(tgs, scrapeConfig)...)
|
|
|
|
}
|
|
|
|
|
|
|
|
res, err := json.MarshalIndent(results, "", " ")
|
|
|
|
if err != nil {
|
|
|
|
fmt.Fprintf(os.Stderr, "Could not marshal result json: %s", err)
|
2021-12-18 04:44:13 -08:00
|
|
|
return failureExitCode
|
2021-11-01 06:42:12 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
fmt.Printf("%s", res)
|
2021-12-01 04:45:18 -08:00
|
|
|
return successExitCode
|
2021-11-01 06:42:12 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func getSDCheckResult(targetGroups []*targetgroup.Group, scrapeConfig *config.ScrapeConfig) []sdCheckResult {
|
|
|
|
sdCheckResults := []sdCheckResult{}
|
|
|
|
for _, targetGroup := range targetGroups {
|
|
|
|
for _, target := range targetGroup.Targets {
|
|
|
|
labelSlice := make([]labels.Label, 0, len(target)+len(targetGroup.Labels))
|
|
|
|
|
|
|
|
for name, value := range target {
|
|
|
|
labelSlice = append(labelSlice, labels.Label{Name: string(name), Value: string(value)})
|
|
|
|
}
|
|
|
|
|
|
|
|
for name, value := range targetGroup.Labels {
|
|
|
|
if _, ok := target[name]; !ok {
|
|
|
|
labelSlice = append(labelSlice, labels.Label{Name: string(name), Value: string(value)})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
targetLabels := labels.New(labelSlice...)
|
|
|
|
res, orig, err := scrape.PopulateLabels(targetLabels, scrapeConfig)
|
|
|
|
result := sdCheckResult{
|
|
|
|
DiscoveredLabels: orig,
|
|
|
|
Labels: res,
|
|
|
|
Error: err,
|
|
|
|
}
|
|
|
|
|
|
|
|
duplicateRes := false
|
|
|
|
for _, sdCheckRes := range sdCheckResults {
|
|
|
|
if reflect.DeepEqual(sdCheckRes, result) {
|
|
|
|
duplicateRes = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !duplicateRes {
|
|
|
|
sdCheckResults = append(sdCheckResults, result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return sdCheckResults
|
|
|
|
}
|