2015-06-18 02:13:28 -07:00
// Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
2018-11-23 07:57:31 -08:00
"bytes"
2018-04-26 11:41:56 -07:00
"context"
2018-11-14 09:40:07 -08:00
"encoding/json"
2022-05-23 23:58:59 -07:00
"errors"
2015-06-18 02:13:28 -07:00
"fmt"
2022-01-07 13:58:28 -08:00
"io"
2018-04-26 11:41:56 -07:00
"math"
2019-06-30 03:50:23 -07:00
"net/http"
2018-07-18 01:26:45 -07:00
"net/url"
2015-06-18 02:13:28 -07:00
"os"
"path/filepath"
2021-08-22 09:03:42 -07:00
"sort"
2018-04-26 11:41:56 -07:00
"strconv"
2015-06-18 02:13:28 -07:00
"strings"
2022-01-07 13:58:28 -08:00
"text/tabwriter"
2018-04-26 11:41:56 -07:00
"time"
2015-06-18 02:13:28 -07:00
2023-03-08 02:44:15 -08:00
"github.com/alecthomas/kingpin/v2"
2021-06-11 09:17:59 -07:00
"github.com/go-kit/log"
2018-11-23 07:57:31 -08:00
"github.com/google/pprof/profile"
2018-04-26 11:41:56 -07:00
"github.com/prometheus/client_golang/api"
2023-10-23 06:55:36 -07:00
"github.com/prometheus/client_golang/prometheus"
2020-05-07 03:34:39 -07:00
"github.com/prometheus/client_golang/prometheus/testutil/promlint"
2018-01-11 07:10:25 -08:00
config_util "github.com/prometheus/common/config"
2018-11-14 09:40:07 -08:00
"github.com/prometheus/common/model"
2016-05-05 04:46:51 -07:00
"github.com/prometheus/common/version"
2021-01-13 12:37:01 -08:00
"github.com/prometheus/exporter-toolkit/web"
2022-08-31 06:50:38 -07:00
"gopkg.in/yaml.v2"
2019-03-25 16:01:12 -07:00
2022-01-07 13:58:28 -08:00
dto "github.com/prometheus/client_model/go"
2022-10-24 16:12:30 -07:00
promconfig "github.com/prometheus/common/config"
2022-01-07 13:58:28 -08:00
"github.com/prometheus/common/expfmt"
2015-06-18 02:13:28 -07:00
"github.com/prometheus/prometheus/config"
2021-10-27 17:01:28 -07:00
"github.com/prometheus/prometheus/discovery"
2020-08-20 05:48:26 -07:00
"github.com/prometheus/prometheus/discovery/file"
"github.com/prometheus/prometheus/discovery/kubernetes"
2021-06-29 08:32:59 -07:00
"github.com/prometheus/prometheus/discovery/targetgroup"
2021-11-08 06:23:17 -08:00
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/model/rulefmt"
2021-10-27 17:01:28 -07:00
"github.com/prometheus/prometheus/notifier"
2022-03-29 05:44:39 -07:00
_ "github.com/prometheus/prometheus/plugins" // Register plugins.
2023-07-12 12:34:55 -07:00
"github.com/prometheus/prometheus/promql/parser"
2024-04-29 02:48:24 -07:00
"github.com/prometheus/prometheus/promql/promqltest"
2021-10-27 17:01:28 -07:00
"github.com/prometheus/prometheus/scrape"
2023-03-11 15:18:33 -08:00
"github.com/prometheus/prometheus/util/documentcli"
2015-06-18 02:13:28 -07:00
)
2024-08-21 07:38:27 -07:00
func init ( ) {
// This can be removed when the default validation scheme in common is updated.
model . NameValidationScheme = model . UTF8Validation
}
2021-11-24 09:17:49 -08:00
const (
2021-12-30 05:37:57 -08:00
successExitCode = 0
failureExitCode = 1
2021-12-01 04:45:18 -08:00
// Exit code 3 is used for "one or more lint issues detected".
2021-11-24 09:17:49 -08:00
lintErrExitCode = 3
2022-04-05 21:05:11 -07:00
lintOptionAll = "all"
lintOptionDuplicateRules = "duplicate-rules"
lintOptionNone = "none"
2023-04-03 13:32:39 -07:00
checkHealth = "/-/healthy"
checkReadiness = "/-/ready"
2021-11-24 09:17:49 -08:00
)
2022-04-05 21:05:11 -07:00
var lintOptions = [ ] string { lintOptionAll , lintOptionDuplicateRules , lintOptionNone }
2017-06-21 04:32:04 -07:00
func main ( ) {
2022-10-24 16:12:30 -07:00
var (
httpRoundTripper = api . DefaultRoundTripper
serverURL * url . URL
2023-05-23 01:29:17 -07:00
remoteWriteURL * url . URL
2022-10-24 16:12:30 -07:00
httpConfigFilePath string
)
2023-09-12 03:37:38 -07:00
ctx := context . Background ( )
2021-02-25 10:52:34 -08:00
app := kingpin . New ( filepath . Base ( os . Args [ 0 ] ) , "Tooling for the Prometheus monitoring system." ) . UsageWriter ( os . Stdout )
2017-06-21 04:32:04 -07:00
app . Version ( version . Print ( "promtool" ) )
app . HelpFlag . Short ( 'h' )
checkCmd := app . Command ( "check" , "Check the resources for validity." )
2023-07-12 13:30:20 -07:00
experimental := app . Flag ( "experimental" , "Enable experimental commands." ) . Bool ( )
2021-11-01 06:42:12 -07:00
sdCheckCmd := checkCmd . Command ( "service-discovery" , "Perform service discovery for the given job name and report the results, including relabeling." )
sdConfigFile := sdCheckCmd . Arg ( "config-file" , "The prometheus config file." ) . Required ( ) . ExistingFile ( )
sdJobName := sdCheckCmd . Arg ( "job" , "The job to run service discovery for." ) . Required ( ) . String ( )
sdTimeout := sdCheckCmd . Flag ( "timeout" , "The time to wait for discovery results." ) . Default ( "30s" ) . Duration ( )
2017-06-21 04:32:04 -07:00
checkConfigCmd := checkCmd . Command ( "config" , "Check if the config files are valid or not." )
configFiles := checkConfigCmd . Arg (
"config-files" ,
"The config files to check." ,
) . Required ( ) . ExistingFiles ( )
2021-11-30 08:11:48 -08:00
checkConfigSyntaxOnly := checkConfigCmd . Flag ( "syntax-only" , "Only check the config file syntax, ignoring file and content validation referenced in the config" ) . Bool ( )
2022-04-05 21:05:11 -07:00
checkConfigLint := checkConfigCmd . Flag (
"lint" ,
"Linting checks to apply to the rules specified in the config. Available options are: " + strings . Join ( lintOptions , ", " ) + ". Use --lint=none to disable linting" ,
) . Default ( lintOptionDuplicateRules ) . String ( )
2022-06-03 06:33:39 -07:00
checkConfigLintFatal := checkConfigCmd . Flag (
"lint-fatal" ,
"Make lint errors exit with exit code 3." ) . Default ( "false" ) . Bool ( )
2017-06-21 04:32:04 -07:00
2020-12-25 16:52:44 -08:00
checkWebConfigCmd := checkCmd . Command ( "web-config" , "Check if the web config files are valid or not." )
webConfigFiles := checkWebConfigCmd . Arg (
"web-config-files" ,
"The config files to check." ,
) . Required ( ) . ExistingFiles ( )
2023-04-03 13:32:39 -07:00
checkServerHealthCmd := checkCmd . Command ( "healthy" , "Check if the Prometheus server is healthy." )
2023-04-05 00:45:39 -07:00
checkServerHealthCmd . Flag ( "http.config.file" , "HTTP client configuration file for promtool to connect to Prometheus." ) . PlaceHolder ( "<filename>" ) . ExistingFileVar ( & httpConfigFilePath )
checkServerHealthCmd . Flag ( "url" , "The URL for the Prometheus server." ) . Default ( "http://localhost:9090" ) . URLVar ( & serverURL )
2023-04-03 13:32:39 -07:00
checkServerReadyCmd := checkCmd . Command ( "ready" , "Check if the Prometheus server is ready." )
2023-04-05 00:45:39 -07:00
checkServerReadyCmd . Flag ( "http.config.file" , "HTTP client configuration file for promtool to connect to Prometheus." ) . PlaceHolder ( "<filename>" ) . ExistingFileVar ( & httpConfigFilePath )
checkServerReadyCmd . Flag ( "url" , "The URL for the Prometheus server." ) . Default ( "http://localhost:9090" ) . URLVar ( & serverURL )
2023-04-03 13:32:39 -07:00
2017-06-21 04:32:04 -07:00
checkRulesCmd := checkCmd . Command ( "rules" , "Check if the rule files are valid or not." )
ruleFiles := checkRulesCmd . Arg (
"rule-files" ,
2023-05-28 10:55:00 -07:00
"The rule files to check, default is read from standard input." ,
2023-04-05 02:24:49 -07:00
) . ExistingFiles ( )
2022-04-05 21:05:11 -07:00
checkRulesLint := checkRulesCmd . Flag (
"lint" ,
"Linting checks to apply. Available options are: " + strings . Join ( lintOptions , ", " ) + ". Use --lint=none to disable linting" ,
) . Default ( lintOptionDuplicateRules ) . String ( )
2022-06-03 06:33:39 -07:00
checkRulesLintFatal := checkRulesCmd . Flag (
"lint-fatal" ,
"Make lint errors exit with exit code 3." ) . Default ( "false" ) . Bool ( )
2017-06-21 04:32:04 -07:00
checkMetricsCmd := checkCmd . Command ( "metrics" , checkMetricsUsage )
2022-01-07 13:58:28 -08:00
checkMetricsExtended := checkCmd . Flag ( "extended" , "Print extended information related to the cardinality of the metrics." ) . Bool ( )
2021-10-29 16:41:40 -07:00
agentMode := checkConfigCmd . Flag ( "agent" , "Check config file for Prometheus in Agent mode." ) . Bool ( )
2017-06-21 04:32:04 -07:00
2018-04-26 11:41:56 -07:00
queryCmd := app . Command ( "query" , "Run query against a Prometheus server." )
2018-11-14 09:40:07 -08:00
queryCmdFmt := queryCmd . Flag ( "format" , "Output format of the query." ) . Short ( 'o' ) . Default ( "promql" ) . Enum ( "promql" , "json" )
2022-10-24 16:12:30 -07:00
queryCmd . Flag ( "http.config.file" , "HTTP client configuration file for promtool to connect to Prometheus." ) . PlaceHolder ( "<filename>" ) . ExistingFileVar ( & httpConfigFilePath )
2020-08-25 03:32:25 -07:00
2018-04-26 11:41:56 -07:00
queryInstantCmd := queryCmd . Command ( "instant" , "Run instant query." )
2022-10-24 16:12:30 -07:00
queryInstantCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . URLVar ( & serverURL )
2020-08-25 03:32:25 -07:00
queryInstantExpr := queryInstantCmd . Arg ( "expr" , "PromQL query expression." ) . Required ( ) . String ( )
queryInstantTime := queryInstantCmd . Flag ( "time" , "Query evaluation time (RFC3339 or Unix timestamp)." ) . String ( )
2018-04-26 11:41:56 -07:00
queryRangeCmd := queryCmd . Command ( "range" , "Run range query." )
2022-10-24 16:12:30 -07:00
queryRangeCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . URLVar ( & serverURL )
2018-04-26 11:41:56 -07:00
queryRangeExpr := queryRangeCmd . Arg ( "expr" , "PromQL query expression." ) . Required ( ) . String ( )
2019-06-30 03:50:23 -07:00
queryRangeHeaders := queryRangeCmd . Flag ( "header" , "Extra headers to send to server." ) . StringMap ( )
2018-04-26 11:41:56 -07:00
queryRangeBegin := queryRangeCmd . Flag ( "start" , "Query range start time (RFC3339 or Unix timestamp)." ) . String ( )
queryRangeEnd := queryRangeCmd . Flag ( "end" , "Query range end time (RFC3339 or Unix timestamp)." ) . String ( )
2018-08-05 02:03:18 -07:00
queryRangeStep := queryRangeCmd . Flag ( "step" , "Query step size (duration)." ) . Duration ( )
2018-04-26 11:41:56 -07:00
2018-07-18 01:15:58 -07:00
querySeriesCmd := queryCmd . Command ( "series" , "Run series query." )
2022-10-24 16:12:30 -07:00
querySeriesCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . URLVar ( & serverURL )
2018-07-18 01:15:58 -07:00
querySeriesMatch := querySeriesCmd . Flag ( "match" , "Series selector. Can be specified multiple times." ) . Required ( ) . Strings ( )
querySeriesBegin := querySeriesCmd . Flag ( "start" , "Start time (RFC3339 or Unix timestamp)." ) . String ( )
querySeriesEnd := querySeriesCmd . Flag ( "end" , "End time (RFC3339 or Unix timestamp)." ) . String ( )
2018-07-18 00:52:01 -07:00
debugCmd := app . Command ( "debug" , "Fetch debug information." )
debugPprofCmd := debugCmd . Command ( "pprof" , "Fetch profiling debug information." )
debugPprofServer := debugPprofCmd . Arg ( "server" , "Prometheus server to get pprof files from." ) . Required ( ) . String ( )
debugMetricsCmd := debugCmd . Command ( "metrics" , "Fetch metrics debug information." )
debugMetricsServer := debugMetricsCmd . Arg ( "server" , "Prometheus server to get metrics from." ) . Required ( ) . String ( )
debugAllCmd := debugCmd . Command ( "all" , "Fetch all debug information." )
debugAllServer := debugAllCmd . Arg ( "server" , "Prometheus server to get all debug information from." ) . Required ( ) . String ( )
2018-07-18 07:27:28 -07:00
queryLabelsCmd := queryCmd . Command ( "labels" , "Run labels query." )
2022-10-24 16:12:30 -07:00
queryLabelsCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . URLVar ( & serverURL )
2018-07-18 07:27:28 -07:00
queryLabelsName := queryLabelsCmd . Arg ( "name" , "Label name to provide label values for." ) . Required ( ) . String ( )
2020-06-25 16:28:03 -07:00
queryLabelsBegin := queryLabelsCmd . Flag ( "start" , "Start time (RFC3339 or Unix timestamp)." ) . String ( )
queryLabelsEnd := queryLabelsCmd . Flag ( "end" , "End time (RFC3339 or Unix timestamp)." ) . String ( )
2022-05-22 18:10:45 -07:00
queryLabelsMatch := queryLabelsCmd . Flag ( "match" , "Series selector. Can be specified multiple times." ) . Strings ( )
2018-07-18 07:27:28 -07:00
2024-01-10 08:32:36 -08:00
queryAnalyzeCfg := & QueryAnalyzeConfig { }
queryAnalyzeCmd := queryCmd . Command ( "analyze" , "Run queries against your Prometheus to analyze the usage pattern of certain metrics." )
queryAnalyzeCmd . Flag ( "server" , "Prometheus server to query." ) . Required ( ) . URLVar ( & serverURL )
queryAnalyzeCmd . Flag ( "type" , "Type of metric: histogram." ) . Required ( ) . StringVar ( & queryAnalyzeCfg . metricType )
queryAnalyzeCmd . Flag ( "duration" , "Time frame to analyze." ) . Default ( "1h" ) . DurationVar ( & queryAnalyzeCfg . duration )
queryAnalyzeCmd . Flag ( "time" , "Query time (RFC3339 or Unix timestamp), defaults to now." ) . StringVar ( & queryAnalyzeCfg . time )
queryAnalyzeCmd . Flag ( "match" , "Series selector. Can be specified multiple times." ) . Required ( ) . StringsVar ( & queryAnalyzeCfg . matchers )
2023-04-27 04:23:52 -07:00
pushCmd := app . Command ( "push" , "Push to a Prometheus server." )
pushCmd . Flag ( "http.config.file" , "HTTP client configuration file for promtool to connect to Prometheus." ) . PlaceHolder ( "<filename>" ) . ExistingFileVar ( & httpConfigFilePath )
2023-05-23 01:29:17 -07:00
pushMetricsCmd := pushCmd . Command ( "metrics" , "Push metrics to a prometheus remote write (for testing purpose only)." )
pushMetricsCmd . Arg ( "remote-write-url" , "Prometheus remote write url to push metrics." ) . Required ( ) . URLVar ( & remoteWriteURL )
2023-04-27 04:23:52 -07:00
metricFiles := pushMetricsCmd . Arg (
"metric-files" ,
2023-06-01 01:28:55 -07:00
"The metric files to push, default is read from standard input." ,
2023-05-23 01:29:17 -07:00
) . ExistingFiles ( )
2023-05-24 01:55:49 -07:00
pushMetricsLabels := pushMetricsCmd . Flag ( "label" , "Label to attach to metrics. Can be specified multiple times." ) . Default ( "job=promtool" ) . StringMap ( )
2023-04-27 04:23:52 -07:00
pushMetricsTimeout := pushMetricsCmd . Flag ( "timeout" , "The time to wait for pushing metrics." ) . Default ( "30s" ) . Duration ( )
pushMetricsHeaders := pushMetricsCmd . Flag ( "header" , "Prometheus remote write header." ) . StringMap ( )
2018-09-25 09:06:26 -07:00
testCmd := app . Command ( "test" , "Unit testing." )
2024-07-29 04:28:08 -07:00
junitOutFile := testCmd . Flag ( "junit" , "File path to store JUnit XML test results." ) . OpenFile ( os . O_CREATE | os . O_WRONLY | os . O_TRUNC , 0 o644 )
2018-09-25 09:06:26 -07:00
testRulesCmd := testCmd . Command ( "rules" , "Unit tests for rules." )
2023-03-31 16:40:04 -07:00
testRulesRun := testRulesCmd . Flag ( "run" , "If set, will only run test groups whose names match the regular expression. Can be specified multiple times." ) . Strings ( )
2018-09-25 09:06:26 -07:00
testRulesFiles := testRulesCmd . Arg (
"test-rule-file" ,
"The unit test file." ,
) . Required ( ) . ExistingFiles ( )
2024-01-18 06:49:16 -08:00
testRulesDiff := testRulesCmd . Flag ( "diff" , "[Experimental] Print colored differential output between expected & received output." ) . Default ( "false" ) . Bool ( )
2018-09-25 09:06:26 -07:00
2020-07-23 11:35:50 -07:00
defaultDBPath := "data/"
tsdbCmd := app . Command ( "tsdb" , "Run tsdb commands." )
tsdbBenchCmd := tsdbCmd . Command ( "bench" , "Run benchmarks." )
tsdbBenchWriteCmd := tsdbBenchCmd . Command ( "write" , "Run a write performance benchmark." )
benchWriteOutPath := tsdbBenchWriteCmd . Flag ( "out" , "Set the output path." ) . Default ( "benchout" ) . String ( )
benchWriteNumMetrics := tsdbBenchWriteCmd . Flag ( "metrics" , "Number of metrics to read." ) . Default ( "10000" ) . Int ( )
2021-03-09 02:14:17 -08:00
benchWriteNumScrapes := tsdbBenchWriteCmd . Flag ( "scrapes" , "Number of scrapes to simulate." ) . Default ( "3000" ) . Int ( )
2020-07-23 11:35:50 -07:00
benchSamplesFile := tsdbBenchWriteCmd . Arg ( "file" , "Input file with samples data, default is (" + filepath . Join ( ".." , ".." , "tsdb" , "testdata" , "20kseries.json" ) + ")." ) . Default ( filepath . Join ( ".." , ".." , "tsdb" , "testdata" , "20kseries.json" ) ) . String ( )
2021-07-02 03:08:52 -07:00
tsdbAnalyzeCmd := tsdbCmd . Command ( "analyze" , "Analyze churn, label pair cardinality and compaction efficiency." )
2020-07-23 11:35:50 -07:00
analyzePath := tsdbAnalyzeCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
analyzeBlockID := tsdbAnalyzeCmd . Arg ( "block id" , "Block to analyze (default is the last block)." ) . String ( )
analyzeLimit := tsdbAnalyzeCmd . Flag ( "limit" , "How many items to show in each list." ) . Default ( "20" ) . Int ( )
2021-09-08 01:49:00 -07:00
analyzeRunExtended := tsdbAnalyzeCmd . Flag ( "extended" , "Run extended analysis." ) . Bool ( )
2023-09-20 03:37:32 -07:00
analyzeMatchers := tsdbAnalyzeCmd . Flag ( "match" , "Series selector to analyze. Only 1 set of matchers is supported now." ) . String ( )
2020-07-23 11:35:50 -07:00
tsdbListCmd := tsdbCmd . Command ( "list" , "List tsdb blocks." )
listHumanReadable := tsdbListCmd . Flag ( "human-readable" , "Print human readable values." ) . Short ( 'r' ) . Bool ( )
listPath := tsdbListCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
tsdbDumpCmd := tsdbCmd . Command ( "dump" , "Dump samples from a TSDB." )
dumpPath := tsdbDumpCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
2024-08-07 07:53:47 -07:00
dumpSandboxDirRoot := tsdbDumpCmd . Flag ( "sandbox-dir-root" , "Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end." ) . String ( )
2020-07-23 11:35:50 -07:00
dumpMinTime := tsdbDumpCmd . Flag ( "min-time" , "Minimum timestamp to dump." ) . Default ( strconv . FormatInt ( math . MinInt64 , 10 ) ) . Int64 ( )
dumpMaxTime := tsdbDumpCmd . Flag ( "max-time" , "Maximum timestamp to dump." ) . Default ( strconv . FormatInt ( math . MaxInt64 , 10 ) ) . Int64 ( )
2024-01-15 02:29:53 -08:00
dumpMatch := tsdbDumpCmd . Flag ( "match" , "Series selector. Can be specified multiple times." ) . Default ( "{__name__=~'(?s:.*)'}" ) . Strings ( )
2020-07-23 11:35:50 -07:00
2024-05-21 05:44:55 -07:00
tsdbDumpOpenMetricsCmd := tsdbCmd . Command ( "dump-openmetrics" , "[Experimental] Dump samples from a TSDB into OpenMetrics text format, excluding native histograms and staleness markers, which are not representable in OpenMetrics." )
2024-01-24 14:22:32 -08:00
dumpOpenMetricsPath := tsdbDumpOpenMetricsCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
2024-08-07 07:53:47 -07:00
dumpOpenMetricsSandboxDirRoot := tsdbDumpOpenMetricsCmd . Flag ( "sandbox-dir-root" , "Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end." ) . String ( )
2024-01-24 14:22:32 -08:00
dumpOpenMetricsMinTime := tsdbDumpOpenMetricsCmd . Flag ( "min-time" , "Minimum timestamp to dump." ) . Default ( strconv . FormatInt ( math . MinInt64 , 10 ) ) . Int64 ( )
dumpOpenMetricsMaxTime := tsdbDumpOpenMetricsCmd . Flag ( "max-time" , "Maximum timestamp to dump." ) . Default ( strconv . FormatInt ( math . MaxInt64 , 10 ) ) . Int64 ( )
dumpOpenMetricsMatch := tsdbDumpOpenMetricsCmd . Flag ( "match" , "Series selector. Can be specified multiple times." ) . Default ( "{__name__=~'(?s:.*)'}" ) . Strings ( )
2020-11-25 21:07:06 -08:00
importCmd := tsdbCmd . Command ( "create-blocks-from" , "[Experimental] Import samples from input and produce TSDB blocks. Please refer to the storage docs for more details." )
2020-12-09 13:58:23 -08:00
importHumanReadable := importCmd . Flag ( "human-readable" , "Print human readable values." ) . Short ( 'r' ) . Bool ( )
2021-06-10 10:34:07 -07:00
importQuiet := importCmd . Flag ( "quiet" , "Do not print created blocks." ) . Short ( 'q' ) . Bool ( )
2021-06-29 02:23:38 -07:00
maxBlockDuration := importCmd . Flag ( "max-block-duration" , "Maximum duration created blocks may span. Anything less than 2h is ignored." ) . Hidden ( ) . PlaceHolder ( "<duration>" ) . Duration ( )
2020-11-25 21:07:06 -08:00
openMetricsImportCmd := importCmd . Command ( "openmetrics" , "Import samples from OpenMetrics input and produce TSDB blocks. Please refer to the storage docs for more details." )
2024-08-27 19:12:24 -07:00
openMetricsLabels := openMetricsImportCmd . Flag ( "label" , "Label to attach to metrics. Can be specified multiple times. Example --label=label_name=label_value" ) . StringMap ( )
2020-11-25 21:07:06 -08:00
importFilePath := openMetricsImportCmd . Arg ( "input file" , "OpenMetrics file to read samples from." ) . Required ( ) . String ( )
importDBPath := openMetricsImportCmd . Arg ( "output directory" , "Output directory for generated blocks." ) . Default ( defaultDBPath ) . String ( )
2021-04-01 13:38:00 -07:00
importRulesCmd := importCmd . Command ( "rules" , "Create blocks of data for new recording rules." )
2022-10-24 16:12:30 -07:00
importRulesCmd . Flag ( "http.config.file" , "HTTP client configuration file for promtool to connect to Prometheus." ) . PlaceHolder ( "<filename>" ) . ExistingFileVar ( & httpConfigFilePath )
importRulesCmd . Flag ( "url" , "The URL for the Prometheus API with the data where the rule will be backfilled from." ) . Default ( "http://localhost:9090" ) . URLVar ( & serverURL )
2021-04-01 13:38:00 -07:00
importRulesStart := importRulesCmd . Flag ( "start" , "The time to start backfilling the new rule from. Must be a RFC3339 formatted date or Unix timestamp. Required." ) .
2021-03-20 12:38:30 -07:00
Required ( ) . String ( )
2021-04-01 13:38:00 -07:00
importRulesEnd := importRulesCmd . Flag ( "end" , "If an end time is provided, all recording rules in the rule files provided will be backfilled to the end time. Default will backfill up to 3 hours ago. Must be a RFC3339 formatted date or Unix timestamp." ) . String ( )
2021-03-20 12:38:30 -07:00
importRulesOutputDir := importRulesCmd . Flag ( "output-dir" , "Output directory for generated blocks." ) . Default ( "data/" ) . String ( )
2021-03-14 10:10:55 -07:00
importRulesEvalInterval := importRulesCmd . Flag ( "eval-interval" , "How frequently to evaluate rules when backfilling if a value is not set in the recording rule files." ) .
2020-10-17 08:36:58 -07:00
Default ( "60s" ) . Duration ( )
2020-11-26 08:43:07 -08:00
importRulesFiles := importRulesCmd . Arg (
2020-07-27 07:44:49 -07:00
"rule-files" ,
2020-09-13 08:07:59 -07:00
"A list of one or more files containing recording rules to be backfilled. All recording rules listed in the files will be backfilled. Alerting rules are not evaluated." ,
2020-07-27 07:44:49 -07:00
) . Required ( ) . ExistingFiles ( )
2020-11-25 21:07:06 -08:00
2023-07-12 13:30:20 -07:00
promQLCmd := app . Command ( "promql" , "PromQL formatting and editing. Requires the --experimental flag." )
2023-07-12 12:34:55 -07:00
promQLFormatCmd := promQLCmd . Command ( "format" , "Format PromQL query to pretty printed form." )
promQLFormatQuery := promQLFormatCmd . Arg ( "query" , "PromQL query." ) . Required ( ) . String ( )
promQLLabelsCmd := promQLCmd . Command ( "label-matchers" , "Edit label matchers contained within an existing PromQL query." )
promQLLabelsSetCmd := promQLLabelsCmd . Command ( "set" , "Set a label matcher in the query." )
promQLLabelsSetType := promQLLabelsSetCmd . Flag ( "type" , "Type of the label matcher to set." ) . Short ( 't' ) . Default ( "=" ) . Enum ( "=" , "!=" , "=~" , "!~" )
promQLLabelsSetQuery := promQLLabelsSetCmd . Arg ( "query" , "PromQL query." ) . Required ( ) . String ( )
promQLLabelsSetName := promQLLabelsSetCmd . Arg ( "name" , "Name of the label matcher to set." ) . Required ( ) . String ( )
promQLLabelsSetValue := promQLLabelsSetCmd . Arg ( "value" , "Value of the label matcher to set." ) . Required ( ) . String ( )
promQLLabelsDeleteCmd := promQLLabelsCmd . Command ( "delete" , "Delete a label from the query." )
promQLLabelsDeleteQuery := promQLLabelsDeleteCmd . Arg ( "query" , "PromQL query." ) . Required ( ) . String ( )
promQLLabelsDeleteName := promQLLabelsDeleteCmd . Arg ( "name" , "Name of the label to delete." ) . Required ( ) . String ( )
2022-07-20 04:35:47 -07:00
featureList := app . Flag ( "enable-feature" , "Comma separated feature names to enable (only PromQL related and no-default-scrape-port). See https://prometheus.io/docs/prometheus/latest/feature_flags/ for the options and more details." ) . Default ( "" ) . Strings ( )
2021-06-30 14:43:39 -07:00
2023-03-11 15:18:33 -08:00
documentationCmd := app . Command ( "write-documentation" , "Generate command line documentation. Internal use." ) . Hidden ( )
2018-11-14 09:40:07 -08:00
parsedCmd := kingpin . MustParse ( app . Parse ( os . Args [ 1 : ] ) )
var p printer
switch * queryCmdFmt {
case "json" :
p = & jsonPrinter { }
case "promql" :
p = & promqlPrinter { }
}
2022-10-24 16:12:30 -07:00
if httpConfigFilePath != "" {
if serverURL != nil && serverURL . User . Username ( ) != "" {
kingpin . Fatalf ( "Cannot set base auth in the server URL and use a http.config.file at the same time" )
}
var err error
httpConfig , _ , err := config_util . LoadHTTPConfigFile ( httpConfigFilePath )
if err != nil {
kingpin . Fatalf ( "Failed to load HTTP config file: %v" , err )
}
httpRoundTripper , err = promconfig . NewRoundTripperFromConfig ( * httpConfig , "promtool" , config_util . WithUserAgent ( "promtool/" + version . Version ) )
if err != nil {
kingpin . Fatalf ( "Failed to create a new HTTP round tripper: %v" , err )
}
}
2022-07-20 04:35:47 -07:00
var noDefaultScrapePort bool
2021-06-30 14:43:39 -07:00
for _ , f := range * featureList {
opts := strings . Split ( f , "," )
for _ , o := range opts {
switch o {
2022-07-20 04:35:47 -07:00
case "no-default-scrape-port" :
noDefaultScrapePort = true
2021-06-30 14:43:39 -07:00
case "" :
continue
default :
fmt . Printf ( " WARNING: Unknown option for --enable-feature: %q\n" , o )
}
}
}
2018-11-14 09:40:07 -08:00
switch parsedCmd {
2021-11-01 06:42:12 -07:00
case sdCheckCmd . FullCommand ( ) :
2023-10-23 06:55:36 -07:00
os . Exit ( CheckSD ( * sdConfigFile , * sdJobName , * sdTimeout , noDefaultScrapePort , prometheus . DefaultRegisterer ) )
2021-11-01 06:42:12 -07:00
2017-06-21 04:32:04 -07:00
case checkConfigCmd . FullCommand ( ) :
2022-06-03 06:33:39 -07:00
os . Exit ( CheckConfig ( * agentMode , * checkConfigSyntaxOnly , newLintConfig ( * checkConfigLint , * checkConfigLintFatal ) , * configFiles ... ) )
2017-06-21 04:32:04 -07:00
2023-04-03 13:32:39 -07:00
case checkServerHealthCmd . FullCommand ( ) :
2023-04-05 00:45:39 -07:00
os . Exit ( checkErr ( CheckServerStatus ( serverURL , checkHealth , httpRoundTripper ) ) )
2023-04-03 13:32:39 -07:00
case checkServerReadyCmd . FullCommand ( ) :
2023-04-05 00:45:39 -07:00
os . Exit ( checkErr ( CheckServerStatus ( serverURL , checkReadiness , httpRoundTripper ) ) )
2023-04-03 13:32:39 -07:00
2020-12-25 16:52:44 -08:00
case checkWebConfigCmd . FullCommand ( ) :
os . Exit ( CheckWebConfig ( * webConfigFiles ... ) )
2017-06-21 04:32:04 -07:00
case checkRulesCmd . FullCommand ( ) :
2022-06-03 06:33:39 -07:00
os . Exit ( CheckRules ( newLintConfig ( * checkRulesLint , * checkRulesLintFatal ) , * ruleFiles ... ) )
2017-06-21 04:32:04 -07:00
case checkMetricsCmd . FullCommand ( ) :
2022-01-07 13:58:28 -08:00
os . Exit ( CheckMetrics ( * checkMetricsExtended ) )
2017-06-21 04:32:04 -07:00
2023-04-27 04:23:52 -07:00
case pushMetricsCmd . FullCommand ( ) :
2023-05-24 01:55:49 -07:00
os . Exit ( PushMetrics ( remoteWriteURL , httpRoundTripper , * pushMetricsHeaders , * pushMetricsTimeout , * pushMetricsLabels , * metricFiles ... ) )
2023-04-27 04:23:52 -07:00
2018-04-26 11:41:56 -07:00
case queryInstantCmd . FullCommand ( ) :
2022-10-24 16:12:30 -07:00
os . Exit ( QueryInstant ( serverURL , httpRoundTripper , * queryInstantExpr , * queryInstantTime , p ) )
2018-04-26 11:41:56 -07:00
case queryRangeCmd . FullCommand ( ) :
2022-10-24 16:12:30 -07:00
os . Exit ( QueryRange ( serverURL , httpRoundTripper , * queryRangeHeaders , * queryRangeExpr , * queryRangeBegin , * queryRangeEnd , * queryRangeStep , p ) )
2018-07-18 00:52:01 -07:00
2018-07-18 01:15:58 -07:00
case querySeriesCmd . FullCommand ( ) :
2022-10-24 16:12:30 -07:00
os . Exit ( QuerySeries ( serverURL , httpRoundTripper , * querySeriesMatch , * querySeriesBegin , * querySeriesEnd , p ) )
2018-07-18 01:15:58 -07:00
2018-07-18 00:52:01 -07:00
case debugPprofCmd . FullCommand ( ) :
os . Exit ( debugPprof ( * debugPprofServer ) )
case debugMetricsCmd . FullCommand ( ) :
os . Exit ( debugMetrics ( * debugMetricsServer ) )
case debugAllCmd . FullCommand ( ) :
os . Exit ( debugAll ( * debugAllServer ) )
2018-07-18 07:27:28 -07:00
case queryLabelsCmd . FullCommand ( ) :
2022-10-24 16:12:30 -07:00
os . Exit ( QueryLabels ( serverURL , httpRoundTripper , * queryLabelsMatch , * queryLabelsName , * queryLabelsBegin , * queryLabelsEnd , p ) )
2018-09-25 09:06:26 -07:00
case testRulesCmd . FullCommand ( ) :
2024-07-29 04:28:08 -07:00
results := io . Discard
if * junitOutFile != nil {
results = * junitOutFile
}
os . Exit ( RulesUnitTestResult ( results ,
2024-04-29 02:48:24 -07:00
promqltest . LazyLoaderOpts {
2022-01-11 08:01:02 -08:00
EnableAtModifier : true ,
EnableNegativeOffset : true ,
} ,
2023-03-31 16:40:04 -07:00
* testRulesRun ,
2024-01-18 06:49:16 -08:00
* testRulesDiff ,
2022-01-11 08:01:02 -08:00
* testRulesFiles ... ) ,
)
2020-07-23 11:35:50 -07:00
case tsdbBenchWriteCmd . FullCommand ( ) :
2021-03-09 02:14:17 -08:00
os . Exit ( checkErr ( benchmarkWrite ( * benchWriteOutPath , * benchSamplesFile , * benchWriteNumMetrics , * benchWriteNumScrapes ) ) )
2020-07-23 11:35:50 -07:00
case tsdbAnalyzeCmd . FullCommand ( ) :
2023-09-20 03:37:32 -07:00
os . Exit ( checkErr ( analyzeBlock ( ctx , * analyzePath , * analyzeBlockID , * analyzeLimit , * analyzeRunExtended , * analyzeMatchers ) ) )
2020-07-23 11:35:50 -07:00
case tsdbListCmd . FullCommand ( ) :
os . Exit ( checkErr ( listBlocks ( * listPath , * listHumanReadable ) ) )
case tsdbDumpCmd . FullCommand ( ) :
2023-11-29 08:49:01 -08:00
os . Exit ( checkErr ( dumpSamples ( ctx , * dumpPath , * dumpSandboxDirRoot , * dumpMinTime , * dumpMaxTime , * dumpMatch , formatSeriesSet ) ) )
2024-01-24 14:22:32 -08:00
case tsdbDumpOpenMetricsCmd . FullCommand ( ) :
2023-11-29 08:49:01 -08:00
os . Exit ( checkErr ( dumpSamples ( ctx , * dumpOpenMetricsPath , * dumpOpenMetricsSandboxDirRoot , * dumpOpenMetricsMinTime , * dumpOpenMetricsMaxTime , * dumpOpenMetricsMatch , formatSeriesSetOpenMetrics ) ) )
2021-10-22 01:06:44 -07:00
// TODO(aSquare14): Work on adding support for custom block size.
2020-11-25 21:07:06 -08:00
case openMetricsImportCmd . FullCommand ( ) :
2024-08-27 19:12:24 -07:00
os . Exit ( backfillOpenMetrics ( * importFilePath , * importDBPath , * importHumanReadable , * importQuiet , * maxBlockDuration , * openMetricsLabels ) )
2020-11-26 08:43:07 -08:00
case importRulesCmd . FullCommand ( ) :
2023-02-10 14:23:38 -08:00
os . Exit ( checkErr ( importRules ( serverURL , httpRoundTripper , * importRulesStart , * importRulesEnd , * importRulesOutputDir , * importRulesEvalInterval , * maxBlockDuration , * importRulesFiles ... ) ) )
2023-07-12 12:34:55 -07:00
2024-01-10 08:32:36 -08:00
case queryAnalyzeCmd . FullCommand ( ) :
os . Exit ( checkErr ( queryAnalyzeCfg . run ( serverURL , httpRoundTripper ) ) )
2023-03-11 15:18:33 -08:00
case documentationCmd . FullCommand ( ) :
os . Exit ( checkErr ( documentcli . GenerateMarkdown ( app . Model ( ) , os . Stdout ) ) )
2023-07-12 12:34:55 -07:00
case promQLFormatCmd . FullCommand ( ) :
2023-07-12 13:30:20 -07:00
checkExperimental ( * experimental )
2023-07-12 12:34:55 -07:00
os . Exit ( checkErr ( formatPromQL ( * promQLFormatQuery ) ) )
case promQLLabelsSetCmd . FullCommand ( ) :
2023-07-12 13:30:20 -07:00
checkExperimental ( * experimental )
2023-07-12 12:34:55 -07:00
os . Exit ( checkErr ( labelsSetPromQL ( * promQLLabelsSetQuery , * promQLLabelsSetType , * promQLLabelsSetName , * promQLLabelsSetValue ) ) )
case promQLLabelsDeleteCmd . FullCommand ( ) :
2023-07-12 13:30:20 -07:00
checkExperimental ( * experimental )
2023-07-12 12:34:55 -07:00
os . Exit ( checkErr ( labelsDeletePromQL ( * promQLLabelsDeleteQuery , * promQLLabelsDeleteName ) ) )
2015-06-18 02:13:28 -07:00
}
2017-06-21 04:32:04 -07:00
}
2023-07-12 13:30:20 -07:00
func checkExperimental ( f bool ) {
if ! f {
fmt . Fprintln ( os . Stderr , "This command is experimental and requires the --experimental flag to be set." )
os . Exit ( 1 )
2015-06-18 02:13:28 -07:00
}
2017-06-21 04:32:04 -07:00
}
2023-11-29 09:23:34 -08:00
var errLint = fmt . Errorf ( "lint error" )
2022-04-05 21:05:11 -07:00
type lintConfig struct {
all bool
duplicateRules bool
2022-06-03 06:33:39 -07:00
fatal bool
2022-04-05 21:05:11 -07:00
}
2022-06-03 06:33:39 -07:00
func newLintConfig ( stringVal string , fatal bool ) lintConfig {
2022-04-05 21:05:11 -07:00
items := strings . Split ( stringVal , "," )
2022-06-03 06:33:39 -07:00
ls := lintConfig {
fatal : fatal ,
}
2022-04-05 21:05:11 -07:00
for _ , setting := range items {
switch setting {
case lintOptionAll :
ls . all = true
case lintOptionDuplicateRules :
ls . duplicateRules = true
case lintOptionNone :
default :
fmt . Printf ( "WARNING: unknown lint option %s\n" , setting )
}
}
return ls
}
func ( ls lintConfig ) lintDuplicateRules ( ) bool {
return ls . all || ls . duplicateRules
}
lint: Revamp our linting rules, mostly around doc comments
Several things done here:
- Set `max-issues-per-linter` to 0 so that we actually see all linter
warnings and not just 50 per linter. (As we also set
`max-same-issues` to 0, I assume this was the intention from the
beginning.)
- Stop using the golangci-lint default excludes (by setting
`exclude-use-default: false`. Those are too generous and don't match
our style conventions. (I have re-added some of the excludes
explicitly in this commit. See below.)
- Re-add the `errcheck` exclusion we have used so far via the
defaults.
- Exclude the signature requirement `govet` has for `Seek` methods
because we use non-standard `Seek` methods a lot. (But we keep other
requirements, while the default excludes completely disabled the
check for common method segnatures.)
- Exclude warnings about missing doc comments on exported symbols. (We
used to be pretty adamant about doc comments, but stopped that at
some point in the past. By now, we have about 500 missing doc
comments. We may consider reintroducing this check, but that's
outside of the scope of this commit. The default excludes of
golangci-lint essentially ignore doc comments completely.)
- By stop using the default excludes, we now get warnings back on
malformed doc comments. That's the most impactful change in this
commit. It does not enforce doc comments (again), but _if_ there is
a doc comment, it has to have the recommended form. (Most of the
changes in this commit are fixing this form.)
- Improve wording/spelling of some comments in .golangci.yml, and
remove an outdated comment.
- Leave `package-comments` inactive, but add a TODO asking if we
should change that.
- Add a new sub-linter `comment-spacings` (and fix corresponding
comments), which avoids missing spaces after the leading `//`.
Signed-off-by: beorn7 <beorn@grafana.com>
2024-08-22 04:59:36 -07:00
// CheckServerStatus - healthy & ready.
2023-04-03 13:32:39 -07:00
func CheckServerStatus ( serverURL * url . URL , checkEndpoint string , roundTripper http . RoundTripper ) error {
2023-04-05 00:45:39 -07:00
if serverURL . Scheme == "" {
serverURL . Scheme = "http"
2023-04-03 13:32:39 -07:00
}
config := api . Config {
Address : serverURL . String ( ) + checkEndpoint ,
RoundTripper : roundTripper ,
}
// Create new client.
c , err := api . NewClient ( config )
if err != nil {
fmt . Fprintln ( os . Stderr , "error creating API client:" , err )
return err
}
2024-04-08 12:26:23 -07:00
request , err := http . NewRequest ( http . MethodGet , config . Address , nil )
2023-04-03 13:32:39 -07:00
if err != nil {
return err
}
ctx , cancel := context . WithTimeout ( context . Background ( ) , 10 * time . Second )
defer cancel ( )
response , dataBytes , err := c . Do ( ctx , request )
if err != nil {
return err
}
if response . StatusCode != http . StatusOK {
return fmt . Errorf ( "check failed: URL=%s, status=%d" , serverURL , response . StatusCode )
}
fmt . Fprintln ( os . Stderr , " SUCCESS: " , string ( dataBytes ) )
return nil
}
2017-06-21 04:32:04 -07:00
// CheckConfig validates configuration files.
2022-04-05 21:05:11 -07:00
func CheckConfig ( agentMode , checkSyntaxOnly bool , lintSettings lintConfig , files ... string ) int {
2015-06-18 02:13:28 -07:00
failed := false
2022-06-03 06:33:39 -07:00
hasErrors := false
2015-06-18 02:13:28 -07:00
2017-06-21 04:32:04 -07:00
for _ , f := range files {
2021-11-30 08:11:48 -08:00
ruleFiles , err := checkConfig ( agentMode , f , checkSyntaxOnly )
2015-06-18 02:13:28 -07:00
if err != nil {
2017-06-21 04:32:04 -07:00
fmt . Fprintln ( os . Stderr , " FAILED:" , err )
2022-07-01 05:38:49 -07:00
hasErrors = true
2015-06-18 02:13:28 -07:00
failed = true
} else {
2021-11-30 08:11:48 -08:00
if len ( ruleFiles ) > 0 {
fmt . Printf ( " SUCCESS: %d rule files found\n" , len ( ruleFiles ) )
}
fmt . Printf ( " SUCCESS: %s is valid prometheus config file syntax\n" , f )
2015-06-18 02:13:28 -07:00
}
2017-06-21 04:32:04 -07:00
fmt . Println ( )
2015-06-18 02:13:28 -07:00
2023-06-01 08:39:04 -07:00
rulesFailed , rulesHasErrors := checkRules ( ruleFiles , lintSettings )
if rulesFailed {
failed = rulesFailed
}
if rulesHasErrors {
hasErrors = rulesHasErrors
2015-06-18 02:13:28 -07:00
}
}
2022-06-03 06:33:39 -07:00
if failed && hasErrors {
2021-11-24 09:17:49 -08:00
return failureExitCode
2015-06-18 02:13:28 -07:00
}
2022-06-03 06:33:39 -07:00
if failed && lintSettings . fatal {
return lintErrExitCode
}
2021-11-24 09:17:49 -08:00
return successExitCode
2015-06-18 02:13:28 -07:00
}
2020-12-25 16:52:44 -08:00
// CheckWebConfig validates web configuration files.
func CheckWebConfig ( files ... string ) int {
failed := false
for _ , f := range files {
2021-01-13 12:37:01 -08:00
if err := web . Validate ( f ) ; err != nil {
2020-12-25 16:52:44 -08:00
fmt . Fprintln ( os . Stderr , f , "FAILED:" , err )
failed = true
continue
}
fmt . Fprintln ( os . Stderr , f , "SUCCESS" )
}
if failed {
2021-11-24 09:17:49 -08:00
return failureExitCode
2020-12-25 16:52:44 -08:00
}
2021-11-24 09:17:49 -08:00
return successExitCode
2020-12-25 16:52:44 -08:00
}
2015-09-09 05:08:05 -07:00
func checkFileExists ( fn string ) error {
// Nothing set, nothing to error on.
if fn == "" {
return nil
}
_ , err := os . Stat ( fn )
return err
}
2021-11-30 08:11:48 -08:00
func checkConfig ( agentMode bool , filename string , checkSyntaxOnly bool ) ( [ ] string , error ) {
2017-06-21 04:32:04 -07:00
fmt . Println ( "Checking" , filename )
2015-06-18 02:13:28 -07:00
2021-10-29 16:41:40 -07:00
cfg , err := config . LoadFile ( filename , agentMode , false , log . NewNopLogger ( ) )
2015-06-18 02:13:28 -07:00
if err != nil {
return nil , err
}
var ruleFiles [ ] string
2021-11-30 08:11:48 -08:00
if ! checkSyntaxOnly {
for _ , rf := range cfg . RuleFiles {
rfs , err := filepath . Glob ( rf )
if err != nil {
return nil , err
2015-08-05 09:30:37 -07:00
}
2021-11-30 08:11:48 -08:00
// If an explicit file was given, error if it is not accessible.
if ! strings . Contains ( rf , "*" ) {
if len ( rfs ) == 0 {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "%q does not point to an existing file" , rf )
2021-11-30 08:11:48 -08:00
}
if err := checkFileExists ( rfs [ 0 ] ) ; err != nil {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "error checking rule file %q: %w" , rfs [ 0 ] , err )
2021-11-30 08:11:48 -08:00
}
2015-08-05 09:30:37 -07:00
}
2021-11-30 08:11:48 -08:00
ruleFiles = append ( ruleFiles , rfs ... )
2015-06-18 02:13:28 -07:00
}
}
2023-02-24 02:47:12 -08:00
var scfgs [ ] * config . ScrapeConfig
if checkSyntaxOnly {
scfgs = cfg . ScrapeConfigs
} else {
var err error
scfgs , err = cfg . GetScrapeConfigs ( )
if err != nil {
return nil , fmt . Errorf ( "error loading scrape configs: %w" , err )
}
}
for _ , scfg := range scfgs {
2021-11-30 08:11:48 -08:00
if ! checkSyntaxOnly && scfg . HTTPClientConfig . Authorization != nil {
2021-11-29 20:02:07 -08:00
if err := checkFileExists ( scfg . HTTPClientConfig . Authorization . CredentialsFile ) ; err != nil {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "error checking authorization credentials or bearer token file %q: %w" , scfg . HTTPClientConfig . Authorization . CredentialsFile , err )
2021-11-29 20:02:07 -08:00
}
2015-08-05 09:30:37 -07:00
}
2021-11-30 08:11:48 -08:00
if err := checkTLSConfig ( scfg . HTTPClientConfig . TLSConfig , checkSyntaxOnly ) ; err != nil {
2015-09-09 05:08:05 -07:00
return nil , err
2015-09-06 16:07:44 -07:00
}
2020-08-20 05:48:26 -07:00
for _ , c := range scfg . ServiceDiscoveryConfigs {
switch c := c . ( type ) {
case * kubernetes . SDConfig :
2021-11-30 08:11:48 -08:00
if err := checkTLSConfig ( c . HTTPClientConfig . TLSConfig , checkSyntaxOnly ) ; err != nil {
2017-08-22 15:25:30 -07:00
return nil , err
}
2020-08-20 05:48:26 -07:00
case * file . SDConfig :
2021-11-30 08:11:48 -08:00
if checkSyntaxOnly {
break
}
2020-08-20 05:48:26 -07:00
for _ , file := range c . Files {
files , err := filepath . Glob ( file )
if err != nil {
return nil , err
}
if len ( files ) != 0 {
2021-06-29 08:32:59 -07:00
for _ , f := range files {
2021-10-27 17:01:28 -07:00
var targetGroups [ ] * targetgroup . Group
targetGroups , err = checkSDFile ( f )
2021-06-29 08:32:59 -07:00
if err != nil {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "checking SD file %q: %w" , file , err )
2021-06-29 08:32:59 -07:00
}
2021-10-27 17:01:28 -07:00
if err := checkTargetGroupsForScrapeConfig ( targetGroups , scfg ) ; err != nil {
return nil , err
}
2021-06-29 08:32:59 -07:00
}
2020-08-20 05:48:26 -07:00
continue
}
fmt . Printf ( " WARNING: file %q for file_sd in scrape job %q does not exist\n" , file , scfg . JobName )
2017-08-22 15:25:30 -07:00
}
2021-10-27 17:01:28 -07:00
case discovery . StaticConfig :
if err := checkTargetGroupsForScrapeConfig ( c , scfg ) ; err != nil {
return nil , err
}
2017-08-22 15:25:30 -07:00
}
}
2015-08-05 09:30:37 -07:00
}
2021-10-27 17:01:28 -07:00
alertConfig := cfg . AlertingConfig
for _ , amcfg := range alertConfig . AlertmanagerConfigs {
for _ , c := range amcfg . ServiceDiscoveryConfigs {
switch c := c . ( type ) {
case * file . SDConfig :
2021-11-30 08:11:48 -08:00
if checkSyntaxOnly {
break
}
2021-10-27 17:01:28 -07:00
for _ , file := range c . Files {
files , err := filepath . Glob ( file )
if err != nil {
return nil , err
}
if len ( files ) != 0 {
for _ , f := range files {
var targetGroups [ ] * targetgroup . Group
targetGroups , err = checkSDFile ( f )
if err != nil {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "checking SD file %q: %w" , file , err )
2021-10-27 17:01:28 -07:00
}
if err := checkTargetGroupsForAlertmanager ( targetGroups , amcfg ) ; err != nil {
return nil , err
}
}
continue
}
fmt . Printf ( " WARNING: file %q for file_sd in alertmanager config does not exist\n" , file )
}
case discovery . StaticConfig :
if err := checkTargetGroupsForAlertmanager ( c , amcfg ) ; err != nil {
return nil , err
}
}
}
}
2015-06-18 02:13:28 -07:00
return ruleFiles , nil
}
2021-11-30 08:11:48 -08:00
func checkTLSConfig ( tlsConfig config_util . TLSConfig , checkSyntaxOnly bool ) error {
2015-09-09 05:08:05 -07:00
if len ( tlsConfig . CertFile ) > 0 && len ( tlsConfig . KeyFile ) == 0 {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "client cert file %q specified without client key file" , tlsConfig . CertFile )
2015-09-09 05:08:05 -07:00
}
if len ( tlsConfig . KeyFile ) > 0 && len ( tlsConfig . CertFile ) == 0 {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "client key file %q specified without client cert file" , tlsConfig . KeyFile )
2015-09-09 05:08:05 -07:00
}
2021-11-30 08:11:48 -08:00
if checkSyntaxOnly {
return nil
}
if err := checkFileExists ( tlsConfig . CertFile ) ; err != nil {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "error checking client cert file %q: %w" , tlsConfig . CertFile , err )
2021-11-30 08:11:48 -08:00
}
if err := checkFileExists ( tlsConfig . KeyFile ) ; err != nil {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "error checking client key file %q: %w" , tlsConfig . KeyFile , err )
2021-11-30 08:11:48 -08:00
}
2015-09-09 05:08:05 -07:00
return nil
}
2021-10-27 17:01:28 -07:00
func checkSDFile ( filename string ) ( [ ] * targetgroup . Group , error ) {
2021-06-29 08:32:59 -07:00
fd , err := os . Open ( filename )
if err != nil {
2021-10-27 17:01:28 -07:00
return nil , err
2021-06-29 08:32:59 -07:00
}
defer fd . Close ( )
2022-04-27 02:24:36 -07:00
content , err := io . ReadAll ( fd )
2021-06-29 08:32:59 -07:00
if err != nil {
2021-10-27 17:01:28 -07:00
return nil , err
2021-06-29 08:32:59 -07:00
}
var targetGroups [ ] * targetgroup . Group
switch ext := filepath . Ext ( filename ) ; strings . ToLower ( ext ) {
case ".json" :
if err := json . Unmarshal ( content , & targetGroups ) ; err != nil {
2021-10-27 17:01:28 -07:00
return nil , err
2021-06-29 08:32:59 -07:00
}
case ".yml" , ".yaml" :
if err := yaml . UnmarshalStrict ( content , & targetGroups ) ; err != nil {
2021-10-27 17:01:28 -07:00
return nil , err
2021-06-29 08:32:59 -07:00
}
default :
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "invalid file extension: %q" , ext )
2021-06-29 08:32:59 -07:00
}
for i , tg := range targetGroups {
if tg == nil {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "nil target group item found (index %d)" , i )
2021-06-29 08:32:59 -07:00
}
}
2021-10-27 17:01:28 -07:00
return targetGroups , nil
2021-06-29 08:32:59 -07:00
}
2017-06-21 04:32:04 -07:00
// CheckRules validates rule files.
2022-04-05 21:05:11 -07:00
func CheckRules ( ls lintConfig , files ... string ) int {
2015-06-18 02:13:28 -07:00
failed := false
2022-06-03 06:33:39 -07:00
hasErrors := false
2023-04-05 02:24:49 -07:00
if len ( files ) == 0 {
2023-09-25 00:48:05 -07:00
failed , hasErrors = checkRulesFromStdin ( ls )
2023-06-01 08:39:04 -07:00
} else {
failed , hasErrors = checkRules ( files , ls )
2015-06-18 02:13:28 -07:00
}
2023-06-01 08:39:04 -07:00
2022-06-03 06:33:39 -07:00
if failed && hasErrors {
2021-11-24 09:17:49 -08:00
return failureExitCode
2015-06-18 02:13:28 -07:00
}
2022-06-03 06:33:39 -07:00
if failed && ls . fatal {
return lintErrExitCode
}
2023-06-01 08:39:04 -07:00
2021-11-24 09:17:49 -08:00
return successExitCode
2015-06-18 02:13:28 -07:00
}
2023-09-25 00:48:05 -07:00
// checkRulesFromStdin validates rule from stdin.
func checkRulesFromStdin ( ls lintConfig ) ( bool , bool ) {
failed := false
hasErrors := false
fmt . Println ( "Checking standard input" )
data , err := io . ReadAll ( os . Stdin )
if err != nil {
fmt . Fprintln ( os . Stderr , " FAILED:" , err )
return true , true
}
rgs , errs := rulefmt . Parse ( data )
if errs != nil {
failed = true
fmt . Fprintln ( os . Stderr , " FAILED:" )
for _ , e := range errs {
fmt . Fprintln ( os . Stderr , e . Error ( ) )
2023-11-29 09:23:34 -08:00
hasErrors = hasErrors || ! errors . Is ( e , errLint )
2023-09-25 00:48:05 -07:00
}
if hasErrors {
return failed , hasErrors
}
}
if n , errs := checkRuleGroups ( rgs , ls ) ; errs != nil {
fmt . Fprintln ( os . Stderr , " FAILED:" )
for _ , e := range errs {
fmt . Fprintln ( os . Stderr , e . Error ( ) )
}
failed = true
for _ , err := range errs {
2023-11-29 09:23:34 -08:00
hasErrors = hasErrors || ! errors . Is ( err , errLint )
2023-09-25 00:48:05 -07:00
}
} else {
fmt . Printf ( " SUCCESS: %d rules found\n" , n )
}
fmt . Println ( )
return failed , hasErrors
}
2015-06-18 02:13:28 -07:00
2023-06-01 08:39:04 -07:00
// checkRules validates rule files.
func checkRules ( files [ ] string , ls lintConfig ) ( bool , bool ) {
failed := false
hasErrors := false
2017-06-21 04:32:04 -07:00
for _ , f := range files {
2023-06-01 08:39:04 -07:00
fmt . Println ( "Checking" , f )
rgs , errs := rulefmt . ParseFile ( f )
if errs != nil {
failed = true
2023-09-25 00:48:05 -07:00
fmt . Fprintln ( os . Stderr , " FAILED:" )
for _ , e := range errs {
fmt . Fprintln ( os . Stderr , e . Error ( ) )
2023-11-29 09:23:34 -08:00
hasErrors = hasErrors || ! errors . Is ( e , errLint )
2023-09-25 00:48:05 -07:00
}
if hasErrors {
continue
}
2023-04-04 04:28:02 -07:00
}
2023-06-01 08:39:04 -07:00
if n , errs := checkRuleGroups ( rgs , ls ) ; errs != nil {
2017-06-21 04:32:04 -07:00
fmt . Fprintln ( os . Stderr , " FAILED:" )
2017-06-16 04:14:33 -07:00
for _ , e := range errs {
2017-06-21 04:32:04 -07:00
fmt . Fprintln ( os . Stderr , e . Error ( ) )
2017-06-16 04:14:33 -07:00
}
2015-06-18 02:13:28 -07:00
failed = true
2022-04-05 21:05:11 -07:00
for _ , err := range errs {
2023-11-29 09:23:34 -08:00
hasErrors = hasErrors || ! errors . Is ( err , errLint )
2022-04-05 21:05:11 -07:00
}
2015-06-18 02:13:28 -07:00
} else {
2017-06-21 04:32:04 -07:00
fmt . Printf ( " SUCCESS: %d rules found\n" , n )
2015-06-18 02:13:28 -07:00
}
2017-06-21 04:32:04 -07:00
fmt . Println ( )
2015-06-18 02:13:28 -07:00
}
2023-06-01 08:39:04 -07:00
return failed , hasErrors
2015-06-18 02:13:28 -07:00
}
2023-06-01 08:39:04 -07:00
func checkRuleGroups ( rgs * rulefmt . RuleGroups , lintSettings lintConfig ) ( int , [ ] error ) {
2017-06-14 01:02:26 -07:00
numRules := 0
for _ , rg := range rgs . Groups {
numRules += len ( rg . Rules )
2015-06-18 02:13:28 -07:00
}
2017-06-14 01:02:26 -07:00
2022-04-05 21:05:11 -07:00
if lintSettings . lintDuplicateRules ( ) {
dRules := checkDuplicates ( rgs . Groups )
if len ( dRules ) != 0 {
errMessage := fmt . Sprintf ( "%d duplicate rule(s) found.\n" , len ( dRules ) )
for _ , n := range dRules {
errMessage += fmt . Sprintf ( "Metric: %s\nLabel(s):\n" , n . metric )
2022-02-27 06:36:53 -08:00
n . label . Range ( func ( l labels . Label ) {
2022-04-05 21:05:11 -07:00
errMessage += fmt . Sprintf ( "\t%s: %s\n" , l . Name , l . Value )
2022-02-27 06:36:53 -08:00
} )
2019-09-20 03:29:47 -07:00
}
2022-04-05 21:05:11 -07:00
errMessage += "Might cause inconsistency while recording expressions"
2023-11-29 09:23:34 -08:00
return 0 , [ ] error { fmt . Errorf ( "%w %s" , errLint , errMessage ) }
2019-09-20 03:29:47 -07:00
}
}
2017-06-14 01:02:26 -07:00
return numRules , nil
2015-06-18 02:13:28 -07:00
}
2019-09-20 03:29:47 -07:00
type compareRuleType struct {
metric string
2021-08-22 09:03:42 -07:00
label labels . Labels
}
type compareRuleTypes [ ] compareRuleType
func ( c compareRuleTypes ) Len ( ) int { return len ( c ) }
func ( c compareRuleTypes ) Swap ( i , j int ) { c [ i ] , c [ j ] = c [ j ] , c [ i ] }
func ( c compareRuleTypes ) Less ( i , j int ) bool { return compare ( c [ i ] , c [ j ] ) < 0 }
func compare ( a , b compareRuleType ) int {
if res := strings . Compare ( a . metric , b . metric ) ; res != 0 {
return res
}
return labels . Compare ( a . label , b . label )
2019-09-20 03:29:47 -07:00
}
2019-11-05 10:22:31 -08:00
func checkDuplicates ( groups [ ] rulefmt . RuleGroup ) [ ] compareRuleType {
2019-09-20 03:29:47 -07:00
var duplicates [ ] compareRuleType
2021-08-22 09:03:42 -07:00
var rules compareRuleTypes
2019-09-20 03:29:47 -07:00
2019-11-05 10:22:31 -08:00
for _ , group := range groups {
2021-08-22 09:03:42 -07:00
for _ , rule := range group . Rules {
rules = append ( rules , compareRuleType {
2019-11-05 10:22:31 -08:00
metric : ruleMetric ( rule ) ,
2021-08-22 09:03:42 -07:00
label : labels . FromMap ( rule . Labels ) ,
} )
}
}
if len ( rules ) < 2 {
return duplicates
}
sort . Sort ( rules )
last := rules [ 0 ]
for i := 1 ; i < len ( rules ) ; i ++ {
if compare ( last , rules [ i ] ) == 0 {
// Don't add a duplicated rule multiple times.
if len ( duplicates ) == 0 || compare ( last , duplicates [ len ( duplicates ) - 1 ] ) != 0 {
duplicates = append ( duplicates , rules [ i ] )
2019-09-20 03:29:47 -07:00
}
}
2021-08-22 09:03:42 -07:00
last = rules [ i ]
2019-09-20 03:29:47 -07:00
}
2021-08-22 09:03:42 -07:00
2019-09-20 03:29:47 -07:00
return duplicates
}
2020-01-15 10:07:54 -08:00
func ruleMetric ( rule rulefmt . RuleNode ) string {
if rule . Alert . Value != "" {
return rule . Alert . Value
2019-11-05 10:22:31 -08:00
}
2020-01-15 10:07:54 -08:00
return rule . Record . Value
2019-11-05 10:22:31 -08:00
}
2017-04-13 14:53:41 -07:00
var checkMetricsUsage = strings . TrimSpace ( `
Pass Prometheus metrics over stdin to lint them for consistency and correctness .
examples :
2017-06-21 04:32:04 -07:00
$ cat metrics . prom | promtool check metrics
2017-04-13 14:53:41 -07:00
2017-06-21 04:32:04 -07:00
$ curl - s http : //localhost:9090/metrics | promtool check metrics
` )
2017-04-13 14:53:41 -07:00
2017-06-21 04:32:04 -07:00
// CheckMetrics performs a linting pass on input metrics.
2022-01-07 13:58:28 -08:00
func CheckMetrics ( extended bool ) int {
var buf bytes . Buffer
tee := io . TeeReader ( os . Stdin , & buf )
l := promlint . New ( tee )
2017-04-13 14:53:41 -07:00
problems , err := l . Lint ( )
if err != nil {
2017-06-21 04:32:04 -07:00
fmt . Fprintln ( os . Stderr , "error while linting:" , err )
2021-11-24 09:17:49 -08:00
return failureExitCode
2017-04-13 14:53:41 -07:00
}
for _ , p := range problems {
2017-06-21 04:32:04 -07:00
fmt . Fprintln ( os . Stderr , p . Metric , p . Text )
2017-04-13 14:53:41 -07:00
}
if len ( problems ) > 0 {
2021-11-24 09:17:49 -08:00
return lintErrExitCode
2017-04-13 14:53:41 -07:00
}
2022-01-07 13:58:28 -08:00
if extended {
stats , total , err := checkMetricsExtended ( & buf )
if err != nil {
fmt . Fprintln ( os . Stderr , err )
return failureExitCode
}
w := tabwriter . NewWriter ( os . Stdout , 4 , 4 , 4 , ' ' , tabwriter . TabIndent )
fmt . Fprintf ( w , "Metric\tCardinality\tPercentage\t\n" )
for _ , stat := range stats {
fmt . Fprintf ( w , "%s\t%d\t%.2f%%\t\n" , stat . name , stat . cardinality , stat . percentage * 100 )
}
fmt . Fprintf ( w , "Total\t%d\t%.f%%\t\n" , total , 100. )
w . Flush ( )
}
2021-11-24 09:17:49 -08:00
return successExitCode
2017-04-13 14:53:41 -07:00
}
2018-04-26 11:41:56 -07:00
2022-01-07 13:58:28 -08:00
type metricStat struct {
name string
cardinality int
percentage float64
}
func checkMetricsExtended ( r io . Reader ) ( [ ] metricStat , int , error ) {
p := expfmt . TextParser { }
metricFamilies , err := p . TextToMetricFamilies ( r )
if err != nil {
return nil , 0 , fmt . Errorf ( "error while parsing text to metric families: %w" , err )
}
var total int
stats := make ( [ ] metricStat , 0 , len ( metricFamilies ) )
for _ , mf := range metricFamilies {
var cardinality int
switch mf . GetType ( ) {
case dto . MetricType_COUNTER , dto . MetricType_GAUGE , dto . MetricType_UNTYPED :
cardinality = len ( mf . Metric )
case dto . MetricType_HISTOGRAM :
// Histogram metrics includes sum, count, buckets.
buckets := len ( mf . Metric [ 0 ] . Histogram . Bucket )
cardinality = len ( mf . Metric ) * ( 2 + buckets )
case dto . MetricType_SUMMARY :
// Summary metrics includes sum, count, quantiles.
quantiles := len ( mf . Metric [ 0 ] . Summary . Quantile )
cardinality = len ( mf . Metric ) * ( 2 + quantiles )
default :
cardinality = len ( mf . Metric )
}
stats = append ( stats , metricStat { name : mf . GetName ( ) , cardinality : cardinality } )
total += cardinality
}
for i := range stats {
stats [ i ] . percentage = float64 ( stats [ i ] . cardinality ) / float64 ( total )
}
sort . SliceStable ( stats , func ( i , j int ) bool {
return stats [ i ] . cardinality > stats [ j ] . cardinality
} )
return stats , total , nil
}
2018-11-23 07:57:31 -08:00
type endpointsGroup struct {
urlToFilename map [ string ] string
postProcess func ( b [ ] byte ) ( [ ] byte , error )
}
var (
pprofEndpoints = [ ] endpointsGroup {
{
urlToFilename : map [ string ] string {
"/debug/pprof/profile?seconds=30" : "cpu.pb" ,
"/debug/pprof/block" : "block.pb" ,
"/debug/pprof/goroutine" : "goroutine.pb" ,
"/debug/pprof/heap" : "heap.pb" ,
"/debug/pprof/mutex" : "mutex.pb" ,
"/debug/pprof/threadcreate" : "threadcreate.pb" ,
} ,
postProcess : func ( b [ ] byte ) ( [ ] byte , error ) {
p , err := profile . Parse ( bytes . NewReader ( b ) )
if err != nil {
return nil , err
}
var buf bytes . Buffer
if err := p . WriteUncompressed ( & buf ) ; err != nil {
2022-05-23 23:58:59 -07:00
return nil , fmt . Errorf ( "writing the profile to the buffer: %w" , err )
2018-11-23 07:57:31 -08:00
}
return buf . Bytes ( ) , nil
} ,
2018-07-18 00:52:01 -07:00
} ,
2018-11-23 07:57:31 -08:00
{
urlToFilename : map [ string ] string {
"/debug/pprof/trace?seconds=30" : "trace.pb" ,
} ,
} ,
}
metricsEndpoints = [ ] endpointsGroup {
{
urlToFilename : map [ string ] string {
"/metrics" : "metrics.txt" ,
} ,
} ,
}
allEndpoints = append ( pprofEndpoints , metricsEndpoints ... )
)
func debugPprof ( url string ) int {
if err := debugWrite ( debugWriterConfig {
serverURL : url ,
tarballName : "debug.tar.gz" ,
endPointGroups : pprofEndpoints ,
} ) ; err != nil {
fmt . Fprintln ( os . Stderr , "error completing debug command:" , err )
2021-11-24 09:17:49 -08:00
return failureExitCode
2018-07-18 00:52:01 -07:00
}
2021-11-24 09:17:49 -08:00
return successExitCode
2018-07-18 00:52:01 -07:00
}
func debugMetrics ( url string ) int {
2018-11-23 07:57:31 -08:00
if err := debugWrite ( debugWriterConfig {
serverURL : url ,
tarballName : "debug.tar.gz" ,
endPointGroups : metricsEndpoints ,
} ) ; err != nil {
fmt . Fprintln ( os . Stderr , "error completing debug command:" , err )
2021-11-24 09:17:49 -08:00
return failureExitCode
2018-07-18 00:52:01 -07:00
}
2021-11-24 09:17:49 -08:00
return successExitCode
2018-07-18 00:52:01 -07:00
}
func debugAll ( url string ) int {
2018-11-23 07:57:31 -08:00
if err := debugWrite ( debugWriterConfig {
serverURL : url ,
tarballName : "debug.tar.gz" ,
endPointGroups : allEndpoints ,
} ) ; err != nil {
fmt . Fprintln ( os . Stderr , "error completing debug command:" , err )
2021-11-24 09:17:49 -08:00
return failureExitCode
2018-07-18 00:52:01 -07:00
}
2021-11-24 09:17:49 -08:00
return successExitCode
2018-07-18 00:52:01 -07:00
}
2018-11-14 09:40:07 -08:00
type printer interface {
printValue ( v model . Value )
printSeries ( v [ ] model . LabelSet )
printLabelValues ( v model . LabelValues )
}
type promqlPrinter struct { }
func ( p * promqlPrinter ) printValue ( v model . Value ) {
fmt . Println ( v )
}
2021-10-22 01:06:44 -07:00
2018-11-14 09:40:07 -08:00
func ( p * promqlPrinter ) printSeries ( val [ ] model . LabelSet ) {
for _ , v := range val {
fmt . Println ( v )
}
}
2021-10-22 01:06:44 -07:00
2018-11-23 07:57:31 -08:00
func ( p * promqlPrinter ) printLabelValues ( val model . LabelValues ) {
2018-11-14 09:40:07 -08:00
for _ , v := range val {
fmt . Println ( v )
}
}
type jsonPrinter struct { }
func ( j * jsonPrinter ) printValue ( v model . Value ) {
2019-05-03 06:11:28 -07:00
//nolint:errcheck
2018-11-14 09:40:07 -08:00
json . NewEncoder ( os . Stdout ) . Encode ( v )
}
2021-10-22 01:06:44 -07:00
2018-11-14 09:40:07 -08:00
func ( j * jsonPrinter ) printSeries ( v [ ] model . LabelSet ) {
2019-05-03 06:11:28 -07:00
//nolint:errcheck
2018-11-14 09:40:07 -08:00
json . NewEncoder ( os . Stdout ) . Encode ( v )
}
2021-10-22 01:06:44 -07:00
2018-11-14 09:40:07 -08:00
func ( j * jsonPrinter ) printLabelValues ( v model . LabelValues ) {
2019-05-03 06:11:28 -07:00
//nolint:errcheck
2018-11-14 09:40:07 -08:00
json . NewEncoder ( os . Stdout ) . Encode ( v )
}
2020-07-27 07:44:49 -07:00
2020-11-28 07:58:33 -08:00
// importRules backfills recording rules from the files provided. The output are blocks of data
2020-09-13 08:38:32 -07:00
// at the outputDir location.
2023-02-10 14:23:38 -08:00
func importRules ( url * url . URL , roundTripper http . RoundTripper , start , end , outputDir string , evalInterval , maxBlockDuration time . Duration , files ... string ) error {
2020-07-27 07:44:49 -07:00
ctx := context . Background ( )
2020-11-01 07:54:04 -08:00
var stime , etime time . Time
var err error
if end == "" {
2021-03-20 12:38:30 -07:00
etime = time . Now ( ) . UTC ( ) . Add ( - 3 * time . Hour )
2020-11-01 07:54:04 -08:00
} else {
etime , err = parseTime ( end )
if err != nil {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "error parsing end time: %w" , err )
2020-11-01 07:54:04 -08:00
}
}
stime , err = parseTime ( start )
2020-09-13 08:07:59 -07:00
if err != nil {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "error parsing start time: %w" , err )
2020-09-13 08:07:59 -07:00
}
2020-11-01 07:54:04 -08:00
if ! stime . Before ( etime ) {
2021-09-06 18:55:57 -07:00
return errors . New ( "start time is not before end time" )
2020-11-01 07:54:04 -08:00
}
2020-10-31 06:40:24 -07:00
cfg := ruleImporterConfig {
2021-10-21 14:28:37 -07:00
outputDir : outputDir ,
start : stime ,
end : etime ,
evalInterval : evalInterval ,
maxBlockDuration : maxBlockDuration ,
2020-07-27 07:44:49 -07:00
}
2024-01-10 08:32:36 -08:00
api , err := newAPI ( url , roundTripper , nil )
2020-11-26 08:30:06 -08:00
if err != nil {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "new api client error: %w" , err )
2020-07-27 07:44:49 -07:00
}
2021-03-14 10:10:55 -07:00
2024-01-10 08:32:36 -08:00
ruleImporter := newRuleImporter ( log . NewLogfmtLogger ( log . NewSyncWriter ( os . Stderr ) ) , cfg , api )
2020-10-31 06:40:24 -07:00
errs := ruleImporter . loadGroups ( ctx , files )
2020-07-27 07:44:49 -07:00
for _ , err := range errs {
if err != nil {
2022-05-23 23:58:59 -07:00
return fmt . Errorf ( "rule importer parse error: %w" , err )
2020-07-27 07:44:49 -07:00
}
}
2020-10-31 06:40:24 -07:00
errs = ruleImporter . importAll ( ctx )
2020-07-27 07:44:49 -07:00
for _ , err := range errs {
2021-09-06 18:55:57 -07:00
fmt . Fprintln ( os . Stderr , "rule importer error:" , err )
}
if len ( errs ) > 0 {
return errors . New ( "error importing rules" )
2020-07-27 07:44:49 -07:00
}
2021-09-06 18:55:57 -07:00
return nil
2020-07-27 07:44:49 -07:00
}
2021-10-27 17:01:28 -07:00
func checkTargetGroupsForAlertmanager ( targetGroups [ ] * targetgroup . Group , amcfg * config . AlertmanagerConfig ) error {
for _ , tg := range targetGroups {
if _ , _ , err := notifier . AlertmanagerFromGroup ( tg , amcfg ) ; err != nil {
return err
}
}
return nil
}
func checkTargetGroupsForScrapeConfig ( targetGroups [ ] * targetgroup . Group , scfg * config . ScrapeConfig ) error {
2023-03-07 01:23:34 -08:00
var targets [ ] * scrape . Target
lb := labels . NewBuilder ( labels . EmptyLabels ( ) )
2021-10-27 17:01:28 -07:00
for _ , tg := range targetGroups {
2023-03-07 01:23:34 -08:00
var failures [ ] error
targets , failures = scrape . TargetsFromGroup ( tg , scfg , false , targets , lb )
2021-10-27 17:01:28 -07:00
if len ( failures ) > 0 {
first := failures [ 0 ]
return first
}
}
return nil
}
2023-07-12 12:34:55 -07:00
func formatPromQL ( query string ) error {
expr , err := parser . ParseExpr ( query )
if err != nil {
return err
}
fmt . Println ( expr . Pretty ( 0 ) )
return nil
}
func labelsSetPromQL ( query , labelMatchType , name , value string ) error {
expr , err := parser . ParseExpr ( query )
if err != nil {
return err
}
var matchType labels . MatchType
switch labelMatchType {
case parser . ItemType ( parser . EQL ) . String ( ) :
matchType = labels . MatchEqual
case parser . ItemType ( parser . NEQ ) . String ( ) :
matchType = labels . MatchNotEqual
case parser . ItemType ( parser . EQL_REGEX ) . String ( ) :
matchType = labels . MatchRegexp
case parser . ItemType ( parser . NEQ_REGEX ) . String ( ) :
matchType = labels . MatchNotRegexp
default :
return fmt . Errorf ( "invalid label match type: %s" , labelMatchType )
}
parser . Inspect ( expr , func ( node parser . Node , path [ ] parser . Node ) error {
if n , ok := node . ( * parser . VectorSelector ) ; ok {
var found bool
for i , l := range n . LabelMatchers {
if l . Name == name {
n . LabelMatchers [ i ] . Type = matchType
n . LabelMatchers [ i ] . Value = value
found = true
}
}
if ! found {
n . LabelMatchers = append ( n . LabelMatchers , & labels . Matcher {
Type : matchType ,
Name : name ,
Value : value ,
} )
}
}
return nil
} )
fmt . Println ( expr . Pretty ( 0 ) )
return nil
}
func labelsDeletePromQL ( query , name string ) error {
expr , err := parser . ParseExpr ( query )
if err != nil {
return err
}
parser . Inspect ( expr , func ( node parser . Node , path [ ] parser . Node ) error {
if n , ok := node . ( * parser . VectorSelector ) ; ok {
for i , l := range n . LabelMatchers {
if l . Name == name {
n . LabelMatchers = append ( n . LabelMatchers [ : i ] , n . LabelMatchers [ i + 1 : ] ... )
}
}
}
return nil
} )
fmt . Println ( expr . Pretty ( 0 ) )
return nil
}