Merge pull request #8917 from stevekuznetsov/skuznets/silence-backfill

promtool: backfill: allow silencing output
This commit is contained in:
Julien Pivotto 2021-06-14 23:27:18 +02:00 committed by GitHub
commit ba76bceb6b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 11 additions and 7 deletions

View file

@ -65,7 +65,7 @@ func getMinAndMaxTimestamps(p textparse.Parser) (int64, int64, error) {
return maxt, mint, nil return maxt, mint, nil
} }
func createBlocks(input []byte, mint, maxt int64, maxSamplesInAppender int, outputDir string, humanReadable bool) (returnErr error) { func createBlocks(input []byte, mint, maxt int64, maxSamplesInAppender int, outputDir string, humanReadable, quiet bool) (returnErr error) {
blockDuration := tsdb.DefaultBlockDuration blockDuration := tsdb.DefaultBlockDuration
mint = blockDuration * (mint / blockDuration) mint = blockDuration * (mint / blockDuration)
@ -169,6 +169,9 @@ func createBlocks(input []byte, mint, maxt int64, maxSamplesInAppender int, outp
block, err := w.Flush(ctx) block, err := w.Flush(ctx)
switch err { switch err {
case nil: case nil:
if quiet {
break
}
blocks, err := db.Blocks() blocks, err := db.Blocks()
if err != nil { if err != nil {
return errors.Wrap(err, "get blocks") return errors.Wrap(err, "get blocks")
@ -196,11 +199,11 @@ func createBlocks(input []byte, mint, maxt int64, maxSamplesInAppender int, outp
} }
func backfill(maxSamplesInAppender int, input []byte, outputDir string, humanReadable bool) (err error) { func backfill(maxSamplesInAppender int, input []byte, outputDir string, humanReadable, quiet bool) (err error) {
p := textparse.NewOpenMetricsParser(input) p := textparse.NewOpenMetricsParser(input)
maxt, mint, err := getMinAndMaxTimestamps(p) maxt, mint, err := getMinAndMaxTimestamps(p)
if err != nil { if err != nil {
return errors.Wrap(err, "getting min and max timestamp") return errors.Wrap(err, "getting min and max timestamp")
} }
return errors.Wrap(createBlocks(input, mint, maxt, maxSamplesInAppender, outputDir, humanReadable), "block creation") return errors.Wrap(createBlocks(input, mint, maxt, maxSamplesInAppender, outputDir, humanReadable, quiet), "block creation")
} }

View file

@ -540,7 +540,7 @@ after_eof 1 2
require.NoError(t, os.RemoveAll(outputDir)) require.NoError(t, os.RemoveAll(outputDir))
}() }()
err = backfill(test.MaxSamplesInAppender, []byte(test.ToParse), outputDir, false) err = backfill(test.MaxSamplesInAppender, []byte(test.ToParse), outputDir, false, false)
if !test.IsOk { if !test.IsOk {
require.Error(t, err, test.Description) require.Error(t, err, test.Description)

View file

@ -144,6 +144,7 @@ func main() {
importCmd := tsdbCmd.Command("create-blocks-from", "[Experimental] Import samples from input and produce TSDB blocks. Please refer to the storage docs for more details.") importCmd := tsdbCmd.Command("create-blocks-from", "[Experimental] Import samples from input and produce TSDB blocks. Please refer to the storage docs for more details.")
importHumanReadable := importCmd.Flag("human-readable", "Print human readable values.").Short('r').Bool() importHumanReadable := importCmd.Flag("human-readable", "Print human readable values.").Short('r').Bool()
importQuiet := importCmd.Flag("quiet", "Do not print created blocks.").Short('q').Bool()
openMetricsImportCmd := importCmd.Command("openmetrics", "Import samples from OpenMetrics input and produce TSDB blocks. Please refer to the storage docs for more details.") openMetricsImportCmd := importCmd.Command("openmetrics", "Import samples from OpenMetrics input and produce TSDB blocks. Please refer to the storage docs for more details.")
// TODO(aSquare14): add flag to set default block duration // TODO(aSquare14): add flag to set default block duration
importFilePath := openMetricsImportCmd.Arg("input file", "OpenMetrics file to read samples from.").Required().String() importFilePath := openMetricsImportCmd.Arg("input file", "OpenMetrics file to read samples from.").Required().String()
@ -221,7 +222,7 @@ func main() {
os.Exit(checkErr(dumpSamples(*dumpPath, *dumpMinTime, *dumpMaxTime))) os.Exit(checkErr(dumpSamples(*dumpPath, *dumpMinTime, *dumpMaxTime)))
//TODO(aSquare14): Work on adding support for custom block size. //TODO(aSquare14): Work on adding support for custom block size.
case openMetricsImportCmd.FullCommand(): case openMetricsImportCmd.FullCommand():
os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable)) os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable, *importQuiet))
case importRulesCmd.FullCommand(): case importRulesCmd.FullCommand():
os.Exit(checkErr(importRules(*importRulesURL, *importRulesStart, *importRulesEnd, *importRulesOutputDir, *importRulesEvalInterval, *importRulesFiles...))) os.Exit(checkErr(importRules(*importRulesURL, *importRulesStart, *importRulesEnd, *importRulesOutputDir, *importRulesEvalInterval, *importRulesFiles...)))

View file

@ -611,7 +611,7 @@ func checkErr(err error) int {
return 0 return 0
} }
func backfillOpenMetrics(path string, outputDir string, humanReadable bool) int { func backfillOpenMetrics(path string, outputDir string, humanReadable, quiet bool) int {
inputFile, err := fileutil.OpenMmapFile(path) inputFile, err := fileutil.OpenMmapFile(path)
if err != nil { if err != nil {
return checkErr(err) return checkErr(err)
@ -622,5 +622,5 @@ func backfillOpenMetrics(path string, outputDir string, humanReadable bool) int
return checkErr(errors.Wrap(err, "create output dir")) return checkErr(errors.Wrap(err, "create output dir"))
} }
return checkErr(backfill(5000, inputFile.Bytes(), outputDir, humanReadable)) return checkErr(backfill(5000, inputFile.Bytes(), outputDir, humanReadable, quiet))
} }