promtool: add support of selecting timeseries for TSDB dump

Dumping without any limit on the data being dumped will generate
a large amount of data. Also, sometimes it is necessary to dump
only a part of the data in order to change or transfer it.

This change allows to specify a part of the data to dump and
by default works same as before. (no public API change)

Signed-off-by: Amin Borjian <borjianamin98@outlook.com>
This commit is contained in:
Amin Borjian 2023-01-19 23:33:53 +03:30
parent 64842f137e
commit 90d6873c7f
2 changed files with 9 additions and 3 deletions

View file

@ -190,6 +190,7 @@ func main() {
dumpPath := tsdbDumpCmd.Arg("db path", "Database path (default is "+defaultDBPath+").").Default(defaultDBPath).String() dumpPath := tsdbDumpCmd.Arg("db path", "Database path (default is "+defaultDBPath+").").Default(defaultDBPath).String()
dumpMinTime := tsdbDumpCmd.Flag("min-time", "Minimum timestamp to dump.").Default(strconv.FormatInt(math.MinInt64, 10)).Int64() dumpMinTime := tsdbDumpCmd.Flag("min-time", "Minimum timestamp to dump.").Default(strconv.FormatInt(math.MinInt64, 10)).Int64()
dumpMaxTime := tsdbDumpCmd.Flag("max-time", "Maximum timestamp to dump.").Default(strconv.FormatInt(math.MaxInt64, 10)).Int64() dumpMaxTime := tsdbDumpCmd.Flag("max-time", "Maximum timestamp to dump.").Default(strconv.FormatInt(math.MaxInt64, 10)).Int64()
dumpMatch := tsdbDumpCmd.Flag("match", "Series selector.").Default("{__name__=~'(?s:.*)'}").String()
importCmd := tsdbCmd.Command("create-blocks-from", "[Experimental] Import samples from input and produce TSDB blocks. Please refer to the storage docs for more details.") importCmd := tsdbCmd.Command("create-blocks-from", "[Experimental] Import samples from input and produce TSDB blocks. Please refer to the storage docs for more details.")
importHumanReadable := importCmd.Flag("human-readable", "Print human readable values.").Short('r').Bool() importHumanReadable := importCmd.Flag("human-readable", "Print human readable values.").Short('r').Bool()
@ -296,7 +297,7 @@ func main() {
os.Exit(checkErr(listBlocks(*listPath, *listHumanReadable))) os.Exit(checkErr(listBlocks(*listPath, *listHumanReadable)))
case tsdbDumpCmd.FullCommand(): case tsdbDumpCmd.FullCommand():
os.Exit(checkErr(dumpSamples(*dumpPath, *dumpMinTime, *dumpMaxTime))) os.Exit(checkErr(dumpSamples(*dumpPath, *dumpMinTime, *dumpMaxTime, *dumpMatch)))
// TODO(aSquare14): Work on adding support for custom block size. // TODO(aSquare14): Work on adding support for custom block size.
case openMetricsImportCmd.FullCommand(): case openMetricsImportCmd.FullCommand():
os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable, *importQuiet, *maxBlockDuration)) os.Exit(backfillOpenMetrics(*importFilePath, *importDBPath, *importHumanReadable, *importQuiet, *maxBlockDuration))

View file

@ -30,6 +30,7 @@ import (
"text/tabwriter" "text/tabwriter"
"time" "time"
"github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/tsdb/index" "github.com/prometheus/prometheus/tsdb/index"
@ -624,7 +625,7 @@ func analyzeCompaction(block tsdb.BlockReader, indexr tsdb.IndexReader) (err err
return nil return nil
} }
func dumpSamples(path string, mint, maxt int64) (err error) { func dumpSamples(path string, mint, maxt int64, match string) (err error) {
db, err := tsdb.OpenDBReadOnly(path, nil) db, err := tsdb.OpenDBReadOnly(path, nil)
if err != nil { if err != nil {
return err return err
@ -638,7 +639,11 @@ func dumpSamples(path string, mint, maxt int64) (err error) {
} }
defer q.Close() defer q.Close()
ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) matchers, err := parser.ParseMetricSelector(match)
if err != nil {
return err
}
ss := q.Select(false, nil, matchers...)
for ss.Next() { for ss.Next() {
series := ss.At() series := ss.At()