mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-12 16:44:05 -08:00
unexport backfiller
Signed-off-by: jessicagreben <jessicagrebens@gmail.com>
This commit is contained in:
parent
3ed6457dd4
commit
6980bcf671
|
@ -792,7 +792,7 @@ func BackfillRule(url, start, end, outputDir string, evalInterval time.Duration,
|
||||||
fmt.Fprintln(os.Stderr, err)
|
fmt.Fprintln(os.Stderr, err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
cfg := RuleImporterConfig{
|
cfg := ruleImporterConfig{
|
||||||
Start: stime,
|
Start: stime,
|
||||||
End: etime,
|
End: etime,
|
||||||
OutputDir: outputDir,
|
OutputDir: outputDir,
|
||||||
|
@ -800,13 +800,13 @@ func BackfillRule(url, start, end, outputDir string, evalInterval time.Duration,
|
||||||
URL: url,
|
URL: url,
|
||||||
}
|
}
|
||||||
logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
||||||
ruleImporter := NewRuleImporter(logger, cfg)
|
ruleImporter := newRuleImporter(logger, cfg)
|
||||||
if err = ruleImporter.Init(); err != nil {
|
if err = ruleImporter.init(); err != nil {
|
||||||
fmt.Fprintln(os.Stderr, "rule importer init error", err)
|
fmt.Fprintln(os.Stderr, "rule importer init error", err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
errs := ruleImporter.LoadGroups(ctx, files)
|
errs := ruleImporter.loadGroups(ctx, files)
|
||||||
for _, err := range errs {
|
for _, err := range errs {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintln(os.Stderr, "rule importer parse error", err)
|
fmt.Fprintln(os.Stderr, "rule importer parse error", err)
|
||||||
|
@ -814,7 +814,7 @@ func BackfillRule(url, start, end, outputDir string, evalInterval time.Duration,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
errs = ruleImporter.ImportAll(ctx)
|
errs = ruleImporter.importAll(ctx)
|
||||||
for _, err := range errs {
|
for _, err := range errs {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintln(os.Stderr, "rule importer error", err)
|
fmt.Fprintln(os.Stderr, "rule importer error", err)
|
||||||
|
|
|
@ -31,21 +31,21 @@ import (
|
||||||
|
|
||||||
const blockSize = 2 // in hours
|
const blockSize = 2 // in hours
|
||||||
|
|
||||||
// RuleImporter is the importer to backfill rules.
|
// ruleImporter is the importer to backfill rules.
|
||||||
type RuleImporter struct {
|
type ruleImporter struct {
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
config RuleImporterConfig
|
config ruleImporterConfig
|
||||||
|
|
||||||
groups map[string]*rules.Group
|
groups map[string]*rules.Group
|
||||||
groupLoader rules.GroupLoader
|
groupLoader rules.GroupLoader
|
||||||
|
|
||||||
apiClient v1.API
|
apiClient v1.API
|
||||||
|
|
||||||
writer *blocks.MultiWriter
|
writer *tsdb.BlockWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
// RuleImporterConfig is the config for the rule importer.
|
// ruleImporterConfig is the config for the rule importer.
|
||||||
type RuleImporterConfig struct {
|
type ruleImporterConfig struct {
|
||||||
Start time.Time
|
Start time.Time
|
||||||
End time.Time
|
End time.Time
|
||||||
OutputDir string
|
OutputDir string
|
||||||
|
@ -53,21 +53,25 @@ type RuleImporterConfig struct {
|
||||||
URL string
|
URL string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewRuleImporter creates a new rule importer that can be used to backfill rules.
|
// newRuleImporter creates a new rule importer that can be used to backfill rules.
|
||||||
func NewRuleImporter(logger log.Logger, config RuleImporterConfig) *RuleImporter {
|
func newRuleImporter(logger log.Logger, config ruleImporterConfig) *ruleImporter {
|
||||||
return &RuleImporter{
|
return &ruleImporter{
|
||||||
config: config,
|
config: config,
|
||||||
groupLoader: rules.FileLoader{},
|
groupLoader: rules.FileLoader{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Init initializes the rule importer which creates a new block writer
|
// init initializes the rule importer which creates a new block writer
|
||||||
// and creates an Prometheus API client.
|
// and creates an Prometheus API client.
|
||||||
func (importer *RuleImporter) Init() error {
|
func (importer *ruleImporter) init() error {
|
||||||
importer.writer = tsdb.NewBlockWriter(importer.logger,
|
w, err := tsdb.NewBlockWriter(importer.logger,
|
||||||
importer.config.OutputDir,
|
importer.config.OutputDir,
|
||||||
(blockSize * time.Hour).Milliseconds()
|
(blockSize * time.Hour).Milliseconds(),
|
||||||
)
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
importer.writer = w
|
||||||
|
|
||||||
config := api.Config{
|
config := api.Config{
|
||||||
Address: importer.config.URL,
|
Address: importer.config.URL,
|
||||||
|
@ -80,13 +84,13 @@ func (importer *RuleImporter) Init() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Close cleans up any open resources.
|
// close cleans up any open resources.
|
||||||
func (importer *RuleImporter) Close() error {
|
func (importer *ruleImporter) close() error {
|
||||||
return importer.writer.Close()
|
return importer.writer.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadGroups reads groups from a list of rule files.
|
// loadGroups reads groups from a list of rule files.
|
||||||
func (importer *RuleImporter) LoadGroups(ctx context.Context, filenames []string) (errs []error) {
|
func (importer *ruleImporter) loadGroups(ctx context.Context, filenames []string) (errs []error) {
|
||||||
groups := make(map[string]*rules.Group)
|
groups := make(map[string]*rules.Group)
|
||||||
|
|
||||||
for _, filename := range filenames {
|
for _, filename := range filenames {
|
||||||
|
@ -127,31 +131,31 @@ func (importer *RuleImporter) LoadGroups(ctx context.Context, filenames []string
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImportAll evaluates all the groups and rules and creates new time series
|
// importAll evaluates all the groups and rules and creates new time series
|
||||||
// and stores them in new blocks.
|
// and stores them in new blocks.
|
||||||
func (importer *RuleImporter) ImportAll(ctx context.Context) []error {
|
func (importer *ruleImporter) importAll(ctx context.Context) []error {
|
||||||
var errs = []error{}
|
var errs = []error{}
|
||||||
for _, group := range importer.groups {
|
for _, group := range importer.groups {
|
||||||
stimeWithAlignment := group.EvalTimestamp(importer.config.Start.UnixNano())
|
stimeWithAlignment := group.EvalTimestamp(importer.config.Start.UnixNano())
|
||||||
|
|
||||||
for _, r := range group.Rules() {
|
for _, r := range group.Rules() {
|
||||||
err := importer.ImportRule(ctx, r.Query().String(), stimeWithAlignment, group.Interval())
|
err := importer.importRule(ctx, r.Query().String(), stimeWithAlignment, group.Interval())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = append(errs, err)
|
errs = append(errs, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_, err := importer.writer.Flush()
|
_, err := importer.writer.Flush(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errs = append(errs, err)
|
errs = append(errs, err)
|
||||||
}
|
}
|
||||||
return errs
|
return errs
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImportRule imports the historical data for a single rule.
|
// importRule imports the historical data for a single rule.
|
||||||
func (importer *RuleImporter) ImportRule(ctx context.Context, ruleExpr string, stimeWithAlignment time.Time, internval time.Duration) error {
|
func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr string, stimeWithAlignment time.Time, internval time.Duration) error {
|
||||||
ts := stimeWithAlignment
|
ts := stimeWithAlignment
|
||||||
appender := importer.writer.Appender()
|
appender := importer.writer.Appender(ctx)
|
||||||
|
|
||||||
for ts.Before(importer.config.End) {
|
for ts.Before(importer.config.End) {
|
||||||
currentBlockEnd := ts.Add(blockSize * time.Hour)
|
currentBlockEnd := ts.Add(blockSize * time.Hour)
|
||||||
|
@ -189,7 +193,6 @@ func (importer *RuleImporter) ImportRule(ctx context.Context, ruleExpr string, s
|
||||||
for _, value := range sample.Values {
|
for _, value := range sample.Values {
|
||||||
_, err := appender.Add(currentLabels, value.Timestamp.Unix(), float64(value.Value))
|
_, err := appender.Add(currentLabels, value.Timestamp.Unix(), float64(value.Value))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// todo: handle other errors, i.e. ErrOutOfOrderSample and ErrDuplicateSampleForTimestamp
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -200,7 +203,7 @@ func (importer *RuleImporter) ImportRule(ctx context.Context, ruleExpr string, s
|
||||||
|
|
||||||
ts = currentBlockEnd
|
ts = currentBlockEnd
|
||||||
}
|
}
|
||||||
_, err := importer.writer.Flush()
|
_, err := importer.writer.Flush(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -280,13 +280,9 @@ func NewGroup(o GroupOptions) *Group {
|
||||||
metrics = NewGroupMetrics(o.Opts.Registerer)
|
metrics = NewGroupMetrics(o.Opts.Registerer)
|
||||||
}
|
}
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
key := GroupKey(o.File, o.Name)
|
key := GroupKey(o.File, o.Name)
|
||||||
=======
|
|
||||||
key := groupKey(o.File, o.Name)
|
|
||||||
metrics.iterationsMissed.WithLabelValues(key)
|
metrics.iterationsMissed.WithLabelValues(key)
|
||||||
metrics.iterationsScheduled.WithLabelValues(key)
|
metrics.iterationsScheduled.WithLabelValues(key)
|
||||||
>>>>>>> master
|
|
||||||
metrics.evalTotal.WithLabelValues(key)
|
metrics.evalTotal.WithLabelValues(key)
|
||||||
metrics.evalFailures.WithLabelValues(key)
|
metrics.evalFailures.WithLabelValues(key)
|
||||||
metrics.groupLastEvalTime.WithLabelValues(key)
|
metrics.groupLastEvalTime.WithLabelValues(key)
|
||||||
|
@ -342,7 +338,7 @@ func (g *Group) run(ctx context.Context) {
|
||||||
})
|
})
|
||||||
|
|
||||||
iter := func() {
|
iter := func() {
|
||||||
g.metrics.iterationsScheduled.WithLabelValues(groupKey(g.file, g.name)).Inc()
|
g.metrics.iterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Inc()
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
g.Eval(ctx, evalTimestamp)
|
g.Eval(ctx, evalTimestamp)
|
||||||
|
@ -394,8 +390,8 @@ func (g *Group) run(ctx context.Context) {
|
||||||
case <-tick.C:
|
case <-tick.C:
|
||||||
missed := (time.Since(evalTimestamp) / g.interval) - 1
|
missed := (time.Since(evalTimestamp) / g.interval) - 1
|
||||||
if missed > 0 {
|
if missed > 0 {
|
||||||
g.metrics.iterationsMissed.WithLabelValues(groupKey(g.file, g.name)).Add(float64(missed))
|
g.metrics.iterationsMissed.WithLabelValues(GroupKey(g.file, g.name)).Add(float64(missed))
|
||||||
g.metrics.iterationsScheduled.WithLabelValues(groupKey(g.file, g.name)).Add(float64(missed))
|
g.metrics.iterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Add(float64(missed))
|
||||||
}
|
}
|
||||||
evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval)
|
evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval)
|
||||||
iter()
|
iter()
|
||||||
|
@ -416,8 +412,8 @@ func (g *Group) run(ctx context.Context) {
|
||||||
case <-tick.C:
|
case <-tick.C:
|
||||||
missed := (time.Since(evalTimestamp) / g.interval) - 1
|
missed := (time.Since(evalTimestamp) / g.interval) - 1
|
||||||
if missed > 0 {
|
if missed > 0 {
|
||||||
g.metrics.iterationsMissed.WithLabelValues(groupKey(g.file, g.name)).Add(float64(missed))
|
g.metrics.iterationsMissed.WithLabelValues(GroupKey(g.file, g.name)).Add(float64(missed))
|
||||||
g.metrics.iterationsScheduled.WithLabelValues(groupKey(g.file, g.name)).Add(float64(missed))
|
g.metrics.iterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Add(float64(missed))
|
||||||
}
|
}
|
||||||
evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval)
|
evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval)
|
||||||
iter()
|
iter()
|
||||||
|
@ -478,15 +474,9 @@ func (g *Group) GetEvaluationTime() time.Duration {
|
||||||
return g.evaluationTime
|
return g.evaluationTime
|
||||||
}
|
}
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
// setEvaluationDuration sets the time in seconds the last evaluation took.
|
// setEvaluationDuration sets the time in seconds the last evaluation took.
|
||||||
func (g *Group) setEvaluationDuration(dur time.Duration) {
|
func (g *Group) setEvaluationDuration(dur time.Duration) {
|
||||||
g.metrics.groupLastDuration.WithLabelValues(GroupKey(g.file, g.name)).Set(dur.Seconds())
|
g.metrics.groupLastDuration.WithLabelValues(GroupKey(g.file, g.name)).Set(dur.Seconds())
|
||||||
=======
|
|
||||||
// setEvaluationTime sets the time in seconds the last evaluation took.
|
|
||||||
func (g *Group) setEvaluationTime(dur time.Duration) {
|
|
||||||
g.metrics.groupLastDuration.WithLabelValues(groupKey(g.file, g.name)).Set(dur.Seconds())
|
|
||||||
>>>>>>> master
|
|
||||||
|
|
||||||
g.mtx.Lock()
|
g.mtx.Lock()
|
||||||
defer g.mtx.Unlock()
|
defer g.mtx.Unlock()
|
||||||
|
@ -500,15 +490,9 @@ func (g *Group) GetLastEvaluation() time.Time {
|
||||||
return g.lastEvaluation
|
return g.lastEvaluation
|
||||||
}
|
}
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
// setEvaluationTimestamp updates evaluationTimestamp to the timestamp of when the rule group was last evaluated.
|
// setEvaluationTimestamp updates evaluationTimestamp to the timestamp of when the rule group was last evaluated.
|
||||||
func (g *Group) setEvaluationTimestamp(ts time.Time) {
|
func (g *Group) setEvaluationTimestamp(ts time.Time) {
|
||||||
g.metrics.groupLastEvalTime.WithLabelValues(GroupKey(g.file, g.name)).Set(float64(ts.UnixNano()) / 1e9)
|
g.metrics.groupLastEvalTime.WithLabelValues(GroupKey(g.file, g.name)).Set(float64(ts.UnixNano()) / 1e9)
|
||||||
=======
|
|
||||||
// setLastEvaluation updates lastEvaluation to the timestamp of when the rule group was last evaluated.
|
|
||||||
func (g *Group) setLastEvaluation(ts time.Time) {
|
|
||||||
g.metrics.groupLastEvalTime.WithLabelValues(groupKey(g.file, g.name)).Set(float64(ts.UnixNano()) / 1e9)
|
|
||||||
>>>>>>> master
|
|
||||||
|
|
||||||
g.mtx.Lock()
|
g.mtx.Lock()
|
||||||
defer g.mtx.Unlock()
|
defer g.mtx.Unlock()
|
||||||
|
@ -519,8 +503,7 @@ func (g *Group) setLastEvaluation(ts time.Time) {
|
||||||
func (g *Group) EvalTimestamp(startTime int64) time.Time {
|
func (g *Group) EvalTimestamp(startTime int64) time.Time {
|
||||||
var (
|
var (
|
||||||
offset = int64(g.hash() % uint64(g.interval))
|
offset = int64(g.hash() % uint64(g.interval))
|
||||||
start = startTime
|
adjNow = startTime - offset
|
||||||
adjNow = start - offset
|
|
||||||
base = adjNow - (adjNow % int64(g.interval))
|
base = adjNow - (adjNow % int64(g.interval))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -615,13 +598,7 @@ func (g *Group) Eval(ctx context.Context, ts time.Time) {
|
||||||
if _, ok := err.(promql.ErrQueryCanceled); !ok {
|
if _, ok := err.(promql.ErrQueryCanceled); !ok {
|
||||||
level.Warn(g.logger).Log("msg", "Evaluating rule failed", "rule", rule, "err", err)
|
level.Warn(g.logger).Log("msg", "Evaluating rule failed", "rule", rule, "err", err)
|
||||||
}
|
}
|
||||||
<<<<<<< HEAD
|
|
||||||
g.metrics.evalFailures.WithLabelValues(GroupKey(g.File(), g.Name())).Inc()
|
g.metrics.evalFailures.WithLabelValues(GroupKey(g.File(), g.Name())).Inc()
|
||||||
=======
|
|
||||||
sp.SetTag("error", true)
|
|
||||||
sp.LogKV("error", err)
|
|
||||||
g.metrics.evalFailures.WithLabelValues(groupKey(g.File(), g.Name())).Inc()
|
|
||||||
>>>>>>> master
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
samplesTotal += float64(len(vector))
|
samplesTotal += float64(len(vector))
|
||||||
|
@ -683,7 +660,7 @@ func (g *Group) Eval(ctx context.Context, ts time.Time) {
|
||||||
}(i, rule)
|
}(i, rule)
|
||||||
}
|
}
|
||||||
if g.metrics != nil {
|
if g.metrics != nil {
|
||||||
g.metrics.groupSamples.WithLabelValues(groupKey(g.File(), g.Name())).Set(samplesTotal)
|
g.metrics.groupSamples.WithLabelValues(GroupKey(g.File(), g.Name())).Set(samplesTotal)
|
||||||
}
|
}
|
||||||
g.cleanupStaleSeries(ctx, ts)
|
g.cleanupStaleSeries(ctx, ts)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue