mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 05:34:05 -08:00
Return warnings on a remote read fail (#4832)
Signed-off-by: Mark Knapp <mknapp@hudson-trading.com>
This commit is contained in:
parent
77ee41084f
commit
f0e9196dca
|
@ -24,6 +24,10 @@ and one of the following HTTP response codes:
|
||||||
Other non-`2xx` codes may be returned for errors occurring before the API
|
Other non-`2xx` codes may be returned for errors occurring before the API
|
||||||
endpoint is reached.
|
endpoint is reached.
|
||||||
|
|
||||||
|
An array of warnings may be returned if there are errors that do
|
||||||
|
not inhibit the request execution. All of the data that was successfully
|
||||||
|
collected will be returned in the data field.
|
||||||
|
|
||||||
The JSON response envelope format is as follows:
|
The JSON response envelope format is as follows:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -34,7 +38,11 @@ The JSON response envelope format is as follows:
|
||||||
// Only set if status is "error". The data field may still hold
|
// Only set if status is "error". The data field may still hold
|
||||||
// additional data.
|
// additional data.
|
||||||
"errorType": "<string>",
|
"errorType": "<string>",
|
||||||
"error": "<string>"
|
"error": "<string>",
|
||||||
|
|
||||||
|
// Only if there were warnings while executing the request.
|
||||||
|
// There will still be data in the data field.
|
||||||
|
"warnings": ["<string>"]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -111,8 +111,9 @@ type MatrixSelector struct {
|
||||||
Offset time.Duration
|
Offset time.Duration
|
||||||
LabelMatchers []*labels.Matcher
|
LabelMatchers []*labels.Matcher
|
||||||
|
|
||||||
// The series are populated at query preparation time.
|
// The unexpanded seriesSet populated at query preparation time.
|
||||||
series []storage.Series
|
unexpandedSeriesSet storage.SeriesSet
|
||||||
|
series []storage.Series
|
||||||
}
|
}
|
||||||
|
|
||||||
// NumberLiteral represents a number.
|
// NumberLiteral represents a number.
|
||||||
|
@ -144,8 +145,9 @@ type VectorSelector struct {
|
||||||
Offset time.Duration
|
Offset time.Duration
|
||||||
LabelMatchers []*labels.Matcher
|
LabelMatchers []*labels.Matcher
|
||||||
|
|
||||||
// The series are populated at query preparation time.
|
// The unexpanded seriesSet populated at query preparation time.
|
||||||
series []storage.Series
|
unexpandedSeriesSet storage.SeriesSet
|
||||||
|
series []storage.Series
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AggregateExpr) Type() ValueType { return ValueTypeVector }
|
func (e *AggregateExpr) Type() ValueType { return ValueTypeVector }
|
||||||
|
|
101
promql/engine.go
101
promql/engine.go
|
@ -154,8 +154,8 @@ func (q *query) Exec(ctx context.Context) *Result {
|
||||||
span.SetTag(queryTag, q.stmt.String())
|
span.SetTag(queryTag, q.stmt.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err := q.ng.exec(ctx, q)
|
res, err, warnings := q.ng.exec(ctx, q)
|
||||||
return &Result{Err: err, Value: res}
|
return &Result{Err: err, Value: res, Warnings: warnings}
|
||||||
}
|
}
|
||||||
|
|
||||||
// contextDone returns an error if the context was canceled or timed out.
|
// contextDone returns an error if the context was canceled or timed out.
|
||||||
|
@ -332,7 +332,7 @@ func (ng *Engine) newTestQuery(f func(context.Context) error) Query {
|
||||||
//
|
//
|
||||||
// At this point per query only one EvalStmt is evaluated. Alert and record
|
// At this point per query only one EvalStmt is evaluated. Alert and record
|
||||||
// statements are not handled by the Engine.
|
// statements are not handled by the Engine.
|
||||||
func (ng *Engine) exec(ctx context.Context, q *query) (Value, error) {
|
func (ng *Engine) exec(ctx context.Context, q *query) (Value, error, storage.Warnings) {
|
||||||
ng.metrics.currentQueries.Inc()
|
ng.metrics.currentQueries.Inc()
|
||||||
defer ng.metrics.currentQueries.Dec()
|
defer ng.metrics.currentQueries.Dec()
|
||||||
|
|
||||||
|
@ -345,7 +345,7 @@ func (ng *Engine) exec(ctx context.Context, q *query) (Value, error) {
|
||||||
queueSpanTimer, _ := q.stats.GetSpanTimer(ctx, stats.ExecQueueTime, ng.metrics.queryQueueTime)
|
queueSpanTimer, _ := q.stats.GetSpanTimer(ctx, stats.ExecQueueTime, ng.metrics.queryQueueTime)
|
||||||
|
|
||||||
if err := ng.gate.Start(ctx); err != nil {
|
if err := ng.gate.Start(ctx); err != nil {
|
||||||
return nil, contextErr(err, "query queue")
|
return nil, contextErr(err, "query queue"), nil
|
||||||
}
|
}
|
||||||
defer ng.gate.Done()
|
defer ng.gate.Done()
|
||||||
|
|
||||||
|
@ -361,14 +361,14 @@ func (ng *Engine) exec(ctx context.Context, q *query) (Value, error) {
|
||||||
|
|
||||||
// The base context might already be canceled on the first iteration (e.g. during shutdown).
|
// The base context might already be canceled on the first iteration (e.g. during shutdown).
|
||||||
if err := contextDone(ctx, env); err != nil {
|
if err := contextDone(ctx, env); err != nil {
|
||||||
return nil, err
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
switch s := q.Statement().(type) {
|
switch s := q.Statement().(type) {
|
||||||
case *EvalStmt:
|
case *EvalStmt:
|
||||||
return ng.execEvalStmt(ctx, q, s)
|
return ng.execEvalStmt(ctx, q, s)
|
||||||
case testStmt:
|
case testStmt:
|
||||||
return nil, s(ctx)
|
return nil, s(ctx), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
panic(fmt.Errorf("promql.Engine.exec: unhandled statement of type %T", q.Statement()))
|
panic(fmt.Errorf("promql.Engine.exec: unhandled statement of type %T", q.Statement()))
|
||||||
|
@ -383,9 +383,9 @@ func durationMilliseconds(d time.Duration) int64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
// execEvalStmt evaluates the expression of an evaluation statement for the given time range.
|
// execEvalStmt evaluates the expression of an evaluation statement for the given time range.
|
||||||
func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (Value, error) {
|
func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (Value, error, storage.Warnings) {
|
||||||
prepareSpanTimer, ctxPrepare := query.stats.GetSpanTimer(ctx, stats.QueryPreparationTime, ng.metrics.queryPrepareTime)
|
prepareSpanTimer, ctxPrepare := query.stats.GetSpanTimer(ctx, stats.QueryPreparationTime, ng.metrics.queryPrepareTime)
|
||||||
querier, err := ng.populateSeries(ctxPrepare, query.queryable, s)
|
querier, err, warnings := ng.populateSeries(ctxPrepare, query.queryable, s)
|
||||||
prepareSpanTimer.Finish()
|
prepareSpanTimer.Finish()
|
||||||
|
|
||||||
// XXX(fabxc): the querier returned by populateSeries might be instantiated
|
// XXX(fabxc): the querier returned by populateSeries might be instantiated
|
||||||
|
@ -396,7 +396,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
evalSpanTimer, _ := query.stats.GetSpanTimer(ctx, stats.InnerEvalTime, ng.metrics.queryInnerEval)
|
evalSpanTimer, _ := query.stats.GetSpanTimer(ctx, stats.InnerEvalTime, ng.metrics.queryInnerEval)
|
||||||
|
@ -413,7 +413,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
|
||||||
}
|
}
|
||||||
val, err := evaluator.Eval(s.Expr)
|
val, err := evaluator.Eval(s.Expr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
evalSpanTimer.Finish()
|
evalSpanTimer.Finish()
|
||||||
|
@ -432,11 +432,11 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
|
||||||
// timestamp as that is when we ran the evaluation.
|
// timestamp as that is when we ran the evaluation.
|
||||||
vector[i] = Sample{Metric: s.Metric, Point: Point{V: s.Points[0].V, T: start}}
|
vector[i] = Sample{Metric: s.Metric, Point: Point{V: s.Points[0].V, T: start}}
|
||||||
}
|
}
|
||||||
return vector, nil
|
return vector, nil, warnings
|
||||||
case ValueTypeScalar:
|
case ValueTypeScalar:
|
||||||
return Scalar{V: mat[0].Points[0].V, T: start}, nil
|
return Scalar{V: mat[0].Points[0].V, T: start}, nil, warnings
|
||||||
case ValueTypeMatrix:
|
case ValueTypeMatrix:
|
||||||
return mat, nil
|
return mat, nil, warnings
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("promql.Engine.exec: unexpected expression type %q", s.Expr.Type()))
|
panic(fmt.Errorf("promql.Engine.exec: unexpected expression type %q", s.Expr.Type()))
|
||||||
}
|
}
|
||||||
|
@ -454,7 +454,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
|
||||||
}
|
}
|
||||||
val, err := evaluator.Eval(s.Expr)
|
val, err := evaluator.Eval(s.Expr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, warnings
|
||||||
}
|
}
|
||||||
evalSpanTimer.Finish()
|
evalSpanTimer.Finish()
|
||||||
|
|
||||||
|
@ -465,7 +465,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
|
||||||
query.matrix = mat
|
query.matrix = mat
|
||||||
|
|
||||||
if err := contextDone(ctx, "expression evaluation"); err != nil {
|
if err := contextDone(ctx, "expression evaluation"); err != nil {
|
||||||
return nil, err
|
return nil, err, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(fabxc): order ensured by storage?
|
// TODO(fabxc): order ensured by storage?
|
||||||
|
@ -474,10 +474,10 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
|
||||||
sort.Sort(mat)
|
sort.Sort(mat)
|
||||||
sortSpanTimer.Finish()
|
sortSpanTimer.Finish()
|
||||||
|
|
||||||
return mat, nil
|
return mat, nil, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *EvalStmt) (storage.Querier, error) {
|
func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *EvalStmt) (storage.Querier, error, storage.Warnings) {
|
||||||
var maxOffset time.Duration
|
var maxOffset time.Duration
|
||||||
Inspect(s.Expr, func(node Node, _ []Node) error {
|
Inspect(s.Expr, func(node Node, _ []Node) error {
|
||||||
switch n := node.(type) {
|
switch n := node.(type) {
|
||||||
|
@ -503,11 +503,14 @@ func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *Ev
|
||||||
|
|
||||||
querier, err := q.Querier(ctx, timestamp.FromTime(mint), timestamp.FromTime(s.End))
|
querier, err := q.Querier(ctx, timestamp.FromTime(mint), timestamp.FromTime(s.End))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var warnings storage.Warnings
|
||||||
|
|
||||||
Inspect(s.Expr, func(node Node, path []Node) error {
|
Inspect(s.Expr, func(node Node, path []Node) error {
|
||||||
var set storage.SeriesSet
|
var set storage.SeriesSet
|
||||||
|
var wrn storage.Warnings
|
||||||
params := &storage.SelectParams{
|
params := &storage.SelectParams{
|
||||||
Start: timestamp.FromTime(s.Start),
|
Start: timestamp.FromTime(s.Start),
|
||||||
End: timestamp.FromTime(s.End),
|
End: timestamp.FromTime(s.End),
|
||||||
|
@ -524,17 +527,13 @@ func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *Ev
|
||||||
params.End = params.End - offsetMilliseconds
|
params.End = params.End - offsetMilliseconds
|
||||||
}
|
}
|
||||||
|
|
||||||
set, err = querier.Select(params, n.LabelMatchers...)
|
set, err, wrn = querier.Select(params, n.LabelMatchers...)
|
||||||
|
warnings = append(warnings, wrn...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(ng.logger).Log("msg", "error selecting series set", "err", err)
|
level.Error(ng.logger).Log("msg", "error selecting series set", "err", err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
n.series, err = expandSeriesSet(ctx, set)
|
n.unexpandedSeriesSet = set
|
||||||
if err != nil {
|
|
||||||
// TODO(fabxc): use multi-error.
|
|
||||||
level.Error(ng.logger).Log("msg", "error expanding series set", "err", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
case *MatrixSelector:
|
case *MatrixSelector:
|
||||||
params.Func = extractFuncFromPath(path)
|
params.Func = extractFuncFromPath(path)
|
||||||
|
@ -547,20 +546,17 @@ func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *Ev
|
||||||
params.End = params.End - offsetMilliseconds
|
params.End = params.End - offsetMilliseconds
|
||||||
}
|
}
|
||||||
|
|
||||||
set, err = querier.Select(params, n.LabelMatchers...)
|
set, err, wrn = querier.Select(params, n.LabelMatchers...)
|
||||||
|
warnings = append(warnings, wrn...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(ng.logger).Log("msg", "error selecting series set", "err", err)
|
level.Error(ng.logger).Log("msg", "error selecting series set", "err", err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
n.series, err = expandSeriesSet(ctx, set)
|
n.unexpandedSeriesSet = set
|
||||||
if err != nil {
|
|
||||||
level.Error(ng.logger).Log("msg", "error expanding series set", "err", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
return querier, err
|
return querier, err, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractFuncFromPath walks up the path and searches for the first instance of
|
// extractFuncFromPath walks up the path and searches for the first instance of
|
||||||
|
@ -582,6 +578,30 @@ func extractFuncFromPath(p []Node) string {
|
||||||
return extractFuncFromPath(p[:len(p)-1])
|
return extractFuncFromPath(p[:len(p)-1])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func checkForSeriesSetExpansion(expr Expr, ctx context.Context) error {
|
||||||
|
switch e := expr.(type) {
|
||||||
|
case *MatrixSelector:
|
||||||
|
if e.series == nil {
|
||||||
|
series, err := expandSeriesSet(ctx, e.unexpandedSeriesSet)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
} else {
|
||||||
|
e.series = series
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case *VectorSelector:
|
||||||
|
if e.series == nil {
|
||||||
|
series, err := expandSeriesSet(ctx, e.unexpandedSeriesSet)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
} else {
|
||||||
|
e.series = series
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func expandSeriesSet(ctx context.Context, it storage.SeriesSet) (res []storage.Series, err error) {
|
func expandSeriesSet(ctx context.Context, it storage.SeriesSet) (res []storage.Series, err error) {
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
select {
|
select {
|
||||||
|
@ -887,6 +907,9 @@ func (ev *evaluator) eval(expr Expr) Value {
|
||||||
}
|
}
|
||||||
|
|
||||||
sel := e.Args[matrixArgIndex].(*MatrixSelector)
|
sel := e.Args[matrixArgIndex].(*MatrixSelector)
|
||||||
|
if err := checkForSeriesSetExpansion(sel, ev.ctx); err != nil {
|
||||||
|
ev.error(err)
|
||||||
|
}
|
||||||
mat := make(Matrix, 0, len(sel.series)) // Output matrix.
|
mat := make(Matrix, 0, len(sel.series)) // Output matrix.
|
||||||
offset := durationMilliseconds(sel.Offset)
|
offset := durationMilliseconds(sel.Offset)
|
||||||
selRange := durationMilliseconds(sel.Range)
|
selRange := durationMilliseconds(sel.Range)
|
||||||
|
@ -1018,6 +1041,9 @@ func (ev *evaluator) eval(expr Expr) Value {
|
||||||
})
|
})
|
||||||
|
|
||||||
case *VectorSelector:
|
case *VectorSelector:
|
||||||
|
if err := checkForSeriesSetExpansion(e, ev.ctx); err != nil {
|
||||||
|
ev.error(err)
|
||||||
|
}
|
||||||
mat := make(Matrix, 0, len(e.series))
|
mat := make(Matrix, 0, len(e.series))
|
||||||
it := storage.NewBuffer(durationMilliseconds(LookbackDelta))
|
it := storage.NewBuffer(durationMilliseconds(LookbackDelta))
|
||||||
for i, s := range e.series {
|
for i, s := range e.series {
|
||||||
|
@ -1058,6 +1084,10 @@ func (ev *evaluator) eval(expr Expr) Value {
|
||||||
|
|
||||||
// vectorSelector evaluates a *VectorSelector expression.
|
// vectorSelector evaluates a *VectorSelector expression.
|
||||||
func (ev *evaluator) vectorSelector(node *VectorSelector, ts int64) Vector {
|
func (ev *evaluator) vectorSelector(node *VectorSelector, ts int64) Vector {
|
||||||
|
if err := checkForSeriesSetExpansion(node, ev.ctx); err != nil {
|
||||||
|
ev.error(err)
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
vec = make(Vector, 0, len(node.series))
|
vec = make(Vector, 0, len(node.series))
|
||||||
)
|
)
|
||||||
|
@ -1127,17 +1157,20 @@ func putPointSlice(p []Point) {
|
||||||
|
|
||||||
// matrixSelector evaluates a *MatrixSelector expression.
|
// matrixSelector evaluates a *MatrixSelector expression.
|
||||||
func (ev *evaluator) matrixSelector(node *MatrixSelector) Matrix {
|
func (ev *evaluator) matrixSelector(node *MatrixSelector) Matrix {
|
||||||
|
if err := checkForSeriesSetExpansion(node, ev.ctx); err != nil {
|
||||||
|
ev.error(err)
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
offset = durationMilliseconds(node.Offset)
|
offset = durationMilliseconds(node.Offset)
|
||||||
maxt = ev.startTimestamp - offset
|
maxt = ev.startTimestamp - offset
|
||||||
mint = maxt - durationMilliseconds(node.Range)
|
mint = maxt - durationMilliseconds(node.Range)
|
||||||
matrix = make(Matrix, 0, len(node.series))
|
matrix = make(Matrix, 0, len(node.series))
|
||||||
err error
|
|
||||||
)
|
)
|
||||||
|
|
||||||
it := storage.NewBuffer(durationMilliseconds(node.Range))
|
it := storage.NewBuffer(durationMilliseconds(node.Range))
|
||||||
for i, s := range node.series {
|
for i, s := range node.series {
|
||||||
if err = contextDone(ev.ctx, "expression evaluation"); err != nil {
|
if err := contextDone(ev.ctx, "expression evaluation"); err != nil {
|
||||||
ev.error(err)
|
ev.error(err)
|
||||||
}
|
}
|
||||||
it.Reset(s.Iterator())
|
it.Reset(s.Iterator())
|
||||||
|
|
|
@ -169,8 +169,8 @@ type errQuerier struct {
|
||||||
err error
|
err error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *errQuerier) Select(*storage.SelectParams, ...*labels.Matcher) (storage.SeriesSet, error) {
|
func (q *errQuerier) Select(*storage.SelectParams, ...*labels.Matcher) (storage.SeriesSet, error, storage.Warnings) {
|
||||||
return errSeriesSet{err: q.err}, q.err
|
return errSeriesSet{err: q.err}, q.err, nil
|
||||||
}
|
}
|
||||||
func (*errQuerier) LabelValues(name string) ([]string, error) { return nil, nil }
|
func (*errQuerier) LabelValues(name string) ([]string, error) { return nil, nil }
|
||||||
func (*errQuerier) LabelNames() ([]string, error) { return nil, nil }
|
func (*errQuerier) LabelNames() ([]string, error) { return nil, nil }
|
||||||
|
@ -425,7 +425,8 @@ load 10s
|
||||||
MaxSamples: 1,
|
MaxSamples: 1,
|
||||||
Result: Result{
|
Result: Result{
|
||||||
nil,
|
nil,
|
||||||
Scalar{V: 1, T: 1000}},
|
Scalar{V: 1, T: 1000},
|
||||||
|
nil},
|
||||||
Start: time.Unix(1, 0),
|
Start: time.Unix(1, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -434,6 +435,7 @@ load 10s
|
||||||
Result: Result{
|
Result: Result{
|
||||||
ErrTooManySamples(env),
|
ErrTooManySamples(env),
|
||||||
nil,
|
nil,
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(1, 0),
|
Start: time.Unix(1, 0),
|
||||||
},
|
},
|
||||||
|
@ -443,6 +445,7 @@ load 10s
|
||||||
Result: Result{
|
Result: Result{
|
||||||
ErrTooManySamples(env),
|
ErrTooManySamples(env),
|
||||||
nil,
|
nil,
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(1, 0),
|
Start: time.Unix(1, 0),
|
||||||
},
|
},
|
||||||
|
@ -455,6 +458,7 @@ load 10s
|
||||||
Sample{Point: Point{V: 1, T: 1000},
|
Sample{Point: Point{V: 1, T: 1000},
|
||||||
Metric: labels.FromStrings("__name__", "metric")},
|
Metric: labels.FromStrings("__name__", "metric")},
|
||||||
},
|
},
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(1, 0),
|
Start: time.Unix(1, 0),
|
||||||
},
|
},
|
||||||
|
@ -467,6 +471,7 @@ load 10s
|
||||||
Points: []Point{{V: 1, T: 0}, {V: 2, T: 10000}},
|
Points: []Point{{V: 1, T: 0}, {V: 2, T: 10000}},
|
||||||
Metric: labels.FromStrings("__name__", "metric")},
|
Metric: labels.FromStrings("__name__", "metric")},
|
||||||
},
|
},
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(10, 0),
|
Start: time.Unix(10, 0),
|
||||||
},
|
},
|
||||||
|
@ -476,6 +481,7 @@ load 10s
|
||||||
Result: Result{
|
Result: Result{
|
||||||
ErrTooManySamples(env),
|
ErrTooManySamples(env),
|
||||||
nil,
|
nil,
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(10, 0),
|
Start: time.Unix(10, 0),
|
||||||
},
|
},
|
||||||
|
@ -489,6 +495,7 @@ load 10s
|
||||||
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
|
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
|
||||||
Metric: labels.FromStrings()},
|
Metric: labels.FromStrings()},
|
||||||
},
|
},
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
End: time.Unix(2, 0),
|
End: time.Unix(2, 0),
|
||||||
|
@ -500,6 +507,7 @@ load 10s
|
||||||
Result: Result{
|
Result: Result{
|
||||||
ErrTooManySamples(env),
|
ErrTooManySamples(env),
|
||||||
nil,
|
nil,
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
End: time.Unix(2, 0),
|
End: time.Unix(2, 0),
|
||||||
|
@ -514,6 +522,7 @@ load 10s
|
||||||
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
|
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}},
|
||||||
Metric: labels.FromStrings("__name__", "metric")},
|
Metric: labels.FromStrings("__name__", "metric")},
|
||||||
},
|
},
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
End: time.Unix(2, 0),
|
End: time.Unix(2, 0),
|
||||||
|
@ -525,6 +534,7 @@ load 10s
|
||||||
Result: Result{
|
Result: Result{
|
||||||
ErrTooManySamples(env),
|
ErrTooManySamples(env),
|
||||||
nil,
|
nil,
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
End: time.Unix(2, 0),
|
End: time.Unix(2, 0),
|
||||||
|
@ -539,6 +549,7 @@ load 10s
|
||||||
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
|
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
|
||||||
Metric: labels.FromStrings("__name__", "metric")},
|
Metric: labels.FromStrings("__name__", "metric")},
|
||||||
},
|
},
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
End: time.Unix(10, 0),
|
End: time.Unix(10, 0),
|
||||||
|
@ -550,6 +561,7 @@ load 10s
|
||||||
Result: Result{
|
Result: Result{
|
||||||
ErrTooManySamples(env),
|
ErrTooManySamples(env),
|
||||||
nil,
|
nil,
|
||||||
|
nil,
|
||||||
},
|
},
|
||||||
Start: time.Unix(0, 0),
|
Start: time.Unix(0, 0),
|
||||||
End: time.Unix(10, 0),
|
End: time.Unix(10, 0),
|
||||||
|
|
|
@ -20,6 +20,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/pkg/labels"
|
"github.com/prometheus/prometheus/pkg/labels"
|
||||||
|
"github.com/prometheus/prometheus/storage"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Value is a generic interface for values resulting from a query evaluation.
|
// Value is a generic interface for values resulting from a query evaluation.
|
||||||
|
@ -201,8 +202,9 @@ func (m Matrix) ContainsSameLabelset() bool {
|
||||||
// Result holds the resulting value of an execution or an error
|
// Result holds the resulting value of an execution or an error
|
||||||
// if any occurred.
|
// if any occurred.
|
||||||
type Result struct {
|
type Result struct {
|
||||||
Err error
|
Err error
|
||||||
Value Value
|
Value Value
|
||||||
|
Warnings storage.Warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
// Vector returns a Vector if the result value is one. An error is returned if
|
// Vector returns a Vector if the result value is one. An error is returned if
|
||||||
|
|
|
@ -518,7 +518,7 @@ func (g *Group) RestoreForState(ts time.Time) {
|
||||||
matchers = append(matchers, mt)
|
matchers = append(matchers, mt)
|
||||||
}
|
}
|
||||||
|
|
||||||
sset, err := q.Select(nil, matchers...)
|
sset, err, _ := q.Select(nil, matchers...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(g.logger).Log("msg", "Failed to restore 'for' state",
|
level.Error(g.logger).Log("msg", "Failed to restore 'for' state",
|
||||||
labels.AlertName, alertRule.Name(), "stage", "Select", "err", err)
|
labels.AlertName, alertRule.Name(), "stage", "Select", "err", err)
|
||||||
|
|
|
@ -538,7 +538,7 @@ func TestStaleness(t *testing.T) {
|
||||||
matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a_plus_one")
|
matcher, err := labels.NewMatcher(labels.MatchEqual, model.MetricNameLabel, "a_plus_one")
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
set, err := querier.Select(nil, matcher)
|
set, err, _ := querier.Select(nil, matcher)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
samples, err := readSeriesSet(set)
|
samples, err := readSeriesSet(set)
|
||||||
|
|
|
@ -68,23 +68,23 @@ func (f *fanout) Querier(ctx context.Context, mint, maxt int64) (Querier, error)
|
||||||
queriers := make([]Querier, 0, 1+len(f.secondaries))
|
queriers := make([]Querier, 0, 1+len(f.secondaries))
|
||||||
|
|
||||||
// Add primary querier
|
// Add primary querier
|
||||||
querier, err := f.primary.Querier(ctx, mint, maxt)
|
primaryQuerier, err := f.primary.Querier(ctx, mint, maxt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
queriers = append(queriers, querier)
|
queriers = append(queriers, primaryQuerier)
|
||||||
|
|
||||||
// Add secondary queriers
|
// Add secondary queriers
|
||||||
for _, storage := range f.secondaries {
|
for _, storage := range f.secondaries {
|
||||||
querier, err := storage.Querier(ctx, mint, maxt)
|
querier, err := storage.Querier(ctx, mint, maxt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
NewMergeQuerier(queriers).Close()
|
NewMergeQuerier(primaryQuerier, queriers).Close()
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
queriers = append(queriers, querier)
|
queriers = append(queriers, querier)
|
||||||
}
|
}
|
||||||
|
|
||||||
return NewMergeQuerier(queriers), nil
|
return NewMergeQuerier(primaryQuerier, queriers), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *fanout) Appender() (Appender, error) {
|
func (f *fanout) Appender() (Appender, error) {
|
||||||
|
@ -190,14 +190,18 @@ func (f *fanoutAppender) Rollback() (err error) {
|
||||||
|
|
||||||
// mergeQuerier implements Querier.
|
// mergeQuerier implements Querier.
|
||||||
type mergeQuerier struct {
|
type mergeQuerier struct {
|
||||||
queriers []Querier
|
primaryQuerier Querier
|
||||||
|
queriers []Querier
|
||||||
|
|
||||||
|
failedQueriers map[Querier]struct{}
|
||||||
|
setQuerierMap map[SeriesSet]Querier
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMergeQuerier returns a new Querier that merges results of input queriers.
|
// NewMergeQuerier returns a new Querier that merges results of input queriers.
|
||||||
// NB NewMergeQuerier will return NoopQuerier if no queriers are passed to it,
|
// NB NewMergeQuerier will return NoopQuerier if no queriers are passed to it,
|
||||||
// and will filter NoopQueriers from its arguments, in order to reduce overhead
|
// and will filter NoopQueriers from its arguments, in order to reduce overhead
|
||||||
// when only one querier is passed.
|
// when only one querier is passed.
|
||||||
func NewMergeQuerier(queriers []Querier) Querier {
|
func NewMergeQuerier(primaryQuerier Querier, queriers []Querier) Querier {
|
||||||
filtered := make([]Querier, 0, len(queriers))
|
filtered := make([]Querier, 0, len(queriers))
|
||||||
for _, querier := range queriers {
|
for _, querier := range queriers {
|
||||||
if querier != NoopQuerier() {
|
if querier != NoopQuerier() {
|
||||||
|
@ -205,6 +209,9 @@ func NewMergeQuerier(queriers []Querier) Querier {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setQuerierMap := make(map[SeriesSet]Querier)
|
||||||
|
failedQueriers := make(map[Querier]struct{})
|
||||||
|
|
||||||
switch len(filtered) {
|
switch len(filtered) {
|
||||||
case 0:
|
case 0:
|
||||||
return NoopQuerier()
|
return NoopQuerier()
|
||||||
|
@ -212,22 +219,37 @@ func NewMergeQuerier(queriers []Querier) Querier {
|
||||||
return filtered[0]
|
return filtered[0]
|
||||||
default:
|
default:
|
||||||
return &mergeQuerier{
|
return &mergeQuerier{
|
||||||
queriers: filtered,
|
primaryQuerier: primaryQuerier,
|
||||||
|
queriers: filtered,
|
||||||
|
failedQueriers: failedQueriers,
|
||||||
|
setQuerierMap: setQuerierMap,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Select returns a set of series that matches the given label matchers.
|
// Select returns a set of series that matches the given label matchers.
|
||||||
func (q *mergeQuerier) Select(params *SelectParams, matchers ...*labels.Matcher) (SeriesSet, error) {
|
func (q *mergeQuerier) Select(params *SelectParams, matchers ...*labels.Matcher) (SeriesSet, error, Warnings) {
|
||||||
seriesSets := make([]SeriesSet, 0, len(q.queriers))
|
seriesSets := make([]SeriesSet, 0, len(q.queriers))
|
||||||
|
var warnings Warnings
|
||||||
for _, querier := range q.queriers {
|
for _, querier := range q.queriers {
|
||||||
set, err := querier.Select(params, matchers...)
|
set, err, wrn := querier.Select(params, matchers...)
|
||||||
|
q.setQuerierMap[set] = querier
|
||||||
|
if wrn != nil {
|
||||||
|
warnings = append(warnings, wrn...)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
q.failedQueriers[querier] = struct{}{}
|
||||||
|
// If the error source isn't the primary querier, return the error as a warning and continue.
|
||||||
|
if querier != q.primaryQuerier {
|
||||||
|
warnings = append(warnings, err)
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
return nil, err, nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
seriesSets = append(seriesSets, set)
|
seriesSets = append(seriesSets, set)
|
||||||
}
|
}
|
||||||
return NewMergeSeriesSet(seriesSets), nil
|
return NewMergeSeriesSet(seriesSets, q), nil, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
// LabelValues returns all potential values for a label name.
|
// LabelValues returns all potential values for a label name.
|
||||||
|
@ -243,6 +265,11 @@ func (q *mergeQuerier) LabelValues(name string) ([]string, error) {
|
||||||
return mergeStringSlices(results), nil
|
return mergeStringSlices(results), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (q *mergeQuerier) IsFailedSet(set SeriesSet) bool {
|
||||||
|
_, isFailedQuerier := q.failedQueriers[q.setQuerierMap[set]]
|
||||||
|
return isFailedQuerier
|
||||||
|
}
|
||||||
|
|
||||||
func mergeStringSlices(ss [][]string) []string {
|
func mergeStringSlices(ss [][]string) []string {
|
||||||
switch len(ss) {
|
switch len(ss) {
|
||||||
case 0:
|
case 0:
|
||||||
|
@ -322,11 +349,13 @@ type mergeSeriesSet struct {
|
||||||
currentSets []SeriesSet
|
currentSets []SeriesSet
|
||||||
heap seriesSetHeap
|
heap seriesSetHeap
|
||||||
sets []SeriesSet
|
sets []SeriesSet
|
||||||
|
|
||||||
|
querier *mergeQuerier
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMergeSeriesSet returns a new series set that merges (deduplicates)
|
// NewMergeSeriesSet returns a new series set that merges (deduplicates)
|
||||||
// series returned by the input series sets when iterating.
|
// series returned by the input series sets when iterating.
|
||||||
func NewMergeSeriesSet(sets []SeriesSet) SeriesSet {
|
func NewMergeSeriesSet(sets []SeriesSet, querier *mergeQuerier) SeriesSet {
|
||||||
if len(sets) == 1 {
|
if len(sets) == 1 {
|
||||||
return sets[0]
|
return sets[0]
|
||||||
}
|
}
|
||||||
|
@ -335,34 +364,53 @@ func NewMergeSeriesSet(sets []SeriesSet) SeriesSet {
|
||||||
// series under the cursor.
|
// series under the cursor.
|
||||||
var h seriesSetHeap
|
var h seriesSetHeap
|
||||||
for _, set := range sets {
|
for _, set := range sets {
|
||||||
|
if set == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
if set.Next() {
|
if set.Next() {
|
||||||
heap.Push(&h, set)
|
heap.Push(&h, set)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return &mergeSeriesSet{
|
return &mergeSeriesSet{
|
||||||
heap: h,
|
heap: h,
|
||||||
sets: sets,
|
sets: sets,
|
||||||
|
querier: querier,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *mergeSeriesSet) Next() bool {
|
func (c *mergeSeriesSet) Next() bool {
|
||||||
// Firstly advance all the current series sets. If any of them have run out
|
// Run in a loop because the "next" series sets may not be valid anymore.
|
||||||
// we can drop them, otherwise they should be inserted back into the heap.
|
// If a remote querier fails, we discard all series sets from that querier.
|
||||||
for _, set := range c.currentSets {
|
// If, for the current label set, all the next series sets come from
|
||||||
if set.Next() {
|
// failed remote storage sources, we want to keep trying with the next label set.
|
||||||
heap.Push(&c.heap, set)
|
for {
|
||||||
|
// Firstly advance all the current series sets. If any of them have run out
|
||||||
|
// we can drop them, otherwise they should be inserted back into the heap.
|
||||||
|
for _, set := range c.currentSets {
|
||||||
|
if set.Next() {
|
||||||
|
heap.Push(&c.heap, set)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(c.heap) == 0 {
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if len(c.heap) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now, pop items of the heap that have equal label sets.
|
// Now, pop items of the heap that have equal label sets.
|
||||||
c.currentSets = nil
|
c.currentSets = nil
|
||||||
c.currentLabels = c.heap[0].At().Labels()
|
c.currentLabels = c.heap[0].At().Labels()
|
||||||
for len(c.heap) > 0 && labels.Equal(c.currentLabels, c.heap[0].At().Labels()) {
|
for len(c.heap) > 0 && labels.Equal(c.currentLabels, c.heap[0].At().Labels()) {
|
||||||
set := heap.Pop(&c.heap).(SeriesSet)
|
set := heap.Pop(&c.heap).(SeriesSet)
|
||||||
c.currentSets = append(c.currentSets, set)
|
if c.querier != nil && c.querier.IsFailedSet(set) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
c.currentSets = append(c.currentSets, set)
|
||||||
|
}
|
||||||
|
|
||||||
|
// As long as the current set contains at least 1 set,
|
||||||
|
// then it should return true.
|
||||||
|
if len(c.currentSets) != 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,7 +109,7 @@ func TestMergeSeriesSet(t *testing.T) {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
merged := NewMergeSeriesSet(tc.input)
|
merged := NewMergeSeriesSet(tc.input, nil)
|
||||||
for merged.Next() {
|
for merged.Next() {
|
||||||
require.True(t, tc.expected.Next())
|
require.True(t, tc.expected.Next())
|
||||||
actualSeries := merged.At()
|
actualSeries := merged.At()
|
||||||
|
@ -262,7 +262,7 @@ func makeMergeSeriesSet(numSeriesSets, numSeries, numSamples int) SeriesSet {
|
||||||
for i := 0; i < numSeriesSets; i++ {
|
for i := 0; i < numSeriesSets; i++ {
|
||||||
seriesSets = append(seriesSets, makeSeriesSet(numSeries, numSamples))
|
seriesSets = append(seriesSets, makeSeriesSet(numSeries, numSamples))
|
||||||
}
|
}
|
||||||
return NewMergeSeriesSet(seriesSets)
|
return NewMergeSeriesSet(seriesSets, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func benchmarkDrain(seriesSet SeriesSet, b *testing.B) {
|
func benchmarkDrain(seriesSet SeriesSet, b *testing.B) {
|
||||||
|
|
|
@ -52,7 +52,7 @@ type Queryable interface {
|
||||||
// Querier provides reading access to time series data.
|
// Querier provides reading access to time series data.
|
||||||
type Querier interface {
|
type Querier interface {
|
||||||
// Select returns a set of series that matches the given label matchers.
|
// Select returns a set of series that matches the given label matchers.
|
||||||
Select(*SelectParams, ...*labels.Matcher) (SeriesSet, error)
|
Select(*SelectParams, ...*labels.Matcher) (SeriesSet, error, Warnings)
|
||||||
|
|
||||||
// LabelValues returns all potential values for a label name.
|
// LabelValues returns all potential values for a label name.
|
||||||
LabelValues(name string) ([]string, error)
|
LabelValues(name string) ([]string, error)
|
||||||
|
@ -122,3 +122,5 @@ type SeriesIterator interface {
|
||||||
// Err returns the current error.
|
// Err returns the current error.
|
||||||
Err() error
|
Err() error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Warnings []error
|
||||||
|
|
|
@ -26,8 +26,8 @@ func NoopQuerier() Querier {
|
||||||
return noopQuerier{}
|
return noopQuerier{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (noopQuerier) Select(*SelectParams, ...*labels.Matcher) (SeriesSet, error) {
|
func (noopQuerier) Select(*SelectParams, ...*labels.Matcher) (SeriesSet, error, Warnings) {
|
||||||
return NoopSeriesSet(), nil
|
return NoopSeriesSet(), nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (noopQuerier) LabelValues(name string) ([]string, error) {
|
func (noopQuerier) LabelValues(name string) ([]string, error) {
|
||||||
|
|
|
@ -59,10 +59,10 @@ type querier struct {
|
||||||
|
|
||||||
// Select implements storage.Querier and uses the given matchers to read series
|
// Select implements storage.Querier and uses the given matchers to read series
|
||||||
// sets from the Client.
|
// sets from the Client.
|
||||||
func (q *querier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (storage.SeriesSet, error) {
|
func (q *querier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (storage.SeriesSet, error, storage.Warnings) {
|
||||||
query, err := ToQuery(q.mint, q.maxt, matchers, p)
|
query, err := ToQuery(q.mint, q.maxt, matchers, p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
remoteReadGauge := remoteReadQueries.WithLabelValues(q.client.Name())
|
remoteReadGauge := remoteReadQueries.WithLabelValues(q.client.Name())
|
||||||
|
@ -71,10 +71,10 @@ func (q *querier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (
|
||||||
|
|
||||||
res, err := q.client.Read(q.ctx, query)
|
res, err := q.client.Read(q.ctx, query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return FromQueryResult(res), nil
|
return FromQueryResult(res), nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// LabelValues implements storage.Querier and is a noop.
|
// LabelValues implements storage.Querier and is a noop.
|
||||||
|
@ -117,13 +117,13 @@ type externalLabelsQuerier struct {
|
||||||
// Select adds equality matchers for all external labels to the list of matchers
|
// Select adds equality matchers for all external labels to the list of matchers
|
||||||
// before calling the wrapped storage.Queryable. The added external labels are
|
// before calling the wrapped storage.Queryable. The added external labels are
|
||||||
// removed from the returned series sets.
|
// removed from the returned series sets.
|
||||||
func (q externalLabelsQuerier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (storage.SeriesSet, error) {
|
func (q externalLabelsQuerier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (storage.SeriesSet, error, storage.Warnings) {
|
||||||
m, added := q.addExternalLabels(matchers)
|
m, added := q.addExternalLabels(matchers)
|
||||||
s, err := q.Querier.Select(p, m...)
|
s, err, warnings := q.Querier.Select(p, m...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, warnings
|
||||||
}
|
}
|
||||||
return newSeriesSetFilter(s, added), nil
|
return newSeriesSetFilter(s, added), nil, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
// PreferLocalStorageFilter returns a QueryableFunc which creates a NoopQuerier
|
// PreferLocalStorageFilter returns a QueryableFunc which creates a NoopQuerier
|
||||||
|
@ -170,7 +170,7 @@ type requiredMatchersQuerier struct {
|
||||||
|
|
||||||
// Select returns a NoopSeriesSet if the given matchers don't match the label
|
// Select returns a NoopSeriesSet if the given matchers don't match the label
|
||||||
// set of the requiredMatchersQuerier. Otherwise it'll call the wrapped querier.
|
// set of the requiredMatchersQuerier. Otherwise it'll call the wrapped querier.
|
||||||
func (q requiredMatchersQuerier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (storage.SeriesSet, error) {
|
func (q requiredMatchersQuerier) Select(p *storage.SelectParams, matchers ...*labels.Matcher) (storage.SeriesSet, error, storage.Warnings) {
|
||||||
ms := q.requiredMatchers
|
ms := q.requiredMatchers
|
||||||
for _, m := range matchers {
|
for _, m := range matchers {
|
||||||
for i, r := range ms {
|
for i, r := range ms {
|
||||||
|
@ -184,7 +184,7 @@ func (q requiredMatchersQuerier) Select(p *storage.SelectParams, matchers ...*la
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(ms) > 0 {
|
if len(ms) > 0 {
|
||||||
return storage.NoopSeriesSet(), nil
|
return storage.NoopSeriesSet(), nil, nil
|
||||||
}
|
}
|
||||||
return q.Querier.Select(p, matchers...)
|
return q.Querier.Select(p, matchers...)
|
||||||
}
|
}
|
||||||
|
@ -225,6 +225,15 @@ func newSeriesSetFilter(ss storage.SeriesSet, toFilter model.LabelSet) storage.S
|
||||||
type seriesSetFilter struct {
|
type seriesSetFilter struct {
|
||||||
storage.SeriesSet
|
storage.SeriesSet
|
||||||
toFilter model.LabelSet
|
toFilter model.LabelSet
|
||||||
|
querier storage.Querier
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ssf *seriesSetFilter) GetQuerier() storage.Querier {
|
||||||
|
return ssf.querier
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ssf *seriesSetFilter) SetQuerier(querier storage.Querier) {
|
||||||
|
ssf.querier = querier
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ssf seriesSetFilter) At() storage.Series {
|
func (ssf seriesSetFilter) At() storage.Series {
|
||||||
|
|
|
@ -42,7 +42,7 @@ func TestExternalLabelsQuerierSelect(t *testing.T) {
|
||||||
externalLabels: model.LabelSet{"region": "europe"},
|
externalLabels: model.LabelSet{"region": "europe"},
|
||||||
}
|
}
|
||||||
want := newSeriesSetFilter(mockSeriesSet{}, q.externalLabels)
|
want := newSeriesSetFilter(mockSeriesSet{}, q.externalLabels)
|
||||||
have, err := q.Select(nil, matchers...)
|
have, err, _ := q.Select(nil, matchers...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
|
@ -157,8 +157,8 @@ type mockSeriesSet struct {
|
||||||
storage.SeriesSet
|
storage.SeriesSet
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mockQuerier) Select(*storage.SelectParams, ...*labels.Matcher) (storage.SeriesSet, error) {
|
func (mockQuerier) Select(*storage.SelectParams, ...*labels.Matcher) (storage.SeriesSet, error, storage.Warnings) {
|
||||||
return mockSeriesSet{}, nil
|
return mockSeriesSet{}, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPreferLocalStorageFilter(t *testing.T) {
|
func TestPreferLocalStorageFilter(t *testing.T) {
|
||||||
|
@ -313,7 +313,7 @@ func TestRequiredLabelsQuerierSelect(t *testing.T) {
|
||||||
requiredMatchers: test.requiredMatchers,
|
requiredMatchers: test.requiredMatchers,
|
||||||
}
|
}
|
||||||
|
|
||||||
have, err := q.Select(nil, test.matchers...)
|
have, err, _ := q.Select(nil, test.matchers...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,7 +140,7 @@ func (s *Storage) Querier(ctx context.Context, mint, maxt int64) (storage.Querie
|
||||||
}
|
}
|
||||||
queriers = append(queriers, q)
|
queriers = append(queriers, q)
|
||||||
}
|
}
|
||||||
return storage.NewMergeQuerier(queriers), nil
|
return storage.NewMergeQuerier(nil, queriers), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Close the background processing of the storage queues.
|
// Close the background processing of the storage queues.
|
||||||
|
|
|
@ -230,7 +230,7 @@ type querier struct {
|
||||||
q tsdb.Querier
|
q tsdb.Querier
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q querier) Select(_ *storage.SelectParams, oms ...*labels.Matcher) (storage.SeriesSet, error) {
|
func (q querier) Select(_ *storage.SelectParams, oms ...*labels.Matcher) (storage.SeriesSet, error, storage.Warnings) {
|
||||||
ms := make([]tsdbLabels.Matcher, 0, len(oms))
|
ms := make([]tsdbLabels.Matcher, 0, len(oms))
|
||||||
|
|
||||||
for _, om := range oms {
|
for _, om := range oms {
|
||||||
|
@ -238,9 +238,9 @@ func (q querier) Select(_ *storage.SelectParams, oms ...*labels.Matcher) (storag
|
||||||
}
|
}
|
||||||
set, err := q.q.Select(ms...)
|
set, err := q.q.Select(ms...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
return seriesSet{set: set}, nil
|
return seriesSet{set: set}, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q querier) LabelValues(name string) ([]string, error) { return q.q.LabelValues(name) }
|
func (q querier) LabelValues(name string) ([]string, error) { return q.q.LabelValues(name) }
|
||||||
|
|
|
@ -119,6 +119,14 @@ type response struct {
|
||||||
Data interface{} `json:"data,omitempty"`
|
Data interface{} `json:"data,omitempty"`
|
||||||
ErrorType errorType `json:"errorType,omitempty"`
|
ErrorType errorType `json:"errorType,omitempty"`
|
||||||
Error string `json:"error,omitempty"`
|
Error string `json:"error,omitempty"`
|
||||||
|
Warnings []string `json:"warnings,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type apiFuncResult struct {
|
||||||
|
data interface{}
|
||||||
|
err *apiError
|
||||||
|
warnings storage.Warnings
|
||||||
|
finalizer func()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enables cross-site script calls.
|
// Enables cross-site script calls.
|
||||||
|
@ -128,7 +136,7 @@ func setCORS(w http.ResponseWriter) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type apiFunc func(r *http.Request) (interface{}, *apiError, func())
|
type apiFunc func(r *http.Request) apiFuncResult
|
||||||
|
|
||||||
// TSDBAdmin defines the tsdb interfaces used by the v1 API for admin operations.
|
// TSDBAdmin defines the tsdb interfaces used by the v1 API for admin operations.
|
||||||
type TSDBAdmin interface {
|
type TSDBAdmin interface {
|
||||||
|
@ -204,16 +212,16 @@ func (api *API) Register(r *route.Router) {
|
||||||
wrap := func(f apiFunc) http.HandlerFunc {
|
wrap := func(f apiFunc) http.HandlerFunc {
|
||||||
hf := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
hf := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
setCORS(w)
|
setCORS(w)
|
||||||
data, err, finalizer := f(r)
|
result := f(r)
|
||||||
if err != nil {
|
if result.err != nil {
|
||||||
api.respondError(w, err, data)
|
api.respondError(w, result.err, result.data)
|
||||||
} else if data != nil {
|
} else if result.data != nil {
|
||||||
api.respond(w, data)
|
api.respond(w, result.data, result.warnings)
|
||||||
} else {
|
} else {
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
if finalizer != nil {
|
if result.finalizer != nil {
|
||||||
finalizer()
|
result.finalizer()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return api.ready(httputil.CompressionHandler{
|
return api.ready(httputil.CompressionHandler{
|
||||||
|
@ -258,17 +266,17 @@ type queryData struct {
|
||||||
Stats *stats.QueryStats `json:"stats,omitempty"`
|
Stats *stats.QueryStats `json:"stats,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) options(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) options(r *http.Request) apiFuncResult {
|
||||||
return nil, nil, nil
|
return apiFuncResult{nil, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) query(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) query(r *http.Request) apiFuncResult {
|
||||||
var ts time.Time
|
var ts time.Time
|
||||||
if t := r.FormValue("time"); t != "" {
|
if t := r.FormValue("time"); t != "" {
|
||||||
var err error
|
var err error
|
||||||
ts, err = parseTime(t)
|
ts, err = parseTime(t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ts = api.now()
|
ts = api.now()
|
||||||
|
@ -279,7 +287,7 @@ func (api *API) query(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
var cancel context.CancelFunc
|
var cancel context.CancelFunc
|
||||||
timeout, err := parseDuration(to)
|
timeout, err := parseDuration(to)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx, cancel = context.WithTimeout(ctx, timeout)
|
ctx, cancel = context.WithTimeout(ctx, timeout)
|
||||||
|
@ -288,12 +296,12 @@ func (api *API) query(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
|
|
||||||
qry, err := api.QueryEngine.NewInstantQuery(api.Queryable, r.FormValue("query"), ts)
|
qry, err := api.QueryEngine.NewInstantQuery(api.Queryable, r.FormValue("query"), ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
res := qry.Exec(ctx)
|
res := qry.Exec(ctx)
|
||||||
if res.Err != nil {
|
if res.Err != nil {
|
||||||
return nil, returnAPIError(res.Err), qry.Close
|
return apiFuncResult{nil, returnAPIError(res.Err), res.Warnings, qry.Close}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optional stats field in response if parameter "stats" is not empty.
|
// Optional stats field in response if parameter "stats" is not empty.
|
||||||
|
@ -302,42 +310,42 @@ func (api *API) query(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
qs = stats.NewQueryStats(qry.Stats())
|
qs = stats.NewQueryStats(qry.Stats())
|
||||||
}
|
}
|
||||||
|
|
||||||
return &queryData{
|
return apiFuncResult{&queryData{
|
||||||
ResultType: res.Value.Type(),
|
ResultType: res.Value.Type(),
|
||||||
Result: res.Value,
|
Result: res.Value,
|
||||||
Stats: qs,
|
Stats: qs,
|
||||||
}, nil, qry.Close
|
}, nil, res.Warnings, qry.Close}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) queryRange(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) queryRange(r *http.Request) apiFuncResult {
|
||||||
start, err := parseTime(r.FormValue("start"))
|
start, err := parseTime(r.FormValue("start"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
end, err := parseTime(r.FormValue("end"))
|
end, err := parseTime(r.FormValue("end"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
if end.Before(start) {
|
if end.Before(start) {
|
||||||
err := errors.New("end timestamp must not be before start time")
|
err := errors.New("end timestamp must not be before start time")
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
step, err := parseDuration(r.FormValue("step"))
|
step, err := parseDuration(r.FormValue("step"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
if step <= 0 {
|
if step <= 0 {
|
||||||
err := errors.New("zero or negative query resolution step widths are not accepted. Try a positive integer")
|
err := errors.New("zero or negative query resolution step widths are not accepted. Try a positive integer")
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
// For safety, limit the number of returned points per timeseries.
|
// For safety, limit the number of returned points per timeseries.
|
||||||
// This is sufficient for 60s resolution for a week or 1h resolution for a year.
|
// This is sufficient for 60s resolution for a week or 1h resolution for a year.
|
||||||
if end.Sub(start)/step > 11000 {
|
if end.Sub(start)/step > 11000 {
|
||||||
err := errors.New("exceeded maximum resolution of 11,000 points per timeseries. Try decreasing the query resolution (?step=XX)")
|
err := errors.New("exceeded maximum resolution of 11,000 points per timeseries. Try decreasing the query resolution (?step=XX)")
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
@ -345,7 +353,7 @@ func (api *API) queryRange(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
var cancel context.CancelFunc
|
var cancel context.CancelFunc
|
||||||
timeout, err := parseDuration(to)
|
timeout, err := parseDuration(to)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx, cancel = context.WithTimeout(ctx, timeout)
|
ctx, cancel = context.WithTimeout(ctx, timeout)
|
||||||
|
@ -354,12 +362,12 @@ func (api *API) queryRange(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
|
|
||||||
qry, err := api.QueryEngine.NewRangeQuery(api.Queryable, r.FormValue("query"), start, end, step)
|
qry, err := api.QueryEngine.NewRangeQuery(api.Queryable, r.FormValue("query"), start, end, step)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
res := qry.Exec(ctx)
|
res := qry.Exec(ctx)
|
||||||
if res.Err != nil {
|
if res.Err != nil {
|
||||||
return nil, returnAPIError(res.Err), qry.Close
|
return apiFuncResult{nil, returnAPIError(res.Err), res.Warnings, qry.Close}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optional stats field in response if parameter "stats" is not empty.
|
// Optional stats field in response if parameter "stats" is not empty.
|
||||||
|
@ -368,11 +376,11 @@ func (api *API) queryRange(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
qs = stats.NewQueryStats(qry.Stats())
|
qs = stats.NewQueryStats(qry.Stats())
|
||||||
}
|
}
|
||||||
|
|
||||||
return &queryData{
|
return apiFuncResult{&queryData{
|
||||||
ResultType: res.Value.Type(),
|
ResultType: res.Value.Type(),
|
||||||
Result: res.Value,
|
Result: res.Value,
|
||||||
Stats: qs,
|
Stats: qs,
|
||||||
}, nil, qry.Close
|
}, nil, res.Warnings, qry.Close}
|
||||||
}
|
}
|
||||||
|
|
||||||
func returnAPIError(err error) *apiError {
|
func returnAPIError(err error) *apiError {
|
||||||
|
@ -392,39 +400,39 @@ func returnAPIError(err error) *apiError {
|
||||||
return &apiError{errorExec, err}
|
return &apiError{errorExec, err}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) labelNames(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) labelNames(r *http.Request) apiFuncResult {
|
||||||
q, err := api.Queryable.Querier(r.Context(), math.MinInt64, math.MaxInt64)
|
q, err := api.Queryable.Querier(r.Context(), math.MinInt64, math.MaxInt64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorExec, err}, nil
|
return apiFuncResult{nil, &apiError{errorExec, err}, nil, nil}
|
||||||
}
|
}
|
||||||
defer q.Close()
|
defer q.Close()
|
||||||
|
|
||||||
names, err := q.LabelNames()
|
names, err := q.LabelNames()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorExec, err}, nil
|
return apiFuncResult{nil, &apiError{errorExec, err}, nil, nil}
|
||||||
}
|
}
|
||||||
return names, nil, nil
|
return apiFuncResult{names, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) labelValues(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) labelValues(r *http.Request) apiFuncResult {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
name := route.Param(ctx, "name")
|
name := route.Param(ctx, "name")
|
||||||
|
|
||||||
if !model.LabelNameRE.MatchString(name) {
|
if !model.LabelNameRE.MatchString(name) {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("invalid label name: %q", name)}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("invalid label name: %q", name)}, nil, nil}
|
||||||
}
|
}
|
||||||
q, err := api.Queryable.Querier(ctx, math.MinInt64, math.MaxInt64)
|
q, err := api.Queryable.Querier(ctx, math.MinInt64, math.MaxInt64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorExec, err}, nil
|
return apiFuncResult{nil, &apiError{errorExec, err}, nil, nil}
|
||||||
}
|
}
|
||||||
defer q.Close()
|
defer q.Close()
|
||||||
|
|
||||||
vals, err := q.LabelValues(name)
|
vals, err := q.LabelValues(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorExec, err}, nil
|
return apiFuncResult{nil, &apiError{errorExec, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
return vals, nil, nil
|
return apiFuncResult{vals, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -432,12 +440,12 @@ var (
|
||||||
maxTime = time.Unix(math.MaxInt64/1000-62135596801, 999999999)
|
maxTime = time.Unix(math.MaxInt64/1000-62135596801, 999999999)
|
||||||
)
|
)
|
||||||
|
|
||||||
func (api *API) series(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) series(r *http.Request) apiFuncResult {
|
||||||
if err := r.ParseForm(); err != nil {
|
if err := r.ParseForm(); err != nil {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("error parsing form values: %v", err)}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("error parsing form values: %v", err)}, nil, nil}
|
||||||
}
|
}
|
||||||
if len(r.Form["match[]"]) == 0 {
|
if len(r.Form["match[]"]) == 0 {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
var start time.Time
|
var start time.Time
|
||||||
|
@ -445,7 +453,7 @@ func (api *API) series(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
var err error
|
var err error
|
||||||
start, err = parseTime(t)
|
start, err = parseTime(t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
start = minTime
|
start = minTime
|
||||||
|
@ -456,7 +464,7 @@ func (api *API) series(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
var err error
|
var err error
|
||||||
end, err = parseTime(t)
|
end, err = parseTime(t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
end = maxTime
|
end = maxTime
|
||||||
|
@ -466,40 +474,42 @@ func (api *API) series(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
for _, s := range r.Form["match[]"] {
|
for _, s := range r.Form["match[]"] {
|
||||||
matchers, err := promql.ParseMetricSelector(s)
|
matchers, err := promql.ParseMetricSelector(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
matcherSets = append(matcherSets, matchers)
|
matcherSets = append(matcherSets, matchers)
|
||||||
}
|
}
|
||||||
|
|
||||||
q, err := api.Queryable.Querier(r.Context(), timestamp.FromTime(start), timestamp.FromTime(end))
|
q, err := api.Queryable.Querier(r.Context(), timestamp.FromTime(start), timestamp.FromTime(end))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorExec, err}, nil
|
return apiFuncResult{nil, &apiError{errorExec, err}, nil, nil}
|
||||||
}
|
}
|
||||||
defer q.Close()
|
defer q.Close()
|
||||||
|
|
||||||
var sets []storage.SeriesSet
|
var sets []storage.SeriesSet
|
||||||
|
var warnings storage.Warnings
|
||||||
for _, mset := range matcherSets {
|
for _, mset := range matcherSets {
|
||||||
s, err := q.Select(nil, mset...)
|
s, err, wrn := q.Select(nil, mset...) //TODO
|
||||||
|
warnings = append(warnings, wrn...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorExec, err}, nil
|
return apiFuncResult{nil, &apiError{errorExec, err}, warnings, nil}
|
||||||
}
|
}
|
||||||
sets = append(sets, s)
|
sets = append(sets, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
set := storage.NewMergeSeriesSet(sets)
|
set := storage.NewMergeSeriesSet(sets, nil)
|
||||||
metrics := []labels.Labels{}
|
metrics := []labels.Labels{}
|
||||||
for set.Next() {
|
for set.Next() {
|
||||||
metrics = append(metrics, set.At().Labels())
|
metrics = append(metrics, set.At().Labels())
|
||||||
}
|
}
|
||||||
if set.Err() != nil {
|
if set.Err() != nil {
|
||||||
return nil, &apiError{errorExec, set.Err()}, nil
|
return apiFuncResult{nil, &apiError{errorExec, set.Err()}, warnings, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
return metrics, nil, nil
|
return apiFuncResult{metrics, nil, warnings, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) dropSeries(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) dropSeries(r *http.Request) apiFuncResult {
|
||||||
return nil, &apiError{errorInternal, fmt.Errorf("not implemented")}, nil
|
return apiFuncResult{nil, &apiError{errorInternal, fmt.Errorf("not implemented")}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Target has the information for one target.
|
// Target has the information for one target.
|
||||||
|
@ -528,7 +538,7 @@ type TargetDiscovery struct {
|
||||||
DroppedTargets []*DroppedTarget `json:"droppedTargets"`
|
DroppedTargets []*DroppedTarget `json:"droppedTargets"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) targets(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) targets(r *http.Request) apiFuncResult {
|
||||||
flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
|
flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
|
||||||
var n int
|
var n int
|
||||||
keys := make([]string, 0, len(targets))
|
keys := make([]string, 0, len(targets))
|
||||||
|
@ -570,7 +580,7 @@ func (api *API) targets(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
DiscoveredLabels: t.DiscoveredLabels().Map(),
|
DiscoveredLabels: t.DiscoveredLabels().Map(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return res, nil, nil
|
return apiFuncResult{res, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func matchLabels(lset labels.Labels, matchers []*labels.Matcher) bool {
|
func matchLabels(lset labels.Labels, matchers []*labels.Matcher) bool {
|
||||||
|
@ -582,18 +592,18 @@ func matchLabels(lset labels.Labels, matchers []*labels.Matcher) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) targetMetadata(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) targetMetadata(r *http.Request) apiFuncResult {
|
||||||
limit := -1
|
limit := -1
|
||||||
if s := r.FormValue("limit"); s != "" {
|
if s := r.FormValue("limit"); s != "" {
|
||||||
var err error
|
var err error
|
||||||
if limit, err = strconv.Atoi(s); err != nil {
|
if limit, err = strconv.Atoi(s); err != nil {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("limit must be a number")}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("limit must be a number")}, nil, nil}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
matchers, err := promql.ParseMetricSelector(r.FormValue("match_target"))
|
matchers, err := promql.ParseMetricSelector(r.FormValue("match_target"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
metric := r.FormValue("metric")
|
metric := r.FormValue("metric")
|
||||||
|
@ -633,9 +643,9 @@ func (api *API) targetMetadata(r *http.Request) (interface{}, *apiError, func())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(res) == 0 {
|
if len(res) == 0 {
|
||||||
return nil, &apiError{errorNotFound, errors.New("specified metadata not found")}, nil
|
return apiFuncResult{nil, &apiError{errorNotFound, errors.New("specified metadata not found")}, nil, nil}
|
||||||
}
|
}
|
||||||
return res, nil, nil
|
return apiFuncResult{res, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
type metricMetadata struct {
|
type metricMetadata struct {
|
||||||
|
@ -657,7 +667,7 @@ type AlertmanagerTarget struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) alertmanagers(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) alertmanagers(r *http.Request) apiFuncResult {
|
||||||
urls := api.alertmanagerRetriever.Alertmanagers()
|
urls := api.alertmanagerRetriever.Alertmanagers()
|
||||||
droppedURLS := api.alertmanagerRetriever.DroppedAlertmanagers()
|
droppedURLS := api.alertmanagerRetriever.DroppedAlertmanagers()
|
||||||
ams := &AlertmanagerDiscovery{ActiveAlertmanagers: make([]*AlertmanagerTarget, len(urls)), DroppedAlertmanagers: make([]*AlertmanagerTarget, len(droppedURLS))}
|
ams := &AlertmanagerDiscovery{ActiveAlertmanagers: make([]*AlertmanagerTarget, len(urls)), DroppedAlertmanagers: make([]*AlertmanagerTarget, len(droppedURLS))}
|
||||||
|
@ -667,7 +677,7 @@ func (api *API) alertmanagers(r *http.Request) (interface{}, *apiError, func())
|
||||||
for i, url := range droppedURLS {
|
for i, url := range droppedURLS {
|
||||||
ams.DroppedAlertmanagers[i] = &AlertmanagerTarget{URL: url.String()}
|
ams.DroppedAlertmanagers[i] = &AlertmanagerTarget{URL: url.String()}
|
||||||
}
|
}
|
||||||
return ams, nil, nil
|
return apiFuncResult{ams, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
// AlertDiscovery has info for all active alerts.
|
// AlertDiscovery has info for all active alerts.
|
||||||
|
@ -684,7 +694,7 @@ type Alert struct {
|
||||||
Value float64 `json:"value"`
|
Value float64 `json:"value"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) alerts(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) alerts(r *http.Request) apiFuncResult {
|
||||||
alertingRules := api.rulesRetriever.AlertingRules()
|
alertingRules := api.rulesRetriever.AlertingRules()
|
||||||
alerts := []*Alert{}
|
alerts := []*Alert{}
|
||||||
|
|
||||||
|
@ -697,7 +707,7 @@ func (api *API) alerts(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
|
|
||||||
res := &AlertDiscovery{Alerts: alerts}
|
res := &AlertDiscovery{Alerts: alerts}
|
||||||
|
|
||||||
return res, nil, nil
|
return apiFuncResult{res, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func rulesAlertsToAPIAlerts(rulesAlerts []*rules.Alert) []*Alert {
|
func rulesAlertsToAPIAlerts(rulesAlerts []*rules.Alert) []*Alert {
|
||||||
|
@ -756,7 +766,7 @@ type recordingRule struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) rules(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) rules(r *http.Request) apiFuncResult {
|
||||||
ruleGroups := api.rulesRetriever.RuleGroups()
|
ruleGroups := api.rulesRetriever.RuleGroups()
|
||||||
res := &RuleDiscovery{RuleGroups: make([]*RuleGroup, len(ruleGroups))}
|
res := &RuleDiscovery{RuleGroups: make([]*RuleGroup, len(ruleGroups))}
|
||||||
for i, grp := range ruleGroups {
|
for i, grp := range ruleGroups {
|
||||||
|
@ -799,29 +809,29 @@ func (api *API) rules(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
err := fmt.Errorf("failed to assert type of rule '%v'", rule.Name())
|
err := fmt.Errorf("failed to assert type of rule '%v'", rule.Name())
|
||||||
return nil, &apiError{errorInternal, err}, nil
|
return apiFuncResult{nil, &apiError{errorInternal, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
apiRuleGroup.Rules = append(apiRuleGroup.Rules, enrichedRule)
|
apiRuleGroup.Rules = append(apiRuleGroup.Rules, enrichedRule)
|
||||||
}
|
}
|
||||||
res.RuleGroups[i] = apiRuleGroup
|
res.RuleGroups[i] = apiRuleGroup
|
||||||
}
|
}
|
||||||
return res, nil, nil
|
return apiFuncResult{res, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
type prometheusConfig struct {
|
type prometheusConfig struct {
|
||||||
YAML string `json:"yaml"`
|
YAML string `json:"yaml"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) serveConfig(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) serveConfig(r *http.Request) apiFuncResult {
|
||||||
cfg := &prometheusConfig{
|
cfg := &prometheusConfig{
|
||||||
YAML: api.config().String(),
|
YAML: api.config().String(),
|
||||||
}
|
}
|
||||||
return cfg, nil, nil
|
return apiFuncResult{cfg, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) serveFlags(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) serveFlags(r *http.Request) apiFuncResult {
|
||||||
return api.flagsMap, nil, nil
|
return apiFuncResult{api.flagsMap, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) remoteRead(w http.ResponseWriter, r *http.Request) {
|
func (api *API) remoteRead(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -873,7 +883,7 @@ func (api *API) remoteRead(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
set, err := querier.Select(selectParams, filteredMatchers...)
|
set, err, _ := querier.Select(selectParams, filteredMatchers...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
|
@ -911,20 +921,20 @@ func (api *API) remoteRead(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) deleteSeries(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) deleteSeries(r *http.Request) apiFuncResult {
|
||||||
if !api.enableAdmin {
|
if !api.enableAdmin {
|
||||||
return nil, &apiError{errorUnavailable, errors.New("Admin APIs disabled")}, nil
|
return apiFuncResult{nil, &apiError{errorUnavailable, errors.New("Admin APIs disabled")}, nil, nil}
|
||||||
}
|
}
|
||||||
db := api.db()
|
db := api.db()
|
||||||
if db == nil {
|
if db == nil {
|
||||||
return nil, &apiError{errorUnavailable, errors.New("TSDB not ready")}, nil
|
return apiFuncResult{nil, &apiError{errorUnavailable, errors.New("TSDB not ready")}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.ParseForm(); err != nil {
|
if err := r.ParseForm(); err != nil {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("error parsing form values: %v", err)}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("error parsing form values: %v", err)}, nil, nil}
|
||||||
}
|
}
|
||||||
if len(r.Form["match[]"]) == 0 {
|
if len(r.Form["match[]"]) == 0 {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("no match[] parameter provided")}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
var start time.Time
|
var start time.Time
|
||||||
|
@ -932,7 +942,7 @@ func (api *API) deleteSeries(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
var err error
|
var err error
|
||||||
start, err = parseTime(t)
|
start, err = parseTime(t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
start = minTime
|
start = minTime
|
||||||
|
@ -943,7 +953,7 @@ func (api *API) deleteSeries(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
var err error
|
var err error
|
||||||
end, err = parseTime(t)
|
end, err = parseTime(t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
end = maxTime
|
end = maxTime
|
||||||
|
@ -952,7 +962,7 @@ func (api *API) deleteSeries(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
for _, s := range r.Form["match[]"] {
|
for _, s := range r.Form["match[]"] {
|
||||||
matchers, err := promql.ParseMetricSelector(s)
|
matchers, err := promql.ParseMetricSelector(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, err}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
var selector tsdbLabels.Selector
|
var selector tsdbLabels.Selector
|
||||||
|
@ -961,16 +971,16 @@ func (api *API) deleteSeries(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := db.Delete(timestamp.FromTime(start), timestamp.FromTime(end), selector...); err != nil {
|
if err := db.Delete(timestamp.FromTime(start), timestamp.FromTime(end), selector...); err != nil {
|
||||||
return nil, &apiError{errorInternal, err}, nil
|
return apiFuncResult{nil, &apiError{errorInternal, err}, nil, nil}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, nil
|
return apiFuncResult{nil, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) snapshot(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) snapshot(r *http.Request) apiFuncResult {
|
||||||
if !api.enableAdmin {
|
if !api.enableAdmin {
|
||||||
return nil, &apiError{errorUnavailable, errors.New("Admin APIs disabled")}, nil
|
return apiFuncResult{nil, &apiError{errorUnavailable, errors.New("Admin APIs disabled")}, nil, nil}
|
||||||
}
|
}
|
||||||
var (
|
var (
|
||||||
skipHead bool
|
skipHead bool
|
||||||
|
@ -979,13 +989,13 @@ func (api *API) snapshot(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
if r.FormValue("skip_head") != "" {
|
if r.FormValue("skip_head") != "" {
|
||||||
skipHead, err = strconv.ParseBool(r.FormValue("skip_head"))
|
skipHead, err = strconv.ParseBool(r.FormValue("skip_head"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, &apiError{errorBadData, fmt.Errorf("unable to parse boolean 'skip_head' argument: %v", err)}, nil
|
return apiFuncResult{nil, &apiError{errorBadData, fmt.Errorf("unable to parse boolean 'skip_head' argument: %v", err)}, nil, nil}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
db := api.db()
|
db := api.db()
|
||||||
if db == nil {
|
if db == nil {
|
||||||
return nil, &apiError{errorUnavailable, errors.New("TSDB not ready")}, nil
|
return apiFuncResult{nil, &apiError{errorUnavailable, errors.New("TSDB not ready")}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -996,31 +1006,31 @@ func (api *API) snapshot(r *http.Request) (interface{}, *apiError, func()) {
|
||||||
dir = filepath.Join(snapdir, name)
|
dir = filepath.Join(snapdir, name)
|
||||||
)
|
)
|
||||||
if err := os.MkdirAll(dir, 0777); err != nil {
|
if err := os.MkdirAll(dir, 0777); err != nil {
|
||||||
return nil, &apiError{errorInternal, fmt.Errorf("create snapshot directory: %s", err)}, nil
|
return apiFuncResult{nil, &apiError{errorInternal, fmt.Errorf("create snapshot directory: %s", err)}, nil, nil}
|
||||||
}
|
}
|
||||||
if err := db.Snapshot(dir, !skipHead); err != nil {
|
if err := db.Snapshot(dir, !skipHead); err != nil {
|
||||||
return nil, &apiError{errorInternal, fmt.Errorf("create snapshot: %s", err)}, nil
|
return apiFuncResult{nil, &apiError{errorInternal, fmt.Errorf("create snapshot: %s", err)}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
return struct {
|
return apiFuncResult{struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
}{name}, nil, nil
|
}{name}, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) cleanTombstones(r *http.Request) (interface{}, *apiError, func()) {
|
func (api *API) cleanTombstones(r *http.Request) apiFuncResult {
|
||||||
if !api.enableAdmin {
|
if !api.enableAdmin {
|
||||||
return nil, &apiError{errorUnavailable, errors.New("Admin APIs disabled")}, nil
|
return apiFuncResult{nil, &apiError{errorUnavailable, errors.New("Admin APIs disabled")}, nil, nil}
|
||||||
}
|
}
|
||||||
db := api.db()
|
db := api.db()
|
||||||
if db == nil {
|
if db == nil {
|
||||||
return nil, &apiError{errorUnavailable, errors.New("TSDB not ready")}, nil
|
return apiFuncResult{nil, &apiError{errorUnavailable, errors.New("TSDB not ready")}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := db.CleanTombstones(); err != nil {
|
if err := db.CleanTombstones(); err != nil {
|
||||||
return nil, &apiError{errorInternal, err}, nil
|
return apiFuncResult{nil, &apiError{errorInternal, err}, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, nil
|
return apiFuncResult{nil, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertMatcher(m *labels.Matcher) tsdbLabels.Matcher {
|
func convertMatcher(m *labels.Matcher) tsdbLabels.Matcher {
|
||||||
|
@ -1075,11 +1085,17 @@ func mergeLabels(primary, secondary []*prompb.Label) []*prompb.Label {
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) respond(w http.ResponseWriter, data interface{}) {
|
func (api *API) respond(w http.ResponseWriter, data interface{}, warnings storage.Warnings) {
|
||||||
|
statusMessage := statusSuccess
|
||||||
|
var warningStrings []string
|
||||||
|
for _, warning := range warnings {
|
||||||
|
warningStrings = append(warningStrings, warning.Error())
|
||||||
|
}
|
||||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||||
b, err := json.Marshal(&response{
|
b, err := json.Marshal(&response{
|
||||||
Status: statusSuccess,
|
Status: statusMessage,
|
||||||
Data: data,
|
Data: data,
|
||||||
|
Warnings: warningStrings,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(api.logger).Log("msg", "error marshaling json response", "err", err)
|
level.Error(api.logger).Log("msg", "error marshaling json response", "err", err)
|
||||||
|
|
|
@ -349,9 +349,9 @@ func TestLabelNames(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
req, err := request(method)
|
req, err := request(method)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
resp, apiErr, _ := api.labelNames(req.WithContext(ctx))
|
res := api.labelNames(req.WithContext(ctx))
|
||||||
assertAPIError(t, apiErr, "")
|
assertAPIError(t, res.err, "")
|
||||||
assertAPIResponse(t, resp, []string{"__name__", "baz", "foo", "foo1", "foo2", "xyz"})
|
assertAPIResponse(t, res.data, []string{"__name__", "baz", "foo", "foo1", "foo2", "xyz"})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -379,7 +379,7 @@ func setupRemote(s storage.Storage) *httptest.Server {
|
||||||
}
|
}
|
||||||
defer querier.Close()
|
defer querier.Close()
|
||||||
|
|
||||||
set, err := querier.Select(selectParams, matchers...)
|
set, err, _ := querier.Select(selectParams, matchers...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
|
@ -857,9 +857,9 @@ func testEndpoints(t *testing.T, api *API, testLabelAPI bool) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
resp, apiErr, _ := test.endpoint(req.WithContext(ctx))
|
res := test.endpoint(req.WithContext(ctx))
|
||||||
assertAPIError(t, apiErr, test.errType)
|
assertAPIError(t, res.err, test.errType)
|
||||||
assertAPIResponse(t, resp, test.response)
|
assertAPIResponse(t, res.data, test.response)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1202,8 +1202,8 @@ func TestAdminEndpoints(t *testing.T) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Error when creating test request: %s", err)
|
t.Fatalf("Error when creating test request: %s", err)
|
||||||
}
|
}
|
||||||
_, apiErr, _ := endpoint(req)
|
res := endpoint(req)
|
||||||
assertAPIError(t, apiErr, tc.errType)
|
assertAPIError(t, res.err, tc.errType)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1211,7 +1211,7 @@ func TestAdminEndpoints(t *testing.T) {
|
||||||
func TestRespondSuccess(t *testing.T) {
|
func TestRespondSuccess(t *testing.T) {
|
||||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
api := API{}
|
api := API{}
|
||||||
api.respond(w, "test")
|
api.respond(w, "test", nil)
|
||||||
}))
|
}))
|
||||||
defer s.Close()
|
defer s.Close()
|
||||||
|
|
||||||
|
@ -1502,7 +1502,7 @@ func TestRespond(t *testing.T) {
|
||||||
for _, c := range cases {
|
for _, c := range cases {
|
||||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
api := API{}
|
api := API{}
|
||||||
api.respond(w, c.response)
|
api.respond(w, c.response, nil)
|
||||||
}))
|
}))
|
||||||
defer s.Close()
|
defer s.Close()
|
||||||
|
|
||||||
|
@ -1543,6 +1543,6 @@ func BenchmarkRespond(b *testing.B) {
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
api := API{}
|
api := API{}
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
api.respond(&testResponseWriter, response)
|
api.respond(&testResponseWriter, response, nil)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,10 @@ var (
|
||||||
Name: "prometheus_web_federation_errors_total",
|
Name: "prometheus_web_federation_errors_total",
|
||||||
Help: "Total number of errors that occurred while sending federation responses.",
|
Help: "Total number of errors that occurred while sending federation responses.",
|
||||||
})
|
})
|
||||||
|
federationWarnings = prometheus.NewCounter(prometheus.CounterOpts{
|
||||||
|
Name: "prometheus_web_federation_warnings_total",
|
||||||
|
Help: "Total number of warnings that occurred while sending federation responses.",
|
||||||
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
||||||
|
@ -83,7 +87,11 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
||||||
|
|
||||||
var sets []storage.SeriesSet
|
var sets []storage.SeriesSet
|
||||||
for _, mset := range matcherSets {
|
for _, mset := range matcherSets {
|
||||||
s, err := q.Select(params, mset...)
|
s, err, wrns := q.Select(params, mset...)
|
||||||
|
if wrns != nil {
|
||||||
|
level.Debug(h.logger).Log("msg", "federation select returned warnings", "warnings", wrns)
|
||||||
|
federationErrors.Add(float64(len(wrns)))
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
federationErrors.Inc()
|
federationErrors.Inc()
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
@ -92,7 +100,7 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
|
||||||
sets = append(sets, s)
|
sets = append(sets, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
set := storage.NewMergeSeriesSet(sets)
|
set := storage.NewMergeSeriesSet(sets, nil)
|
||||||
it := storage.NewBuffer(int64(promql.LookbackDelta / 1e6))
|
it := storage.NewBuffer(int64(promql.LookbackDelta / 1e6))
|
||||||
for set.Next() {
|
for set.Next() {
|
||||||
s := set.At()
|
s := set.At()
|
||||||
|
|
Loading…
Reference in a new issue