2016-04-13 07:08:22 -07:00
|
|
|
// Copyright 2016 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2015-06-04 09:07:57 -07:00
|
|
|
package v1
|
|
|
|
|
|
|
|
import (
|
2017-10-23 13:28:17 -07:00
|
|
|
"bytes"
|
2017-10-24 21:21:42 -07:00
|
|
|
"context"
|
2015-06-04 09:07:57 -07:00
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2015-07-02 01:37:19 -07:00
|
|
|
"io/ioutil"
|
2018-02-08 09:28:55 -08:00
|
|
|
"math"
|
2015-06-04 09:07:57 -07:00
|
|
|
"net/http"
|
|
|
|
"net/http/httptest"
|
|
|
|
"net/url"
|
2018-11-15 05:22:16 -08:00
|
|
|
"os"
|
2015-06-04 09:07:57 -07:00
|
|
|
"reflect"
|
2017-11-10 16:53:48 -08:00
|
|
|
"strings"
|
2015-06-04 09:07:57 -07:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2018-09-25 11:14:00 -07:00
|
|
|
"github.com/go-kit/kit/log"
|
2017-10-23 13:28:17 -07:00
|
|
|
"github.com/gogo/protobuf/proto"
|
|
|
|
"github.com/golang/snappy"
|
2018-06-16 10:26:37 -07:00
|
|
|
config_util "github.com/prometheus/common/config"
|
2015-08-20 08:18:46 -07:00
|
|
|
"github.com/prometheus/common/model"
|
2018-06-16 10:26:37 -07:00
|
|
|
"github.com/prometheus/common/promlog"
|
2015-09-24 08:07:11 -07:00
|
|
|
"github.com/prometheus/common/route"
|
2015-06-04 09:07:57 -07:00
|
|
|
|
2017-05-11 08:09:24 -07:00
|
|
|
"github.com/prometheus/prometheus/config"
|
2018-09-25 12:07:34 -07:00
|
|
|
"github.com/prometheus/prometheus/pkg/gate"
|
2016-12-30 01:43:44 -08:00
|
|
|
"github.com/prometheus/prometheus/pkg/labels"
|
|
|
|
"github.com/prometheus/prometheus/pkg/timestamp"
|
2017-10-23 13:28:17 -07:00
|
|
|
"github.com/prometheus/prometheus/prompb"
|
2015-06-04 09:07:57 -07:00
|
|
|
"github.com/prometheus/prometheus/promql"
|
2018-03-25 09:50:34 -07:00
|
|
|
"github.com/prometheus/prometheus/rules"
|
2018-02-01 01:55:07 -08:00
|
|
|
"github.com/prometheus/prometheus/scrape"
|
2018-05-08 01:48:13 -07:00
|
|
|
"github.com/prometheus/prometheus/storage"
|
2017-10-23 13:28:17 -07:00
|
|
|
"github.com/prometheus/prometheus/storage/remote"
|
2018-03-25 09:50:34 -07:00
|
|
|
"github.com/prometheus/prometheus/util/testutil"
|
2018-11-15 05:22:16 -08:00
|
|
|
tsdbLabels "github.com/prometheus/tsdb/labels"
|
2015-06-04 09:07:57 -07:00
|
|
|
)
|
|
|
|
|
2018-02-21 09:26:18 -08:00
|
|
|
type testTargetRetriever struct{}
|
2016-12-02 04:31:43 -08:00
|
|
|
|
2018-09-26 02:20:56 -07:00
|
|
|
func (t testTargetRetriever) TargetsActive() map[string][]*scrape.Target {
|
|
|
|
return map[string][]*scrape.Target{
|
|
|
|
"test": {
|
|
|
|
scrape.NewTarget(
|
|
|
|
labels.FromMap(map[string]string{
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.AddressLabel: "example.com:8080",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
2018-10-25 01:19:20 -07:00
|
|
|
model.JobLabel: "test",
|
2018-09-26 02:20:56 -07:00
|
|
|
}),
|
|
|
|
nil,
|
|
|
|
url.Values{},
|
|
|
|
),
|
|
|
|
},
|
2018-10-25 01:19:20 -07:00
|
|
|
"blackbox": {
|
|
|
|
scrape.NewTarget(
|
|
|
|
labels.FromMap(map[string]string{
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.AddressLabel: "localhost:9115",
|
|
|
|
model.MetricsPathLabel: "/probe",
|
|
|
|
model.JobLabel: "blackbox",
|
|
|
|
}),
|
|
|
|
nil,
|
|
|
|
url.Values{"target": []string{"example.com"}},
|
|
|
|
),
|
|
|
|
},
|
2018-02-21 09:26:18 -08:00
|
|
|
}
|
|
|
|
}
|
2018-09-26 02:20:56 -07:00
|
|
|
func (t testTargetRetriever) TargetsDropped() map[string][]*scrape.Target {
|
|
|
|
return map[string][]*scrape.Target{
|
2018-10-25 01:19:20 -07:00
|
|
|
"blackbox": {
|
2018-09-26 02:20:56 -07:00
|
|
|
scrape.NewTarget(
|
|
|
|
nil,
|
|
|
|
labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "http://dropped.example.com:9115",
|
|
|
|
model.MetricsPathLabel: "/probe",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.JobLabel: "blackbox",
|
|
|
|
}),
|
|
|
|
url.Values{},
|
|
|
|
),
|
|
|
|
},
|
2018-02-21 09:26:18 -08:00
|
|
|
}
|
2016-12-02 04:31:43 -08:00
|
|
|
}
|
|
|
|
|
2018-02-21 01:00:07 -08:00
|
|
|
type testAlertmanagerRetriever struct{}
|
2017-01-13 01:20:11 -08:00
|
|
|
|
2018-02-21 01:00:07 -08:00
|
|
|
func (t testAlertmanagerRetriever) Alertmanagers() []*url.URL {
|
|
|
|
return []*url.URL{
|
|
|
|
{
|
|
|
|
Scheme: "http",
|
|
|
|
Host: "alertmanager.example.com:8080",
|
|
|
|
Path: "/api/v1/alerts",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t testAlertmanagerRetriever) DroppedAlertmanagers() []*url.URL {
|
|
|
|
return []*url.URL{
|
|
|
|
{
|
|
|
|
Scheme: "http",
|
|
|
|
Host: "dropped.alertmanager.example.com:8080",
|
|
|
|
Path: "/api/v1/alerts",
|
|
|
|
},
|
|
|
|
}
|
2016-12-02 04:31:43 -08:00
|
|
|
}
|
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
type rulesRetrieverMock struct {
|
|
|
|
testing *testing.T
|
2018-03-25 09:50:34 -07:00
|
|
|
}
|
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
func (m rulesRetrieverMock) AlertingRules() []*rules.AlertingRule {
|
2018-03-25 09:50:34 -07:00
|
|
|
expr1, err := promql.ParseExpr(`absent(test_metric3) != 1`)
|
|
|
|
if err != nil {
|
2018-06-27 00:15:17 -07:00
|
|
|
m.testing.Fatalf("unable to parse alert expression: %s", err)
|
2018-03-25 09:50:34 -07:00
|
|
|
}
|
|
|
|
expr2, err := promql.ParseExpr(`up == 1`)
|
|
|
|
if err != nil {
|
2018-06-27 00:15:17 -07:00
|
|
|
m.testing.Fatalf("Unable to parse alert expression: %s", err)
|
2018-03-25 09:50:34 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
rule1 := rules.NewAlertingRule(
|
|
|
|
"test_metric3",
|
|
|
|
expr1,
|
|
|
|
time.Second,
|
|
|
|
labels.Labels{},
|
|
|
|
labels.Labels{},
|
2018-08-02 03:18:24 -07:00
|
|
|
true,
|
2018-03-25 09:50:34 -07:00
|
|
|
log.NewNopLogger(),
|
|
|
|
)
|
|
|
|
rule2 := rules.NewAlertingRule(
|
|
|
|
"test_metric4",
|
|
|
|
expr2,
|
|
|
|
time.Second,
|
|
|
|
labels.Labels{},
|
|
|
|
labels.Labels{},
|
2018-08-02 03:18:24 -07:00
|
|
|
true,
|
2018-03-25 09:50:34 -07:00
|
|
|
log.NewNopLogger(),
|
|
|
|
)
|
|
|
|
var r []*rules.AlertingRule
|
|
|
|
r = append(r, rule1)
|
|
|
|
r = append(r, rule2)
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
func (m rulesRetrieverMock) RuleGroups() []*rules.Group {
|
|
|
|
var ar rulesRetrieverMock
|
2018-03-25 09:50:34 -07:00
|
|
|
arules := ar.AlertingRules()
|
2018-06-27 00:15:17 -07:00
|
|
|
storage := testutil.NewStorage(m.testing)
|
2018-03-25 09:50:34 -07:00
|
|
|
defer storage.Close()
|
|
|
|
|
2018-10-02 04:59:19 -07:00
|
|
|
engineOpts := promql.EngineOpts{
|
|
|
|
Logger: nil,
|
|
|
|
Reg: nil,
|
|
|
|
MaxConcurrent: 10,
|
|
|
|
MaxSamples: 10,
|
|
|
|
Timeout: 100 * time.Second,
|
|
|
|
}
|
|
|
|
|
|
|
|
engine := promql.NewEngine(engineOpts)
|
2018-03-25 09:50:34 -07:00
|
|
|
opts := &rules.ManagerOptions{
|
|
|
|
QueryFunc: rules.EngineQueryFunc(engine, storage),
|
|
|
|
Appendable: storage,
|
|
|
|
Context: context.Background(),
|
|
|
|
Logger: log.NewNopLogger(),
|
|
|
|
}
|
|
|
|
|
|
|
|
var r []rules.Rule
|
|
|
|
|
|
|
|
for _, alertrule := range arules {
|
|
|
|
r = append(r, alertrule)
|
|
|
|
}
|
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
recordingExpr, err := promql.ParseExpr(`vector(1)`)
|
|
|
|
if err != nil {
|
|
|
|
m.testing.Fatalf("unable to parse alert expression: %s", err)
|
|
|
|
}
|
|
|
|
recordingRule := rules.NewRecordingRule("recording-rule-1", recordingExpr, labels.Labels{})
|
|
|
|
r = append(r, recordingRule)
|
|
|
|
|
2018-08-02 03:18:24 -07:00
|
|
|
group := rules.NewGroup("grp", "/path/to/file", time.Second, r, false, opts)
|
2018-03-25 09:50:34 -07:00
|
|
|
return []*rules.Group{group}
|
|
|
|
}
|
|
|
|
|
2017-05-11 08:09:24 -07:00
|
|
|
var samplePrometheusCfg = config.Config{
|
|
|
|
GlobalConfig: config.GlobalConfig{},
|
|
|
|
AlertingConfig: config.AlertingConfig{},
|
|
|
|
RuleFiles: []string{},
|
|
|
|
ScrapeConfigs: []*config.ScrapeConfig{},
|
|
|
|
RemoteWriteConfigs: []*config.RemoteWriteConfig{},
|
|
|
|
RemoteReadConfigs: []*config.RemoteReadConfig{},
|
|
|
|
}
|
|
|
|
|
api: Added v1/status/flags endpoint. (#3864)
Endpoint URL: /api/v1/status/flags
Example Output:
```json
{
"status": "success",
"data": {
"alertmanager.notification-queue-capacity": "10000",
"alertmanager.timeout": "10s",
"completion-bash": "false",
"completion-script-bash": "false",
"completion-script-zsh": "false",
"config.file": "my_cool_prometheus.yaml",
"help": "false",
"help-long": "false",
"help-man": "false",
"log.level": "info",
"query.lookback-delta": "5m",
"query.max-concurrency": "20",
"query.timeout": "2m",
"storage.tsdb.max-block-duration": "36h",
"storage.tsdb.min-block-duration": "2h",
"storage.tsdb.no-lockfile": "false",
"storage.tsdb.path": "data/",
"storage.tsdb.retention": "15d",
"version": "false",
"web.console.libraries": "console_libraries",
"web.console.templates": "consoles",
"web.enable-admin-api": "false",
"web.enable-lifecycle": "false",
"web.external-url": "",
"web.listen-address": "0.0.0.0:9090",
"web.max-connections": "512",
"web.read-timeout": "5m",
"web.route-prefix": "/",
"web.user-assets": ""
}
}
```
Signed-off-by: Bartek Plotka <bwplotka@gmail.com>
2018-02-21 00:49:02 -08:00
|
|
|
var sampleFlagMap = map[string]string{
|
|
|
|
"flag1": "value1",
|
|
|
|
"flag2": "value2",
|
|
|
|
}
|
|
|
|
|
2015-06-04 09:07:57 -07:00
|
|
|
func TestEndpoints(t *testing.T) {
|
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo="bar"} 0+100x100
|
|
|
|
test_metric1{foo="boo"} 1+0x100
|
|
|
|
test_metric2{foo="boo"} 1+0x100
|
|
|
|
`)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
defer suite.Close()
|
|
|
|
|
|
|
|
if err := suite.Run(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
2016-12-30 01:43:44 -08:00
|
|
|
now := time.Now()
|
2016-12-02 04:31:43 -08:00
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
var algr rulesRetrieverMock
|
|
|
|
algr.testing = t
|
|
|
|
algr.AlertingRules()
|
|
|
|
algr.RuleGroups()
|
2018-03-25 09:50:34 -07:00
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
t.Run("local", func(t *testing.T) {
|
|
|
|
var algr rulesRetrieverMock
|
|
|
|
algr.testing = t
|
2018-03-25 09:50:34 -07:00
|
|
|
|
|
|
|
algr.AlertingRules()
|
|
|
|
|
|
|
|
algr.RuleGroups()
|
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
|
|
|
targetRetriever: testTargetRetriever{},
|
|
|
|
alertmanagerRetriever: testAlertmanagerRetriever{},
|
2018-11-19 02:21:14 -08:00
|
|
|
flagsMap: sampleFlagMap,
|
2018-10-16 00:41:45 -07:00
|
|
|
now: func() time.Time { return now },
|
|
|
|
config: func() config.Config { return samplePrometheusCfg },
|
|
|
|
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
|
|
|
rulesRetriever: algr,
|
2018-06-16 10:26:37 -07:00
|
|
|
}
|
2016-12-02 04:31:43 -08:00
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
testEndpoints(t, api, true)
|
|
|
|
})
|
2017-01-13 01:20:11 -08:00
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
// Run all the API tests against a API that is wired to forward queries via
|
|
|
|
// the remote read client to a test server, which in turn sends them to the
|
2018-06-18 09:32:44 -07:00
|
|
|
// data from the test suite.
|
2018-06-16 10:26:37 -07:00
|
|
|
t.Run("remote", func(t *testing.T) {
|
|
|
|
server := setupRemote(suite.Storage())
|
|
|
|
defer server.Close()
|
|
|
|
|
|
|
|
u, err := url.Parse(server.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
al := promlog.AllowedLevel{}
|
|
|
|
al.Set("debug")
|
2018-11-23 05:22:40 -08:00
|
|
|
af := promlog.AllowedFormat{}
|
|
|
|
al.Set("logfmt")
|
|
|
|
promlogConfig := promlog.Config{
|
|
|
|
Level: &al,
|
|
|
|
Format: &af,
|
|
|
|
}
|
|
|
|
|
|
|
|
remote := remote.NewStorage(promlog.New(&promlogConfig), func() (int64, error) {
|
2018-06-16 10:26:37 -07:00
|
|
|
return 0, nil
|
|
|
|
}, 1*time.Second)
|
|
|
|
|
|
|
|
err = remote.ApplyConfig(&config.Config{
|
|
|
|
RemoteReadConfigs: []*config.RemoteReadConfig{
|
|
|
|
{
|
|
|
|
URL: &config_util.URL{URL: u},
|
|
|
|
RemoteTimeout: model.Duration(1 * time.Second),
|
|
|
|
ReadRecent: true,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
2018-06-27 00:15:17 -07:00
|
|
|
var algr rulesRetrieverMock
|
|
|
|
algr.testing = t
|
2018-03-25 09:50:34 -07:00
|
|
|
|
|
|
|
algr.AlertingRules()
|
|
|
|
|
|
|
|
algr.RuleGroups()
|
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
api := &API{
|
|
|
|
Queryable: remote,
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
|
|
|
targetRetriever: testTargetRetriever{},
|
|
|
|
alertmanagerRetriever: testAlertmanagerRetriever{},
|
2018-11-19 02:21:14 -08:00
|
|
|
flagsMap: sampleFlagMap,
|
2018-10-16 00:41:45 -07:00
|
|
|
now: func() time.Time { return now },
|
|
|
|
config: func() config.Config { return samplePrometheusCfg },
|
|
|
|
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
|
|
|
rulesRetriever: algr,
|
2018-06-16 10:26:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
testEndpoints(t, api, false)
|
|
|
|
})
|
2018-11-19 02:21:14 -08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestLabelNames(t *testing.T) {
|
|
|
|
// TestEndpoints doesn't have enough label names to test api.labelNames
|
|
|
|
// endpoint properly. Hence we test it separately.
|
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo1="bar", baz="abc"} 0+100x100
|
|
|
|
test_metric1{foo2="boo"} 1+0x100
|
|
|
|
test_metric2{foo="boo"} 1+0x100
|
|
|
|
test_metric2{foo="boo", xyz="qwerty"} 1+0x100
|
|
|
|
`)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
defer suite.Close()
|
|
|
|
testutil.Ok(t, suite.Run())
|
|
|
|
|
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
}
|
|
|
|
request := func(m string) (*http.Request, error) {
|
|
|
|
if m == http.MethodPost {
|
|
|
|
r, err := http.NewRequest(m, "http://example.com", nil)
|
|
|
|
r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
|
|
|
return r, err
|
|
|
|
}
|
|
|
|
return http.NewRequest(m, "http://example.com", nil)
|
|
|
|
}
|
|
|
|
for _, method := range []string{http.MethodGet, http.MethodPost} {
|
|
|
|
ctx := context.Background()
|
|
|
|
req, err := request(method)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
resp, apiErr, _ := api.labelNames(req.WithContext(ctx))
|
|
|
|
assertAPIError(t, apiErr, "")
|
|
|
|
assertAPIResponse(t, resp, []string{"__name__", "baz", "foo", "foo1", "foo2", "xyz"})
|
|
|
|
}
|
2018-06-16 10:26:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
func setupRemote(s storage.Storage) *httptest.Server {
|
|
|
|
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
req, err := remote.DecodeReadRequest(r)
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
resp := prompb.ReadResponse{
|
|
|
|
Results: make([]*prompb.QueryResult, len(req.Queries)),
|
|
|
|
}
|
|
|
|
for i, query := range req.Queries {
|
2018-07-17 20:58:00 -07:00
|
|
|
from, through, matchers, selectParams, err := remote.FromQuery(query)
|
2018-06-16 10:26:37 -07:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
querier, err := s.Querier(r.Context(), from, through)
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer querier.Close()
|
|
|
|
|
2018-07-17 20:58:00 -07:00
|
|
|
set, err := querier.Select(selectParams, matchers...)
|
2018-06-16 10:26:37 -07:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
2018-09-05 06:50:50 -07:00
|
|
|
resp.Results[i], err = remote.ToQueryResult(set, 1e6)
|
2018-06-16 10:26:37 -07:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := remote.EncodeReadResponse(&resp, w); err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
return httptest.NewServer(handler)
|
|
|
|
}
|
|
|
|
|
|
|
|
func testEndpoints(t *testing.T, api *API, testLabelAPI bool) {
|
2016-12-30 01:43:44 -08:00
|
|
|
start := time.Unix(0, 0)
|
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
type test struct {
|
2015-06-04 09:07:57 -07:00
|
|
|
endpoint apiFunc
|
2015-06-08 12:19:52 -07:00
|
|
|
params map[string]string
|
2015-06-04 09:07:57 -07:00
|
|
|
query url.Values
|
|
|
|
response interface{}
|
|
|
|
errType errorType
|
2018-06-16 10:26:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
var tests = []test{
|
2015-06-04 09:07:57 -07:00
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"2"},
|
2016-12-30 01:43:44 -08:00
|
|
|
"time": []string{"123.4"},
|
2015-06-04 09:07:57 -07:00
|
|
|
},
|
|
|
|
response: &queryData{
|
2016-12-30 01:43:44 -08:00
|
|
|
ResultType: promql.ValueTypeScalar,
|
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 2,
|
|
|
|
T: timestamp.FromTime(start.Add(123*time.Second + 400*time.Millisecond)),
|
2015-06-04 09:07:57 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"0.333"},
|
|
|
|
"time": []string{"1970-01-01T00:02:03Z"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2016-12-30 01:43:44 -08:00
|
|
|
ResultType: promql.ValueTypeScalar,
|
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 0.333,
|
|
|
|
T: timestamp.FromTime(start.Add(123 * time.Second)),
|
2015-06-04 09:07:57 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"0.333"},
|
|
|
|
"time": []string{"1970-01-01T01:02:03+01:00"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2016-12-30 01:43:44 -08:00
|
|
|
ResultType: promql.ValueTypeScalar,
|
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 0.333,
|
|
|
|
T: timestamp.FromTime(start.Add(123 * time.Second)),
|
2015-06-04 09:07:57 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-11-11 11:46:57 -08:00
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"0.333"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2016-12-30 01:43:44 -08:00
|
|
|
ResultType: promql.ValueTypeScalar,
|
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 0.333,
|
2018-06-16 10:26:37 -07:00
|
|
|
T: timestamp.FromTime(api.now()),
|
2015-11-11 11:46:57 -08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-06-09 04:44:49 -07:00
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2016-12-30 01:43:44 -08:00
|
|
|
ResultType: promql.ValueTypeMatrix,
|
|
|
|
Result: promql.Matrix{
|
|
|
|
promql.Series{
|
|
|
|
Points: []promql.Point{
|
|
|
|
{V: 0, T: timestamp.FromTime(start)},
|
|
|
|
{V: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
|
|
|
{V: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
2015-06-09 04:44:49 -07:00
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
Metric: nil,
|
2015-06-09 04:44:49 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Missing query params in range queries.
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
// Bad query expression.
|
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"invalid][query"},
|
|
|
|
"time": []string{"1970-01-01T01:02:03+01:00"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"invalid][query"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"end": []string{"100"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2017-03-16 07:16:20 -07:00
|
|
|
// Invalid step.
|
2016-08-16 06:10:02 -07:00
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"1"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
"step": []string{"0"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2017-03-16 07:16:20 -07:00
|
|
|
// Start after end.
|
2016-11-01 06:25:34 -07:00
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"2"},
|
|
|
|
"end": []string{"1"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2017-03-16 07:16:20 -07:00
|
|
|
// Start overflows int64 internally.
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"148966367200.372"},
|
|
|
|
"end": []string{"1489667272.372"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2015-06-09 07:09:31 -07:00
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
2015-11-05 02:23:43 -08:00
|
|
|
"match[]": []string{`test_metric1{foo=~".+o"}`},
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
2016-12-30 01:43:44 -08:00
|
|
|
"match[]": []string{`test_metric1{foo=~".+o$"}`, `test_metric1{foo=~".+o"}`},
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
2015-11-05 02:23:43 -08:00
|
|
|
"match[]": []string{`test_metric1{foo=~".+o"}`, `none`},
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
|
|
|
},
|
2016-05-11 14:59:52 -07:00
|
|
|
// Start and end before series starts.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"-2"},
|
|
|
|
"end": []string{"-1"},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{},
|
2016-05-11 14:59:52 -07:00
|
|
|
},
|
|
|
|
// Start and end after series ends.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"100000"},
|
|
|
|
"end": []string{"100001"},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{},
|
2016-05-11 14:59:52 -07:00
|
|
|
},
|
|
|
|
// Start before series starts, end after series ends.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"-1"},
|
|
|
|
"end": []string{"100000"},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 14:59:52 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Start and end within series.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"1"},
|
|
|
|
"end": []string{"100"},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 14:59:52 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Start within series, end after.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"1"},
|
|
|
|
"end": []string{"100000"},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 14:59:52 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Start before series, end within series.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"-1"},
|
|
|
|
"end": []string{"1"},
|
|
|
|
},
|
2016-12-30 01:43:44 -08:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 14:59:52 -07:00
|
|
|
},
|
|
|
|
},
|
2015-06-09 07:09:31 -07:00
|
|
|
// Missing match[] query params in series requests.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.dropSeries,
|
2017-07-06 05:38:40 -07:00
|
|
|
errType: errorInternal,
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
2017-05-11 08:09:24 -07:00
|
|
|
{
|
2016-12-02 04:31:43 -08:00
|
|
|
endpoint: api.targets,
|
2017-01-13 08:15:04 -08:00
|
|
|
response: &TargetDiscovery{
|
2018-10-25 01:19:20 -07:00
|
|
|
ActiveTargets: []*Target{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "blackbox",
|
2018-09-26 02:20:56 -07:00
|
|
|
},
|
2018-10-25 01:19:20 -07:00
|
|
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
|
|
|
Health: "unknown",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "test",
|
|
|
|
},
|
|
|
|
ScrapeURL: "http://example.com:8080/metrics",
|
|
|
|
Health: "unknown",
|
2017-01-13 08:15:04 -08:00
|
|
|
},
|
2016-12-02 04:31:43 -08:00
|
|
|
},
|
2018-10-25 01:19:20 -07:00
|
|
|
DroppedTargets: []*DroppedTarget{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{
|
|
|
|
"__address__": "http://dropped.example.com:9115",
|
|
|
|
"__metrics_path__": "/probe",
|
|
|
|
"__scheme__": "http",
|
|
|
|
"job": "blackbox",
|
2018-02-21 09:26:18 -08:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2016-12-02 04:31:43 -08:00
|
|
|
},
|
2017-05-11 08:09:24 -07:00
|
|
|
},
|
2015-06-09 07:09:31 -07:00
|
|
|
{
|
2017-01-13 01:20:11 -08:00
|
|
|
endpoint: api.alertmanagers,
|
|
|
|
response: &AlertmanagerDiscovery{
|
|
|
|
ActiveAlertmanagers: []*AlertmanagerTarget{
|
2017-04-05 06:24:22 -07:00
|
|
|
{
|
2017-01-13 01:20:11 -08:00
|
|
|
URL: "http://alertmanager.example.com:8080/api/v1/alerts",
|
|
|
|
},
|
|
|
|
},
|
2018-02-21 01:00:07 -08:00
|
|
|
DroppedAlertmanagers: []*AlertmanagerTarget{
|
|
|
|
{
|
|
|
|
URL: "http://dropped.alertmanager.example.com:8080/api/v1/alerts",
|
|
|
|
},
|
|
|
|
},
|
2017-01-13 01:20:11 -08:00
|
|
|
},
|
2015-06-09 07:09:31 -07:00
|
|
|
},
|
2017-05-11 08:09:24 -07:00
|
|
|
{
|
|
|
|
endpoint: api.serveConfig,
|
|
|
|
response: &prometheusConfig{
|
|
|
|
YAML: samplePrometheusCfg.String(),
|
|
|
|
},
|
|
|
|
},
|
api: Added v1/status/flags endpoint. (#3864)
Endpoint URL: /api/v1/status/flags
Example Output:
```json
{
"status": "success",
"data": {
"alertmanager.notification-queue-capacity": "10000",
"alertmanager.timeout": "10s",
"completion-bash": "false",
"completion-script-bash": "false",
"completion-script-zsh": "false",
"config.file": "my_cool_prometheus.yaml",
"help": "false",
"help-long": "false",
"help-man": "false",
"log.level": "info",
"query.lookback-delta": "5m",
"query.max-concurrency": "20",
"query.timeout": "2m",
"storage.tsdb.max-block-duration": "36h",
"storage.tsdb.min-block-duration": "2h",
"storage.tsdb.no-lockfile": "false",
"storage.tsdb.path": "data/",
"storage.tsdb.retention": "15d",
"version": "false",
"web.console.libraries": "console_libraries",
"web.console.templates": "consoles",
"web.enable-admin-api": "false",
"web.enable-lifecycle": "false",
"web.external-url": "",
"web.listen-address": "0.0.0.0:9090",
"web.max-connections": "512",
"web.read-timeout": "5m",
"web.route-prefix": "/",
"web.user-assets": ""
}
}
```
Signed-off-by: Bartek Plotka <bwplotka@gmail.com>
2018-02-21 00:49:02 -08:00
|
|
|
{
|
|
|
|
endpoint: api.serveFlags,
|
|
|
|
response: sampleFlagMap,
|
|
|
|
},
|
2018-03-25 09:50:34 -07:00
|
|
|
{
|
|
|
|
endpoint: api.alerts,
|
|
|
|
response: &AlertDiscovery{
|
2018-06-27 00:15:17 -07:00
|
|
|
Alerts: []*Alert{},
|
2018-03-25 09:50:34 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.rules,
|
2018-06-27 00:15:17 -07:00
|
|
|
response: &RuleDiscovery{
|
|
|
|
RuleGroups: []*RuleGroup{
|
2018-03-25 09:50:34 -07:00
|
|
|
{
|
2018-06-27 00:15:17 -07:00
|
|
|
Name: "grp",
|
|
|
|
File: "/path/to/file",
|
|
|
|
Interval: 1,
|
|
|
|
Rules: []rule{
|
|
|
|
alertingRule{
|
|
|
|
Name: "test_metric3",
|
|
|
|
Query: "absent(test_metric3) != 1",
|
|
|
|
Duration: 1,
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
Alerts: []*Alert{},
|
2018-08-23 06:00:10 -07:00
|
|
|
Health: "unknown",
|
2018-06-27 00:15:17 -07:00
|
|
|
Type: "alerting",
|
|
|
|
},
|
|
|
|
alertingRule{
|
|
|
|
Name: "test_metric4",
|
|
|
|
Query: "up == 1",
|
|
|
|
Duration: 1,
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
Alerts: []*Alert{},
|
2018-08-23 06:00:10 -07:00
|
|
|
Health: "unknown",
|
2018-06-27 00:15:17 -07:00
|
|
|
Type: "alerting",
|
2018-03-25 09:50:34 -07:00
|
|
|
},
|
2018-06-27 00:15:17 -07:00
|
|
|
recordingRule{
|
|
|
|
Name: "recording-rule-1",
|
|
|
|
Query: "vector(1)",
|
|
|
|
Labels: labels.Labels{},
|
2018-08-23 06:00:10 -07:00
|
|
|
Health: "unknown",
|
2018-06-27 00:15:17 -07:00
|
|
|
Type: "recording",
|
2018-03-25 09:50:34 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
if testLabelAPI {
|
|
|
|
tests = append(tests, []test{
|
|
|
|
{
|
|
|
|
endpoint: api.labelValues,
|
|
|
|
params: map[string]string{
|
|
|
|
"name": "__name__",
|
|
|
|
},
|
|
|
|
response: []string{
|
|
|
|
"test_metric1",
|
|
|
|
"test_metric2",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.labelValues,
|
|
|
|
params: map[string]string{
|
|
|
|
"name": "foo",
|
|
|
|
},
|
|
|
|
response: []string{
|
|
|
|
"bar",
|
|
|
|
"boo",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Bad name parameter.
|
|
|
|
{
|
|
|
|
endpoint: api.labelValues,
|
|
|
|
params: map[string]string{
|
|
|
|
"name": "not!!!allowed",
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2018-11-19 02:21:14 -08:00
|
|
|
// Label names.
|
|
|
|
{
|
|
|
|
endpoint: api.labelNames,
|
|
|
|
response: []string{"__name__", "foo"},
|
|
|
|
},
|
2018-06-16 10:26:37 -07:00
|
|
|
}...)
|
|
|
|
}
|
|
|
|
|
2017-11-10 16:53:48 -08:00
|
|
|
methods := func(f apiFunc) []string {
|
|
|
|
fp := reflect.ValueOf(f).Pointer()
|
|
|
|
if fp == reflect.ValueOf(api.query).Pointer() || fp == reflect.ValueOf(api.queryRange).Pointer() {
|
|
|
|
return []string{http.MethodGet, http.MethodPost}
|
2015-06-08 12:19:52 -07:00
|
|
|
}
|
2017-11-10 16:53:48 -08:00
|
|
|
return []string{http.MethodGet}
|
|
|
|
}
|
2015-06-08 12:19:52 -07:00
|
|
|
|
2017-11-10 16:53:48 -08:00
|
|
|
request := func(m string, q url.Values) (*http.Request, error) {
|
|
|
|
if m == http.MethodPost {
|
|
|
|
r, err := http.NewRequest(m, "http://example.com", strings.NewReader(q.Encode()))
|
|
|
|
r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
|
|
|
return r, err
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
2017-11-10 16:53:48 -08:00
|
|
|
return http.NewRequest(m, fmt.Sprintf("http://example.com?%s", q.Encode()), nil)
|
|
|
|
}
|
|
|
|
|
2018-06-16 10:26:37 -07:00
|
|
|
for i, test := range tests {
|
2017-11-10 16:53:48 -08:00
|
|
|
for _, method := range methods(test.endpoint) {
|
|
|
|
// Build a context with the correct request params.
|
|
|
|
ctx := context.Background()
|
|
|
|
for p, v := range test.params {
|
|
|
|
ctx = route.WithParam(ctx, p, v)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
2018-06-16 10:26:37 -07:00
|
|
|
t.Logf("run %d\t%s\t%q", i, method, test.query.Encode())
|
2017-11-10 16:53:48 -08:00
|
|
|
|
|
|
|
req, err := request(method, test.query)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
Optimise PromQL (#3966)
* Move range logic to 'eval'
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make aggregegate range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* PromQL is statically typed, so don't eval to find the type.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Extend rangewrapper to multiple exprs
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Start making function evaluation ranged
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make instant queries a special case of range queries
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Eliminate evalString
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Evaluate range vector functions one series at a time
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make unary operators range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make binops range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Pass time to range-aware functions.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make simple _over_time functions range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reduce allocs when working with matrix selectors
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Add basic benchmark for range evaluation
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reuse objects for function arguments
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Do dropmetricname and allocating output vector only once.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Add range-aware support for range vector functions with params
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Optimise holt_winters, cut cpu and allocs by ~25%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make rate&friends range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make more functions range aware. Document calling convention.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make date functions range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make simple math functions range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Convert more functions to be range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make more functions range aware
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Specialcase timestamp() with vector selector arg for range awareness
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Remove transition code for functions
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Remove the rest of the engine transition code
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Remove more obselete code
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Remove the last uses of the eval* functions
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Remove engine finalizers to prevent corruption
The finalizers set by matrixSelector were being called
just before the value they were retruning to the pool
was then being provided to the caller. Thus a concurrent query
could corrupt the data that the user has just been returned.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Add new benchmark suite for range functinos
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Migrate existing benchmarks to new system
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Expand promql benchmarks
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Simply test by removing unused range code
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* When testing instant queries, check range queries too.
To protect against subsequent steps in a range query being
affected by the previous steps, add a test that evaluates
an instant query that we know works again as a range query
with the tiimestamp we care about not being the first step.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reuse ring for matrix iters. Put query results back in pool.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reuse buffer when iterating over matrix selectors
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Unary minus should remove metric name
Cut down benchmarks for faster runs.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reduce repetition in benchmark test cases
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Work series by series when doing normal vectorSelectors
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Optimise benchmark setup, cuts time by 60%
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Have rangeWrapper use an evalNodeHelper to cache across steps
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Use evalNodeHelper with functions
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Cache dropMetricName within a node evaluation.
This saves both the calculations and allocs done by dropMetricName
across steps.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reuse input vectors in rangewrapper
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Reuse the point slices in the matrixes input/output by rangeWrapper
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make benchmark setup faster using AddFast
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Simplify benchmark code.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Add caching in VectorBinop
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Use xor to have one-level resultMetric hash key
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Add more benchmarks
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Call Query.Close in apiv1
This allows point slices allocated for the response data
to be reused by later queries, saving allocations.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Optimise histogram_quantile
It's now 5-10% faster with 97% less garbage generated for 1k steps
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make the input collection in rangeVector linear rather than quadratic
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Optimise label_replace, for 1k steps 15x fewer allocs and 3x faster
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Optimise label_join, 1.8x faster and 11x less memory for 1k steps
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Expand benchmarks, cleanup comments, simplify numSteps logic.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Address Fabian's comments
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Comments from Alin.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Address jrv's comments
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Remove dead code
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Address Simon's comments.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Rename populateIterators, pre-init some sizes
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Handle case where function has non-matrix args first
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Split rangeWrapper out to rangeEval function, improve comments
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Cleanup and make things more consistent
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Make EvalNodeHelper public
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
* Fabian's comments.
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
2018-06-04 06:47:45 -07:00
|
|
|
resp, apiErr, _ := test.endpoint(req.WithContext(ctx))
|
2018-11-15 05:22:16 -08:00
|
|
|
assertAPIError(t, apiErr, test.errType)
|
|
|
|
assertAPIResponse(t, resp, test.response)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-06-27 00:15:17 -07:00
|
|
|
|
2018-11-15 05:22:16 -08:00
|
|
|
func assertAPIError(t *testing.T, got *apiError, exp errorType) {
|
|
|
|
t.Helper()
|
2018-06-27 00:15:17 -07:00
|
|
|
|
2018-11-15 05:22:16 -08:00
|
|
|
if got != nil {
|
|
|
|
if exp == errorNone {
|
|
|
|
t.Fatalf("Unexpected error: %s", got)
|
|
|
|
}
|
|
|
|
if exp != got.typ {
|
|
|
|
t.Fatalf("Expected error of type %q but got type %q (%q)", exp, got.typ, got)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
2018-11-15 05:22:16 -08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if got == nil && exp != errorNone {
|
|
|
|
t.Fatalf("Expected error of type %q but got none", exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func assertAPIResponse(t *testing.T, got interface{}, exp interface{}) {
|
|
|
|
if !reflect.DeepEqual(exp, got) {
|
|
|
|
respJSON, err := json.Marshal(got)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to marshal response as JSON: %v", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
expectedRespJSON, err := json.Marshal(exp)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to marshal expected response as JSON: %v", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Fatalf(
|
|
|
|
"Response does not match, expected:\n%+v\ngot:\n%+v",
|
|
|
|
string(expectedRespJSON),
|
|
|
|
string(respJSON),
|
|
|
|
)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-23 13:28:17 -07:00
|
|
|
func TestReadEndpoint(t *testing.T) {
|
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo="bar",baz="qux"} 1
|
|
|
|
`)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
defer suite.Close()
|
|
|
|
|
|
|
|
if err := suite.Run(); err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
|
|
|
config: func() config.Config {
|
|
|
|
return config.Config{
|
|
|
|
GlobalConfig: config.GlobalConfig{
|
|
|
|
ExternalLabels: model.LabelSet{
|
|
|
|
"baz": "a",
|
|
|
|
"b": "c",
|
|
|
|
"d": "e",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
2018-09-25 12:07:34 -07:00
|
|
|
remoteReadSampleLimit: 1e6,
|
|
|
|
remoteReadGate: gate.New(1),
|
2017-10-23 13:28:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Encode the request.
|
|
|
|
matcher1, err := labels.NewMatcher(labels.MatchEqual, "__name__", "test_metric1")
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
matcher2, err := labels.NewMatcher(labels.MatchEqual, "d", "e")
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
2018-05-08 01:48:13 -07:00
|
|
|
query, err := remote.ToQuery(0, 1, []*labels.Matcher{matcher1, matcher2}, &storage.SelectParams{Step: 0, Func: "avg"})
|
2017-10-23 13:28:17 -07:00
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
req := &prompb.ReadRequest{Queries: []*prompb.Query{query}}
|
|
|
|
data, err := proto.Marshal(req)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
compressed := snappy.Encode(nil, data)
|
|
|
|
request, err := http.NewRequest("POST", "", bytes.NewBuffer(compressed))
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
recorder := httptest.NewRecorder()
|
|
|
|
api.remoteRead(recorder, request)
|
|
|
|
|
2018-09-05 06:50:50 -07:00
|
|
|
if recorder.Code/100 != 2 {
|
|
|
|
t.Fatal(recorder.Code)
|
|
|
|
}
|
|
|
|
|
2017-10-23 13:28:17 -07:00
|
|
|
// Decode the response.
|
|
|
|
compressed, err = ioutil.ReadAll(recorder.Result().Body)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
uncompressed, err := snappy.Decode(nil, compressed)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
var resp prompb.ReadResponse
|
|
|
|
err = proto.Unmarshal(uncompressed, &resp)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(resp.Results) != 1 {
|
|
|
|
t.Fatalf("Expected 1 result, got %d", len(resp.Results))
|
|
|
|
}
|
|
|
|
|
|
|
|
result := resp.Results[0]
|
|
|
|
expected := &prompb.QueryResult{
|
|
|
|
Timeseries: []*prompb.TimeSeries{
|
|
|
|
{
|
|
|
|
Labels: []*prompb.Label{
|
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
2017-10-26 03:44:49 -07:00
|
|
|
{Name: "baz", Value: "qux"},
|
2017-10-23 13:28:17 -07:00
|
|
|
{Name: "d", Value: "e"},
|
2017-10-26 03:44:49 -07:00
|
|
|
{Name: "foo", Value: "bar"},
|
2017-10-23 13:28:17 -07:00
|
|
|
},
|
2018-09-25 11:14:00 -07:00
|
|
|
Samples: []prompb.Sample{{Value: 1, Timestamp: 0}},
|
2017-10-23 13:28:17 -07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(result, expected) {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", result, expected)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-15 05:22:16 -08:00
|
|
|
type fakeDB struct {
|
|
|
|
err error
|
|
|
|
closer func()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *fakeDB) CleanTombstones() error { return f.err }
|
|
|
|
func (f *fakeDB) Delete(mint, maxt int64, ms ...tsdbLabels.Matcher) error { return f.err }
|
|
|
|
func (f *fakeDB) Dir() string {
|
|
|
|
dir, _ := ioutil.TempDir("", "fakeDB")
|
|
|
|
f.closer = func() {
|
|
|
|
os.RemoveAll(dir)
|
|
|
|
}
|
|
|
|
return dir
|
|
|
|
}
|
|
|
|
func (f *fakeDB) Snapshot(dir string, withHead bool) error { return f.err }
|
|
|
|
|
|
|
|
func TestAdminEndpoints(t *testing.T) {
|
|
|
|
tsdb, tsdbWithError := &fakeDB{}, &fakeDB{err: fmt.Errorf("some error")}
|
|
|
|
snapshotAPI := func(api *API) apiFunc { return api.snapshot }
|
|
|
|
cleanAPI := func(api *API) apiFunc { return api.cleanTombstones }
|
|
|
|
deleteAPI := func(api *API) apiFunc { return api.deleteSeries }
|
|
|
|
|
|
|
|
for i, tc := range []struct {
|
|
|
|
db *fakeDB
|
|
|
|
enableAdmin bool
|
|
|
|
endpoint func(api *API) apiFunc
|
|
|
|
method string
|
|
|
|
values url.Values
|
|
|
|
|
|
|
|
errType errorType
|
|
|
|
}{
|
|
|
|
// Tests for the snapshot endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: false,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
values: map[string][]string{"skip_head": []string{"true"}},
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
values: map[string][]string{"skip_head": []string{"xxx"}},
|
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdbWithError,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorInternal,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: nil,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
// Tests for the cleanTombstones endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: false,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdbWithError,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorInternal,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: nil,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
// Tests for the deleteSeries endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: false,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
values: map[string][]string{"match[]": []string{"123"}},
|
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
values: map[string][]string{"match[]": []string{"up"}, "start": []string{"xxx"}},
|
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
values: map[string][]string{"match[]": []string{"up"}, "end": []string{"xxx"}},
|
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
values: map[string][]string{"match[]": []string{"up"}},
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
values: map[string][]string{"match[]": []string{"up{job!=\"foo\"}", "{job=~\"bar.+\"}", "up{instance!~\"fred.+\"}"}},
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdbWithError,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
values: map[string][]string{"match[]": []string{"up"}},
|
|
|
|
|
|
|
|
errType: errorInternal,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: nil,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
} {
|
|
|
|
tc := tc
|
|
|
|
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
|
|
|
|
api := &API{
|
|
|
|
db: func() TSDBAdmin {
|
|
|
|
if tc.db != nil {
|
|
|
|
return tc.db
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
},
|
|
|
|
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
|
|
|
enableAdmin: tc.enableAdmin,
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
if tc.db != nil && tc.db.closer != nil {
|
|
|
|
tc.db.closer()
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
endpoint := tc.endpoint(api)
|
|
|
|
req, err := http.NewRequest(tc.method, fmt.Sprintf("?%s", tc.values.Encode()), nil)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error when creating test request: %s", err)
|
|
|
|
}
|
|
|
|
_, apiErr, _ := endpoint(req)
|
|
|
|
assertAPIError(t, apiErr, tc.errType)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-06-04 09:07:57 -07:00
|
|
|
func TestRespondSuccess(t *testing.T) {
|
2015-07-02 01:37:19 -07:00
|
|
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
2018-07-06 10:44:45 -07:00
|
|
|
api := API{}
|
|
|
|
api.respond(w, "test")
|
2015-07-02 01:37:19 -07:00
|
|
|
}))
|
|
|
|
defer s.Close()
|
2015-06-04 09:07:57 -07:00
|
|
|
|
2015-07-02 01:37:19 -07:00
|
|
|
resp, err := http.Get(s.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error on test request: %s", err)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
2015-07-02 01:37:19 -07:00
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
defer resp.Body.Close()
|
2015-06-04 09:07:57 -07:00
|
|
|
if err != nil {
|
2015-07-02 01:37:19 -07:00
|
|
|
t.Fatalf("Error reading response body: %s", err)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
|
2015-07-02 01:37:19 -07:00
|
|
|
if resp.StatusCode != 200 {
|
|
|
|
t.Fatalf("Return code %d expected in success response but got %d", 200, resp.StatusCode)
|
|
|
|
}
|
|
|
|
if h := resp.Header.Get("Content-Type"); h != "application/json" {
|
|
|
|
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
|
|
|
}
|
|
|
|
|
|
|
|
var res response
|
|
|
|
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
|
|
|
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
exp := &response{
|
|
|
|
Status: statusSuccess,
|
|
|
|
Data: "test",
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(&res, exp) {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", res, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestRespondError(t *testing.T) {
|
2015-07-02 01:37:19 -07:00
|
|
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
2018-07-06 10:44:45 -07:00
|
|
|
api := API{}
|
|
|
|
api.respondError(w, &apiError{errorTimeout, errors.New("message")}, "test")
|
2015-07-02 01:37:19 -07:00
|
|
|
}))
|
|
|
|
defer s.Close()
|
2015-06-04 09:07:57 -07:00
|
|
|
|
2015-07-02 01:37:19 -07:00
|
|
|
resp, err := http.Get(s.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error on test request: %s", err)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
2015-07-02 01:37:19 -07:00
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
defer resp.Body.Close()
|
2015-06-04 09:07:57 -07:00
|
|
|
if err != nil {
|
2015-07-02 01:37:19 -07:00
|
|
|
t.Fatalf("Error reading response body: %s", err)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
|
2015-11-11 14:00:54 -08:00
|
|
|
if want, have := http.StatusServiceUnavailable, resp.StatusCode; want != have {
|
|
|
|
t.Fatalf("Return code %d expected in error response but got %d", want, have)
|
2015-07-02 01:37:19 -07:00
|
|
|
}
|
|
|
|
if h := resp.Header.Get("Content-Type"); h != "application/json" {
|
|
|
|
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
|
|
|
}
|
|
|
|
|
|
|
|
var res response
|
|
|
|
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
|
|
|
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
exp := &response{
|
|
|
|
Status: statusError,
|
|
|
|
Data: "test",
|
|
|
|
ErrorType: errorTimeout,
|
|
|
|
Error: "message",
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(&res, exp) {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", res, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestParseTime(t *testing.T) {
|
|
|
|
ts, err := time.Parse(time.RFC3339Nano, "2015-06-03T13:21:58.555Z")
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
var tests = []struct {
|
|
|
|
input string
|
|
|
|
fail bool
|
|
|
|
result time.Time
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
input: "",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "abc",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "30s",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "123",
|
|
|
|
result: time.Unix(123, 0),
|
|
|
|
}, {
|
|
|
|
input: "123.123",
|
|
|
|
result: time.Unix(123, 123000000),
|
|
|
|
}, {
|
|
|
|
input: "2015-06-03T13:21:58.555Z",
|
|
|
|
result: ts,
|
|
|
|
}, {
|
|
|
|
input: "2015-06-03T14:21:58.555+01:00",
|
|
|
|
result: ts,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
ts, err := parseTime(test.input)
|
|
|
|
if err != nil && !test.fail {
|
|
|
|
t.Errorf("Unexpected error for %q: %s", test.input, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err == nil && test.fail {
|
|
|
|
t.Errorf("Expected error for %q but got none", test.input)
|
|
|
|
continue
|
|
|
|
}
|
2016-12-30 01:43:44 -08:00
|
|
|
if !test.fail && !ts.Equal(test.result) {
|
|
|
|
t.Errorf("Expected time %v for input %q but got %v", test.result, test.input, ts)
|
2015-06-04 09:07:57 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestParseDuration(t *testing.T) {
|
|
|
|
var tests = []struct {
|
|
|
|
input string
|
|
|
|
fail bool
|
|
|
|
result time.Duration
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
input: "",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "abc",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "2015-06-03T13:21:58.555Z",
|
|
|
|
fail: true,
|
2017-03-16 07:16:20 -07:00
|
|
|
}, {
|
|
|
|
// Internal int64 overflow.
|
|
|
|
input: "-148966367200.372",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
// Internal int64 overflow.
|
|
|
|
input: "148966367200.372",
|
|
|
|
fail: true,
|
2015-06-04 09:07:57 -07:00
|
|
|
}, {
|
|
|
|
input: "123",
|
|
|
|
result: 123 * time.Second,
|
|
|
|
}, {
|
|
|
|
input: "123.333",
|
|
|
|
result: 123*time.Second + 333*time.Millisecond,
|
|
|
|
}, {
|
|
|
|
input: "15s",
|
|
|
|
result: 15 * time.Second,
|
|
|
|
}, {
|
|
|
|
input: "5m",
|
|
|
|
result: 5 * time.Minute,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
d, err := parseDuration(test.input)
|
|
|
|
if err != nil && !test.fail {
|
|
|
|
t.Errorf("Unexpected error for %q: %s", test.input, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err == nil && test.fail {
|
|
|
|
t.Errorf("Expected error for %q but got none", test.input)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if !test.fail && d != test.result {
|
|
|
|
t.Errorf("Expected duration %v for input %q but got %v", test.result, test.input, d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-01-25 16:32:46 -08:00
|
|
|
|
|
|
|
func TestOptionsMethod(t *testing.T) {
|
2017-05-02 16:49:29 -07:00
|
|
|
r := route.New()
|
2017-10-06 08:20:20 -07:00
|
|
|
api := &API{ready: func(f http.HandlerFunc) http.HandlerFunc { return f }}
|
2016-01-25 16:32:46 -08:00
|
|
|
api.Register(r)
|
|
|
|
|
|
|
|
s := httptest.NewServer(r)
|
|
|
|
defer s.Close()
|
|
|
|
|
|
|
|
req, err := http.NewRequest("OPTIONS", s.URL+"/any_path", nil)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error creating OPTIONS request: %s", err)
|
|
|
|
}
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error executing OPTIONS request: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.StatusCode != http.StatusNoContent {
|
|
|
|
t.Fatalf("Expected status %d, got %d", http.StatusNoContent, resp.StatusCode)
|
|
|
|
}
|
|
|
|
|
|
|
|
for h, v := range corsHeaders {
|
|
|
|
if resp.Header.Get(h) != v {
|
|
|
|
t.Fatalf("Expected %q for header %q, got %q", v, h, resp.Header.Get(h))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-02-07 07:40:36 -08:00
|
|
|
|
2018-02-08 09:28:55 -08:00
|
|
|
func TestRespond(t *testing.T) {
|
|
|
|
cases := []struct {
|
|
|
|
response interface{}
|
|
|
|
expected string
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
response: &queryData{
|
|
|
|
ResultType: promql.ValueTypeMatrix,
|
|
|
|
Result: promql.Matrix{
|
|
|
|
promql.Series{
|
|
|
|
Points: []promql.Point{{V: 1, T: 1000}},
|
|
|
|
Metric: labels.FromStrings("__name__", "foo"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"values":[[1,"1"]]}]}}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 0, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"0"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1},
|
|
|
|
expected: `{"status":"success","data":[0.001,"20"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 10},
|
2018-02-07 04:27:57 -08:00
|
|
|
expected: `{"status":"success","data":[0.010,"20"]}`,
|
2018-02-08 09:28:55 -08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 100},
|
2018-02-07 04:27:57 -08:00
|
|
|
expected: `{"status":"success","data":[0.100,"20"]}`,
|
2018-02-08 09:28:55 -08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1001},
|
|
|
|
expected: `{"status":"success","data":[1.001,"20"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1010},
|
2018-02-07 04:27:57 -08:00
|
|
|
expected: `{"status":"success","data":[1.010,"20"]}`,
|
2018-02-08 09:28:55 -08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1100},
|
2018-02-07 04:27:57 -08:00
|
|
|
expected: `{"status":"success","data":[1.100,"20"]}`,
|
2018-02-08 09:28:55 -08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 12345678123456555},
|
2018-02-07 04:27:57 -08:00
|
|
|
expected: `{"status":"success","data":[12345678123456.555,"20"]}`,
|
2018-02-08 09:28:55 -08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: -1},
|
|
|
|
expected: `{"status":"success","data":[-0.001,"20"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: math.NaN(), T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"NaN"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: math.Inf(1), T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"+Inf"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: math.Inf(-1), T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"-Inf"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 1.2345678e6, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"1234567.8"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 1.2345678e-6, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"0.0000012345678"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 1.2345678e-67, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"1.2345678e-67"]}`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
2018-07-06 10:44:45 -07:00
|
|
|
api := API{}
|
|
|
|
api.respond(w, c.response)
|
2018-02-08 09:28:55 -08:00
|
|
|
}))
|
|
|
|
defer s.Close()
|
|
|
|
|
|
|
|
resp, err := http.Get(s.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error on test request: %s", err)
|
|
|
|
}
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
defer resp.Body.Close()
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error reading response body: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if string(body) != c.expected {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", c.expected, string(body))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-07 07:40:36 -08:00
|
|
|
// This is a global to avoid the benchmark being optimized away.
|
|
|
|
var testResponseWriter = httptest.ResponseRecorder{}
|
|
|
|
|
|
|
|
func BenchmarkRespond(b *testing.B) {
|
|
|
|
b.ReportAllocs()
|
|
|
|
points := []promql.Point{}
|
|
|
|
for i := 0; i < 10000; i++ {
|
|
|
|
points = append(points, promql.Point{V: float64(i * 1000000), T: int64(i)})
|
|
|
|
}
|
|
|
|
response := &queryData{
|
|
|
|
ResultType: promql.ValueTypeMatrix,
|
|
|
|
Result: promql.Matrix{
|
|
|
|
promql.Series{
|
|
|
|
Points: points,
|
|
|
|
Metric: nil,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2018-02-08 09:28:55 -08:00
|
|
|
b.ResetTimer()
|
2018-07-06 10:44:45 -07:00
|
|
|
api := API{}
|
2018-02-07 07:40:36 -08:00
|
|
|
for n := 0; n < b.N; n++ {
|
2018-07-06 10:44:45 -07:00
|
|
|
api.respond(&testResponseWriter, response)
|
2018-02-07 07:40:36 -08:00
|
|
|
}
|
|
|
|
}
|