mirror of
https://github.com/prometheus/prometheus.git
synced 2025-01-11 05:47:27 -08:00
Add extension point for returning different content types from API endpoints
Signed-off-by: Charles Korn <charles.korn@grafana.com>
This commit is contained in:
parent
9fb8fe0d4e
commit
3e94dd8c8f
|
@ -80,6 +80,8 @@ const (
|
||||||
|
|
||||||
var LocalhostRepresentations = []string{"127.0.0.1", "localhost", "::1"}
|
var LocalhostRepresentations = []string{"127.0.0.1", "localhost", "::1"}
|
||||||
|
|
||||||
|
var defaultCodec = JSONCodec{}
|
||||||
|
|
||||||
type apiError struct {
|
type apiError struct {
|
||||||
typ errorType
|
typ errorType
|
||||||
err error
|
err error
|
||||||
|
@ -145,7 +147,8 @@ type RuntimeInfo struct {
|
||||||
StorageRetention string `json:"storageRetention"`
|
StorageRetention string `json:"storageRetention"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type response struct {
|
// Response contains a response to a HTTP API request.
|
||||||
|
type Response struct {
|
||||||
Status status `json:"status"`
|
Status status `json:"status"`
|
||||||
Data interface{} `json:"data,omitempty"`
|
Data interface{} `json:"data,omitempty"`
|
||||||
ErrorType errorType `json:"errorType,omitempty"`
|
ErrorType errorType `json:"errorType,omitempty"`
|
||||||
|
@ -208,6 +211,8 @@ type API struct {
|
||||||
|
|
||||||
remoteWriteHandler http.Handler
|
remoteWriteHandler http.Handler
|
||||||
remoteReadHandler http.Handler
|
remoteReadHandler http.Handler
|
||||||
|
|
||||||
|
codecs map[string]Codec
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -273,8 +278,12 @@ func NewAPI(
|
||||||
statsRenderer: defaultStatsRenderer,
|
statsRenderer: defaultStatsRenderer,
|
||||||
|
|
||||||
remoteReadHandler: remote.NewReadHandler(logger, registerer, q, configFunc, remoteReadSampleLimit, remoteReadConcurrencyLimit, remoteReadMaxBytesInFrame),
|
remoteReadHandler: remote.NewReadHandler(logger, registerer, q, configFunc, remoteReadSampleLimit, remoteReadConcurrencyLimit, remoteReadMaxBytesInFrame),
|
||||||
|
|
||||||
|
codecs: map[string]Codec{},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
a.InstallCodec(defaultCodec)
|
||||||
|
|
||||||
if statsRenderer != nil {
|
if statsRenderer != nil {
|
||||||
a.statsRenderer = statsRenderer
|
a.statsRenderer = statsRenderer
|
||||||
}
|
}
|
||||||
|
@ -286,6 +295,16 @@ func NewAPI(
|
||||||
return a
|
return a
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// InstallCodec adds codec to this API's available codecs.
|
||||||
|
// If codec handles a content type handled by a codec already installed in this API, codec replaces the previous codec.
|
||||||
|
func (api *API) InstallCodec(codec Codec) {
|
||||||
|
if api.codecs == nil {
|
||||||
|
api.codecs = map[string]Codec{}
|
||||||
|
}
|
||||||
|
|
||||||
|
api.codecs[codec.ContentType()] = codec
|
||||||
|
}
|
||||||
|
|
||||||
func setUnavailStatusOnTSDBNotReady(r apiFuncResult) apiFuncResult {
|
func setUnavailStatusOnTSDBNotReady(r apiFuncResult) apiFuncResult {
|
||||||
if r.err != nil && errors.Cause(r.err.err) == tsdb.ErrNotReady {
|
if r.err != nil && errors.Cause(r.err.err) == tsdb.ErrNotReady {
|
||||||
r.err.typ = errorUnavailable
|
r.err.typ = errorUnavailable
|
||||||
|
@ -308,7 +327,7 @@ func (api *API) Register(r *route.Router) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if result.data != nil {
|
if result.data != nil {
|
||||||
api.respond(w, result.data, result.warnings)
|
api.respond(w, r, result.data, result.warnings)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
@ -1446,7 +1465,7 @@ func (api *API) serveWALReplayStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
api.respondError(w, &apiError{errorInternal, err}, nil)
|
api.respondError(w, &apiError{errorInternal, err}, nil)
|
||||||
}
|
}
|
||||||
api.respond(w, walReplayStatus{
|
api.respond(w, r, walReplayStatus{
|
||||||
Min: status.Min,
|
Min: status.Min,
|
||||||
Max: status.Max,
|
Max: status.Max,
|
||||||
Current: status.Current,
|
Current: status.Current,
|
||||||
|
@ -1548,34 +1567,59 @@ func (api *API) cleanTombstones(r *http.Request) apiFuncResult {
|
||||||
return apiFuncResult{nil, nil, nil, nil}
|
return apiFuncResult{nil, nil, nil, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) respond(w http.ResponseWriter, data interface{}, warnings storage.Warnings) {
|
func (api *API) respond(w http.ResponseWriter, req *http.Request, data interface{}, warnings storage.Warnings) {
|
||||||
statusMessage := statusSuccess
|
statusMessage := statusSuccess
|
||||||
var warningStrings []string
|
var warningStrings []string
|
||||||
for _, warning := range warnings {
|
for _, warning := range warnings {
|
||||||
warningStrings = append(warningStrings, warning.Error())
|
warningStrings = append(warningStrings, warning.Error())
|
||||||
}
|
}
|
||||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
|
||||||
b, err := json.Marshal(&response{
|
resp := &Response{
|
||||||
Status: statusMessage,
|
Status: statusMessage,
|
||||||
Data: data,
|
Data: data,
|
||||||
Warnings: warningStrings,
|
Warnings: warningStrings,
|
||||||
})
|
}
|
||||||
|
|
||||||
|
codec := api.negotiateCodec(req, resp)
|
||||||
|
b, err := codec.Encode(resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
level.Error(api.logger).Log("msg", "error marshaling json response", "err", err)
|
level.Error(api.logger).Log("msg", "error marshaling response", "err", err)
|
||||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Header().Set("Content-Type", "application/json")
|
w.Header().Set("Content-Type", codec.ContentType())
|
||||||
w.WriteHeader(http.StatusOK)
|
w.WriteHeader(http.StatusOK)
|
||||||
if n, err := w.Write(b); err != nil {
|
if n, err := w.Write(b); err != nil {
|
||||||
level.Error(api.logger).Log("msg", "error writing response", "bytesWritten", n, "err", err)
|
level.Error(api.logger).Log("msg", "error writing response", "bytesWritten", n, "err", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HTTP content negotiation is hard (see https://developer.mozilla.org/en-US/docs/Web/HTTP/Content_negotiation).
|
||||||
|
// Ideally, we shouldn't be implementing this ourselves - https://github.com/golang/go/issues/19307 is an open proposal to add
|
||||||
|
// this to the Go stdlib and has links to a number of other implementations.
|
||||||
|
//
|
||||||
|
// This is an MVP, and doesn't support features like wildcards or weighting.
|
||||||
|
func (api *API) negotiateCodec(req *http.Request, resp *Response) Codec {
|
||||||
|
acceptHeader := req.Header.Get("Accept")
|
||||||
|
if acceptHeader == "" {
|
||||||
|
return defaultCodec
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, contentType := range strings.Split(acceptHeader, ",") {
|
||||||
|
codec, ok := api.codecs[strings.TrimSpace(contentType)]
|
||||||
|
if ok && codec.CanEncode(resp) {
|
||||||
|
return codec
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
level.Warn(api.logger).Log("msg", "could not find suitable codec for response, falling back to default codec", "accept_header", acceptHeader)
|
||||||
|
return defaultCodec
|
||||||
|
}
|
||||||
|
|
||||||
func (api *API) respondError(w http.ResponseWriter, apiErr *apiError, data interface{}) {
|
func (api *API) respondError(w http.ResponseWriter, apiErr *apiError, data interface{}) {
|
||||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||||
b, err := json.Marshal(&response{
|
b, err := json.Marshal(&Response{
|
||||||
Status: statusError,
|
Status: statusError,
|
||||||
ErrorType: apiErr.typ,
|
ErrorType: apiErr.typ,
|
||||||
Error: apiErr.err.Error(),
|
Error: apiErr.err.Error(),
|
||||||
|
|
|
@ -18,7 +18,6 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -30,7 +29,6 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/model/histogram"
|
|
||||||
"github.com/prometheus/prometheus/prompb"
|
"github.com/prometheus/prometheus/prompb"
|
||||||
"github.com/prometheus/prometheus/util/stats"
|
"github.com/prometheus/prometheus/util/stats"
|
||||||
|
|
||||||
|
@ -2765,39 +2763,93 @@ func TestAdminEndpoints(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRespondSuccess(t *testing.T) {
|
func TestRespondSuccess(t *testing.T) {
|
||||||
|
api := API{
|
||||||
|
logger: log.NewNopLogger(),
|
||||||
|
}
|
||||||
|
|
||||||
|
api.InstallCodec(&testCodec{contentType: "test/cannot-encode", canEncode: false})
|
||||||
|
api.InstallCodec(&testCodec{contentType: "test/can-encode", canEncode: true})
|
||||||
|
api.InstallCodec(&testCodec{contentType: "test/can-encode-2", canEncode: true})
|
||||||
|
|
||||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
api := API{}
|
api.respond(w, r, "test", nil)
|
||||||
api.respond(w, "test", nil)
|
|
||||||
}))
|
}))
|
||||||
defer s.Close()
|
defer s.Close()
|
||||||
|
|
||||||
resp, err := http.Get(s.URL)
|
for _, tc := range []struct {
|
||||||
if err != nil {
|
name string
|
||||||
t.Fatalf("Error on test request: %s", err)
|
acceptHeader string
|
||||||
|
expectedContentType string
|
||||||
|
expectedBody string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no Accept header",
|
||||||
|
expectedContentType: "application/json",
|
||||||
|
expectedBody: `{"status":"success","data":"test"}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with single content type which is suitable",
|
||||||
|
acceptHeader: "test/can-encode",
|
||||||
|
expectedContentType: "test/can-encode",
|
||||||
|
expectedBody: `response from test/can-encode codec`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with single content type which is not available",
|
||||||
|
acceptHeader: "test/not-registered",
|
||||||
|
expectedContentType: "application/json",
|
||||||
|
expectedBody: `{"status":"success","data":"test"}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with single content type which cannot encode the response payload",
|
||||||
|
acceptHeader: "test/cannot-encode",
|
||||||
|
expectedContentType: "application/json",
|
||||||
|
expectedBody: `{"status":"success","data":"test"}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with multiple content types, all of which are suitable",
|
||||||
|
acceptHeader: "test/can-encode, test/can-encode-2",
|
||||||
|
expectedContentType: "test/can-encode",
|
||||||
|
expectedBody: `response from test/can-encode codec`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with multiple content types, only one of which is available",
|
||||||
|
acceptHeader: "test/not-registered, test/can-encode",
|
||||||
|
expectedContentType: "test/can-encode",
|
||||||
|
expectedBody: `response from test/can-encode codec`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with multiple content types, only one of which can encode the response payload",
|
||||||
|
acceptHeader: "test/cannot-encode, test/can-encode",
|
||||||
|
expectedContentType: "test/can-encode",
|
||||||
|
expectedBody: `response from test/can-encode codec`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Accept header with multiple content types, none of which are available",
|
||||||
|
acceptHeader: "test/not-registered, test/also-not-registered",
|
||||||
|
expectedContentType: "application/json",
|
||||||
|
expectedBody: `{"status":"success","data":"test"}`,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
req, err := http.NewRequest(http.MethodGet, s.URL, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
if tc.acceptHeader != "" {
|
||||||
|
req.Header.Set("Accept", tc.acceptHeader)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
if err != nil {
|
require.NoError(t, err)
|
||||||
t.Fatalf("Error reading response body: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode != 200 {
|
require.Equal(t, http.StatusOK, resp.StatusCode)
|
||||||
t.Fatalf("Return code %d expected in success response but got %d", 200, resp.StatusCode)
|
require.Equal(t, tc.expectedContentType, resp.Header.Get("Content-Type"))
|
||||||
|
require.Equal(t, tc.expectedBody, string(body))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
if h := resp.Header.Get("Content-Type"); h != "application/json" {
|
|
||||||
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
|
||||||
}
|
|
||||||
|
|
||||||
var res response
|
|
||||||
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
|
||||||
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
exp := &response{
|
|
||||||
Status: statusSuccess,
|
|
||||||
Data: "test",
|
|
||||||
}
|
|
||||||
require.Equal(t, exp, &res)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRespondError(t *testing.T) {
|
func TestRespondError(t *testing.T) {
|
||||||
|
@ -2824,12 +2876,12 @@ func TestRespondError(t *testing.T) {
|
||||||
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
||||||
}
|
}
|
||||||
|
|
||||||
var res response
|
var res Response
|
||||||
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
||||||
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
exp := &response{
|
exp := &Response{
|
||||||
Status: statusError,
|
Status: statusError,
|
||||||
Data: "test",
|
Data: "test",
|
||||||
ErrorType: errorTimeout,
|
ErrorType: errorTimeout,
|
||||||
|
@ -3047,165 +3099,6 @@ func TestOptionsMethod(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRespond(t *testing.T) {
|
|
||||||
cases := []struct {
|
|
||||||
response interface{}
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
response: &queryData{
|
|
||||||
ResultType: parser.ValueTypeMatrix,
|
|
||||||
Result: promql.Matrix{
|
|
||||||
promql.Series{
|
|
||||||
Points: []promql.Point{{V: 1, T: 1000}},
|
|
||||||
Metric: labels.FromStrings("__name__", "foo"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"values":[[1,"1"]]}]}}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: &queryData{
|
|
||||||
ResultType: parser.ValueTypeMatrix,
|
|
||||||
Result: promql.Matrix{
|
|
||||||
promql.Series{
|
|
||||||
Points: []promql.Point{{H: &histogram.FloatHistogram{
|
|
||||||
Schema: 2,
|
|
||||||
ZeroThreshold: 0.001,
|
|
||||||
ZeroCount: 12,
|
|
||||||
Count: 10,
|
|
||||||
Sum: 20,
|
|
||||||
PositiveSpans: []histogram.Span{
|
|
||||||
{Offset: 3, Length: 2},
|
|
||||||
{Offset: 1, Length: 3},
|
|
||||||
},
|
|
||||||
NegativeSpans: []histogram.Span{
|
|
||||||
{Offset: 2, Length: 2},
|
|
||||||
},
|
|
||||||
PositiveBuckets: []float64{1, 2, 2, 1, 1},
|
|
||||||
NegativeBuckets: []float64{2, 1},
|
|
||||||
}, T: 1000}},
|
|
||||||
Metric: labels.FromStrings("__name__", "foo"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"histograms":[[1,{"count":"10","sum":"20","buckets":[[1,"-1.6817928305074288","-1.414213562373095","1"],[1,"-1.414213562373095","-1.189207115002721","2"],[3,"-0.001","0.001","12"],[0,"1.414213562373095","1.6817928305074288","1"],[0,"1.6817928305074288","2","2"],[0,"2.378414230005442","2.82842712474619","2"],[0,"2.82842712474619","3.3635856610148576","1"],[0,"3.3635856610148576","4","1"]]}]]}]}}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 0, T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"0"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 1},
|
|
||||||
expected: `{"status":"success","data":[0.001,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 10},
|
|
||||||
expected: `{"status":"success","data":[0.010,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 100},
|
|
||||||
expected: `{"status":"success","data":[0.100,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 1001},
|
|
||||||
expected: `{"status":"success","data":[1.001,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 1010},
|
|
||||||
expected: `{"status":"success","data":[1.010,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 1100},
|
|
||||||
expected: `{"status":"success","data":[1.100,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: 12345678123456555},
|
|
||||||
expected: `{"status":"success","data":[12345678123456.555,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 20, T: -1},
|
|
||||||
expected: `{"status":"success","data":[-0.001,"20"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: math.NaN(), T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"NaN"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: math.Inf(1), T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"+Inf"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: math.Inf(-1), T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"-Inf"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 1.2345678e6, T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"1234567.8"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 1.2345678e-6, T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"0.0000012345678"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: promql.Point{V: 1.2345678e-67, T: 0},
|
|
||||||
expected: `{"status":"success","data":[0,"1.2345678e-67"]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: []exemplar.QueryResult{
|
|
||||||
{
|
|
||||||
SeriesLabels: labels.FromStrings("foo", "bar"),
|
|
||||||
Exemplars: []exemplar.Exemplar{
|
|
||||||
{
|
|
||||||
Labels: labels.FromStrings("traceID", "abc"),
|
|
||||||
Value: 100.123,
|
|
||||||
Ts: 1234,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"traceID":"abc"},"value":"100.123","timestamp":1.234}]}]}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
response: []exemplar.QueryResult{
|
|
||||||
{
|
|
||||||
SeriesLabels: labels.FromStrings("foo", "bar"),
|
|
||||||
Exemplars: []exemplar.Exemplar{
|
|
||||||
{
|
|
||||||
Labels: labels.FromStrings("traceID", "abc"),
|
|
||||||
Value: math.Inf(1),
|
|
||||||
Ts: 1234,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"traceID":"abc"},"value":"+Inf","timestamp":1.234}]}]}`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, c := range cases {
|
|
||||||
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
||||||
api := API{}
|
|
||||||
api.respond(w, c.response, nil)
|
|
||||||
}))
|
|
||||||
defer s.Close()
|
|
||||||
|
|
||||||
resp, err := http.Get(s.URL)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Error on test request: %s", err)
|
|
||||||
}
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
|
||||||
defer resp.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Error reading response body: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if string(body) != c.expected {
|
|
||||||
t.Fatalf("Expected response \n%v\n but got \n%v\n", c.expected, string(body))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTSDBStatus(t *testing.T) {
|
func TestTSDBStatus(t *testing.T) {
|
||||||
tsdb := &fakeDB{}
|
tsdb := &fakeDB{}
|
||||||
tsdbStatusAPI := func(api *API) apiFunc { return api.serveTSDBStatus }
|
tsdbStatusAPI := func(api *API) apiFunc { return api.serveTSDBStatus }
|
||||||
|
@ -3281,6 +3174,8 @@ var testResponseWriter = httptest.ResponseRecorder{}
|
||||||
|
|
||||||
func BenchmarkRespond(b *testing.B) {
|
func BenchmarkRespond(b *testing.B) {
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
|
request, err := http.NewRequest(http.MethodGet, "/does-not-matter", nil)
|
||||||
|
require.NoError(b, err)
|
||||||
points := []promql.Point{}
|
points := []promql.Point{}
|
||||||
for i := 0; i < 10000; i++ {
|
for i := 0; i < 10000; i++ {
|
||||||
points = append(points, promql.Point{V: float64(i * 1000000), T: int64(i)})
|
points = append(points, promql.Point{V: float64(i * 1000000), T: int64(i)})
|
||||||
|
@ -3297,7 +3192,7 @@ func BenchmarkRespond(b *testing.B) {
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
api := API{}
|
api := API{}
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
api.respond(&testResponseWriter, response, nil)
|
api.respond(&testResponseWriter, request, response, nil)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3408,3 +3303,20 @@ func TestGetGlobalURL(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type testCodec struct {
|
||||||
|
contentType string
|
||||||
|
canEncode bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testCodec) ContentType() string {
|
||||||
|
return t.contentType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testCodec) CanEncode(_ *Response) bool {
|
||||||
|
return t.canEncode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testCodec) Encode(_ *Response) ([]byte, error) {
|
||||||
|
return []byte(fmt.Sprintf("response from %v codec", t.contentType)), nil
|
||||||
|
}
|
||||||
|
|
26
web/api/v1/codec.go
Normal file
26
web/api/v1/codec.go
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2016 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package v1
|
||||||
|
|
||||||
|
// A Codec performs encoding of API responses.
|
||||||
|
type Codec interface {
|
||||||
|
// ContentType returns the MIME time that this Codec emits.
|
||||||
|
ContentType() string
|
||||||
|
|
||||||
|
// CanEncode determines if this Codec can encode resp.
|
||||||
|
CanEncode(resp *Response) bool
|
||||||
|
|
||||||
|
// Encode encodes resp, ready for transmission to an API consumer.
|
||||||
|
Encode(resp *Response) ([]byte, error)
|
||||||
|
}
|
32
web/api/v1/json_codec.go
Normal file
32
web/api/v1/json_codec.go
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
// Copyright 2016 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package v1
|
||||||
|
|
||||||
|
import jsoniter "github.com/json-iterator/go"
|
||||||
|
|
||||||
|
// JSONCodec is a Codec that encodes API responses as JSON.
|
||||||
|
type JSONCodec struct{}
|
||||||
|
|
||||||
|
func (j JSONCodec) ContentType() string {
|
||||||
|
return "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j JSONCodec) CanEncode(_ *Response) bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j JSONCodec) Encode(resp *Response) ([]byte, error) {
|
||||||
|
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||||
|
return json.Marshal(resp)
|
||||||
|
}
|
178
web/api/v1/json_codec_test.go
Normal file
178
web/api/v1/json_codec_test.go
Normal file
|
@ -0,0 +1,178 @@
|
||||||
|
// Copyright 2016 The Prometheus Authors
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/model/exemplar"
|
||||||
|
"github.com/prometheus/prometheus/model/histogram"
|
||||||
|
"github.com/prometheus/prometheus/model/labels"
|
||||||
|
"github.com/prometheus/prometheus/promql"
|
||||||
|
"github.com/prometheus/prometheus/promql/parser"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestJsonCodec_Encode(t *testing.T) {
|
||||||
|
cases := []struct {
|
||||||
|
response interface{}
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
response: &queryData{
|
||||||
|
ResultType: parser.ValueTypeMatrix,
|
||||||
|
Result: promql.Matrix{
|
||||||
|
promql.Series{
|
||||||
|
Points: []promql.Point{{V: 1, T: 1000}},
|
||||||
|
Metric: labels.FromStrings("__name__", "foo"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"values":[[1,"1"]]}]}}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: &queryData{
|
||||||
|
ResultType: parser.ValueTypeMatrix,
|
||||||
|
Result: promql.Matrix{
|
||||||
|
promql.Series{
|
||||||
|
Points: []promql.Point{{H: &histogram.FloatHistogram{
|
||||||
|
Schema: 2,
|
||||||
|
ZeroThreshold: 0.001,
|
||||||
|
ZeroCount: 12,
|
||||||
|
Count: 10,
|
||||||
|
Sum: 20,
|
||||||
|
PositiveSpans: []histogram.Span{
|
||||||
|
{Offset: 3, Length: 2},
|
||||||
|
{Offset: 1, Length: 3},
|
||||||
|
},
|
||||||
|
NegativeSpans: []histogram.Span{
|
||||||
|
{Offset: 2, Length: 2},
|
||||||
|
},
|
||||||
|
PositiveBuckets: []float64{1, 2, 2, 1, 1},
|
||||||
|
NegativeBuckets: []float64{2, 1},
|
||||||
|
}, T: 1000}},
|
||||||
|
Metric: labels.FromStrings("__name__", "foo"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"histograms":[[1,{"count":"10","sum":"20","buckets":[[1,"-1.6817928305074288","-1.414213562373095","1"],[1,"-1.414213562373095","-1.189207115002721","2"],[3,"-0.001","0.001","12"],[0,"1.414213562373095","1.6817928305074288","1"],[0,"1.6817928305074288","2","2"],[0,"2.378414230005442","2.82842712474619","2"],[0,"2.82842712474619","3.3635856610148576","1"],[0,"3.3635856610148576","4","1"]]}]]}]}}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 0, T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"0"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 1},
|
||||||
|
expected: `{"status":"success","data":[0.001,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 10},
|
||||||
|
expected: `{"status":"success","data":[0.010,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 100},
|
||||||
|
expected: `{"status":"success","data":[0.100,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 1001},
|
||||||
|
expected: `{"status":"success","data":[1.001,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 1010},
|
||||||
|
expected: `{"status":"success","data":[1.010,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 1100},
|
||||||
|
expected: `{"status":"success","data":[1.100,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: 12345678123456555},
|
||||||
|
expected: `{"status":"success","data":[12345678123456.555,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 20, T: -1},
|
||||||
|
expected: `{"status":"success","data":[-0.001,"20"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: math.NaN(), T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"NaN"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: math.Inf(1), T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"+Inf"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: math.Inf(-1), T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"-Inf"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 1.2345678e6, T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"1234567.8"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 1.2345678e-6, T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"0.0000012345678"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: promql.Point{V: 1.2345678e-67, T: 0},
|
||||||
|
expected: `{"status":"success","data":[0,"1.2345678e-67"]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: []exemplar.QueryResult{
|
||||||
|
{
|
||||||
|
SeriesLabels: labels.FromStrings("foo", "bar"),
|
||||||
|
Exemplars: []exemplar.Exemplar{
|
||||||
|
{
|
||||||
|
Labels: labels.FromStrings("traceID", "abc"),
|
||||||
|
Value: 100.123,
|
||||||
|
Ts: 1234,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"traceID":"abc"},"value":"100.123","timestamp":1.234}]}]}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
response: []exemplar.QueryResult{
|
||||||
|
{
|
||||||
|
SeriesLabels: labels.FromStrings("foo", "bar"),
|
||||||
|
Exemplars: []exemplar.Exemplar{
|
||||||
|
{
|
||||||
|
Labels: labels.FromStrings("traceID", "abc"),
|
||||||
|
Value: math.Inf(1),
|
||||||
|
Ts: 1234,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: `{"status":"success","data":[{"seriesLabels":{"foo":"bar"},"exemplars":[{"labels":{"traceID":"abc"},"value":"+Inf","timestamp":1.234}]}]}`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
codec := JSONCodec{}
|
||||||
|
|
||||||
|
for _, c := range cases {
|
||||||
|
body, err := codec.Encode(&Response{
|
||||||
|
Status: statusSuccess,
|
||||||
|
Data: c.response,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Error encoding response body: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if string(body) != c.expected {
|
||||||
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", c.expected, string(body))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue