mirror of
https://github.com/prometheus/prometheus.git
synced 2025-01-11 13:57:36 -08:00
Adds normalization of localhost urls in targets page react (#6794)
* support for globalurls in targets page react Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * fixed tests Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * removed fmts Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * implemented suggestions Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * formatted Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * implemented suggestions. fixed tests. Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * formated go code Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * implemented suggestions Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com>
This commit is contained in:
parent
88af973663
commit
489a9aa7b9
|
@ -18,6 +18,7 @@ import (
|
|||
"fmt"
|
||||
"math"
|
||||
"math/rand"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
@ -79,12 +80,15 @@ const (
|
|||
errorNotFound errorType = "not_found"
|
||||
)
|
||||
|
||||
var remoteReadQueries = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: namespace,
|
||||
Subsystem: subsystem,
|
||||
Name: "remote_read_queries",
|
||||
Help: "The current number of remote read queries being executed or waiting.",
|
||||
})
|
||||
var (
|
||||
LocalhostRepresentations = []string{"127.0.0.1", "localhost"}
|
||||
remoteReadQueries = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Namespace: namespace,
|
||||
Subsystem: subsystem,
|
||||
Name: "remote_read_queries",
|
||||
Help: "The current number of remote read queries being executed or waiting.",
|
||||
})
|
||||
)
|
||||
|
||||
type apiError struct {
|
||||
typ errorType
|
||||
|
@ -175,6 +179,7 @@ type API struct {
|
|||
config func() config.Config
|
||||
flagsMap map[string]string
|
||||
ready func(http.HandlerFunc) http.HandlerFunc
|
||||
globalURLOptions GlobalURLOptions
|
||||
|
||||
db func() TSDBAdmin
|
||||
enableAdmin bool
|
||||
|
@ -200,6 +205,7 @@ func NewAPI(
|
|||
ar alertmanagerRetriever,
|
||||
configFunc func() config.Config,
|
||||
flagsMap map[string]string,
|
||||
globalURLOptions GlobalURLOptions,
|
||||
readyFunc func(http.HandlerFunc) http.HandlerFunc,
|
||||
db func() TSDBAdmin,
|
||||
enableAdmin bool,
|
||||
|
@ -222,6 +228,7 @@ func NewAPI(
|
|||
config: configFunc,
|
||||
flagsMap: flagsMap,
|
||||
ready: readyFunc,
|
||||
globalURLOptions: globalURLOptions,
|
||||
db: db,
|
||||
enableAdmin: enableAdmin,
|
||||
rulesRetriever: rr,
|
||||
|
@ -583,6 +590,7 @@ type Target struct {
|
|||
|
||||
ScrapePool string `json:"scrapePool"`
|
||||
ScrapeURL string `json:"scrapeUrl"`
|
||||
GlobalURL string `json:"globalUrl"`
|
||||
|
||||
LastError string `json:"lastError"`
|
||||
LastScrape time.Time `json:"lastScrape"`
|
||||
|
@ -602,6 +610,54 @@ type TargetDiscovery struct {
|
|||
DroppedTargets []*DroppedTarget `json:"droppedTargets"`
|
||||
}
|
||||
|
||||
// GlobalURLOptions contains fields used for deriving the global URL for local targets.
|
||||
type GlobalURLOptions struct {
|
||||
ListenAddress string
|
||||
Host string
|
||||
Scheme string
|
||||
}
|
||||
|
||||
func getGlobalURL(u *url.URL, opts GlobalURLOptions) (*url.URL, error) {
|
||||
host, port, err := net.SplitHostPort(u.Host)
|
||||
if err != nil {
|
||||
return u, err
|
||||
}
|
||||
|
||||
for _, lhr := range LocalhostRepresentations {
|
||||
if host == lhr {
|
||||
_, ownPort, err := net.SplitHostPort(opts.ListenAddress)
|
||||
if err != nil {
|
||||
return u, err
|
||||
}
|
||||
|
||||
if port == ownPort {
|
||||
// Only in the case where the target is on localhost and its port is
|
||||
// the same as the one we're listening on, we know for sure that
|
||||
// we're monitoring our own process and that we need to change the
|
||||
// scheme, hostname, and port to the externally reachable ones as
|
||||
// well. We shouldn't need to touch the path at all, since if a
|
||||
// path prefix is defined, the path under which we scrape ourselves
|
||||
// should already contain the prefix.
|
||||
u.Scheme = opts.Scheme
|
||||
u.Host = opts.Host
|
||||
} else {
|
||||
// Otherwise, we only know that localhost is not reachable
|
||||
// externally, so we replace only the hostname by the one in the
|
||||
// external URL. It could be the wrong hostname for the service on
|
||||
// this port, but it's still the best possible guess.
|
||||
host, _, err := net.SplitHostPort(opts.Host)
|
||||
if err != nil {
|
||||
return u, err
|
||||
}
|
||||
u.Host = host + ":" + port
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func (api *API) targets(r *http.Request) apiFuncResult {
|
||||
sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) {
|
||||
var n int
|
||||
|
@ -641,12 +697,22 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
|||
lastErrStr = lastErr.Error()
|
||||
}
|
||||
|
||||
globalURL, err := getGlobalURL(target.URL(), api.globalURLOptions)
|
||||
|
||||
res.ActiveTargets = append(res.ActiveTargets, &Target{
|
||||
DiscoveredLabels: target.DiscoveredLabels().Map(),
|
||||
Labels: target.Labels().Map(),
|
||||
ScrapePool: key,
|
||||
ScrapeURL: target.URL().String(),
|
||||
LastError: lastErrStr,
|
||||
DiscoveredLabels: target.DiscoveredLabels().Map(),
|
||||
Labels: target.Labels().Map(),
|
||||
ScrapePool: key,
|
||||
ScrapeURL: target.URL().String(),
|
||||
GlobalURL: globalURL.String(),
|
||||
LastError: func() string {
|
||||
if err == nil && lastErrStr == "" {
|
||||
return ""
|
||||
} else if err != nil {
|
||||
return errors.Wrapf(err, lastErrStr).Error()
|
||||
}
|
||||
return lastErrStr
|
||||
}(),
|
||||
LastScrape: target.LastScrape(),
|
||||
LastScrapeDuration: target.LastScrapeDuration().Seconds(),
|
||||
Health: target.Health(),
|
||||
|
|
|
@ -826,8 +826,9 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
|||
},
|
||||
ScrapePool: "blackbox",
|
||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
||||
Health: "down",
|
||||
LastError: "failed",
|
||||
LastError: "failed: missing port in address",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.1,
|
||||
},
|
||||
|
@ -838,6 +839,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
|||
},
|
||||
ScrapePool: "test",
|
||||
ScrapeURL: "http://example.com:8080/metrics",
|
||||
GlobalURL: "http://example.com:8080/metrics",
|
||||
Health: "up",
|
||||
LastError: "",
|
||||
LastScrape: scrapeStart,
|
||||
|
@ -870,8 +872,9 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
|||
},
|
||||
ScrapePool: "blackbox",
|
||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
||||
Health: "down",
|
||||
LastError: "failed",
|
||||
LastError: "failed: missing port in address",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.1,
|
||||
},
|
||||
|
@ -882,6 +885,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
|||
},
|
||||
ScrapePool: "test",
|
||||
ScrapeURL: "http://example.com:8080/metrics",
|
||||
GlobalURL: "http://example.com:8080/metrics",
|
||||
Health: "up",
|
||||
LastError: "",
|
||||
LastScrape: scrapeStart,
|
||||
|
@ -914,8 +918,9 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
|||
},
|
||||
ScrapePool: "blackbox",
|
||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
||||
Health: "down",
|
||||
LastError: "failed",
|
||||
LastError: "failed: missing port in address",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.1,
|
||||
},
|
||||
|
@ -926,6 +931,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
|||
},
|
||||
ScrapePool: "test",
|
||||
ScrapeURL: "http://example.com:8080/metrics",
|
||||
GlobalURL: "http://example.com:8080/metrics",
|
||||
Health: "up",
|
||||
LastError: "",
|
||||
LastScrape: scrapeStart,
|
||||
|
|
|
@ -6,19 +6,21 @@ import EndpointLink from './EndpointLink';
|
|||
describe('EndpointLink', () => {
|
||||
it('renders a simple anchor if the endpoint has no query params', () => {
|
||||
const endpoint = 'http://100.104.208.71:15090/stats/prometheus';
|
||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />);
|
||||
const globalURL = 'http://100.104.208.71:15090/stats/prometheus';
|
||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} globalUrl={globalURL} />);
|
||||
const anchor = endpointLink.find('a');
|
||||
expect(anchor.prop('href')).toEqual(endpoint);
|
||||
expect(anchor.prop('href')).toEqual(globalURL);
|
||||
expect(anchor.children().text()).toEqual(endpoint);
|
||||
expect(endpointLink.find('br')).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('renders an anchor targeting endpoint but with query param labels if the endpoint has query params', () => {
|
||||
const endpoint = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service';
|
||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />);
|
||||
const globalURL = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service';
|
||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} globalUrl={globalURL} />);
|
||||
const anchor = endpointLink.find('a');
|
||||
const badges = endpointLink.find(Badge);
|
||||
expect(anchor.prop('href')).toEqual(endpoint);
|
||||
expect(anchor.prop('href')).toEqual(globalURL);
|
||||
expect(anchor.children().text()).toEqual('http://100.99.128.71:9115/probe');
|
||||
expect(endpointLink.find('br')).toHaveLength(1);
|
||||
expect(badges).toHaveLength(2);
|
||||
|
@ -29,7 +31,7 @@ describe('EndpointLink', () => {
|
|||
});
|
||||
|
||||
it('renders an alert if url is invalid', () => {
|
||||
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} />);
|
||||
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} globalUrl={'afdsacas'} />);
|
||||
const err = endpointLink.find(Alert);
|
||||
expect(err.render().text()).toEqual('Error: Invalid URL');
|
||||
});
|
||||
|
|
|
@ -3,9 +3,10 @@ import { Badge, Alert } from 'reactstrap';
|
|||
|
||||
export interface EndpointLinkProps {
|
||||
endpoint: string;
|
||||
globalUrl: string;
|
||||
}
|
||||
|
||||
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint }) => {
|
||||
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
|
||||
let url: URL;
|
||||
try {
|
||||
url = new URL(endpoint);
|
||||
|
@ -22,7 +23,7 @@ const EndpointLink: FC<EndpointLinkProps> = ({ endpoint }) => {
|
|||
|
||||
return (
|
||||
<>
|
||||
<a href={endpoint}>{`${protocol}//${host}${pathname}`}</a>
|
||||
<a href={globalUrl}>{`${protocol}//${host}${pathname}`}</a>
|
||||
{params.length > 0 ? <br /> : null}
|
||||
{params.map(([labelName, labelValue]: [string, string]) => {
|
||||
return (
|
||||
|
|
|
@ -49,6 +49,7 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup }) => {
|
|||
labels,
|
||||
scrapePool,
|
||||
scrapeUrl,
|
||||
globalUrl,
|
||||
lastError,
|
||||
lastScrape,
|
||||
lastScrapeDuration,
|
||||
|
@ -59,7 +60,7 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup }) => {
|
|||
return (
|
||||
<tr key={scrapeUrl}>
|
||||
<td className={styles.endpoint}>
|
||||
<EndpointLink endpoint={scrapeUrl} />
|
||||
<EndpointLink endpoint={scrapeUrl} globalUrl={globalUrl} />
|
||||
</td>
|
||||
<td className={styles.state}>
|
||||
<Badge color={color}>{health.toUpperCase()}</Badge>
|
||||
|
|
|
@ -20,6 +20,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
},
|
||||
scrapePool: 'blackbox',
|
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
|
||||
globalUrl: 'http://localhost.localdomain:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:14.759299-07:00',
|
||||
lastScrapeDuration: 36560147,
|
||||
|
@ -39,6 +40,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
},
|
||||
scrapePool: 'blackbox',
|
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
|
||||
globalUrl: 'http://localhost.localdomain:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:24.731096-07:00',
|
||||
lastScrapeDuration: 49448763,
|
||||
|
@ -58,6 +60,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
},
|
||||
scrapePool: 'blackbox',
|
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
|
||||
globalUrl: 'http://localhost.localdomain:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:13.516654-07:00',
|
||||
lastScrapeDuration: 120916592,
|
||||
|
@ -81,6 +84,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
},
|
||||
scrapePool: 'node_exporter',
|
||||
scrapeUrl: 'http://localhost:9100/metrics',
|
||||
globalUrl: 'http://localhost.localdomain:9100/metrics',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:14.145703-07:00',
|
||||
lastScrapeDuration: 3842307,
|
||||
|
@ -104,6 +108,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
},
|
||||
scrapePool: 'prometheus',
|
||||
scrapeUrl: 'http://localhost:9090/metrics',
|
||||
globalUrl: 'http://localhost.localdomain:9000/metrics',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:18.479731-07:00',
|
||||
lastScrapeDuration: 4050976,
|
||||
|
|
|
@ -7,6 +7,7 @@ export interface Target {
|
|||
labels: Labels;
|
||||
scrapePool: string;
|
||||
scrapeUrl: string;
|
||||
globalUrl: string;
|
||||
lastError: string;
|
||||
lastScrape: string;
|
||||
lastScrapeDuration: number;
|
||||
|
|
39
web/web.go
39
web/web.go
|
@ -70,24 +70,20 @@ import (
|
|||
"github.com/prometheus/prometheus/web/ui"
|
||||
)
|
||||
|
||||
var (
|
||||
localhostRepresentations = []string{"127.0.0.1", "localhost"}
|
||||
|
||||
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
|
||||
reactRouterPaths = []string{
|
||||
"/",
|
||||
"/alerts",
|
||||
"/config",
|
||||
"/flags",
|
||||
"/graph",
|
||||
"/rules",
|
||||
"/service-discovery",
|
||||
"/status",
|
||||
"/targets",
|
||||
"/tsdb-status",
|
||||
"/version",
|
||||
}
|
||||
)
|
||||
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
|
||||
var reactRouterPaths = []string{
|
||||
"/",
|
||||
"/alerts",
|
||||
"/config",
|
||||
"/flags",
|
||||
"/graph",
|
||||
"/rules",
|
||||
"/service-discovery",
|
||||
"/status",
|
||||
"/targets",
|
||||
"/tsdb-status",
|
||||
"/version",
|
||||
}
|
||||
|
||||
// withStackTrace logs the stack trace in case the request panics. The function
|
||||
// will re-raise the error which will then be handled by the net/http package.
|
||||
|
@ -300,6 +296,11 @@ func New(logger log.Logger, o *Options) *Handler {
|
|||
return *h.config
|
||||
},
|
||||
o.Flags,
|
||||
api_v1.GlobalURLOptions{
|
||||
ListenAddress: o.ListenAddress,
|
||||
Host: o.ExternalURL.Host,
|
||||
Scheme: o.ExternalURL.Scheme,
|
||||
},
|
||||
h.testReady,
|
||||
func() api_v1.TSDBAdmin {
|
||||
return h.options.TSDB()
|
||||
|
@ -973,7 +974,7 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
|
|||
if err != nil {
|
||||
return u
|
||||
}
|
||||
for _, lhr := range localhostRepresentations {
|
||||
for _, lhr := range api_v1.LocalhostRepresentations {
|
||||
if host == lhr {
|
||||
_, ownPort, err := net.SplitHostPort(opts.ListenAddress)
|
||||
if err != nil {
|
||||
|
|
|
@ -305,7 +305,11 @@ func TestRoutePrefix(t *testing.T) {
|
|||
Notifier: nil,
|
||||
RoutePrefix: "/prometheus",
|
||||
EnableAdminAPI: true,
|
||||
TSDB: func() *libtsdb.DB { return db },
|
||||
ExternalURL: &url.URL{
|
||||
Host: "localhost.localdomain:9090",
|
||||
Scheme: "http",
|
||||
},
|
||||
TSDB: func() *libtsdb.DB { return db },
|
||||
}
|
||||
|
||||
opts.Flags = map[string]string{}
|
||||
|
@ -391,7 +395,12 @@ func TestDebugHandler(t *testing.T) {
|
|||
{"/foo", "/bar/debug/pprof/goroutine", 404},
|
||||
} {
|
||||
opts := &Options{
|
||||
RoutePrefix: tc.prefix,
|
||||
RoutePrefix: tc.prefix,
|
||||
ListenAddress: "somehost:9090",
|
||||
ExternalURL: &url.URL{
|
||||
Host: "localhost.localdomain:9090",
|
||||
Scheme: "http",
|
||||
},
|
||||
}
|
||||
handler := New(nil, opts)
|
||||
handler.Ready()
|
||||
|
@ -411,7 +420,14 @@ func TestDebugHandler(t *testing.T) {
|
|||
func TestHTTPMetrics(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
handler := New(nil, &Options{RoutePrefix: "/"})
|
||||
handler := New(nil, &Options{
|
||||
RoutePrefix: "/",
|
||||
ListenAddress: "somehost:9090",
|
||||
ExternalURL: &url.URL{
|
||||
Host: "localhost.localdomain:9090",
|
||||
Scheme: "http",
|
||||
},
|
||||
})
|
||||
getReady := func() int {
|
||||
t.Helper()
|
||||
w := httptest.NewRecorder()
|
||||
|
|
Loading…
Reference in a new issue