mirror of
https://github.com/prometheus/prometheus.git
synced 2025-01-11 13:57:36 -08:00
Adds normalization of localhost urls in targets page react (#6794)
* support for globalurls in targets page react Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * fixed tests Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * removed fmts Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * implemented suggestions Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * formatted Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * implemented suggestions. fixed tests. Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * formated go code Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com> * implemented suggestions Signed-off-by: Harkishen Singh <harkishensingh@hotmail.com>
This commit is contained in:
parent
88af973663
commit
489a9aa7b9
|
@ -18,6 +18,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
@ -79,12 +80,15 @@ const (
|
||||||
errorNotFound errorType = "not_found"
|
errorNotFound errorType = "not_found"
|
||||||
)
|
)
|
||||||
|
|
||||||
var remoteReadQueries = prometheus.NewGauge(prometheus.GaugeOpts{
|
var (
|
||||||
Namespace: namespace,
|
LocalhostRepresentations = []string{"127.0.0.1", "localhost"}
|
||||||
Subsystem: subsystem,
|
remoteReadQueries = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "remote_read_queries",
|
Namespace: namespace,
|
||||||
Help: "The current number of remote read queries being executed or waiting.",
|
Subsystem: subsystem,
|
||||||
})
|
Name: "remote_read_queries",
|
||||||
|
Help: "The current number of remote read queries being executed or waiting.",
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
type apiError struct {
|
type apiError struct {
|
||||||
typ errorType
|
typ errorType
|
||||||
|
@ -175,6 +179,7 @@ type API struct {
|
||||||
config func() config.Config
|
config func() config.Config
|
||||||
flagsMap map[string]string
|
flagsMap map[string]string
|
||||||
ready func(http.HandlerFunc) http.HandlerFunc
|
ready func(http.HandlerFunc) http.HandlerFunc
|
||||||
|
globalURLOptions GlobalURLOptions
|
||||||
|
|
||||||
db func() TSDBAdmin
|
db func() TSDBAdmin
|
||||||
enableAdmin bool
|
enableAdmin bool
|
||||||
|
@ -200,6 +205,7 @@ func NewAPI(
|
||||||
ar alertmanagerRetriever,
|
ar alertmanagerRetriever,
|
||||||
configFunc func() config.Config,
|
configFunc func() config.Config,
|
||||||
flagsMap map[string]string,
|
flagsMap map[string]string,
|
||||||
|
globalURLOptions GlobalURLOptions,
|
||||||
readyFunc func(http.HandlerFunc) http.HandlerFunc,
|
readyFunc func(http.HandlerFunc) http.HandlerFunc,
|
||||||
db func() TSDBAdmin,
|
db func() TSDBAdmin,
|
||||||
enableAdmin bool,
|
enableAdmin bool,
|
||||||
|
@ -222,6 +228,7 @@ func NewAPI(
|
||||||
config: configFunc,
|
config: configFunc,
|
||||||
flagsMap: flagsMap,
|
flagsMap: flagsMap,
|
||||||
ready: readyFunc,
|
ready: readyFunc,
|
||||||
|
globalURLOptions: globalURLOptions,
|
||||||
db: db,
|
db: db,
|
||||||
enableAdmin: enableAdmin,
|
enableAdmin: enableAdmin,
|
||||||
rulesRetriever: rr,
|
rulesRetriever: rr,
|
||||||
|
@ -583,6 +590,7 @@ type Target struct {
|
||||||
|
|
||||||
ScrapePool string `json:"scrapePool"`
|
ScrapePool string `json:"scrapePool"`
|
||||||
ScrapeURL string `json:"scrapeUrl"`
|
ScrapeURL string `json:"scrapeUrl"`
|
||||||
|
GlobalURL string `json:"globalUrl"`
|
||||||
|
|
||||||
LastError string `json:"lastError"`
|
LastError string `json:"lastError"`
|
||||||
LastScrape time.Time `json:"lastScrape"`
|
LastScrape time.Time `json:"lastScrape"`
|
||||||
|
@ -602,6 +610,54 @@ type TargetDiscovery struct {
|
||||||
DroppedTargets []*DroppedTarget `json:"droppedTargets"`
|
DroppedTargets []*DroppedTarget `json:"droppedTargets"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GlobalURLOptions contains fields used for deriving the global URL for local targets.
|
||||||
|
type GlobalURLOptions struct {
|
||||||
|
ListenAddress string
|
||||||
|
Host string
|
||||||
|
Scheme string
|
||||||
|
}
|
||||||
|
|
||||||
|
func getGlobalURL(u *url.URL, opts GlobalURLOptions) (*url.URL, error) {
|
||||||
|
host, port, err := net.SplitHostPort(u.Host)
|
||||||
|
if err != nil {
|
||||||
|
return u, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, lhr := range LocalhostRepresentations {
|
||||||
|
if host == lhr {
|
||||||
|
_, ownPort, err := net.SplitHostPort(opts.ListenAddress)
|
||||||
|
if err != nil {
|
||||||
|
return u, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if port == ownPort {
|
||||||
|
// Only in the case where the target is on localhost and its port is
|
||||||
|
// the same as the one we're listening on, we know for sure that
|
||||||
|
// we're monitoring our own process and that we need to change the
|
||||||
|
// scheme, hostname, and port to the externally reachable ones as
|
||||||
|
// well. We shouldn't need to touch the path at all, since if a
|
||||||
|
// path prefix is defined, the path under which we scrape ourselves
|
||||||
|
// should already contain the prefix.
|
||||||
|
u.Scheme = opts.Scheme
|
||||||
|
u.Host = opts.Host
|
||||||
|
} else {
|
||||||
|
// Otherwise, we only know that localhost is not reachable
|
||||||
|
// externally, so we replace only the hostname by the one in the
|
||||||
|
// external URL. It could be the wrong hostname for the service on
|
||||||
|
// this port, but it's still the best possible guess.
|
||||||
|
host, _, err := net.SplitHostPort(opts.Host)
|
||||||
|
if err != nil {
|
||||||
|
return u, err
|
||||||
|
}
|
||||||
|
u.Host = host + ":" + port
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return u, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (api *API) targets(r *http.Request) apiFuncResult {
|
func (api *API) targets(r *http.Request) apiFuncResult {
|
||||||
sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) {
|
sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) {
|
||||||
var n int
|
var n int
|
||||||
|
@ -641,12 +697,22 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
||||||
lastErrStr = lastErr.Error()
|
lastErrStr = lastErr.Error()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
globalURL, err := getGlobalURL(target.URL(), api.globalURLOptions)
|
||||||
|
|
||||||
res.ActiveTargets = append(res.ActiveTargets, &Target{
|
res.ActiveTargets = append(res.ActiveTargets, &Target{
|
||||||
DiscoveredLabels: target.DiscoveredLabels().Map(),
|
DiscoveredLabels: target.DiscoveredLabels().Map(),
|
||||||
Labels: target.Labels().Map(),
|
Labels: target.Labels().Map(),
|
||||||
ScrapePool: key,
|
ScrapePool: key,
|
||||||
ScrapeURL: target.URL().String(),
|
ScrapeURL: target.URL().String(),
|
||||||
LastError: lastErrStr,
|
GlobalURL: globalURL.String(),
|
||||||
|
LastError: func() string {
|
||||||
|
if err == nil && lastErrStr == "" {
|
||||||
|
return ""
|
||||||
|
} else if err != nil {
|
||||||
|
return errors.Wrapf(err, lastErrStr).Error()
|
||||||
|
}
|
||||||
|
return lastErrStr
|
||||||
|
}(),
|
||||||
LastScrape: target.LastScrape(),
|
LastScrape: target.LastScrape(),
|
||||||
LastScrapeDuration: target.LastScrapeDuration().Seconds(),
|
LastScrapeDuration: target.LastScrapeDuration().Seconds(),
|
||||||
Health: target.Health(),
|
Health: target.Health(),
|
||||||
|
|
|
@ -826,8 +826,9 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
},
|
},
|
||||||
ScrapePool: "blackbox",
|
ScrapePool: "blackbox",
|
||||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||||
|
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
||||||
Health: "down",
|
Health: "down",
|
||||||
LastError: "failed",
|
LastError: "failed: missing port in address",
|
||||||
LastScrape: scrapeStart,
|
LastScrape: scrapeStart,
|
||||||
LastScrapeDuration: 0.1,
|
LastScrapeDuration: 0.1,
|
||||||
},
|
},
|
||||||
|
@ -838,6 +839,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
},
|
},
|
||||||
ScrapePool: "test",
|
ScrapePool: "test",
|
||||||
ScrapeURL: "http://example.com:8080/metrics",
|
ScrapeURL: "http://example.com:8080/metrics",
|
||||||
|
GlobalURL: "http://example.com:8080/metrics",
|
||||||
Health: "up",
|
Health: "up",
|
||||||
LastError: "",
|
LastError: "",
|
||||||
LastScrape: scrapeStart,
|
LastScrape: scrapeStart,
|
||||||
|
@ -870,8 +872,9 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
},
|
},
|
||||||
ScrapePool: "blackbox",
|
ScrapePool: "blackbox",
|
||||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||||
|
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
||||||
Health: "down",
|
Health: "down",
|
||||||
LastError: "failed",
|
LastError: "failed: missing port in address",
|
||||||
LastScrape: scrapeStart,
|
LastScrape: scrapeStart,
|
||||||
LastScrapeDuration: 0.1,
|
LastScrapeDuration: 0.1,
|
||||||
},
|
},
|
||||||
|
@ -882,6 +885,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
},
|
},
|
||||||
ScrapePool: "test",
|
ScrapePool: "test",
|
||||||
ScrapeURL: "http://example.com:8080/metrics",
|
ScrapeURL: "http://example.com:8080/metrics",
|
||||||
|
GlobalURL: "http://example.com:8080/metrics",
|
||||||
Health: "up",
|
Health: "up",
|
||||||
LastError: "",
|
LastError: "",
|
||||||
LastScrape: scrapeStart,
|
LastScrape: scrapeStart,
|
||||||
|
@ -914,8 +918,9 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
},
|
},
|
||||||
ScrapePool: "blackbox",
|
ScrapePool: "blackbox",
|
||||||
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
||||||
|
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
||||||
Health: "down",
|
Health: "down",
|
||||||
LastError: "failed",
|
LastError: "failed: missing port in address",
|
||||||
LastScrape: scrapeStart,
|
LastScrape: scrapeStart,
|
||||||
LastScrapeDuration: 0.1,
|
LastScrapeDuration: 0.1,
|
||||||
},
|
},
|
||||||
|
@ -926,6 +931,7 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI
|
||||||
},
|
},
|
||||||
ScrapePool: "test",
|
ScrapePool: "test",
|
||||||
ScrapeURL: "http://example.com:8080/metrics",
|
ScrapeURL: "http://example.com:8080/metrics",
|
||||||
|
GlobalURL: "http://example.com:8080/metrics",
|
||||||
Health: "up",
|
Health: "up",
|
||||||
LastError: "",
|
LastError: "",
|
||||||
LastScrape: scrapeStart,
|
LastScrape: scrapeStart,
|
||||||
|
|
|
@ -6,19 +6,21 @@ import EndpointLink from './EndpointLink';
|
||||||
describe('EndpointLink', () => {
|
describe('EndpointLink', () => {
|
||||||
it('renders a simple anchor if the endpoint has no query params', () => {
|
it('renders a simple anchor if the endpoint has no query params', () => {
|
||||||
const endpoint = 'http://100.104.208.71:15090/stats/prometheus';
|
const endpoint = 'http://100.104.208.71:15090/stats/prometheus';
|
||||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />);
|
const globalURL = 'http://100.104.208.71:15090/stats/prometheus';
|
||||||
|
const endpointLink = shallow(<EndpointLink endpoint={endpoint} globalUrl={globalURL} />);
|
||||||
const anchor = endpointLink.find('a');
|
const anchor = endpointLink.find('a');
|
||||||
expect(anchor.prop('href')).toEqual(endpoint);
|
expect(anchor.prop('href')).toEqual(globalURL);
|
||||||
expect(anchor.children().text()).toEqual(endpoint);
|
expect(anchor.children().text()).toEqual(endpoint);
|
||||||
expect(endpointLink.find('br')).toHaveLength(0);
|
expect(endpointLink.find('br')).toHaveLength(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders an anchor targeting endpoint but with query param labels if the endpoint has query params', () => {
|
it('renders an anchor targeting endpoint but with query param labels if the endpoint has query params', () => {
|
||||||
const endpoint = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service';
|
const endpoint = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service';
|
||||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />);
|
const globalURL = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service';
|
||||||
|
const endpointLink = shallow(<EndpointLink endpoint={endpoint} globalUrl={globalURL} />);
|
||||||
const anchor = endpointLink.find('a');
|
const anchor = endpointLink.find('a');
|
||||||
const badges = endpointLink.find(Badge);
|
const badges = endpointLink.find(Badge);
|
||||||
expect(anchor.prop('href')).toEqual(endpoint);
|
expect(anchor.prop('href')).toEqual(globalURL);
|
||||||
expect(anchor.children().text()).toEqual('http://100.99.128.71:9115/probe');
|
expect(anchor.children().text()).toEqual('http://100.99.128.71:9115/probe');
|
||||||
expect(endpointLink.find('br')).toHaveLength(1);
|
expect(endpointLink.find('br')).toHaveLength(1);
|
||||||
expect(badges).toHaveLength(2);
|
expect(badges).toHaveLength(2);
|
||||||
|
@ -29,7 +31,7 @@ describe('EndpointLink', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders an alert if url is invalid', () => {
|
it('renders an alert if url is invalid', () => {
|
||||||
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} />);
|
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} globalUrl={'afdsacas'} />);
|
||||||
const err = endpointLink.find(Alert);
|
const err = endpointLink.find(Alert);
|
||||||
expect(err.render().text()).toEqual('Error: Invalid URL');
|
expect(err.render().text()).toEqual('Error: Invalid URL');
|
||||||
});
|
});
|
||||||
|
|
|
@ -3,9 +3,10 @@ import { Badge, Alert } from 'reactstrap';
|
||||||
|
|
||||||
export interface EndpointLinkProps {
|
export interface EndpointLinkProps {
|
||||||
endpoint: string;
|
endpoint: string;
|
||||||
|
globalUrl: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint }) => {
|
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
|
||||||
let url: URL;
|
let url: URL;
|
||||||
try {
|
try {
|
||||||
url = new URL(endpoint);
|
url = new URL(endpoint);
|
||||||
|
@ -22,7 +23,7 @@ const EndpointLink: FC<EndpointLinkProps> = ({ endpoint }) => {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<a href={endpoint}>{`${protocol}//${host}${pathname}`}</a>
|
<a href={globalUrl}>{`${protocol}//${host}${pathname}`}</a>
|
||||||
{params.length > 0 ? <br /> : null}
|
{params.length > 0 ? <br /> : null}
|
||||||
{params.map(([labelName, labelValue]: [string, string]) => {
|
{params.map(([labelName, labelValue]: [string, string]) => {
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -49,6 +49,7 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup }) => {
|
||||||
labels,
|
labels,
|
||||||
scrapePool,
|
scrapePool,
|
||||||
scrapeUrl,
|
scrapeUrl,
|
||||||
|
globalUrl,
|
||||||
lastError,
|
lastError,
|
||||||
lastScrape,
|
lastScrape,
|
||||||
lastScrapeDuration,
|
lastScrapeDuration,
|
||||||
|
@ -59,7 +60,7 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup }) => {
|
||||||
return (
|
return (
|
||||||
<tr key={scrapeUrl}>
|
<tr key={scrapeUrl}>
|
||||||
<td className={styles.endpoint}>
|
<td className={styles.endpoint}>
|
||||||
<EndpointLink endpoint={scrapeUrl} />
|
<EndpointLink endpoint={scrapeUrl} globalUrl={globalUrl} />
|
||||||
</td>
|
</td>
|
||||||
<td className={styles.state}>
|
<td className={styles.state}>
|
||||||
<Badge color={color}>{health.toUpperCase()}</Badge>
|
<Badge color={color}>{health.toUpperCase()}</Badge>
|
||||||
|
|
|
@ -20,6 +20,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
||||||
},
|
},
|
||||||
scrapePool: 'blackbox',
|
scrapePool: 'blackbox',
|
||||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
|
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
|
||||||
|
globalUrl: 'http://localhost.localdomain:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
|
||||||
lastError: '',
|
lastError: '',
|
||||||
lastScrape: '2019-11-04T11:52:14.759299-07:00',
|
lastScrape: '2019-11-04T11:52:14.759299-07:00',
|
||||||
lastScrapeDuration: 36560147,
|
lastScrapeDuration: 36560147,
|
||||||
|
@ -39,6 +40,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
||||||
},
|
},
|
||||||
scrapePool: 'blackbox',
|
scrapePool: 'blackbox',
|
||||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
|
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
|
||||||
|
globalUrl: 'http://localhost.localdomain:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
|
||||||
lastError: '',
|
lastError: '',
|
||||||
lastScrape: '2019-11-04T11:52:24.731096-07:00',
|
lastScrape: '2019-11-04T11:52:24.731096-07:00',
|
||||||
lastScrapeDuration: 49448763,
|
lastScrapeDuration: 49448763,
|
||||||
|
@ -58,6 +60,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
||||||
},
|
},
|
||||||
scrapePool: 'blackbox',
|
scrapePool: 'blackbox',
|
||||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
|
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
|
||||||
|
globalUrl: 'http://localhost.localdomain:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
|
||||||
lastError: '',
|
lastError: '',
|
||||||
lastScrape: '2019-11-04T11:52:13.516654-07:00',
|
lastScrape: '2019-11-04T11:52:13.516654-07:00',
|
||||||
lastScrapeDuration: 120916592,
|
lastScrapeDuration: 120916592,
|
||||||
|
@ -81,6 +84,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
||||||
},
|
},
|
||||||
scrapePool: 'node_exporter',
|
scrapePool: 'node_exporter',
|
||||||
scrapeUrl: 'http://localhost:9100/metrics',
|
scrapeUrl: 'http://localhost:9100/metrics',
|
||||||
|
globalUrl: 'http://localhost.localdomain:9100/metrics',
|
||||||
lastError: '',
|
lastError: '',
|
||||||
lastScrape: '2019-11-04T11:52:14.145703-07:00',
|
lastScrape: '2019-11-04T11:52:14.145703-07:00',
|
||||||
lastScrapeDuration: 3842307,
|
lastScrapeDuration: 3842307,
|
||||||
|
@ -104,6 +108,7 @@ export const targetGroups: ScrapePools = Object.freeze({
|
||||||
},
|
},
|
||||||
scrapePool: 'prometheus',
|
scrapePool: 'prometheus',
|
||||||
scrapeUrl: 'http://localhost:9090/metrics',
|
scrapeUrl: 'http://localhost:9090/metrics',
|
||||||
|
globalUrl: 'http://localhost.localdomain:9000/metrics',
|
||||||
lastError: '',
|
lastError: '',
|
||||||
lastScrape: '2019-11-04T11:52:18.479731-07:00',
|
lastScrape: '2019-11-04T11:52:18.479731-07:00',
|
||||||
lastScrapeDuration: 4050976,
|
lastScrapeDuration: 4050976,
|
||||||
|
|
|
@ -7,6 +7,7 @@ export interface Target {
|
||||||
labels: Labels;
|
labels: Labels;
|
||||||
scrapePool: string;
|
scrapePool: string;
|
||||||
scrapeUrl: string;
|
scrapeUrl: string;
|
||||||
|
globalUrl: string;
|
||||||
lastError: string;
|
lastError: string;
|
||||||
lastScrape: string;
|
lastScrape: string;
|
||||||
lastScrapeDuration: number;
|
lastScrapeDuration: number;
|
||||||
|
|
39
web/web.go
39
web/web.go
|
@ -70,24 +70,20 @@ import (
|
||||||
"github.com/prometheus/prometheus/web/ui"
|
"github.com/prometheus/prometheus/web/ui"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
|
||||||
localhostRepresentations = []string{"127.0.0.1", "localhost"}
|
var reactRouterPaths = []string{
|
||||||
|
"/",
|
||||||
// Paths that are handled by the React / Reach router that should all be served the main React app's index.html.
|
"/alerts",
|
||||||
reactRouterPaths = []string{
|
"/config",
|
||||||
"/",
|
"/flags",
|
||||||
"/alerts",
|
"/graph",
|
||||||
"/config",
|
"/rules",
|
||||||
"/flags",
|
"/service-discovery",
|
||||||
"/graph",
|
"/status",
|
||||||
"/rules",
|
"/targets",
|
||||||
"/service-discovery",
|
"/tsdb-status",
|
||||||
"/status",
|
"/version",
|
||||||
"/targets",
|
}
|
||||||
"/tsdb-status",
|
|
||||||
"/version",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// withStackTrace logs the stack trace in case the request panics. The function
|
// withStackTrace logs the stack trace in case the request panics. The function
|
||||||
// will re-raise the error which will then be handled by the net/http package.
|
// will re-raise the error which will then be handled by the net/http package.
|
||||||
|
@ -300,6 +296,11 @@ func New(logger log.Logger, o *Options) *Handler {
|
||||||
return *h.config
|
return *h.config
|
||||||
},
|
},
|
||||||
o.Flags,
|
o.Flags,
|
||||||
|
api_v1.GlobalURLOptions{
|
||||||
|
ListenAddress: o.ListenAddress,
|
||||||
|
Host: o.ExternalURL.Host,
|
||||||
|
Scheme: o.ExternalURL.Scheme,
|
||||||
|
},
|
||||||
h.testReady,
|
h.testReady,
|
||||||
func() api_v1.TSDBAdmin {
|
func() api_v1.TSDBAdmin {
|
||||||
return h.options.TSDB()
|
return h.options.TSDB()
|
||||||
|
@ -973,7 +974,7 @@ func tmplFuncs(consolesPath string, opts *Options) template_text.FuncMap {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return u
|
return u
|
||||||
}
|
}
|
||||||
for _, lhr := range localhostRepresentations {
|
for _, lhr := range api_v1.LocalhostRepresentations {
|
||||||
if host == lhr {
|
if host == lhr {
|
||||||
_, ownPort, err := net.SplitHostPort(opts.ListenAddress)
|
_, ownPort, err := net.SplitHostPort(opts.ListenAddress)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -305,7 +305,11 @@ func TestRoutePrefix(t *testing.T) {
|
||||||
Notifier: nil,
|
Notifier: nil,
|
||||||
RoutePrefix: "/prometheus",
|
RoutePrefix: "/prometheus",
|
||||||
EnableAdminAPI: true,
|
EnableAdminAPI: true,
|
||||||
TSDB: func() *libtsdb.DB { return db },
|
ExternalURL: &url.URL{
|
||||||
|
Host: "localhost.localdomain:9090",
|
||||||
|
Scheme: "http",
|
||||||
|
},
|
||||||
|
TSDB: func() *libtsdb.DB { return db },
|
||||||
}
|
}
|
||||||
|
|
||||||
opts.Flags = map[string]string{}
|
opts.Flags = map[string]string{}
|
||||||
|
@ -391,7 +395,12 @@ func TestDebugHandler(t *testing.T) {
|
||||||
{"/foo", "/bar/debug/pprof/goroutine", 404},
|
{"/foo", "/bar/debug/pprof/goroutine", 404},
|
||||||
} {
|
} {
|
||||||
opts := &Options{
|
opts := &Options{
|
||||||
RoutePrefix: tc.prefix,
|
RoutePrefix: tc.prefix,
|
||||||
|
ListenAddress: "somehost:9090",
|
||||||
|
ExternalURL: &url.URL{
|
||||||
|
Host: "localhost.localdomain:9090",
|
||||||
|
Scheme: "http",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
handler := New(nil, opts)
|
handler := New(nil, opts)
|
||||||
handler.Ready()
|
handler.Ready()
|
||||||
|
@ -411,7 +420,14 @@ func TestDebugHandler(t *testing.T) {
|
||||||
func TestHTTPMetrics(t *testing.T) {
|
func TestHTTPMetrics(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
handler := New(nil, &Options{RoutePrefix: "/"})
|
handler := New(nil, &Options{
|
||||||
|
RoutePrefix: "/",
|
||||||
|
ListenAddress: "somehost:9090",
|
||||||
|
ExternalURL: &url.URL{
|
||||||
|
Host: "localhost.localdomain:9090",
|
||||||
|
Scheme: "http",
|
||||||
|
},
|
||||||
|
})
|
||||||
getReady := func() int {
|
getReady := func() int {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
w := httptest.NewRecorder()
|
w := httptest.NewRecorder()
|
||||||
|
|
Loading…
Reference in a new issue