From e1b708200853371517480176da0a03437b3bb2c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Mierzwa?= Date: Fri, 23 Dec 2022 10:55:08 +0000 Subject: [PATCH] Show individual scrape pools on /targets page (#11142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add API endpoints for getting scrape pool names This adds api/v1/scrape_pools endpoint that returns the list of *names* of all the scrape pools configured. Having it allows to find out what scrape pools are defined without having to list and parse all targets. The second change is adding scrapePool query parameter support in api/v1/targets endpoint, that allows to filter returned targets by only finding ones for passed scrape pool name. Both changes allow to query for a specific scrape pool data, rather than getting all the targets for all possible scrape pools. The problem with api/v1/targets endpoint is that it returns huge amount of data if you configure a lot of scrape pools. Signed-off-by: Łukasz Mierzwa * Add a scrape pool selector on /targets page Current targets page lists all possible targets. This works great if you only have a few scrape pools configured, but for systems with a lot of scrape pools and targets this slow things down a lot. Not only does the /targets page load very slowly in such case (waiting for huge API response) but it also take a long time to render, due to huge number of elements. This change adds a dropdown selector so it's possible to select only intersting scrape pool to view. There's also scrapePool query param that will open selected pool automatically. Signed-off-by: Łukasz Mierzwa Signed-off-by: Łukasz Mierzwa --- docs/querying/api.md | 32 +++++ scrape/manager.go | 12 ++ scrape/manager_test.go | 68 +++++++++ web/api/v1/api.go | 52 +++++-- web/api/v1/errors_test.go | 8 ++ web/ui/react-app/src/pages/targets/Filter.tsx | 2 +- .../src/pages/targets/ScrapePoolList.test.tsx | 4 +- .../src/pages/targets/ScrapePoolList.tsx | 134 ++++++++++++++++-- .../src/pages/targets/Targets.test.tsx | 27 +++- .../react-app/src/pages/targets/Targets.tsx | 40 +++++- .../pages/targets/__testdata__/testdata.ts | 81 +++++++++++ web/ui/react-app/src/pages/targets/target.ts | 17 +++ web/ui/react-app/src/utils/index.ts | 14 +- web/web.go | 3 +- 14 files changed, 458 insertions(+), 36 deletions(-) diff --git a/docs/querying/api.md b/docs/querying/api.md index 7ac761703e..53d12052c7 100644 --- a/docs/querying/api.md +++ b/docs/querying/api.md @@ -623,6 +623,38 @@ $ curl 'http://localhost:9090/api/v1/targets?state=active' } ``` +The `scrapePool` query parameter allows the caller to filter by scrape pool name. + +```json +$ curl 'http://localhost:9090/api/v1/targets?scrapePool=node_exporter' +{ + "status": "success", + "data": { + "activeTargets": [ + { + "discoveredLabels": { + "__address__": "127.0.0.1:9091", + "__metrics_path__": "/metrics", + "__scheme__": "http", + "job": "node_exporter" + }, + "labels": { + "instance": "127.0.0.1:9091", + "job": "node_exporter" + }, + "scrapePool": "node_exporter", + "scrapeUrl": "http://127.0.0.1:9091/metrics", + "globalUrl": "http://example-prometheus:9091/metrics", + "lastError": "", + "lastScrape": "2017-01-17T15:07:44.723715405+01:00", + "lastScrapeDuration": 50688943, + "health": "up" + } + ], + "droppedTargets": [] + } +} +``` ## Rules diff --git a/scrape/manager.go b/scrape/manager.go index 3c77dac397..e0a7102850 100644 --- a/scrape/manager.go +++ b/scrape/manager.go @@ -313,6 +313,18 @@ func (m *Manager) TargetsAll() map[string][]*Target { return targets } +// ScrapePools returns the list of all scrape pool names. +func (m *Manager) ScrapePools() []string { + m.mtxScrape.Lock() + defer m.mtxScrape.Unlock() + + names := make([]string, 0, len(m.scrapePools)) + for name := range m.scrapePools { + names = append(names, name) + } + return names +} + // TargetsActive returns the active targets currently being scraped. func (m *Manager) TargetsActive() map[string][]*Target { m.mtxScrape.Lock() diff --git a/scrape/manager_test.go b/scrape/manager_test.go index dd550511e8..bd59240e20 100644 --- a/scrape/manager_test.go +++ b/scrape/manager_test.go @@ -14,6 +14,7 @@ package scrape import ( + "context" "net/http" "strconv" "testing" @@ -24,6 +25,7 @@ import ( "gopkg.in/yaml.v2" "github.com/prometheus/prometheus/config" + "github.com/prometheus/prometheus/discovery" "github.com/prometheus/prometheus/discovery/targetgroup" "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/relabel" @@ -635,3 +637,69 @@ global: t.Error("Jitter should not be the same on different set of external labels") } } + +func TestManagerScrapePools(t *testing.T) { + cfgText1 := ` +scrape_configs: +- job_name: job1 + static_configs: + - targets: ["foo:9090"] +- job_name: job2 + static_configs: + - targets: ["foo:9091", "foo:9092"] +` + cfgText2 := ` +scrape_configs: +- job_name: job1 + static_configs: + - targets: ["foo:9090", "foo:9094"] +- job_name: job3 + static_configs: + - targets: ["foo:9093"] +` + var ( + cfg1 = loadConfiguration(t, cfgText1) + cfg2 = loadConfiguration(t, cfgText2) + ) + + reload := func(scrapeManager *Manager, cfg *config.Config) { + newLoop := func(scrapeLoopOptions) loop { + return noopLoop() + } + scrapeManager.scrapePools = map[string]*scrapePool{} + for _, sc := range cfg.ScrapeConfigs { + _, cancel := context.WithCancel(context.Background()) + defer cancel() + sp := &scrapePool{ + appendable: &nopAppendable{}, + activeTargets: map[uint64]*Target{}, + loops: map[uint64]loop{ + 1: noopLoop(), + }, + newLoop: newLoop, + logger: nil, + config: sc, + client: http.DefaultClient, + cancel: cancel, + } + for _, c := range sc.ServiceDiscoveryConfigs { + staticConfig := c.(discovery.StaticConfig) + for _, group := range staticConfig { + for i := range group.Targets { + sp.activeTargets[uint64(i)] = &Target{} + } + } + } + scrapeManager.scrapePools[sc.JobName] = sp + } + } + + opts := Options{} + scrapeManager := NewManager(&opts, nil, nil) + + reload(scrapeManager, cfg1) + require.ElementsMatch(t, []string{"job1", "job2"}, scrapeManager.ScrapePools()) + + reload(scrapeManager, cfg2) + require.ElementsMatch(t, []string{"job1", "job3"}, scrapeManager.ScrapePools()) +} diff --git a/web/api/v1/api.go b/web/api/v1/api.go index 54652471db..894a8666a6 100644 --- a/web/api/v1/api.go +++ b/web/api/v1/api.go @@ -23,6 +23,7 @@ import ( "net/url" "os" "path/filepath" + "sort" "strconv" "strings" "time" @@ -88,6 +89,11 @@ func (e *apiError) Error() string { return fmt.Sprintf("%s: %s", e.typ, e.err) } +// ScrapePoolsRetriever provide the list of all scrape pools. +type ScrapePoolsRetriever interface { + ScrapePools() []string +} + // TargetRetriever provides the list of active/dropped targets to scrape or not. type TargetRetriever interface { TargetsActive() map[string][]*scrape.Target @@ -179,6 +185,7 @@ type API struct { QueryEngine QueryEngine ExemplarQueryable storage.ExemplarQueryable + scrapePoolsRetriever func(context.Context) ScrapePoolsRetriever targetRetriever func(context.Context) TargetRetriever alertmanagerRetriever func(context.Context) AlertmanagerRetriever rulesRetriever func(context.Context) RulesRetriever @@ -216,6 +223,7 @@ func NewAPI( q storage.SampleAndChunkQueryable, ap storage.Appendable, eq storage.ExemplarQueryable, + spsr func(context.Context) ScrapePoolsRetriever, tr func(context.Context) TargetRetriever, ar func(context.Context) AlertmanagerRetriever, configFunc func() config.Config, @@ -243,6 +251,7 @@ func NewAPI( Queryable: q, ExemplarQueryable: eq, + scrapePoolsRetriever: spsr, targetRetriever: tr, alertmanagerRetriever: ar, @@ -338,6 +347,7 @@ func (api *API) Register(r *route.Router) { r.Post("/series", wrapAgent(api.series)) r.Del("/series", wrapAgent(api.dropSeries)) + r.Get("/scrape_pools", wrap(api.scrapePools)) r.Get("/targets", wrap(api.targets)) r.Get("/targets/metadata", wrap(api.targetMetadata)) r.Get("/alertmanagers", wrapAgent(api.alertmanagers)) @@ -824,6 +834,10 @@ type Target struct { ScrapeTimeout string `json:"scrapeTimeout"` } +type ScrapePoolsDiscovery struct { + ScrapePools []string `json:"scrapePools"` +} + // DroppedTarget has the information for one target that was dropped during relabelling. type DroppedTarget struct { // Labels before any processing. @@ -903,6 +917,13 @@ func getGlobalURL(u *url.URL, opts GlobalURLOptions) (*url.URL, error) { return u, nil } +func (api *API) scrapePools(r *http.Request) apiFuncResult { + names := api.scrapePoolsRetriever(r.Context()).ScrapePools() + sort.Strings(names) + res := &ScrapePoolsDiscovery{ScrapePools: names} + return apiFuncResult{data: res, err: nil, warnings: nil, finalizer: nil} +} + func (api *API) targets(r *http.Request) apiFuncResult { sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) { var n int @@ -915,15 +936,7 @@ func (api *API) targets(r *http.Request) apiFuncResult { return keys, n } - flatten := func(targets map[string][]*scrape.Target) []*scrape.Target { - keys, n := sortKeys(targets) - res := make([]*scrape.Target, 0, n) - for _, k := range keys { - res = append(res, targets[k]...) - } - return res - } - + scrapePool := r.URL.Query().Get("scrapePool") state := strings.ToLower(r.URL.Query().Get("state")) showActive := state == "" || state == "any" || state == "active" showDropped := state == "" || state == "any" || state == "dropped" @@ -935,6 +948,9 @@ func (api *API) targets(r *http.Request) apiFuncResult { res.ActiveTargets = make([]*Target, 0, numTargets) for _, key := range activeKeys { + if scrapePool != "" && key != scrapePool { + continue + } for _, target := range targetsActive[key] { lastErrStr := "" lastErr := target.LastError() @@ -970,12 +986,18 @@ func (api *API) targets(r *http.Request) apiFuncResult { res.ActiveTargets = []*Target{} } if showDropped { - tDropped := flatten(api.targetRetriever(r.Context()).TargetsDropped()) - res.DroppedTargets = make([]*DroppedTarget, 0, len(tDropped)) - for _, t := range tDropped { - res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{ - DiscoveredLabels: t.DiscoveredLabels().Map(), - }) + targetsDropped := api.targetRetriever(r.Context()).TargetsDropped() + droppedKeys, numTargets := sortKeys(targetsDropped) + res.DroppedTargets = make([]*DroppedTarget, 0, numTargets) + for _, key := range droppedKeys { + if scrapePool != "" && key != scrapePool { + continue + } + for _, target := range targetsDropped[key] { + res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{ + DiscoveredLabels: target.DiscoveredLabels().Map(), + }) + } } } else { res.DroppedTargets = []*DroppedTarget{} diff --git a/web/api/v1/errors_test.go b/web/api/v1/errors_test.go index 90d5f18de3..c86ceb1c96 100644 --- a/web/api/v1/errors_test.go +++ b/web/api/v1/errors_test.go @@ -113,6 +113,7 @@ func createPrometheusAPI(q storage.SampleAndChunkQueryable) *route.Router { q, nil, nil, + func(context.Context) ScrapePoolsRetriever { return &DummyScrapePoolsRetriever{} }, func(context.Context) TargetRetriever { return &DummyTargetRetriever{} }, func(context.Context) AlertmanagerRetriever { return &DummyAlertmanagerRetriever{} }, func() config.Config { return config.Config{} }, @@ -205,6 +206,13 @@ func (t errorTestSeriesSet) Warnings() storage.Warnings { return nil } +// DummyTargetRetriever implements github.com/prometheus/prometheus/web/api/v1.ScrapePoolsRetriever. +type DummyScrapePoolsRetriever struct{} + +func (DummyScrapePoolsRetriever) ScrapePools() []string { + return []string{} +} + // DummyTargetRetriever implements github.com/prometheus/prometheus/web/api/v1.targetRetriever. type DummyTargetRetriever struct{} diff --git a/web/ui/react-app/src/pages/targets/Filter.tsx b/web/ui/react-app/src/pages/targets/Filter.tsx index bf6177d6ce..b9ec6e09a1 100644 --- a/web/ui/react-app/src/pages/targets/Filter.tsx +++ b/web/ui/react-app/src/pages/targets/Filter.tsx @@ -49,7 +49,7 @@ const Filter: FC = ({ filter, setFilter, expanded, setExpanded }) = }, }; return ( - + diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx index 867d1d3bed..5952dc3490 100644 --- a/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx +++ b/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx @@ -36,7 +36,7 @@ describe('ScrapePoolList', () => { await act(async () => { scrapePoolList = mount( - + ); }); @@ -63,7 +63,7 @@ describe('ScrapePoolList', () => { await act(async () => { scrapePoolList = mount( - + ); }); diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx index d7078128e8..c9e2947e63 100644 --- a/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx +++ b/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx @@ -2,10 +2,10 @@ import { KVSearch } from '@nexucis/kvsearch'; import { usePathPrefix } from '../../contexts/PathPrefixContext'; import { useFetch } from '../../hooks/useFetch'; import { API_PATH } from '../../constants/constants'; -import { groupTargets, ScrapePool, ScrapePools, Target } from './target'; +import { filterTargetsByHealth, groupTargets, ScrapePool, ScrapePools, Target } from './target'; import { withStatusIndicator } from '../../components/withStatusIndicator'; import { FC, useCallback, useEffect, useMemo, useState } from 'react'; -import { Col, Collapse, Row } from 'reactstrap'; +import { Badge, Col, Collapse, Dropdown, DropdownItem, DropdownMenu, DropdownToggle, Input, Row } from 'reactstrap'; import { ScrapePoolContent } from './ScrapePoolContent'; import Filter, { Expanded, FilterData } from './Filter'; import { useLocalStorage } from '../../hooks/useLocalStorage'; @@ -13,8 +13,64 @@ import styles from './ScrapePoolPanel.module.css'; import { ToggleMoreLess } from '../../components/ToggleMoreLess'; import SearchBar from '../../components/SearchBar'; import { setQuerySearchFilter, getQuerySearchFilter } from '../../utils/index'; +import Checkbox from '../../components/Checkbox'; + +export interface ScrapePoolNamesListProps { + scrapePools: string[]; +} + +interface ScrapePoolDropDownProps { + selectedPool: string | null; + scrapePools: string[]; + onScrapePoolChange: (name: string) => void; +} + +const ScrapePoolDropDown: FC = ({ selectedPool, scrapePools, onScrapePoolChange }) => { + const [dropdownOpen, setDropdownOpen] = useState(false); + const toggle = () => setDropdownOpen((prevState) => !prevState); + + const [filter, setFilter] = useState(''); + + return ( + + + {selectedPool === null || !scrapePools.includes(selectedPool) ? 'All scrape pools' : selectedPool} + + + {selectedPool ? ( + <> + onScrapePoolChange('')}> + Clear selection + + + + ) : null} + + setFilter(event.target.value.trim())} /> + + {scrapePools.length === 0 ? ( + No scrape pools configured + ) : ( + scrapePools + .filter((name) => filter === '' || name.includes(filter)) + .map((name) => ( + onScrapePoolChange(name)} active={name === selectedPool}> + {name} + + )) + )} + + + ); +}; interface ScrapePoolListProps { + scrapePools: string[]; + selectedPool: string | null; + onPoolSelect: (name: string) => void; +} + +interface ScrapePoolListContentProps extends ScrapePoolListProps { activeTargets: Target[]; } @@ -51,8 +107,21 @@ export const ScrapePoolPanel: FC = (props: PanelProps) => { ); }; +type targetHealth = 'healthy' | 'unhealthy' | 'unknown'; + +const healthColorTuples: Array<[targetHealth, string]> = [ + ['healthy', 'success'], + ['unhealthy', 'danger'], + ['unknown', 'warning'], +]; + // ScrapePoolListContent is taking care of every possible filter -const ScrapePoolListContent: FC = ({ activeTargets }) => { +const ScrapePoolListContent: FC = ({ + activeTargets, + scrapePools, + selectedPool, + onPoolSelect, +}) => { const initialPoolList = groupTargets(activeTargets); const [poolList, setPoolList] = useState(initialPoolList); const [targetList, setTargetList] = useState(activeTargets); @@ -63,6 +132,18 @@ const ScrapePoolListContent: FC = ({ activeTargets }) => { }; const [filter, setFilter] = useLocalStorage('targets-page-filter', initialFilter); + const [healthFilters, setHealthFilters] = useLocalStorage('target-health-filter', { + healthy: true, + unhealthy: true, + unknown: true, + }); + const toggleHealthFilter = (val: targetHealth) => () => { + setHealthFilters({ + ...healthFilters, + [val]: !healthFilters[val], + }); + }; + const initialExpanded: Expanded = Object.keys(initialPoolList).reduce( (acc: { [scrapePool: string]: boolean }, scrapePool: string) => ({ ...acc, @@ -95,17 +176,37 @@ const ScrapePoolListContent: FC = ({ activeTargets }) => { return ( <> - - + + + + + - + + +
+ {healthColorTuples.map(([val, color]) => ( + + + {val} + + + ))} +
+
{Object.keys(poolList) .filter((scrapePool) => { @@ -117,7 +218,10 @@ const ScrapePoolListContent: FC = ({ activeTargets }) => { filterTargetsByHealth(target.health, healthFilters)), + }} expanded={expanded[scrapePool]} toggleExpanded={(): void => setExpanded({ ...expanded, [scrapePool]: !expanded[scrapePool] })} /> @@ -128,14 +232,26 @@ const ScrapePoolListContent: FC = ({ activeTargets }) => { const ScrapePoolListWithStatusIndicator = withStatusIndicator(ScrapePoolListContent); -export const ScrapePoolList: FC = () => { +export const ScrapePoolList: FC = ({ selectedPool, scrapePools, ...props }) => { + // If we have more than 20 scrape pools AND there's no pool selected then select first pool + // by default. This is to avoid loading a huge list of targets when we have many pools configured. + // If we have up to 20 scrape pools then pass whatever is the value of selectedPool, it can + // be a pool name or a null (if all pools should be shown). + const poolToShow = selectedPool === null && scrapePools.length > 20 ? scrapePools[0] : selectedPool; + const pathPrefix = usePathPrefix(); - const { response, error, isLoading } = useFetch(`${pathPrefix}/${API_PATH}/targets?state=active`); + const { response, error, isLoading } = useFetch( + `${pathPrefix}/${API_PATH}/targets?state=active${poolToShow === null ? '' : `&scrapePool=${poolToShow}`}` + ); const { status: responseStatus } = response; const badResponse = responseStatus !== 'success' && responseStatus !== 'start fetching'; + return ( { - const targets = shallow(); + beforeEach(() => { + fetchMock.resetMocks(); + }); + + let targets: ReactWrapper; + let mock: FetchMock; + describe('Header', () => { + const targets = shallow(); const h2 = targets.find('h2'); it('renders a header', () => { expect(h2.text()).toEqual('Targets'); @@ -15,7 +25,18 @@ describe('Targets', () => { expect(h2).toHaveLength(1); }); }); - it('renders a scrape pool list', () => { + + it('renders a scrape pool list', async () => { + mock = fetchMock.mockResponseOnce(JSON.stringify(scrapePoolsSampleAPI)); + await act(async () => { + targets = mount(); + }); + expect(mock).toHaveBeenCalledWith('/api/v1/scrape_pools', { + cache: 'no-store', + credentials: 'same-origin', + }); + targets.update(); + const scrapePoolList = targets.find(ScrapePoolList); expect(scrapePoolList).toHaveLength(1); }); diff --git a/web/ui/react-app/src/pages/targets/Targets.tsx b/web/ui/react-app/src/pages/targets/Targets.tsx index cc7647acae..bba9396e19 100644 --- a/web/ui/react-app/src/pages/targets/Targets.tsx +++ b/web/ui/react-app/src/pages/targets/Targets.tsx @@ -1,11 +1,45 @@ -import React, { FC } from 'react'; -import ScrapePoolList from './ScrapePoolList'; +import React, { FC, useCallback, useState } from 'react'; +import ScrapePoolList, { ScrapePoolNamesListProps } from './ScrapePoolList'; +import { API_PATH } from '../../constants/constants'; +import { usePathPrefix } from '../../contexts/PathPrefixContext'; +import { useFetch } from '../../hooks/useFetch'; +import { withStatusIndicator } from '../../components/withStatusIndicator'; +import { setQueryParam, getQueryParam } from '../../utils/index'; + +const ScrapePoolListWithStatusIndicator = withStatusIndicator(ScrapePoolList); + +const scrapePoolQueryParam = 'scrapePool'; const Targets: FC = () => { + // get the initial name of selected scrape pool from query args + const scrapePool = getQueryParam(scrapePoolQueryParam) || null; + + const [selectedPool, setSelectedPool] = useState(scrapePool); + + const onPoolSelect = useCallback( + (name: string) => { + setSelectedPool(name); + setQueryParam(scrapePoolQueryParam, name); + }, + [setSelectedPool] + ); + + const pathPrefix = usePathPrefix(); + const { response, error, isLoading } = useFetch(`${pathPrefix}/${API_PATH}/scrape_pools`); + const { status: responseStatus } = response; + const badResponse = responseStatus !== 'success' && responseStatus !== 'start fetching'; + return ( <>

Targets

- + ); }; diff --git a/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts b/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts index 0cf0fcb333..1c6ed1af93 100644 --- a/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts +++ b/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts @@ -241,3 +241,84 @@ export const sampleApiResponse = Object.freeze({ ] as Target[], }, }); + +export const scrapePoolTargetsSampleAPI = Object.freeze({ + status: 'success', + data: { + targets: [ + { + discoveredLabels: { + __address__: 'http://prometheus.io', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'http://prometheus.io', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io', + lastError: '', + lastScrape: '2019-11-04T11:52:14.759299-07:00', + lastScrapeDuration: 36560147, + health: 'up', + globalUrl: 'http://localhost.localdomain:9000/metrics', + scrapeInterval: '15s', + scrapeTimeout: '500ms', + }, + { + discoveredLabels: { + __address__: 'https://prometheus.io', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'https://prometheus.io', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io', + lastError: '', + lastScrape: '2019-11-04T11:52:24.731096-07:00', + lastScrapeDuration: 49448763, + health: 'up', + globalUrl: 'http://localhost.localdomain:9000/metrics', + scrapeInterval: '15s', + scrapeTimeout: '500ms', + }, + { + discoveredLabels: { + __address__: 'http://example.com:8080', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'http://example.com:8080', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080', + lastError: '', + lastScrape: '2019-11-04T11:52:13.516654-07:00', + lastScrapeDuration: 120916592, + health: 'up', + globalUrl: 'http://localhost.localdomain:9000/metrics', + scrapeInterval: '15s', + scrapeTimeout: '500ms', + }, + ] as Target[], + }, +}); + +export const scrapePoolsSampleAPI = Object.freeze({ + status: 'success', + data: { + scrapePools: ['blackbox'], + }, +}); diff --git a/web/ui/react-app/src/pages/targets/target.ts b/web/ui/react-app/src/pages/targets/target.ts index a3ca48ce1f..38e2c9e656 100644 --- a/web/ui/react-app/src/pages/targets/target.ts +++ b/web/ui/react-app/src/pages/targets/target.ts @@ -54,3 +54,20 @@ export const getColor = (health: string): string => { return 'warning'; } }; + +export interface TargetHealthFilters { + healthy: boolean; + unhealthy: boolean; + unknown: boolean; +} + +export const filterTargetsByHealth = (health: string, filters: TargetHealthFilters): boolean => { + switch (health.toLowerCase()) { + case 'up': + return filters.healthy; + case 'down': + return filters.unhealthy; + default: + return filters.unknown; + } +}; diff --git a/web/ui/react-app/src/utils/index.ts b/web/ui/react-app/src/utils/index.ts index d51c1e77c3..83782605a2 100644 --- a/web/ui/react-app/src/utils/index.ts +++ b/web/ui/react-app/src/utils/index.ts @@ -244,13 +244,23 @@ export const encodePanelOptionsToQueryString = (panels: PanelMeta[]): string => }; export const setQuerySearchFilter = (search: string) => { - window.history.pushState({}, '', `?search=${search}`); + setQueryParam('search', search); }; export const getQuerySearchFilter = (): string => { + return getQueryParam('search'); +}; + +export const setQueryParam = (key: string, value: string) => { + const params = new URLSearchParams(window.location.search); + params.set(key, value); + window.history.pushState({}, '', '?' + params.toString()); +}; + +export const getQueryParam = (key: string): string => { const locationSearch = window.location.search; const params = new URLSearchParams(locationSearch); - return params.get('search') || ''; + return params.get(key) || ''; }; export const createExpressionLink = (expr: string): string => { diff --git a/web/web.go b/web/web.go index f43d5104ce..ba7c2d2895 100644 --- a/web/web.go +++ b/web/web.go @@ -309,6 +309,7 @@ func New(logger log.Logger, o *Options) *Handler { } h.SetReady(false) + factorySPr := func(_ context.Context) api_v1.ScrapePoolsRetriever { return h.scrapeManager } factoryTr := func(_ context.Context) api_v1.TargetRetriever { return h.scrapeManager } factoryAr := func(_ context.Context) api_v1.AlertmanagerRetriever { return h.notifier } FactoryRr := func(_ context.Context) api_v1.RulesRetriever { return h.ruleManager } @@ -318,7 +319,7 @@ func New(logger log.Logger, o *Options) *Handler { app = h.storage } - h.apiV1 = api_v1.NewAPI(h.queryEngine, h.storage, app, h.exemplarStorage, factoryTr, factoryAr, + h.apiV1 = api_v1.NewAPI(h.queryEngine, h.storage, app, h.exemplarStorage, factorySPr, factoryTr, factoryAr, func() config.Config { h.mtx.RLock() defer h.mtx.RUnlock()