Show individual scrape pools on /targets page (#11142)
* Add API endpoints for getting scrape pool names This adds api/v1/scrape_pools endpoint that returns the list of *names* of all the scrape pools configured. Having it allows to find out what scrape pools are defined without having to list and parse all targets. The second change is adding scrapePool query parameter support in api/v1/targets endpoint, that allows to filter returned targets by only finding ones for passed scrape pool name. Both changes allow to query for a specific scrape pool data, rather than getting all the targets for all possible scrape pools. The problem with api/v1/targets endpoint is that it returns huge amount of data if you configure a lot of scrape pools. Signed-off-by: Łukasz Mierzwa <l.mierzwa@gmail.com> * Add a scrape pool selector on /targets page Current targets page lists all possible targets. This works great if you only have a few scrape pools configured, but for systems with a lot of scrape pools and targets this slow things down a lot. Not only does the /targets page load very slowly in such case (waiting for huge API response) but it also take a long time to render, due to huge number of elements. This change adds a dropdown selector so it's possible to select only intersting scrape pool to view. There's also scrapePool query param that will open selected pool automatically. Signed-off-by: Łukasz Mierzwa <l.mierzwa@gmail.com> Signed-off-by: Łukasz Mierzwa <l.mierzwa@gmail.com>
This commit is contained in:
parent
d7f0276d88
commit
e1b7082008
|
@ -623,6 +623,38 @@ $ curl 'http://localhost:9090/api/v1/targets?state=active'
|
|||
}
|
||||
```
|
||||
|
||||
The `scrapePool` query parameter allows the caller to filter by scrape pool name.
|
||||
|
||||
```json
|
||||
$ curl 'http://localhost:9090/api/v1/targets?scrapePool=node_exporter'
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"activeTargets": [
|
||||
{
|
||||
"discoveredLabels": {
|
||||
"__address__": "127.0.0.1:9091",
|
||||
"__metrics_path__": "/metrics",
|
||||
"__scheme__": "http",
|
||||
"job": "node_exporter"
|
||||
},
|
||||
"labels": {
|
||||
"instance": "127.0.0.1:9091",
|
||||
"job": "node_exporter"
|
||||
},
|
||||
"scrapePool": "node_exporter",
|
||||
"scrapeUrl": "http://127.0.0.1:9091/metrics",
|
||||
"globalUrl": "http://example-prometheus:9091/metrics",
|
||||
"lastError": "",
|
||||
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
|
||||
"lastScrapeDuration": 50688943,
|
||||
"health": "up"
|
||||
}
|
||||
],
|
||||
"droppedTargets": []
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
|
|
|
@ -313,6 +313,18 @@ func (m *Manager) TargetsAll() map[string][]*Target {
|
|||
return targets
|
||||
}
|
||||
|
||||
// ScrapePools returns the list of all scrape pool names.
|
||||
func (m *Manager) ScrapePools() []string {
|
||||
m.mtxScrape.Lock()
|
||||
defer m.mtxScrape.Unlock()
|
||||
|
||||
names := make([]string, 0, len(m.scrapePools))
|
||||
for name := range m.scrapePools {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// TargetsActive returns the active targets currently being scraped.
|
||||
func (m *Manager) TargetsActive() map[string][]*Target {
|
||||
m.mtxScrape.Lock()
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
package scrape
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
@ -24,6 +25,7 @@ import (
|
|||
"gopkg.in/yaml.v2"
|
||||
|
||||
"github.com/prometheus/prometheus/config"
|
||||
"github.com/prometheus/prometheus/discovery"
|
||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/model/relabel"
|
||||
|
@ -635,3 +637,69 @@ global:
|
|||
t.Error("Jitter should not be the same on different set of external labels")
|
||||
}
|
||||
}
|
||||
|
||||
func TestManagerScrapePools(t *testing.T) {
|
||||
cfgText1 := `
|
||||
scrape_configs:
|
||||
- job_name: job1
|
||||
static_configs:
|
||||
- targets: ["foo:9090"]
|
||||
- job_name: job2
|
||||
static_configs:
|
||||
- targets: ["foo:9091", "foo:9092"]
|
||||
`
|
||||
cfgText2 := `
|
||||
scrape_configs:
|
||||
- job_name: job1
|
||||
static_configs:
|
||||
- targets: ["foo:9090", "foo:9094"]
|
||||
- job_name: job3
|
||||
static_configs:
|
||||
- targets: ["foo:9093"]
|
||||
`
|
||||
var (
|
||||
cfg1 = loadConfiguration(t, cfgText1)
|
||||
cfg2 = loadConfiguration(t, cfgText2)
|
||||
)
|
||||
|
||||
reload := func(scrapeManager *Manager, cfg *config.Config) {
|
||||
newLoop := func(scrapeLoopOptions) loop {
|
||||
return noopLoop()
|
||||
}
|
||||
scrapeManager.scrapePools = map[string]*scrapePool{}
|
||||
for _, sc := range cfg.ScrapeConfigs {
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
sp := &scrapePool{
|
||||
appendable: &nopAppendable{},
|
||||
activeTargets: map[uint64]*Target{},
|
||||
loops: map[uint64]loop{
|
||||
1: noopLoop(),
|
||||
},
|
||||
newLoop: newLoop,
|
||||
logger: nil,
|
||||
config: sc,
|
||||
client: http.DefaultClient,
|
||||
cancel: cancel,
|
||||
}
|
||||
for _, c := range sc.ServiceDiscoveryConfigs {
|
||||
staticConfig := c.(discovery.StaticConfig)
|
||||
for _, group := range staticConfig {
|
||||
for i := range group.Targets {
|
||||
sp.activeTargets[uint64(i)] = &Target{}
|
||||
}
|
||||
}
|
||||
}
|
||||
scrapeManager.scrapePools[sc.JobName] = sp
|
||||
}
|
||||
}
|
||||
|
||||
opts := Options{}
|
||||
scrapeManager := NewManager(&opts, nil, nil)
|
||||
|
||||
reload(scrapeManager, cfg1)
|
||||
require.ElementsMatch(t, []string{"job1", "job2"}, scrapeManager.ScrapePools())
|
||||
|
||||
reload(scrapeManager, cfg2)
|
||||
require.ElementsMatch(t, []string{"job1", "job3"}, scrapeManager.ScrapePools())
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import (
|
|||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -88,6 +89,11 @@ func (e *apiError) Error() string {
|
|||
return fmt.Sprintf("%s: %s", e.typ, e.err)
|
||||
}
|
||||
|
||||
// ScrapePoolsRetriever provide the list of all scrape pools.
|
||||
type ScrapePoolsRetriever interface {
|
||||
ScrapePools() []string
|
||||
}
|
||||
|
||||
// TargetRetriever provides the list of active/dropped targets to scrape or not.
|
||||
type TargetRetriever interface {
|
||||
TargetsActive() map[string][]*scrape.Target
|
||||
|
@ -179,6 +185,7 @@ type API struct {
|
|||
QueryEngine QueryEngine
|
||||
ExemplarQueryable storage.ExemplarQueryable
|
||||
|
||||
scrapePoolsRetriever func(context.Context) ScrapePoolsRetriever
|
||||
targetRetriever func(context.Context) TargetRetriever
|
||||
alertmanagerRetriever func(context.Context) AlertmanagerRetriever
|
||||
rulesRetriever func(context.Context) RulesRetriever
|
||||
|
@ -216,6 +223,7 @@ func NewAPI(
|
|||
q storage.SampleAndChunkQueryable,
|
||||
ap storage.Appendable,
|
||||
eq storage.ExemplarQueryable,
|
||||
spsr func(context.Context) ScrapePoolsRetriever,
|
||||
tr func(context.Context) TargetRetriever,
|
||||
ar func(context.Context) AlertmanagerRetriever,
|
||||
configFunc func() config.Config,
|
||||
|
@ -243,6 +251,7 @@ func NewAPI(
|
|||
Queryable: q,
|
||||
ExemplarQueryable: eq,
|
||||
|
||||
scrapePoolsRetriever: spsr,
|
||||
targetRetriever: tr,
|
||||
alertmanagerRetriever: ar,
|
||||
|
||||
|
@ -338,6 +347,7 @@ func (api *API) Register(r *route.Router) {
|
|||
r.Post("/series", wrapAgent(api.series))
|
||||
r.Del("/series", wrapAgent(api.dropSeries))
|
||||
|
||||
r.Get("/scrape_pools", wrap(api.scrapePools))
|
||||
r.Get("/targets", wrap(api.targets))
|
||||
r.Get("/targets/metadata", wrap(api.targetMetadata))
|
||||
r.Get("/alertmanagers", wrapAgent(api.alertmanagers))
|
||||
|
@ -824,6 +834,10 @@ type Target struct {
|
|||
ScrapeTimeout string `json:"scrapeTimeout"`
|
||||
}
|
||||
|
||||
type ScrapePoolsDiscovery struct {
|
||||
ScrapePools []string `json:"scrapePools"`
|
||||
}
|
||||
|
||||
// DroppedTarget has the information for one target that was dropped during relabelling.
|
||||
type DroppedTarget struct {
|
||||
// Labels before any processing.
|
||||
|
@ -903,6 +917,13 @@ func getGlobalURL(u *url.URL, opts GlobalURLOptions) (*url.URL, error) {
|
|||
return u, nil
|
||||
}
|
||||
|
||||
func (api *API) scrapePools(r *http.Request) apiFuncResult {
|
||||
names := api.scrapePoolsRetriever(r.Context()).ScrapePools()
|
||||
sort.Strings(names)
|
||||
res := &ScrapePoolsDiscovery{ScrapePools: names}
|
||||
return apiFuncResult{data: res, err: nil, warnings: nil, finalizer: nil}
|
||||
}
|
||||
|
||||
func (api *API) targets(r *http.Request) apiFuncResult {
|
||||
sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) {
|
||||
var n int
|
||||
|
@ -915,15 +936,7 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
|||
return keys, n
|
||||
}
|
||||
|
||||
flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
|
||||
keys, n := sortKeys(targets)
|
||||
res := make([]*scrape.Target, 0, n)
|
||||
for _, k := range keys {
|
||||
res = append(res, targets[k]...)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
scrapePool := r.URL.Query().Get("scrapePool")
|
||||
state := strings.ToLower(r.URL.Query().Get("state"))
|
||||
showActive := state == "" || state == "any" || state == "active"
|
||||
showDropped := state == "" || state == "any" || state == "dropped"
|
||||
|
@ -935,6 +948,9 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
|||
res.ActiveTargets = make([]*Target, 0, numTargets)
|
||||
|
||||
for _, key := range activeKeys {
|
||||
if scrapePool != "" && key != scrapePool {
|
||||
continue
|
||||
}
|
||||
for _, target := range targetsActive[key] {
|
||||
lastErrStr := ""
|
||||
lastErr := target.LastError()
|
||||
|
@ -970,12 +986,18 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
|||
res.ActiveTargets = []*Target{}
|
||||
}
|
||||
if showDropped {
|
||||
tDropped := flatten(api.targetRetriever(r.Context()).TargetsDropped())
|
||||
res.DroppedTargets = make([]*DroppedTarget, 0, len(tDropped))
|
||||
for _, t := range tDropped {
|
||||
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
|
||||
DiscoveredLabels: t.DiscoveredLabels().Map(),
|
||||
})
|
||||
targetsDropped := api.targetRetriever(r.Context()).TargetsDropped()
|
||||
droppedKeys, numTargets := sortKeys(targetsDropped)
|
||||
res.DroppedTargets = make([]*DroppedTarget, 0, numTargets)
|
||||
for _, key := range droppedKeys {
|
||||
if scrapePool != "" && key != scrapePool {
|
||||
continue
|
||||
}
|
||||
for _, target := range targetsDropped[key] {
|
||||
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
|
||||
DiscoveredLabels: target.DiscoveredLabels().Map(),
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
res.DroppedTargets = []*DroppedTarget{}
|
||||
|
|
|
@ -113,6 +113,7 @@ func createPrometheusAPI(q storage.SampleAndChunkQueryable) *route.Router {
|
|||
q,
|
||||
nil,
|
||||
nil,
|
||||
func(context.Context) ScrapePoolsRetriever { return &DummyScrapePoolsRetriever{} },
|
||||
func(context.Context) TargetRetriever { return &DummyTargetRetriever{} },
|
||||
func(context.Context) AlertmanagerRetriever { return &DummyAlertmanagerRetriever{} },
|
||||
func() config.Config { return config.Config{} },
|
||||
|
@ -205,6 +206,13 @@ func (t errorTestSeriesSet) Warnings() storage.Warnings {
|
|||
return nil
|
||||
}
|
||||
|
||||
// DummyTargetRetriever implements github.com/prometheus/prometheus/web/api/v1.ScrapePoolsRetriever.
|
||||
type DummyScrapePoolsRetriever struct{}
|
||||
|
||||
func (DummyScrapePoolsRetriever) ScrapePools() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
// DummyTargetRetriever implements github.com/prometheus/prometheus/web/api/v1.targetRetriever.
|
||||
type DummyTargetRetriever struct{}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ const Filter: FC<FilterProps> = ({ filter, setFilter, expanded, setExpanded }) =
|
|||
},
|
||||
};
|
||||
return (
|
||||
<ButtonGroup className="mt-3 mb-4">
|
||||
<ButtonGroup className="text-nowrap">
|
||||
<Button {...btnProps.all}>All</Button>
|
||||
<Button {...btnProps.unhealthy}>Unhealthy</Button>
|
||||
<Button {...btnProps.expansionState}>{allExpanded ? 'Collapse All' : 'Expand All'}</Button>
|
||||
|
|
|
@ -36,7 +36,7 @@ describe('ScrapePoolList', () => {
|
|||
await act(async () => {
|
||||
scrapePoolList = mount(
|
||||
<PathPrefixContext.Provider value="/path/prefix">
|
||||
<ScrapePoolList />
|
||||
<ScrapePoolList scrapePools={[]} selectedPool={null} onPoolSelect={jest.fn()} />
|
||||
</PathPrefixContext.Provider>
|
||||
);
|
||||
});
|
||||
|
@ -63,7 +63,7 @@ describe('ScrapePoolList', () => {
|
|||
await act(async () => {
|
||||
scrapePoolList = mount(
|
||||
<PathPrefixContext.Provider value="/path/prefix">
|
||||
<ScrapePoolList />
|
||||
<ScrapePoolList scrapePools={[]} selectedPool={null} onPoolSelect={jest.fn()} />
|
||||
</PathPrefixContext.Provider>
|
||||
);
|
||||
});
|
||||
|
|
|
@ -2,10 +2,10 @@ import { KVSearch } from '@nexucis/kvsearch';
|
|||
import { usePathPrefix } from '../../contexts/PathPrefixContext';
|
||||
import { useFetch } from '../../hooks/useFetch';
|
||||
import { API_PATH } from '../../constants/constants';
|
||||
import { groupTargets, ScrapePool, ScrapePools, Target } from './target';
|
||||
import { filterTargetsByHealth, groupTargets, ScrapePool, ScrapePools, Target } from './target';
|
||||
import { withStatusIndicator } from '../../components/withStatusIndicator';
|
||||
import { FC, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { Col, Collapse, Row } from 'reactstrap';
|
||||
import { Badge, Col, Collapse, Dropdown, DropdownItem, DropdownMenu, DropdownToggle, Input, Row } from 'reactstrap';
|
||||
import { ScrapePoolContent } from './ScrapePoolContent';
|
||||
import Filter, { Expanded, FilterData } from './Filter';
|
||||
import { useLocalStorage } from '../../hooks/useLocalStorage';
|
||||
|
@ -13,8 +13,64 @@ import styles from './ScrapePoolPanel.module.css';
|
|||
import { ToggleMoreLess } from '../../components/ToggleMoreLess';
|
||||
import SearchBar from '../../components/SearchBar';
|
||||
import { setQuerySearchFilter, getQuerySearchFilter } from '../../utils/index';
|
||||
import Checkbox from '../../components/Checkbox';
|
||||
|
||||
export interface ScrapePoolNamesListProps {
|
||||
scrapePools: string[];
|
||||
}
|
||||
|
||||
interface ScrapePoolDropDownProps {
|
||||
selectedPool: string | null;
|
||||
scrapePools: string[];
|
||||
onScrapePoolChange: (name: string) => void;
|
||||
}
|
||||
|
||||
const ScrapePoolDropDown: FC<ScrapePoolDropDownProps> = ({ selectedPool, scrapePools, onScrapePoolChange }) => {
|
||||
const [dropdownOpen, setDropdownOpen] = useState(false);
|
||||
const toggle = () => setDropdownOpen((prevState) => !prevState);
|
||||
|
||||
const [filter, setFilter] = useState<string>('');
|
||||
|
||||
return (
|
||||
<Dropdown isOpen={dropdownOpen} toggle={toggle}>
|
||||
<DropdownToggle caret className="mw-100 text-truncate">
|
||||
{selectedPool === null || !scrapePools.includes(selectedPool) ? 'All scrape pools' : selectedPool}
|
||||
</DropdownToggle>
|
||||
<DropdownMenu style={{ maxHeight: 400, overflowY: 'auto' }}>
|
||||
{selectedPool ? (
|
||||
<>
|
||||
<DropdownItem key="__all__" value={null} onClick={() => onScrapePoolChange('')}>
|
||||
Clear selection
|
||||
</DropdownItem>
|
||||
<DropdownItem divider />
|
||||
</>
|
||||
) : null}
|
||||
<DropdownItem key="__header" header toggle={false}>
|
||||
<Input autoFocus placeholder="Filter" value={filter} onChange={(event) => setFilter(event.target.value.trim())} />
|
||||
</DropdownItem>
|
||||
{scrapePools.length === 0 ? (
|
||||
<DropdownItem disabled>No scrape pools configured</DropdownItem>
|
||||
) : (
|
||||
scrapePools
|
||||
.filter((name) => filter === '' || name.includes(filter))
|
||||
.map((name) => (
|
||||
<DropdownItem key={name} value={name} onClick={() => onScrapePoolChange(name)} active={name === selectedPool}>
|
||||
{name}
|
||||
</DropdownItem>
|
||||
))
|
||||
)}
|
||||
</DropdownMenu>
|
||||
</Dropdown>
|
||||
);
|
||||
};
|
||||
|
||||
interface ScrapePoolListProps {
|
||||
scrapePools: string[];
|
||||
selectedPool: string | null;
|
||||
onPoolSelect: (name: string) => void;
|
||||
}
|
||||
|
||||
interface ScrapePoolListContentProps extends ScrapePoolListProps {
|
||||
activeTargets: Target[];
|
||||
}
|
||||
|
||||
|
@ -51,8 +107,21 @@ export const ScrapePoolPanel: FC<PanelProps> = (props: PanelProps) => {
|
|||
);
|
||||
};
|
||||
|
||||
type targetHealth = 'healthy' | 'unhealthy' | 'unknown';
|
||||
|
||||
const healthColorTuples: Array<[targetHealth, string]> = [
|
||||
['healthy', 'success'],
|
||||
['unhealthy', 'danger'],
|
||||
['unknown', 'warning'],
|
||||
];
|
||||
|
||||
// ScrapePoolListContent is taking care of every possible filter
|
||||
const ScrapePoolListContent: FC<ScrapePoolListProps> = ({ activeTargets }) => {
|
||||
const ScrapePoolListContent: FC<ScrapePoolListContentProps> = ({
|
||||
activeTargets,
|
||||
scrapePools,
|
||||
selectedPool,
|
||||
onPoolSelect,
|
||||
}) => {
|
||||
const initialPoolList = groupTargets(activeTargets);
|
||||
const [poolList, setPoolList] = useState<ScrapePools>(initialPoolList);
|
||||
const [targetList, setTargetList] = useState(activeTargets);
|
||||
|
@ -63,6 +132,18 @@ const ScrapePoolListContent: FC<ScrapePoolListProps> = ({ activeTargets }) => {
|
|||
};
|
||||
const [filter, setFilter] = useLocalStorage('targets-page-filter', initialFilter);
|
||||
|
||||
const [healthFilters, setHealthFilters] = useLocalStorage('target-health-filter', {
|
||||
healthy: true,
|
||||
unhealthy: true,
|
||||
unknown: true,
|
||||
});
|
||||
const toggleHealthFilter = (val: targetHealth) => () => {
|
||||
setHealthFilters({
|
||||
...healthFilters,
|
||||
[val]: !healthFilters[val],
|
||||
});
|
||||
};
|
||||
|
||||
const initialExpanded: Expanded = Object.keys(initialPoolList).reduce(
|
||||
(acc: { [scrapePool: string]: boolean }, scrapePool: string) => ({
|
||||
...acc,
|
||||
|
@ -95,17 +176,37 @@ const ScrapePoolListContent: FC<ScrapePoolListProps> = ({ activeTargets }) => {
|
|||
|
||||
return (
|
||||
<>
|
||||
<Row xs="4" className="align-items-center">
|
||||
<Col>
|
||||
<Row className="align-items-center">
|
||||
<Col className="flex-grow-0 py-1">
|
||||
<ScrapePoolDropDown selectedPool={selectedPool} scrapePools={scrapePools} onScrapePoolChange={onPoolSelect} />
|
||||
</Col>
|
||||
<Col className="flex-grow-0 py-1">
|
||||
<Filter filter={filter} setFilter={setFilter} expanded={expanded} setExpanded={setExpanded} />
|
||||
</Col>
|
||||
<Col xs="6">
|
||||
<Col className="flex-grow-1 py-1">
|
||||
<SearchBar
|
||||
defaultValue={defaultValue}
|
||||
handleChange={handleSearchChange}
|
||||
placeholder="Filter by endpoint or labels"
|
||||
/>
|
||||
</Col>
|
||||
<Col className="flex-grow-0 py-1">
|
||||
<div className="d-flex flex-row-reverse">
|
||||
{healthColorTuples.map(([val, color]) => (
|
||||
<Checkbox
|
||||
wrapperStyles={{ marginBottom: 0 }}
|
||||
key={val}
|
||||
checked={healthFilters[val]}
|
||||
id={`${val}-toggler`}
|
||||
onChange={toggleHealthFilter(val)}
|
||||
>
|
||||
<Badge color={color} className="text-capitalize">
|
||||
{val}
|
||||
</Badge>
|
||||
</Checkbox>
|
||||
))}
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
{Object.keys(poolList)
|
||||
.filter((scrapePool) => {
|
||||
|
@ -117,7 +218,10 @@ const ScrapePoolListContent: FC<ScrapePoolListProps> = ({ activeTargets }) => {
|
|||
<ScrapePoolPanel
|
||||
key={scrapePool}
|
||||
scrapePool={scrapePool}
|
||||
targetGroup={poolList[scrapePool]}
|
||||
targetGroup={{
|
||||
upCount: poolList[scrapePool].upCount,
|
||||
targets: poolList[scrapePool].targets.filter((target) => filterTargetsByHealth(target.health, healthFilters)),
|
||||
}}
|
||||
expanded={expanded[scrapePool]}
|
||||
toggleExpanded={(): void => setExpanded({ ...expanded, [scrapePool]: !expanded[scrapePool] })}
|
||||
/>
|
||||
|
@ -128,14 +232,26 @@ const ScrapePoolListContent: FC<ScrapePoolListProps> = ({ activeTargets }) => {
|
|||
|
||||
const ScrapePoolListWithStatusIndicator = withStatusIndicator(ScrapePoolListContent);
|
||||
|
||||
export const ScrapePoolList: FC = () => {
|
||||
export const ScrapePoolList: FC<ScrapePoolListProps> = ({ selectedPool, scrapePools, ...props }) => {
|
||||
// If we have more than 20 scrape pools AND there's no pool selected then select first pool
|
||||
// by default. This is to avoid loading a huge list of targets when we have many pools configured.
|
||||
// If we have up to 20 scrape pools then pass whatever is the value of selectedPool, it can
|
||||
// be a pool name or a null (if all pools should be shown).
|
||||
const poolToShow = selectedPool === null && scrapePools.length > 20 ? scrapePools[0] : selectedPool;
|
||||
|
||||
const pathPrefix = usePathPrefix();
|
||||
const { response, error, isLoading } = useFetch<ScrapePoolListProps>(`${pathPrefix}/${API_PATH}/targets?state=active`);
|
||||
const { response, error, isLoading } = useFetch<ScrapePoolListContentProps>(
|
||||
`${pathPrefix}/${API_PATH}/targets?state=active${poolToShow === null ? '' : `&scrapePool=${poolToShow}`}`
|
||||
);
|
||||
const { status: responseStatus } = response;
|
||||
const badResponse = responseStatus !== 'success' && responseStatus !== 'start fetching';
|
||||
|
||||
return (
|
||||
<ScrapePoolListWithStatusIndicator
|
||||
{...props}
|
||||
{...response.data}
|
||||
selectedPool={poolToShow}
|
||||
scrapePools={scrapePools}
|
||||
error={badResponse ? new Error(responseStatus) : error}
|
||||
isLoading={isLoading}
|
||||
componentTitle="Targets information"
|
||||
|
|
|
@ -1,11 +1,21 @@
|
|||
import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import { shallow, mount, ReactWrapper } from 'enzyme';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import Targets from './Targets';
|
||||
import ScrapePoolList from './ScrapePoolList';
|
||||
import { FetchMock } from 'jest-fetch-mock/types';
|
||||
import { scrapePoolsSampleAPI } from './__testdata__/testdata';
|
||||
|
||||
describe('Targets', () => {
|
||||
const targets = shallow(<Targets />);
|
||||
beforeEach(() => {
|
||||
fetchMock.resetMocks();
|
||||
});
|
||||
|
||||
let targets: ReactWrapper;
|
||||
let mock: FetchMock;
|
||||
|
||||
describe('Header', () => {
|
||||
const targets = shallow(<Targets />);
|
||||
const h2 = targets.find('h2');
|
||||
it('renders a header', () => {
|
||||
expect(h2.text()).toEqual('Targets');
|
||||
|
@ -15,7 +25,18 @@ describe('Targets', () => {
|
|||
expect(h2).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
it('renders a scrape pool list', () => {
|
||||
|
||||
it('renders a scrape pool list', async () => {
|
||||
mock = fetchMock.mockResponseOnce(JSON.stringify(scrapePoolsSampleAPI));
|
||||
await act(async () => {
|
||||
targets = mount(<Targets />);
|
||||
});
|
||||
expect(mock).toHaveBeenCalledWith('/api/v1/scrape_pools', {
|
||||
cache: 'no-store',
|
||||
credentials: 'same-origin',
|
||||
});
|
||||
targets.update();
|
||||
|
||||
const scrapePoolList = targets.find(ScrapePoolList);
|
||||
expect(scrapePoolList).toHaveLength(1);
|
||||
});
|
||||
|
|
|
@ -1,11 +1,45 @@
|
|||
import React, { FC } from 'react';
|
||||
import ScrapePoolList from './ScrapePoolList';
|
||||
import React, { FC, useCallback, useState } from 'react';
|
||||
import ScrapePoolList, { ScrapePoolNamesListProps } from './ScrapePoolList';
|
||||
import { API_PATH } from '../../constants/constants';
|
||||
import { usePathPrefix } from '../../contexts/PathPrefixContext';
|
||||
import { useFetch } from '../../hooks/useFetch';
|
||||
import { withStatusIndicator } from '../../components/withStatusIndicator';
|
||||
import { setQueryParam, getQueryParam } from '../../utils/index';
|
||||
|
||||
const ScrapePoolListWithStatusIndicator = withStatusIndicator(ScrapePoolList);
|
||||
|
||||
const scrapePoolQueryParam = 'scrapePool';
|
||||
|
||||
const Targets: FC = () => {
|
||||
// get the initial name of selected scrape pool from query args
|
||||
const scrapePool = getQueryParam(scrapePoolQueryParam) || null;
|
||||
|
||||
const [selectedPool, setSelectedPool] = useState<string | null>(scrapePool);
|
||||
|
||||
const onPoolSelect = useCallback(
|
||||
(name: string) => {
|
||||
setSelectedPool(name);
|
||||
setQueryParam(scrapePoolQueryParam, name);
|
||||
},
|
||||
[setSelectedPool]
|
||||
);
|
||||
|
||||
const pathPrefix = usePathPrefix();
|
||||
const { response, error, isLoading } = useFetch<ScrapePoolNamesListProps>(`${pathPrefix}/${API_PATH}/scrape_pools`);
|
||||
const { status: responseStatus } = response;
|
||||
const badResponse = responseStatus !== 'success' && responseStatus !== 'start fetching';
|
||||
|
||||
return (
|
||||
<>
|
||||
<h2>Targets</h2>
|
||||
<ScrapePoolList />
|
||||
<ScrapePoolListWithStatusIndicator
|
||||
error={badResponse ? new Error(responseStatus) : error}
|
||||
isLoading={isLoading}
|
||||
componentTitle="Targets"
|
||||
selectedPool={selectedPool}
|
||||
onPoolSelect={onPoolSelect}
|
||||
{...response.data}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -241,3 +241,84 @@ export const sampleApiResponse = Object.freeze({
|
|||
] as Target[],
|
||||
},
|
||||
});
|
||||
|
||||
export const scrapePoolTargetsSampleAPI = Object.freeze({
|
||||
status: 'success',
|
||||
data: {
|
||||
targets: [
|
||||
{
|
||||
discoveredLabels: {
|
||||
__address__: 'http://prometheus.io',
|
||||
__metrics_path__: '/probe',
|
||||
__param_module: 'http_2xx',
|
||||
__scheme__: 'http',
|
||||
job: 'blackbox',
|
||||
},
|
||||
labels: {
|
||||
instance: 'http://prometheus.io',
|
||||
job: 'blackbox',
|
||||
},
|
||||
scrapePool: 'blackbox',
|
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:14.759299-07:00',
|
||||
lastScrapeDuration: 36560147,
|
||||
health: 'up',
|
||||
globalUrl: 'http://localhost.localdomain:9000/metrics',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
{
|
||||
discoveredLabels: {
|
||||
__address__: 'https://prometheus.io',
|
||||
__metrics_path__: '/probe',
|
||||
__param_module: 'http_2xx',
|
||||
__scheme__: 'http',
|
||||
job: 'blackbox',
|
||||
},
|
||||
labels: {
|
||||
instance: 'https://prometheus.io',
|
||||
job: 'blackbox',
|
||||
},
|
||||
scrapePool: 'blackbox',
|
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:24.731096-07:00',
|
||||
lastScrapeDuration: 49448763,
|
||||
health: 'up',
|
||||
globalUrl: 'http://localhost.localdomain:9000/metrics',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
{
|
||||
discoveredLabels: {
|
||||
__address__: 'http://example.com:8080',
|
||||
__metrics_path__: '/probe',
|
||||
__param_module: 'http_2xx',
|
||||
__scheme__: 'http',
|
||||
job: 'blackbox',
|
||||
},
|
||||
labels: {
|
||||
instance: 'http://example.com:8080',
|
||||
job: 'blackbox',
|
||||
},
|
||||
scrapePool: 'blackbox',
|
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
|
||||
lastError: '',
|
||||
lastScrape: '2019-11-04T11:52:13.516654-07:00',
|
||||
lastScrapeDuration: 120916592,
|
||||
health: 'up',
|
||||
globalUrl: 'http://localhost.localdomain:9000/metrics',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
] as Target[],
|
||||
},
|
||||
});
|
||||
|
||||
export const scrapePoolsSampleAPI = Object.freeze({
|
||||
status: 'success',
|
||||
data: {
|
||||
scrapePools: ['blackbox'],
|
||||
},
|
||||
});
|
||||
|
|
|
@ -54,3 +54,20 @@ export const getColor = (health: string): string => {
|
|||
return 'warning';
|
||||
}
|
||||
};
|
||||
|
||||
export interface TargetHealthFilters {
|
||||
healthy: boolean;
|
||||
unhealthy: boolean;
|
||||
unknown: boolean;
|
||||
}
|
||||
|
||||
export const filterTargetsByHealth = (health: string, filters: TargetHealthFilters): boolean => {
|
||||
switch (health.toLowerCase()) {
|
||||
case 'up':
|
||||
return filters.healthy;
|
||||
case 'down':
|
||||
return filters.unhealthy;
|
||||
default:
|
||||
return filters.unknown;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -244,13 +244,23 @@ export const encodePanelOptionsToQueryString = (panels: PanelMeta[]): string =>
|
|||
};
|
||||
|
||||
export const setQuerySearchFilter = (search: string) => {
|
||||
window.history.pushState({}, '', `?search=${search}`);
|
||||
setQueryParam('search', search);
|
||||
};
|
||||
|
||||
export const getQuerySearchFilter = (): string => {
|
||||
return getQueryParam('search');
|
||||
};
|
||||
|
||||
export const setQueryParam = (key: string, value: string) => {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
params.set(key, value);
|
||||
window.history.pushState({}, '', '?' + params.toString());
|
||||
};
|
||||
|
||||
export const getQueryParam = (key: string): string => {
|
||||
const locationSearch = window.location.search;
|
||||
const params = new URLSearchParams(locationSearch);
|
||||
return params.get('search') || '';
|
||||
return params.get(key) || '';
|
||||
};
|
||||
|
||||
export const createExpressionLink = (expr: string): string => {
|
||||
|
|
|
@ -309,6 +309,7 @@ func New(logger log.Logger, o *Options) *Handler {
|
|||
}
|
||||
h.SetReady(false)
|
||||
|
||||
factorySPr := func(_ context.Context) api_v1.ScrapePoolsRetriever { return h.scrapeManager }
|
||||
factoryTr := func(_ context.Context) api_v1.TargetRetriever { return h.scrapeManager }
|
||||
factoryAr := func(_ context.Context) api_v1.AlertmanagerRetriever { return h.notifier }
|
||||
FactoryRr := func(_ context.Context) api_v1.RulesRetriever { return h.ruleManager }
|
||||
|
@ -318,7 +319,7 @@ func New(logger log.Logger, o *Options) *Handler {
|
|||
app = h.storage
|
||||
}
|
||||
|
||||
h.apiV1 = api_v1.NewAPI(h.queryEngine, h.storage, app, h.exemplarStorage, factoryTr, factoryAr,
|
||||
h.apiV1 = api_v1.NewAPI(h.queryEngine, h.storage, app, h.exemplarStorage, factorySPr, factoryTr, factoryAr,
|
||||
func() config.Config {
|
||||
h.mtx.RLock()
|
||||
defer h.mtx.RUnlock()
|
||||
|
|
Loading…
Reference in New Issue