React UI: Implement /targets page (#6276)

* Add LastScrapeDuration to targets endpoint

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* Add Scrape job name to targets endpoint

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* Implement the /targets page in react

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* Add state query param to targets endpoint

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* Use state filter in api call

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* api feedback

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* pr feedback frontend

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* Implement and use localstorage hook

Signed-off-by: Dustin Hooten <dhooten@splunk.com>

* PR feedback

Signed-off-by: Dustin Hooten <dhooten@splunk.com>
This commit is contained in:
Dustin Hooten 2019-11-11 14:42:24 -07:00 committed by Julius Volz
parent 454315337b
commit ca60bf298c
31 changed files with 1499 additions and 77 deletions

View File

@ -390,7 +390,7 @@ Prometheus target discovery:
GET /api/v1/targets
```
Both the active and dropped targets are part of the response.
Both the active and dropped targets are part of the response by default.
`labels` represents the label set after relabelling has occurred.
`discoveredLabels` represent the unmodified labels retrieved during service discovery before relabelling has occurred.
@ -411,9 +411,11 @@ $ curl http://localhost:9090/api/v1/targets
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"scrapePool": "prometheus",
"scrapeUrl": "http://127.0.0.1:9090/metrics",
"lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 0.050688943,
"health": "up"
}
],
@ -431,6 +433,41 @@ $ curl http://localhost:9090/api/v1/targets
}
```
The `state` query parameter allows the caller to filter by active or dropped targets,
(e.g., `state=active`, `state=dropped`, `state=any`).
Note that an empty array is still returned for targets that are filtered out.
Other values are ignored.
```json
$ curl 'http://localhost:9090/api/v1/targets?state=active'
{
"status": "success",
"data": {
"activeTargets": [
{
"discoveredLabels": {
"__address__": "127.0.0.1:9090",
"__metrics_path__": "/metrics",
"__scheme__": "http",
"job": "prometheus"
},
"labels": {
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"scrapePool": "prometheus",
"scrapeUrl": "http://127.0.0.1:9090/metrics",
"lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 50688943,
"health": "up"
}
],
"droppedTargets": []
}
}
```
## Rules

View File

@ -499,7 +499,7 @@ func appender(app storage.Appender, limit int) storage.Appender {
// A scraper retrieves samples and accepts a status report at the end.
type scraper interface {
scrape(ctx context.Context, w io.Writer) (string, error)
report(start time.Time, dur time.Duration, err error)
Report(start time.Time, dur time.Duration, err error)
offset(interval time.Duration, jitterSeed uint64) time.Duration
}
@ -1212,7 +1212,7 @@ const (
)
func (sl *scrapeLoop) report(start time.Time, duration time.Duration, scraped, appended, seriesAdded int, err error) error {
sl.scraper.report(start, duration, err)
sl.scraper.Report(start, duration, err)
ts := timestamp.FromTime(start)

View File

@ -1450,7 +1450,7 @@ func (ts *testScraper) offset(interval time.Duration, jitterSeed uint64) time.Du
return ts.offsetDur
}
func (ts *testScraper) report(start time.Time, duration time.Duration, err error) {
func (ts *testScraper) Report(start time.Time, duration time.Duration, err error) {
ts.lastStart = start
ts.lastDuration = duration
ts.lastError = err

View File

@ -200,7 +200,8 @@ func (t *Target) URL() *url.URL {
}
}
func (t *Target) report(start time.Time, dur time.Duration, err error) {
// Report sets target data about the last scrape.
func (t *Target) Report(start time.Time, dur time.Duration, err error) {
t.mtx.Lock()
defer t.mtx.Unlock()

View File

@ -25,6 +25,7 @@ import (
"regexp"
"sort"
"strconv"
"strings"
"time"
"unsafe"
@ -562,11 +563,13 @@ type Target struct {
// Any labels that are added to this target and its metrics.
Labels map[string]string `json:"labels"`
ScrapeURL string `json:"scrapeUrl"`
ScrapePool string `json:"scrapePool"`
ScrapeURL string `json:"scrapeUrl"`
LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"`
Health scrape.TargetHealth `json:"health"`
LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"`
LastScrapeDuration float64 `json:"lastScrapeDuration"`
Health scrape.TargetHealth `json:"health"`
}
// DroppedTarget has the information for one target that was dropped during relabelling.
@ -582,7 +585,7 @@ type TargetDiscovery struct {
}
func (api *API) targets(r *http.Request) apiFuncResult {
flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) {
var n int
keys := make([]string, 0, len(targets))
for k := range targets {
@ -590,6 +593,11 @@ func (api *API) targets(r *http.Request) apiFuncResult {
n += len(targets[k])
}
sort.Strings(keys)
return keys, n
}
flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
keys, n := sortKeys(targets)
res := make([]*scrape.Target, 0, n)
for _, k := range keys {
res = append(res, targets[k]...)
@ -597,31 +605,49 @@ func (api *API) targets(r *http.Request) apiFuncResult {
return res
}
tActive := flatten(api.targetRetriever.TargetsActive())
tDropped := flatten(api.targetRetriever.TargetsDropped())
res := &TargetDiscovery{ActiveTargets: make([]*Target, 0, len(tActive)), DroppedTargets: make([]*DroppedTarget, 0, len(tDropped))}
state := strings.ToLower(r.URL.Query().Get("state"))
showActive := state == "" || state == "any" || state == "active"
showDropped := state == "" || state == "any" || state == "dropped"
res := &TargetDiscovery{}
for _, target := range tActive {
lastErrStr := ""
lastErr := target.LastError()
if lastErr != nil {
lastErrStr = lastErr.Error()
if showActive {
targetsActive := api.targetRetriever.TargetsActive()
activeKeys, numTargets := sortKeys(targetsActive)
res.ActiveTargets = make([]*Target, 0, numTargets)
for _, key := range activeKeys {
for _, target := range targetsActive[key] {
lastErrStr := ""
lastErr := target.LastError()
if lastErr != nil {
lastErrStr = lastErr.Error()
}
res.ActiveTargets = append(res.ActiveTargets, &Target{
DiscoveredLabels: target.DiscoveredLabels().Map(),
Labels: target.Labels().Map(),
ScrapePool: key,
ScrapeURL: target.URL().String(),
LastError: lastErrStr,
LastScrape: target.LastScrape(),
LastScrapeDuration: target.LastScrapeDuration().Seconds(),
Health: target.Health(),
})
}
}
res.ActiveTargets = append(res.ActiveTargets, &Target{
DiscoveredLabels: target.DiscoveredLabels().Map(),
Labels: target.Labels().Map(),
ScrapeURL: target.URL().String(),
LastError: lastErrStr,
LastScrape: target.LastScrape(),
Health: target.Health(),
})
} else {
res.ActiveTargets = []*Target{}
}
for _, t := range tDropped {
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
DiscoveredLabels: t.DiscoveredLabels().Map(),
})
if showDropped {
tDropped := flatten(api.targetRetriever.TargetsDropped())
res.DroppedTargets = make([]*DroppedTarget, 0, len(tDropped))
for _, t := range tDropped {
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
DiscoveredLabels: t.DiscoveredLabels().Map(),
})
}
} else {
res.DroppedTargets = []*DroppedTarget{}
}
return apiFuncResult{res, nil, nil, nil}
}

View File

@ -57,32 +57,36 @@ import (
type testTargetRetriever struct{}
var (
scrapeStart = time.Now().Add(-11 * time.Second)
)
func (t testTargetRetriever) TargetsActive() map[string][]*scrape.Target {
testTarget := scrape.NewTarget(
labels.FromMap(map[string]string{
model.SchemeLabel: "http",
model.AddressLabel: "example.com:8080",
model.MetricsPathLabel: "/metrics",
model.JobLabel: "test",
}),
nil,
url.Values{},
)
testTarget.Report(scrapeStart, 70*time.Millisecond, nil)
blackboxTarget := scrape.NewTarget(
labels.FromMap(map[string]string{
model.SchemeLabel: "http",
model.AddressLabel: "localhost:9115",
model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox",
}),
nil,
url.Values{"target": []string{"example.com"}},
)
blackboxTarget.Report(scrapeStart, 100*time.Millisecond, errors.New("failed"))
return map[string][]*scrape.Target{
"test": {
scrape.NewTarget(
labels.FromMap(map[string]string{
model.SchemeLabel: "http",
model.AddressLabel: "example.com:8080",
model.MetricsPathLabel: "/metrics",
model.JobLabel: "test",
}),
nil,
url.Values{},
),
},
"blackbox": {
scrape.NewTarget(
labels.FromMap(map[string]string{
model.SchemeLabel: "http",
model.AddressLabel: "localhost:9115",
model.MetricsPathLabel: "/probe",
model.JobLabel: "blackbox",
}),
nil,
url.Values{"target": []string{"example.com"}},
),
},
"test": {testTarget},
"blackbox": {blackboxTarget},
}
}
func (t testTargetRetriever) TargetsDropped() map[string][]*scrape.Target {
@ -699,16 +703,24 @@ func testEndpoints(t *testing.T, api *API, testLabelAPI bool) {
Labels: map[string]string{
"job": "blackbox",
},
ScrapeURL: "http://localhost:9115/probe?target=example.com",
Health: "unknown",
ScrapePool: "blackbox",
ScrapeURL: "http://localhost:9115/probe?target=example.com",
Health: "down",
LastError: "failed",
LastScrape: scrapeStart,
LastScrapeDuration: 0.1,
},
{
DiscoveredLabels: map[string]string{},
Labels: map[string]string{
"job": "test",
},
ScrapeURL: "http://example.com:8080/metrics",
Health: "unknown",
ScrapePool: "test",
ScrapeURL: "http://example.com:8080/metrics",
Health: "up",
LastError: "",
LastScrape: scrapeStart,
LastScrapeDuration: 0.07,
},
},
DroppedTargets: []*DroppedTarget{
@ -723,6 +735,104 @@ func testEndpoints(t *testing.T, api *API, testLabelAPI bool) {
},
},
},
{
endpoint: api.targets,
query: url.Values{
"state": []string{"any"},
},
response: &TargetDiscovery{
ActiveTargets: []*Target{
{
DiscoveredLabels: map[string]string{},
Labels: map[string]string{
"job": "blackbox",
},
ScrapePool: "blackbox",
ScrapeURL: "http://localhost:9115/probe?target=example.com",
Health: "down",
LastError: "failed",
LastScrape: scrapeStart,
LastScrapeDuration: 0.1,
},
{
DiscoveredLabels: map[string]string{},
Labels: map[string]string{
"job": "test",
},
ScrapePool: "test",
ScrapeURL: "http://example.com:8080/metrics",
Health: "up",
LastError: "",
LastScrape: scrapeStart,
LastScrapeDuration: 0.07,
},
},
DroppedTargets: []*DroppedTarget{
{
DiscoveredLabels: map[string]string{
"__address__": "http://dropped.example.com:9115",
"__metrics_path__": "/probe",
"__scheme__": "http",
"job": "blackbox",
},
},
},
},
},
{
endpoint: api.targets,
query: url.Values{
"state": []string{"active"},
},
response: &TargetDiscovery{
ActiveTargets: []*Target{
{
DiscoveredLabels: map[string]string{},
Labels: map[string]string{
"job": "blackbox",
},
ScrapePool: "blackbox",
ScrapeURL: "http://localhost:9115/probe?target=example.com",
Health: "down",
LastError: "failed",
LastScrape: scrapeStart,
LastScrapeDuration: 0.1,
},
{
DiscoveredLabels: map[string]string{},
Labels: map[string]string{
"job": "test",
},
ScrapePool: "test",
ScrapeURL: "http://example.com:8080/metrics",
Health: "up",
LastError: "",
LastScrape: scrapeStart,
LastScrapeDuration: 0.07,
},
},
DroppedTargets: []*DroppedTarget{},
},
},
{
endpoint: api.targets,
query: url.Values{
"state": []string{"Dropped"},
},
response: &TargetDiscovery{
ActiveTargets: []*Target{},
DroppedTargets: []*DroppedTarget{
{
DiscoveredLabels: map[string]string{
"__address__": "http://dropped.example.com:9115",
"__metrics_path__": "/probe",
"__scheme__": "http",
"job": "blackbox",
},
},
},
},
},
{
endpoint: api.alertmanagers,
response: &AlertmanagerDiscovery{

View File

@ -0,0 +1,27 @@
import { useLocalStorage } from './useLocalStorage';
import { renderHook, act } from '@testing-library/react-hooks';
describe('useLocalStorage', () => {
it('returns the initialState', () => {
const initialState = { a: 1, b: 2 };
const { result } = renderHook(() => useLocalStorage('mystorage', initialState));
expect(result.current[0]).toEqual(initialState);
});
it('stores the initialState as serialized json in localstorage', () => {
const key = 'mystorage';
const initialState = { a: 1, b: 2 };
renderHook(() => useLocalStorage(key, initialState));
expect(localStorage.getItem(key)).toEqual(JSON.stringify(initialState));
});
it('returns a setValue function that can reset local storage', () => {
const key = 'mystorage';
const initialState = { a: 1, b: 2 };
const { result } = renderHook(() => useLocalStorage(key, initialState));
const newValue = { a: 2, b: 5 };
act(() => {
result.current[1](newValue);
});
expect(result.current[0]).toEqual(newValue);
expect(localStorage.getItem(key)).toEqual(JSON.stringify(newValue));
});
});

View File

@ -0,0 +1,13 @@
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
export function useLocalStorage<S>(localStorageKey: string, initialState: S): [S, Dispatch<SetStateAction<S>>] {
const localStorageState = JSON.parse(localStorage.getItem(localStorageKey) as string);
const [value, setValue] = useState(localStorageState || initialState);
useEffect(() => {
const serializedState = JSON.stringify(value);
localStorage.setItem(localStorageKey, serializedState);
}, [localStorageKey, value]);
return [value, setValue];
}

View File

@ -1,15 +0,0 @@
import React, { FC } from 'react';
import { RouteComponentProps } from '@reach/router';
import PathPrefixProps from '../PathPrefixProps';
import { Alert } from 'reactstrap';
const Targets: FC<RouteComponentProps & PathPrefixProps> = ({ pathPrefix }) => (
<>
<h2>Targets</h2>
<Alert color="warning">
This page is still under construction. Please try it in the <a href={`${pathPrefix}/targets`}>Classic UI</a>.
</Alert>
</>
);
export default Targets;

View File

@ -4,7 +4,7 @@ import Flags from './Flags';
import Rules from './Rules';
import Services from './Services';
import Status from './Status';
import Targets from './Targets';
import Targets from './targets/Targets';
import PanelList from './PanelList';
export { Alerts, Config, Flags, Rules, Services, Status, Targets, PanelList };

View File

@ -0,0 +1,36 @@
import React from 'react';
import { shallow } from 'enzyme';
import { Badge, Alert } from 'reactstrap';
import EndpointLink from './EndpointLink';
describe('EndpointLink', () => {
it('renders a simple anchor if the endpoint has no query params', () => {
const endpoint = 'http://100.104.208.71:15090/stats/prometheus';
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />);
const anchor = endpointLink.find('a');
expect(anchor.prop('href')).toEqual(endpoint);
expect(anchor.children().text()).toEqual(endpoint);
expect(endpointLink.find('br')).toHaveLength(0);
});
it('renders an anchor targeting endpoint but with query param labels if the endpoint has query params', () => {
const endpoint = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service';
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />);
const anchor = endpointLink.find('a');
const badges = endpointLink.find(Badge);
expect(anchor.prop('href')).toEqual(endpoint);
expect(anchor.children().text()).toEqual('http://100.99.128.71:9115/probe');
expect(endpointLink.find('br')).toHaveLength(1);
expect(badges).toHaveLength(2);
const moduleLabel = badges.filterWhere(badge => badge.hasClass('module'));
expect(moduleLabel.children().text()).toEqual('module="http_2xx"');
const targetLabel = badges.filterWhere(badge => badge.hasClass('target'));
expect(targetLabel.children().text()).toEqual('target="http://some-service"');
});
it('renders an alert if url is invalid', () => {
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} />);
const err = endpointLink.find(Alert);
expect(err.render().text()).toEqual('Error: Invalid URL');
});
});

View File

@ -0,0 +1,38 @@
import React, { FC } from 'react';
import { Badge, Alert } from 'reactstrap';
export interface EndpointLinkProps {
endpoint: string;
}
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint }) => {
let url: URL;
try {
url = new URL(endpoint);
} catch (e) {
return (
<Alert color="danger">
<strong>Error:</strong> {e.message}
</Alert>
);
}
const { host, pathname, protocol, searchParams }: URL = url;
const params = Array.from(searchParams.entries());
return (
<>
<a href={endpoint}>{`${protocol}//${host}${pathname}`}</a>
{params.length > 0 ? <br /> : null}
{params.map(([labelName, labelValue]: [string, string]) => {
return (
<Badge color="primary" className={`mr-1 ${labelName}`} key={labelName}>
{`${labelName}="${labelValue}"`}
</Badge>
);
})}
</>
);
};
export default EndpointLink;

View File

@ -0,0 +1,4 @@
.btn {
margin-top: 0.6em;
margin-bottom: 15px;
}

View File

@ -0,0 +1,45 @@
import React, { Component } from 'react';
import { shallow, ShallowWrapper } from 'enzyme';
import { Button, ButtonGroup } from 'reactstrap';
import Filter, { FilterData, FilterProps } from './Filter';
import sinon, { SinonSpy } from 'sinon';
describe('Filter', () => {
const initialState: FilterData = { showHealthy: true, showUnhealthy: true };
let setFilter: SinonSpy;
let filterWrapper: ShallowWrapper<FilterProps, Readonly<{}>, Component<{}, {}, Component>>;
beforeEach(() => {
setFilter = sinon.spy();
filterWrapper = shallow(<Filter filter={initialState} setFilter={setFilter} />);
});
it('renders a button group', () => {
expect(filterWrapper.find(ButtonGroup)).toHaveLength(1);
});
it('renders an all filter button that is active by default', () => {
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('all'));
expect(btn.prop('active')).toBe(true);
expect(btn.prop('color')).toBe('primary');
});
it('renders an unhealthy filter button that is inactive by default', () => {
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('unhealthy'));
expect(btn.prop('active')).toBe(false);
expect(btn.prop('color')).toBe('primary');
});
it('renders an all filter button which shows all targets', () => {
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('all'));
btn.simulate('click');
expect(setFilter.calledOnce).toBe(true);
expect(setFilter.getCall(0).args[0]).toEqual({ showHealthy: true, showUnhealthy: true });
});
it('renders an unhealthy filter button which filters targets', () => {
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('unhealthy'));
btn.simulate('click');
expect(setFilter.calledOnce).toBe(true);
expect(setFilter.getCall(0).args[0]).toEqual({ showHealthy: false, showUnhealthy: true });
});
});

View File

@ -0,0 +1,39 @@
import React, { Dispatch, FC, SetStateAction } from 'react';
import { Button, ButtonGroup } from 'reactstrap';
import styles from './Filter.module.css';
export interface FilterData {
showHealthy: boolean;
showUnhealthy: boolean;
}
export interface FilterProps {
filter: FilterData;
setFilter: Dispatch<SetStateAction<FilterData>>;
}
const Filter: FC<FilterProps> = ({ filter, setFilter }) => {
const { showHealthy } = filter;
const btnProps = {
all: {
active: showHealthy,
className: `all ${styles.btn}`,
color: 'primary',
onClick: (): void => setFilter({ ...filter, showHealthy: true }),
},
unhealthy: {
active: !showHealthy,
className: `unhealthy ${styles.btn}`,
color: 'primary',
onClick: (): void => setFilter({ ...filter, showHealthy: false }),
},
};
return (
<ButtonGroup>
<Button {...btnProps.all}>All</Button>
<Button {...btnProps.unhealthy}>Unhealthy</Button>
</ButtonGroup>
);
};
export default Filter;

View File

@ -0,0 +1,98 @@
import * as React from 'react';
import { mount, shallow, ReactWrapper } from 'enzyme';
import { act } from 'react-dom/test-utils';
import { Alert } from 'reactstrap';
import { sampleApiResponse } from './__testdata__/testdata';
import ScrapePoolList from './ScrapePoolList';
import ScrapePoolPanel from './ScrapePoolPanel';
import { Target } from './target';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faSpinner } from '@fortawesome/free-solid-svg-icons';
describe('Flags', () => {
const defaultProps = {
filter: { showHealthy: true, showUnhealthy: true },
pathPrefix: '..',
};
beforeEach(() => {
fetch.resetMocks();
});
describe('before data is returned', () => {
const scrapePoolList = shallow(<ScrapePoolList {...defaultProps} />);
const spinner = scrapePoolList.find(FontAwesomeIcon);
it('renders a spinner', () => {
expect(spinner.prop('icon')).toEqual(faSpinner);
expect(spinner.prop('spin')).toBe(true);
});
it('renders exactly one spinner', () => {
expect(spinner).toHaveLength(1);
});
});
describe('when data is returned', () => {
let scrapePoolList: ReactWrapper;
let mock: Promise<Response>;
beforeEach(() => {
//Tooltip requires DOM elements to exist. They do not in enzyme rendering so we must manually create them.
const scrapePools: { [key: string]: number } = { blackbox: 3, node_exporter: 1, prometheus: 1 };
Object.keys(scrapePools).forEach((pool: string): void => {
Array.from(Array(scrapePools[pool]).keys()).forEach((idx: number): void => {
const div = document.createElement('div');
div.id = `series-labels-${pool}-${idx}`;
document.body.appendChild(div);
});
});
mock = fetch.mockResponse(JSON.stringify(sampleApiResponse));
});
it('renders a table', async () => {
await act(async () => {
scrapePoolList = mount(<ScrapePoolList {...defaultProps} />);
});
scrapePoolList.update();
expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined);
const panels = scrapePoolList.find(ScrapePoolPanel);
expect(panels).toHaveLength(3);
const activeTargets: Target[] = sampleApiResponse.data.activeTargets as Target[];
activeTargets.forEach(({ scrapePool }: Target) => {
const panel = scrapePoolList.find(ScrapePoolPanel).filterWhere(panel => panel.prop('scrapePool') === scrapePool);
expect(panel).toHaveLength(1);
});
});
it('filters by health', async () => {
const props = {
...defaultProps,
filter: { showHealthy: false, showUnhealthy: true },
};
await act(async () => {
scrapePoolList = mount(<ScrapePoolList {...props} />);
});
scrapePoolList.update();
expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined);
const panels = scrapePoolList.find(ScrapePoolPanel);
expect(panels).toHaveLength(0);
});
});
describe('when an error is returned', () => {
it('displays an alert', async () => {
const mock = fetch.mockReject(new Error('Error fetching targets'));
let scrapePoolList: ReactWrapper;
await act(async () => {
scrapePoolList = mount(<ScrapePoolList {...defaultProps} />);
});
scrapePoolList.update();
expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined);
const alert = scrapePoolList.find(Alert);
expect(alert.prop('color')).toBe('danger');
expect(alert.text()).toContain('Error fetching targets');
});
});
});

View File

@ -0,0 +1,55 @@
import React, { FC } from 'react';
import { FilterData } from './Filter';
import { useFetch } from '../../utils/useFetch';
import { ScrapePool, groupTargets } from './target';
import ScrapePoolPanel from './ScrapePoolPanel';
import PathPrefixProps from '../../PathPrefixProps';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faSpinner } from '@fortawesome/free-solid-svg-icons';
import { Alert } from 'reactstrap';
interface ScrapePoolListProps {
filter: FilterData;
}
const filterByHealth = ({ upCount, targets }: ScrapePool, { showHealthy, showUnhealthy }: FilterData): boolean => {
const isHealthy = upCount === targets.length;
return (isHealthy && showHealthy) || (!isHealthy && showUnhealthy);
};
const ScrapePoolList: FC<ScrapePoolListProps & PathPrefixProps> = ({ filter, pathPrefix }) => {
const { response, error } = useFetch(`${pathPrefix}/api/v1/targets?state=active`);
if (error) {
return (
<Alert color="danger">
<strong>Error fetching targets:</strong> {error.message}
</Alert>
);
} else if (response && response.status !== 'success') {
return (
<Alert color="danger">
<strong>Error fetching targets:</strong> {response.status}
</Alert>
);
} else if (response && response.data) {
const { activeTargets } = response.data;
const targetGroups = groupTargets(activeTargets);
return (
<>
{Object.keys(targetGroups)
.filter((scrapePool: string) => filterByHealth(targetGroups[scrapePool], filter))
.map((scrapePool: string) => {
const targetGroupProps = {
scrapePool,
targetGroup: targetGroups[scrapePool],
};
return <ScrapePoolPanel key={scrapePool} {...targetGroupProps} />;
})}
</>
);
}
return <FontAwesomeIcon icon={faSpinner} spin />;
};
export default ScrapePoolList;

View File

@ -0,0 +1,43 @@
.container {
margin-top: -12px;
}
.title {
font-size: 20px;
font-weight: bold;
cursor: pointer;
}
.normal {
composes: title;
}
.danger {
composes: title;
color: rgb(242, 65, 65);
}
.table {
width: 100%;
}
.cell {
height: auto;
word-wrap: break-word;
word-break: break-all;
}
.endpoint, .labels {
composes: cell;
width: 25%;
}
.state, .last-scrape {
composes: cell;
width: 10%;
}
.errors {
composes: cell;
width: 30%;
}

View File

@ -0,0 +1,142 @@
import React from 'react';
import { mount, shallow } from 'enzyme';
import { targetGroups } from './__testdata__/testdata';
import ScrapePoolPanel, { columns } from './ScrapePoolPanel';
import { Button, Collapse, Table, Badge } from 'reactstrap';
import { Target, getColor } from './target';
import EndpointLink from './EndpointLink';
import TargetLabels from './TargetLabels';
describe('ScrapePoolPanel', () => {
const defaultProps = {
scrapePool: 'blackbox',
targetGroup: targetGroups.blackbox,
};
const scrapePoolPanel = shallow(<ScrapePoolPanel {...defaultProps} />);
it('renders a container', () => {
const div = scrapePoolPanel.find('div').filterWhere(elem => elem.hasClass('container'));
expect(div).toHaveLength(1);
});
describe('Header', () => {
it('renders an h3', () => {
expect(scrapePoolPanel.find('h3')).toHaveLength(1);
});
it('renders an anchor with up count and danger color if upCount < targetsCount', () => {
const anchor = scrapePoolPanel.find('a');
expect(anchor).toHaveLength(1);
expect(anchor.prop('id')).toEqual('pool-blackbox');
expect(anchor.prop('href')).toEqual('#pool-blackbox');
expect(anchor.text()).toEqual('blackbox (2/3 up)');
expect(anchor.prop('className')).toEqual('danger');
});
it('renders an anchor with up count and normal color if upCount == targetsCount', () => {
const props = {
scrapePool: 'prometheus',
targetGroup: targetGroups.prometheus,
};
const scrapePoolPanel = shallow(<ScrapePoolPanel {...props} />);
const anchor = scrapePoolPanel.find('a');
expect(anchor).toHaveLength(1);
expect(anchor.prop('id')).toEqual('pool-prometheus');
expect(anchor.prop('href')).toEqual('#pool-prometheus');
expect(anchor.text()).toEqual('prometheus (1/1 up)');
expect(anchor.prop('className')).toEqual('normal');
});
it('renders a show less btn if expanded', () => {
const btn = scrapePoolPanel.find(Button);
expect(btn).toHaveLength(1);
expect(btn.prop('color')).toEqual('primary');
expect(btn.prop('size')).toEqual('xs');
expect(btn.render().text()).toEqual('show less');
});
it('renders a show more btn if collapsed', () => {
const props = {
scrapePool: 'prometheus',
targetGroup: targetGroups.prometheus,
};
const div = document.createElement('div');
div.id = `series-labels-prometheus-0`;
document.body.appendChild(div);
const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />);
const btn = scrapePoolPanel.find(Button);
btn.simulate('click');
expect(btn.render().text()).toEqual('show more');
const collapse = scrapePoolPanel.find(Collapse);
expect(collapse.prop('isOpen')).toBe(false);
});
});
it('renders a Collapse component', () => {
const collapse = scrapePoolPanel.find(Collapse);
expect(collapse.prop('isOpen')).toBe(true);
});
describe('Table', () => {
it('renders a table', () => {
const table = scrapePoolPanel.find(Table);
const headers = table.find('th');
expect(table).toHaveLength(1);
expect(headers).toHaveLength(6);
columns.forEach(col => {
expect(headers.contains(col));
});
});
describe('for each target', () => {
const table = scrapePoolPanel.find(Table);
defaultProps.targetGroup.targets.forEach(
({ discoveredLabels, labels, scrapeUrl, lastError, health }: Target, idx: number) => {
const row = table.find('tr').at(idx + 1);
it('renders an EndpointLink with the scrapeUrl', () => {
const link = row.find(EndpointLink);
expect(link).toHaveLength(1);
expect(link.prop('endpoint')).toEqual(scrapeUrl);
});
it('renders a badge for health', () => {
const td = row.find('td').filterWhere(elem => Boolean(elem.hasClass('state')));
const badge = td.find(Badge);
expect(badge).toHaveLength(1);
expect(badge.prop('color')).toEqual(getColor(health));
expect(badge.children().text()).toEqual(health.toUpperCase());
});
it('renders series labels', () => {
const targetLabels = row.find(TargetLabels);
expect(targetLabels).toHaveLength(1);
expect(targetLabels.prop('discoveredLabels')).toEqual(discoveredLabels);
expect(targetLabels.prop('labels')).toEqual(labels);
});
it('renders last scrape time', () => {
const lastScrapeCell = row.find('td').filterWhere(elem => Boolean(elem.hasClass('last-scrape')));
expect(lastScrapeCell).toHaveLength(1);
});
it('renders last scrape duration', () => {
const lastScrapeCell = row.find('td').filterWhere(elem => Boolean(elem.hasClass('scrape-duration')));
expect(lastScrapeCell).toHaveLength(1);
});
it('renders a badge for Errors', () => {
const td = row.find('td').filterWhere(elem => Boolean(elem.hasClass('errors')));
const badge = td.find(Badge);
expect(badge).toHaveLength(lastError ? 1 : 0);
if (lastError) {
expect(badge.prop('color')).toEqual('danger');
expect(badge.children().text()).toEqual(lastError);
}
});
}
);
});
});
});

View File

@ -0,0 +1,95 @@
import React, { FC } from 'react';
import { ScrapePool, getColor } from './target';
import { Button, Collapse, Table, Badge } from 'reactstrap';
import styles from './ScrapePoolPanel.module.css';
import { Target } from './target';
import EndpointLink from './EndpointLink';
import TargetLabels from './TargetLabels';
import { formatRelative, humanizeDuration } from '../../utils/timeFormat';
import { now } from 'moment';
import { useLocalStorage } from '../../hooks/useLocalStorage';
interface PanelProps {
scrapePool: string;
targetGroup: ScrapePool;
}
export const columns = ['Endpoint', 'State', 'Labels', 'Last Scrape', 'Scrape Duration', 'Error'];
const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup }) => {
const [{ expanded }, setOptions] = useLocalStorage(`targets-${scrapePool}-expanded`, { expanded: true });
const modifier = targetGroup.upCount < targetGroup.targets.length ? 'danger' : 'normal';
const id = `pool-${scrapePool}`;
const anchorProps = {
href: `#${id}`,
id,
};
const btnProps = {
children: `show ${expanded ? 'less' : 'more'}`,
color: 'primary',
onClick: (): void => setOptions({ expanded: !expanded }),
size: 'xs',
style: {
padding: '0.3em 0.3em 0.25em 0.3em',
fontSize: '0.375em',
marginLeft: '1em',
verticalAlign: 'baseline',
},
};
return (
<div className={styles.container}>
<h3>
<a className={styles[modifier]} {...anchorProps}>
{`${scrapePool} (${targetGroup.upCount}/${targetGroup.targets.length} up)`}
</a>
<Button {...btnProps} />
</h3>
<Collapse isOpen={expanded}>
<Table className={styles.table} size="sm" bordered hover striped>
<thead>
<tr key="header">
{columns.map(column => (
<th key={column}>{column}</th>
))}
</tr>
</thead>
<tbody>
{targetGroup.targets.map((target: Target, idx: number) => {
const {
discoveredLabels,
labels,
scrapePool,
scrapeUrl,
lastError,
lastScrape,
lastScrapeDuration,
health,
} = target;
const color = getColor(health);
return (
<tr key={scrapeUrl}>
<td className={styles.endpoint}>
<EndpointLink endpoint={scrapeUrl} />
</td>
<td className={styles.state}>
<Badge color={color}>{health.toUpperCase()}</Badge>
</td>
<td className={styles.labels}>
<TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} />
</td>
<td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td>
<td className={styles['scrape-duration']}>{humanizeDuration(lastScrapeDuration * 1000)}</td>
<td className={styles.errors}>{lastError ? <Badge color={color}>{lastError}</Badge> : null}</td>
</tr>
);
})}
</tbody>
</Table>
</Collapse>
</div>
);
};
export default ScrapePoolPanel;

View File

@ -0,0 +1,3 @@
.discovered {
white-space: nowrap;
}

View File

@ -0,0 +1,50 @@
import * as React from 'react';
import { shallow } from 'enzyme';
import TargetLabels from './TargetLabels';
import { Tooltip, Badge } from 'reactstrap';
import toJson from 'enzyme-to-json';
describe('targetLabels', () => {
const defaultProps = {
discoveredLabels: {
__address__: 'localhost:9100',
__metrics_path__: '/metrics',
__scheme__: 'http',
job: 'node_exporter',
},
labels: {
instance: 'localhost:9100',
job: 'node_exporter',
foo: 'bar',
},
idx: 1,
scrapePool: 'cortex/node-exporter_group/0',
};
const targetLabels = shallow(<TargetLabels {...defaultProps} />);
it('renders a div of series labels', () => {
const div = targetLabels.find('div').filterWhere(elem => elem.hasClass('series-labels-container'));
expect(div).toHaveLength(1);
expect(div.prop('id')).toEqual('series-labels-cortex/node-exporter_group/0-1');
});
it('wraps each label in a label badge', () => {
const l: { [key: string]: string } = defaultProps.labels;
Object.keys(l).forEach((labelName: string): void => {
const badge = targetLabels.find(Badge).filterWhere(badge => badge.hasClass(labelName));
expect(badge.children().text()).toEqual(`${labelName}="${l[labelName]}"`);
});
expect(targetLabels.find(Badge)).toHaveLength(3);
});
it('renders a tooltip for discovered labels', () => {
const tooltip = targetLabels.find(Tooltip);
expect(tooltip).toHaveLength(1);
expect(tooltip.prop('isOpen')).toBe(false);
expect(tooltip.prop('target')).toEqual('series-labels-cortex/node-exporter_group/0-1');
});
it('renders discovered labels', () => {
expect(toJson(targetLabels)).toMatchSnapshot();
});
});

View File

@ -0,0 +1,48 @@
import React, { FC, Fragment, useState } from 'react';
import { Badge, Tooltip } from 'reactstrap';
import styles from './TargetLabels.module.css';
interface Labels {
[key: string]: string;
}
export interface TargetLabelsProps {
discoveredLabels: Labels;
labels: Labels;
idx: number;
scrapePool: string;
}
const formatLabels = (labels: Labels): string[] => Object.keys(labels).map(key => `${key}="${labels[key]}"`);
const TargetLabels: FC<TargetLabelsProps> = ({ discoveredLabels, labels, idx, scrapePool }) => {
const [tooltipOpen, setTooltipOpen] = useState(false);
const toggle = (): void => setTooltipOpen(!tooltipOpen);
const id = `series-labels-${scrapePool}-${idx}`;
return (
<>
<div id={id} className="series-labels-container">
{Object.keys(labels).map(labelName => {
return (
<Badge color="primary" className={`mr-1 ${labelName}`} key={labelName}>
{`${labelName}="${labels[labelName]}"`}
</Badge>
);
})}
</div>
<Tooltip isOpen={tooltipOpen} target={id} toggle={toggle} style={{ maxWidth: 'none', textAlign: 'left' }}>
<b>Before relabeling:</b>
{formatLabels(discoveredLabels).map((s: string, idx: number) => (
<Fragment key={idx}>
<br />
<span className={styles.discovered}>{s}</span>
</Fragment>
))}
</Tooltip>
</>
);
};
export default TargetLabels;

View File

@ -0,0 +1,33 @@
import React from 'react';
import { shallow } from 'enzyme';
import Targets from './Targets';
import Filter from './Filter';
import ScrapePoolList from './ScrapePoolList';
describe('Targets', () => {
const defaultProps = {
pathPrefix: '..',
};
const targets = shallow(<Targets {...defaultProps} />);
describe('Header', () => {
const h2 = targets.find('h2');
it('renders a header', () => {
expect(h2.text()).toEqual('Targets');
});
it('renders exactly one header', () => {
const h2 = targets.find('h2');
expect(h2).toHaveLength(1);
});
});
it('renders a filter', () => {
const filter = targets.find(Filter);
expect(filter).toHaveLength(1);
expect(filter.prop('filter')).toEqual({ showHealthy: true, showUnhealthy: true });
});
it('renders a scrape pool list', () => {
const scrapePoolList = targets.find(ScrapePoolList);
expect(scrapePoolList).toHaveLength(1);
expect(scrapePoolList.prop('filter')).toEqual({ showHealthy: true, showUnhealthy: true });
expect(scrapePoolList.prop('pathPrefix')).toEqual(defaultProps.pathPrefix);
});
});

View File

@ -0,0 +1,22 @@
import React, { FC } from 'react';
import { RouteComponentProps } from '@reach/router';
import Filter from './Filter';
import ScrapePoolList from './ScrapePoolList';
import PathPrefixProps from '../../PathPrefixProps';
import { useLocalStorage } from '../../hooks/useLocalStorage';
const Targets: FC<RouteComponentProps & PathPrefixProps> = ({ pathPrefix }) => {
const [filter, setFilter] = useLocalStorage('targets-page-filter', { showHealthy: true, showUnhealthy: true });
const filterProps = { filter, setFilter };
const scrapePoolListProps = { filter, pathPrefix };
return (
<>
<h2>Targets</h2>
<Filter {...filterProps} />
<ScrapePoolList {...scrapePoolListProps} />
</>
);
};
export default Targets;

View File

@ -0,0 +1,81 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`targetLabels renders discovered labels 1`] = `
<Fragment>
<div
className="series-labels-container"
id="series-labels-cortex/node-exporter_group/0-1"
>
<Badge
className="mr-1 instance"
color="primary"
key="instance"
pill={false}
tag="span"
>
instance="localhost:9100"
</Badge>
<Badge
className="mr-1 job"
color="primary"
key="job"
pill={false}
tag="span"
>
job="node_exporter"
</Badge>
<Badge
className="mr-1 foo"
color="primary"
key="foo"
pill={false}
tag="span"
>
foo="bar"
</Badge>
</div>
<Tooltip
autohide={true}
isOpen={false}
placement="top"
placementPrefix="bs-tooltip"
style={
Object {
"maxWidth": "none",
"textAlign": "left",
}
}
target="series-labels-cortex/node-exporter_group/0-1"
toggle={[Function]}
trigger="click hover focus"
>
<b>
Before relabeling:
</b>
<br />
<span
className="discovered"
>
__address__="localhost:9100"
</span>
<br />
<span
className="discovered"
>
__metrics_path__="/metrics"
</span>
<br />
<span
className="discovered"
>
__scheme__="http"
</span>
<br />
<span
className="discovered"
>
job="node_exporter"
</span>
</Tooltip>
</Fragment>
`;

View File

@ -0,0 +1,215 @@
/* eslint @typescript-eslint/camelcase: 0 */
import { ScrapePools, Target, Labels } from '../target';
export const targetGroups: ScrapePools = Object.freeze({
blackbox: {
upCount: 2,
targets: [
{
discoveredLabels: {
__address__: 'http://prometheus.io',
__metrics_path__: '/probe',
__param_module: 'http_2xx',
__scheme__: 'http',
job: 'blackbox',
},
labels: {
instance: 'http://prometheus.io',
job: 'blackbox',
},
scrapePool: 'blackbox',
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
lastError: '',
lastScrape: '2019-11-04T11:52:14.759299-07:00',
lastScrapeDuration: 36560147,
health: 'up',
},
{
discoveredLabels: {
__address__: 'https://prometheus.io',
__metrics_path__: '/probe',
__param_module: 'http_2xx',
__scheme__: 'http',
job: 'blackbox',
},
labels: {
instance: 'https://prometheus.io',
job: 'blackbox',
},
scrapePool: 'blackbox',
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
lastError: '',
lastScrape: '2019-11-04T11:52:24.731096-07:00',
lastScrapeDuration: 49448763,
health: 'up',
},
{
discoveredLabels: {
__address__: 'http://example.com:8080',
__metrics_path__: '/probe',
__param_module: 'http_2xx',
__scheme__: 'http',
job: 'blackbox',
},
labels: {
instance: 'http://example.com:8080',
job: 'blackbox',
},
scrapePool: 'blackbox',
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
lastError: '',
lastScrape: '2019-11-04T11:52:13.516654-07:00',
lastScrapeDuration: 120916592,
health: 'down',
},
],
},
node_exporter: {
upCount: 1,
targets: [
{
discoveredLabels: {
__address__: 'localhost:9100',
__metrics_path__: '/metrics',
__scheme__: 'http',
job: 'node_exporter',
},
labels: {
instance: 'localhost:9100',
job: 'node_exporter',
},
scrapePool: 'node_exporter',
scrapeUrl: 'http://localhost:9100/metrics',
lastError: '',
lastScrape: '2019-11-04T11:52:14.145703-07:00',
lastScrapeDuration: 3842307,
health: 'up',
},
],
},
prometheus: {
upCount: 1,
targets: [
{
discoveredLabels: {
__address__: 'localhost:9090',
__metrics_path__: '/metrics',
__scheme__: 'http',
job: 'prometheus',
},
labels: {
instance: 'localhost:9090',
job: 'prometheus',
},
scrapePool: 'prometheus',
scrapeUrl: 'http://localhost:9090/metrics',
lastError: '',
lastScrape: '2019-11-04T11:52:18.479731-07:00',
lastScrapeDuration: 4050976,
health: 'up',
},
],
},
});
export const sampleApiResponse = Object.freeze({
status: 'success',
data: {
activeTargets: [
{
discoveredLabels: {
__address__: 'http://prometheus.io',
__metrics_path__: '/probe',
__param_module: 'http_2xx',
__scheme__: 'http',
job: 'blackbox',
},
labels: {
instance: 'http://prometheus.io',
job: 'blackbox',
},
scrapePool: 'blackbox',
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io',
lastError: '',
lastScrape: '2019-11-04T11:52:14.759299-07:00',
lastScrapeDuration: 36560147,
health: 'up',
},
{
discoveredLabels: {
__address__: 'https://prometheus.io',
__metrics_path__: '/probe',
__param_module: 'http_2xx',
__scheme__: 'http',
job: 'blackbox',
},
labels: {
instance: 'https://prometheus.io',
job: 'blackbox',
},
scrapePool: 'blackbox',
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io',
lastError: '',
lastScrape: '2019-11-04T11:52:24.731096-07:00',
lastScrapeDuration: 49448763,
health: 'up',
},
{
discoveredLabels: {
__address__: 'http://example.com:8080',
__metrics_path__: '/probe',
__param_module: 'http_2xx',
__scheme__: 'http',
job: 'blackbox',
},
labels: {
instance: 'http://example.com:8080',
job: 'blackbox',
},
scrapePool: 'blackbox',
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080',
lastError: '',
lastScrape: '2019-11-04T11:52:13.516654-07:00',
lastScrapeDuration: 120916592,
health: 'up',
},
{
discoveredLabels: {
__address__: 'localhost:9100',
__metrics_path__: '/metrics',
__scheme__: 'http',
job: 'node_exporter',
},
labels: {
instance: 'localhost:9100',
job: 'node_exporter',
},
scrapePool: 'node_exporter',
scrapeUrl: 'http://localhost:9100/metrics',
lastError: '',
lastScrape: '2019-11-04T11:52:14.145703-07:00',
lastScrapeDuration: 3842307,
health: 'up',
},
{
discoveredLabels: {
__address__: 'localhost:9090',
__metrics_path__: '/metrics',
__scheme__: 'http',
job: 'prometheus',
},
labels: {
instance: 'localhost:9090',
job: 'prometheus',
},
scrapePool: 'prometheus',
scrapeUrl: 'http://localhost:9090/metrics',
lastError: '',
lastScrape: '2019-11-04T11:52:18.479731-07:00',
lastScrapeDuration: 4050976,
health: 'up',
},
],
},
});

View File

@ -0,0 +1,45 @@
/* eslint @typescript-eslint/camelcase: 0 */
import { sampleApiResponse } from './__testdata__/testdata';
import { groupTargets, Target, ScrapePools, getColor } from './target';
import { string } from 'prop-types';
describe('groupTargets', () => {
const targets: Target[] = sampleApiResponse.data.activeTargets as Target[];
const targetGroups: ScrapePools = groupTargets(targets);
it('groups a list of targets by scrape job', () => {
['blackbox', 'prometheus', 'node_exporter'].forEach(scrapePool => {
expect(Object.keys(targetGroups)).toContain(scrapePool);
});
Object.keys(targetGroups).forEach((scrapePool: string): void => {
const ts: Target[] = targetGroups[scrapePool].targets;
ts.forEach((t: Target) => {
expect(t.scrapePool).toEqual(scrapePool);
});
});
});
it('adds upCount during aggregation', () => {
const testCases: { [key: string]: number } = { blackbox: 3, prometheus: 1, node_exporter: 1 };
Object.keys(testCases).forEach((scrapePool: string): void => {
expect(targetGroups[scrapePool].upCount).toEqual(testCases[scrapePool]);
});
});
});
describe('getColor', () => {
const testCases: { color: string; status: string }[] = [
{ color: 'danger', status: 'down' },
{ color: 'danger', status: 'DOWN' },
{ color: 'warning', status: 'unknown' },
{ color: 'warning', status: 'foo' },
{ color: 'success', status: 'up' },
{ color: 'success', status: 'Up' },
];
testCases.forEach(({ color, status }) => {
it(`returns ${color} for ${status} status`, () => {
expect(getColor(status)).toEqual(color);
});
});
});

View File

@ -0,0 +1,49 @@
export interface Labels {
[key: string]: string;
}
export interface Target {
discoveredLabels: Labels;
labels: Labels;
scrapePool: string;
scrapeUrl: string;
lastError: string;
lastScrape: string;
lastScrapeDuration: number;
health: string;
}
export interface ScrapePool {
upCount: number;
targets: Target[];
}
export interface ScrapePools {
[scrapePool: string]: ScrapePool;
}
export const groupTargets = (targets: Target[]): ScrapePools =>
targets.reduce((pools: ScrapePools, target: Target) => {
const { health, scrapePool } = target;
const up = health.toLowerCase() === 'up' ? 1 : 0;
if (!pools[scrapePool]) {
pools[scrapePool] = {
upCount: 0,
targets: [],
};
}
pools[scrapePool].targets.push(target);
pools[scrapePool].upCount += up;
return pools;
}, {});
export const getColor = (health: string): string => {
switch (health.toLowerCase()) {
case 'up':
return 'success';
case 'down':
return 'danger';
default:
return 'warning';
}
};

View File

@ -1,4 +1,4 @@
import { formatTime, parseTime, formatRange, parseRange } from './timeFormat';
import { formatTime, parseTime, formatRange, parseRange, humanizeDuration, formatRelative, now } from './timeFormat';
describe('formatTime', () => {
it('returns a time string representing the time in seconds', () => {
@ -11,6 +11,7 @@ describe('parseTime', () => {
it('returns a time string representing the time in seconds', () => {
expect(parseTime('2019-10-26 00:23')).toEqual(1572049380000);
expect(parseTime('1970-01-01 00:00')).toEqual(0);
expect(parseTime('0001-01-01T00:00:00Z')).toEqual(-62135596800000);
});
});
@ -35,3 +36,47 @@ describe('parseRange', () => {
expect(parseRange('63s')).toEqual(63);
});
});
describe('humanizeDuration', () => {
it('humanizes zero', () => {
expect(humanizeDuration(0)).toEqual('0s');
});
it('humanizes milliseconds', () => {
expect(humanizeDuration(1.234567)).toEqual('1.235ms');
expect(humanizeDuration(12.34567)).toEqual('12.346ms');
expect(humanizeDuration(123.45678)).toEqual('123.457ms');
expect(humanizeDuration(123)).toEqual('123.000ms');
});
it('humanizes seconds', () => {
expect(humanizeDuration(12340)).toEqual('12.340s');
});
it('humanizes minutes', () => {
expect(humanizeDuration(1234567)).toEqual('20m 34s');
});
it('humanizes hours', () => {
expect(humanizeDuration(12345678)).toEqual('3h 25m 45s');
});
it('humanizes days', () => {
expect(humanizeDuration(123456789)).toEqual('1d 10h 17m 36s');
expect(humanizeDuration(123456789000)).toEqual('1428d 21h 33m 9s');
});
it('takes sign into account', () => {
expect(humanizeDuration(-123456789000)).toEqual('-1428d 21h 33m 9s');
});
});
describe('formatRelative', () => {
it('renders never for pre-beginning-of-time strings', () => {
expect(formatRelative('0001-01-01T00:00:00Z', now())).toEqual('Never');
});
it('renders a humanized duration for sane durations', () => {
expect(formatRelative('2019-11-04T09:15:29.578701-07:00', parseTime('2019-11-04T09:15:35.8701-07:00'))).toEqual(
'6.292s'
);
expect(formatRelative('2019-11-04T09:15:35.8701-07:00', parseTime('2019-11-04T09:15:29.578701-07:00'))).toEqual(
'-6.292s'
);
});
});

View File

@ -36,3 +36,40 @@ export function parseTime(timeText: string): number {
export function formatTime(time: number): string {
return moment.utc(time).format('YYYY-MM-DD HH:mm');
}
export const now = (): number => moment().valueOf();
export const humanizeDuration = (milliseconds: number): string => {
const sign = milliseconds < 0 ? '-' : '';
const unsignedMillis = milliseconds < 0 ? -1 * milliseconds : milliseconds;
const duration = moment.duration(unsignedMillis, 'ms');
const ms = Math.floor(duration.milliseconds());
const s = Math.floor(duration.seconds());
const m = Math.floor(duration.minutes());
const h = Math.floor(duration.hours());
const d = Math.floor(duration.asDays());
if (d !== 0) {
return `${sign}${d}d ${h}h ${m}m ${s}s`;
}
if (h !== 0) {
return `${sign}${h}h ${m}m ${s}s`;
}
if (m !== 0) {
return `${sign}${m}m ${s}s`;
}
if (s !== 0) {
return `${sign}${s}.${ms}s`;
}
if (unsignedMillis > 0) {
return `${sign}${unsignedMillis.toFixed(3)}ms`;
}
return '0s';
};
export const formatRelative = (startStr: string, end: number): string => {
const start = parseTime(startStr);
if (start < 0) {
return 'Never';
}
return humanizeDuration(end - start);
};