[ BROKER ] Add broker metrics page (#2157)
* add metrics page * add test for broker * add test for paths * remove unused-vars * add editor view for broker metrics * add not available message * try to fix BrokerLogdir tests * add tests for translateLogdir and translateLogdirs * add tests for translateLogdir and translateLogdirs * add tests for metrics page * fix review comments * fix unused var * remove describe duplicate Co-authored-by: Oleg Shur <workshur@gmail.com>
This commit is contained in:
parent
a77869783b
commit
9f2310472d
15 changed files with 569 additions and 84 deletions
|
@ -3,14 +3,18 @@ import PageHeading from 'components/common/PageHeading/PageHeading';
|
|||
import * as Metrics from 'components/common/Metrics';
|
||||
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { translateLogdir } from 'components/Brokers/utils/translateLogdir';
|
||||
import { SmartTable } from 'components/common/SmartTable/SmartTable';
|
||||
import { TableColumn } from 'components/common/SmartTable/TableColumn';
|
||||
import { useTableState } from 'lib/hooks/useTableState';
|
||||
import { ClusterBrokerParam } from 'lib/paths';
|
||||
import {
|
||||
clusterBrokerMetricsPath,
|
||||
clusterBrokerMetricsRelativePath,
|
||||
ClusterBrokerParam,
|
||||
clusterBrokerPath,
|
||||
} from 'lib/paths';
|
||||
import useClusterStats from 'lib/hooks/useClusterStats';
|
||||
import useBrokers from 'lib/hooks/useBrokers';
|
||||
import useBrokersLogDirs from 'lib/hooks/useBrokersLogDirs';
|
||||
import { NavLink, Route, Routes } from 'react-router-dom';
|
||||
import BrokerLogdir from 'components/Brokers/Broker/BrokerLogdir/BrokerLogdir';
|
||||
import BrokerMetrics from 'components/Brokers/Broker/BrokerMetrics/BrokerMetrics';
|
||||
import Navbar from 'components/common/Navigation/Navbar.styled';
|
||||
|
||||
export interface BrokerLogdirState {
|
||||
name: string;
|
||||
|
@ -24,13 +28,6 @@ const Broker: React.FC = () => {
|
|||
|
||||
const { data: clusterStats } = useClusterStats(clusterName);
|
||||
const { data: brokers } = useBrokers(clusterName);
|
||||
const { data: logDirs } = useBrokersLogDirs(clusterName, Number(brokerId));
|
||||
|
||||
const preparedRows = logDirs?.map(translateLogdir) || [];
|
||||
const tableState = useTableState<BrokerLogdirState, string>(preparedRows, {
|
||||
idSelector: ({ name }) => name,
|
||||
totalPages: 0,
|
||||
});
|
||||
|
||||
if (!clusterStats) return null;
|
||||
|
||||
|
@ -53,16 +50,30 @@ const Broker: React.FC = () => {
|
|||
<Metrics.Indicator label="Host">{brokerItem?.host}</Metrics.Indicator>
|
||||
</Metrics.Section>
|
||||
</Metrics.Wrapper>
|
||||
<SmartTable
|
||||
tableState={tableState}
|
||||
placeholder="Log dir data not available"
|
||||
isFullwidth
|
||||
|
||||
<Navbar role="navigation">
|
||||
<NavLink
|
||||
to={clusterBrokerPath(clusterName, brokerId)}
|
||||
className={({ isActive }) => (isActive ? 'is-active' : '')}
|
||||
end
|
||||
>
|
||||
<TableColumn title="Name" field="name" />
|
||||
<TableColumn title="Error" field="error" />
|
||||
<TableColumn title="Topics" field="topics" />
|
||||
<TableColumn title="Partitions" field="partitions" />
|
||||
</SmartTable>
|
||||
Logdir
|
||||
</NavLink>
|
||||
<NavLink
|
||||
to={clusterBrokerMetricsPath(clusterName, brokerId)}
|
||||
className={({ isActive }) => (isActive ? 'is-active' : '')}
|
||||
>
|
||||
Metrics
|
||||
</NavLink>
|
||||
</Navbar>
|
||||
|
||||
<Routes>
|
||||
<Route index element={<BrokerLogdir />} />
|
||||
<Route
|
||||
path={clusterBrokerMetricsRelativePath}
|
||||
element={<BrokerMetrics />}
|
||||
/>
|
||||
</Routes>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
import React from 'react';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { translateLogdirs } from 'components/Brokers/utils/translateLogdirs';
|
||||
import { SmartTable } from 'components/common/SmartTable/SmartTable';
|
||||
import { TableColumn } from 'components/common/SmartTable/TableColumn';
|
||||
import { useTableState } from 'lib/hooks/useTableState';
|
||||
import { ClusterBrokerParam } from 'lib/paths';
|
||||
import useBrokersLogDirs from 'lib/hooks/useBrokersLogDirs';
|
||||
|
||||
export interface BrokerLogdirState {
|
||||
name: string;
|
||||
error: string;
|
||||
topics: number;
|
||||
partitions: number;
|
||||
}
|
||||
|
||||
const BrokerLogdir: React.FC = () => {
|
||||
const { clusterName, brokerId } = useAppParams<ClusterBrokerParam>();
|
||||
|
||||
const { data: logDirs } = useBrokersLogDirs(clusterName, Number(brokerId));
|
||||
|
||||
const preparedRows = translateLogdirs(logDirs);
|
||||
const tableState = useTableState<BrokerLogdirState, string>(preparedRows, {
|
||||
idSelector: ({ name }) => name,
|
||||
totalPages: 0,
|
||||
});
|
||||
|
||||
return (
|
||||
<SmartTable
|
||||
tableState={tableState}
|
||||
placeholder="Log dir data not available"
|
||||
isFullwidth
|
||||
>
|
||||
<TableColumn title="Name" field="name" />
|
||||
<TableColumn title="Error" field="error" />
|
||||
<TableColumn title="Topics" field="topics" />
|
||||
<TableColumn title="Partitions" field="partitions" />
|
||||
</SmartTable>
|
||||
);
|
||||
};
|
||||
|
||||
export default BrokerLogdir;
|
|
@ -0,0 +1,67 @@
|
|||
import React from 'react';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { screen, waitFor } from '@testing-library/dom';
|
||||
import { clusterBrokerPath } from 'lib/paths';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import { act } from '@testing-library/react';
|
||||
import Broker from 'components/Brokers/Broker/Broker';
|
||||
import {
|
||||
clusterStatsPayload,
|
||||
brokerLogDirsPayload,
|
||||
brokersPayload,
|
||||
} from 'components/Brokers/__test__/fixtures';
|
||||
|
||||
const clusterName = 'local';
|
||||
const brokerId = 1;
|
||||
const fetchStatsUrl = `/api/clusters/${clusterName}/stats`;
|
||||
const fetchBrokersUrl = `/api/clusters/${clusterName}/brokers`;
|
||||
const fetchLogDirsUrl = `/api/clusters/${clusterName}/brokers/logdirs`;
|
||||
|
||||
describe('BrokerLogdir Component', () => {
|
||||
afterEach(() => {
|
||||
fetchMock.reset();
|
||||
});
|
||||
|
||||
const renderComponent = async () => {
|
||||
const fetchStatsMock = fetchMock.get(fetchStatsUrl, clusterStatsPayload);
|
||||
const fetchBrokersMock = fetchMock.get(fetchBrokersUrl, brokersPayload);
|
||||
await act(() => {
|
||||
render(
|
||||
<WithRoute path={clusterBrokerPath()}>
|
||||
<Broker />
|
||||
</WithRoute>,
|
||||
{
|
||||
initialEntries: [clusterBrokerPath(clusterName, brokerId)],
|
||||
}
|
||||
);
|
||||
});
|
||||
await waitFor(() => expect(fetchStatsMock.called()).toBeTruthy());
|
||||
expect(fetchBrokersMock.called()).toBeTruthy();
|
||||
};
|
||||
|
||||
it('shows warning when server returns empty logDirs response', async () => {
|
||||
const fetchLogDirsMock = fetchMock.getOnce(fetchLogDirsUrl, [], {
|
||||
query: { broker: brokerId },
|
||||
});
|
||||
await renderComponent();
|
||||
await waitFor(() => expect(fetchLogDirsMock.called()).toBeTruthy());
|
||||
expect(screen.getByText('Log dir data not available')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows broker', async () => {
|
||||
const fetchLogDirsMock = fetchMock.getOnce(
|
||||
fetchLogDirsUrl,
|
||||
brokerLogDirsPayload,
|
||||
{
|
||||
query: { broker: brokerId },
|
||||
}
|
||||
);
|
||||
|
||||
await renderComponent();
|
||||
await waitFor(() => expect(fetchLogDirsMock.called()).toBeTruthy());
|
||||
const topicCount = screen.getByText(3);
|
||||
const partitionsCount = screen.getByText(4);
|
||||
expect(topicCount).toBeInTheDocument();
|
||||
expect(partitionsCount).toBeInTheDocument();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,18 @@
|
|||
import React from 'react';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { ClusterBrokerParam } from 'lib/paths';
|
||||
import useBrokersMetrics from 'lib/hooks/useBrokersMetrics';
|
||||
import { SchemaType } from 'generated-sources';
|
||||
import EditorViewer from 'components/common/EditorViewer/EditorViewer';
|
||||
import { getEditorText } from 'components/Brokers/utils/getEditorText';
|
||||
|
||||
const BrokerMetrics: React.FC = () => {
|
||||
const { clusterName, brokerId } = useAppParams<ClusterBrokerParam>();
|
||||
const { data: metrics } = useBrokersMetrics(clusterName, Number(brokerId));
|
||||
|
||||
return (
|
||||
<EditorViewer schemaType={SchemaType.JSON} data={getEditorText(metrics)} />
|
||||
);
|
||||
};
|
||||
|
||||
export default BrokerMetrics;
|
|
@ -0,0 +1,37 @@
|
|||
import React from 'react';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { screen, waitFor } from '@testing-library/dom';
|
||||
import { clusterBrokerMetricsPath } from 'lib/paths';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import { act } from '@testing-library/react';
|
||||
import BrokerMetrics from 'components/Brokers/Broker/BrokerMetrics/BrokerMetrics';
|
||||
|
||||
const clusterName = 'local';
|
||||
const brokerId = 1;
|
||||
const fetchMetricsUrl = `/api/clusters/${clusterName}/brokers/${brokerId}/metrics`;
|
||||
|
||||
describe('BrokerMetrics Component', () => {
|
||||
afterEach(() => {
|
||||
fetchMock.reset();
|
||||
});
|
||||
|
||||
const renderComponent = async () => {
|
||||
const fetchMetricsMock = fetchMock.getOnce(fetchMetricsUrl, {});
|
||||
await act(() => {
|
||||
render(
|
||||
<WithRoute path={clusterBrokerMetricsPath()}>
|
||||
<BrokerMetrics />
|
||||
</WithRoute>,
|
||||
{
|
||||
initialEntries: [clusterBrokerMetricsPath(clusterName, brokerId)],
|
||||
}
|
||||
);
|
||||
});
|
||||
await waitFor(() => expect(fetchMetricsMock.called()).toBeTruthy());
|
||||
};
|
||||
|
||||
it("shows warning when server doesn't return metrics response", async () => {
|
||||
await renderComponent();
|
||||
expect(screen.getAllByRole('textbox').length).toEqual(1);
|
||||
});
|
||||
});
|
|
@ -1,37 +1,57 @@
|
|||
import React from 'react';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { screen, waitFor } from '@testing-library/dom';
|
||||
import { clusterBrokerPath } from 'lib/paths';
|
||||
import {
|
||||
clusterBrokerMetricsPath,
|
||||
clusterBrokerPath,
|
||||
getNonExactPath,
|
||||
} from 'lib/paths';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import { act } from '@testing-library/react';
|
||||
import Broker from 'components/Brokers/Broker/Broker';
|
||||
import {
|
||||
clusterStatsPayload,
|
||||
brokerLogDirsPayload,
|
||||
brokersPayload,
|
||||
} from 'components/Brokers/__test__/fixtures';
|
||||
|
||||
const clusterName = 'local';
|
||||
const brokerId = 1;
|
||||
const activeClassName = 'is-active';
|
||||
const fetchStatsUrl = `/api/clusters/${clusterName}/stats`;
|
||||
const fetchBrokersUrl = `/api/clusters/${clusterName}/brokers`;
|
||||
const fetchLogDirsUrl = `/api/clusters/${clusterName}/brokers/logdirs`;
|
||||
const brokerLogdir = {
|
||||
pageText: 'brokerLogdir',
|
||||
navigationName: 'Logdir',
|
||||
};
|
||||
const brokerMetrics = {
|
||||
pageText: 'brokerMetrics',
|
||||
navigationName: 'Metrics',
|
||||
};
|
||||
|
||||
jest.mock('components/Brokers/Broker/BrokerLogdir/BrokerLogdir', () => () => (
|
||||
<div>{brokerLogdir.pageText}</div>
|
||||
));
|
||||
jest.mock('components/Brokers/Broker/BrokerMetrics/BrokerMetrics', () => () => (
|
||||
<div>{brokerMetrics.pageText}</div>
|
||||
));
|
||||
|
||||
describe('Broker Component', () => {
|
||||
afterEach(() => {
|
||||
fetchMock.reset();
|
||||
});
|
||||
|
||||
const renderComponent = async () => {
|
||||
const renderComponent = async (
|
||||
path = clusterBrokerPath(clusterName, brokerId)
|
||||
) => {
|
||||
const fetchStatsMock = fetchMock.get(fetchStatsUrl, clusterStatsPayload);
|
||||
const fetchBrokersMock = fetchMock.get(fetchBrokersUrl, brokersPayload);
|
||||
await act(() => {
|
||||
render(
|
||||
<WithRoute path={clusterBrokerPath()}>
|
||||
<WithRoute path={getNonExactPath(clusterBrokerPath())}>
|
||||
<Broker />
|
||||
</WithRoute>,
|
||||
{
|
||||
initialEntries: [clusterBrokerPath(clusterName, brokerId)],
|
||||
initialEntries: [path],
|
||||
}
|
||||
);
|
||||
});
|
||||
|
@ -39,29 +59,51 @@ describe('Broker Component', () => {
|
|||
expect(fetchBrokersMock.called()).toBeTruthy();
|
||||
};
|
||||
|
||||
it('shows warning when server returns empty logDirs response', async () => {
|
||||
const fetchLogDirsMock = fetchMock.getOnce(fetchLogDirsUrl, [], {
|
||||
query: { broker: brokerId },
|
||||
});
|
||||
await renderComponent();
|
||||
await waitFor(() => expect(fetchLogDirsMock.called()).toBeTruthy());
|
||||
expect(screen.getByText('Log dir data not available')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows broker found', async () => {
|
||||
const fetchLogDirsMock = fetchMock.getOnce(
|
||||
fetchLogDirsUrl,
|
||||
brokerLogDirsPayload,
|
||||
{
|
||||
query: { broker: brokerId },
|
||||
}
|
||||
await renderComponent();
|
||||
const brokerInfo = brokersPayload.find((broker) => broker.id === brokerId);
|
||||
const brokerDiskUsage = clusterStatsPayload.diskUsage.find(
|
||||
(disk) => disk.brokerId === brokerId
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByText(brokerDiskUsage?.segmentCount || '')
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText('12MB')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Segment Count')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(brokerDiskUsage?.segmentCount || '')
|
||||
).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Port')).toBeInTheDocument();
|
||||
expect(screen.getByText(brokerInfo?.port || '')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Host')).toBeInTheDocument();
|
||||
expect(screen.getByText(brokerInfo?.host || '')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders Broker Logdir', async () => {
|
||||
await renderComponent();
|
||||
await waitFor(() => expect(fetchLogDirsMock.called()).toBeTruthy());
|
||||
const topicCount = screen.getByText(3);
|
||||
const partitionsCount = screen.getByText(4);
|
||||
expect(topicCount).toBeInTheDocument();
|
||||
expect(partitionsCount).toBeInTheDocument();
|
||||
|
||||
const logdirLink = screen.getByRole('link', {
|
||||
name: brokerLogdir.navigationName,
|
||||
});
|
||||
expect(logdirLink).toBeInTheDocument();
|
||||
expect(logdirLink).toHaveClass(activeClassName);
|
||||
|
||||
expect(screen.getByText(brokerLogdir.pageText)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders Broker Metrics', async () => {
|
||||
await renderComponent(clusterBrokerMetricsPath(clusterName, brokerId));
|
||||
|
||||
const metricsLink = screen.getByRole('link', {
|
||||
name: brokerMetrics.navigationName,
|
||||
});
|
||||
expect(metricsLink).toBeInTheDocument();
|
||||
expect(metricsLink).toHaveClass(activeClassName);
|
||||
|
||||
expect(screen.getByText(brokerMetrics.pageText)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { BrokersLogdirs } from 'generated-sources';
|
||||
|
||||
export const brokersPayload = [
|
||||
{ id: 1, host: 'b-1.test.kafka.amazonaws.com' },
|
||||
{ id: 2, host: 'b-2.test.kafka.amazonaws.com' },
|
||||
{ id: 1, host: 'b-1.test.kafka.amazonaws.com', port: 9092 },
|
||||
{ id: 2, host: 'b-2.test.kafka.amazonaws.com', port: 9092 },
|
||||
];
|
||||
|
||||
export const clusterStatsPayload = {
|
||||
|
@ -20,38 +20,6 @@ export const clusterStatsPayload = {
|
|||
version: '2.2.1',
|
||||
};
|
||||
|
||||
export const initialBrokersReducerState = {
|
||||
items: brokersPayload,
|
||||
brokerCount: 2,
|
||||
activeControllers: 1,
|
||||
onlinePartitionCount: 138,
|
||||
offlinePartitionCount: 0,
|
||||
inSyncReplicasCount: 239,
|
||||
outOfSyncReplicasCount: 0,
|
||||
underReplicatedPartitionCount: 0,
|
||||
diskUsage: [
|
||||
{ brokerId: 0, segmentSize: 1111, segmentCount: 333 },
|
||||
{ brokerId: 1, segmentSize: 2222, segmentCount: 444 },
|
||||
],
|
||||
version: '2.2.1',
|
||||
};
|
||||
|
||||
export const updatedBrokersReducerState = {
|
||||
items: brokersPayload,
|
||||
brokerCount: 2,
|
||||
activeControllers: 1,
|
||||
onlinePartitionCount: 138,
|
||||
offlinePartitionCount: 0,
|
||||
inSyncReplicasCount: 239,
|
||||
outOfSyncReplicasCount: 0,
|
||||
underReplicatedPartitionCount: 0,
|
||||
diskUsage: [
|
||||
{ brokerId: 0, segmentSize: 334567, segmentCount: 245 },
|
||||
{ brokerId: 1, segmentSize: 12345678, segmentCount: 121 },
|
||||
],
|
||||
version: '2.2.1',
|
||||
};
|
||||
|
||||
const partition = {
|
||||
broker: 2,
|
||||
offsetLag: 0,
|
||||
|
|
|
@ -0,0 +1,190 @@
|
|||
import { BrokerLogdirState } from 'components/Brokers/Broker/Broker';
|
||||
import { BrokerMetrics } from 'generated-sources';
|
||||
|
||||
export const transformedBrokerLogDirsPayload: BrokerLogdirState[] = [
|
||||
{
|
||||
error: 'NONE',
|
||||
name: '/opt/kafka/data-0/logs',
|
||||
topics: 3,
|
||||
partitions: 4,
|
||||
},
|
||||
];
|
||||
export const defaultTransformedBrokerLogDirsPayload: BrokerLogdirState = {
|
||||
error: '-',
|
||||
name: '-',
|
||||
topics: 0,
|
||||
partitions: 0,
|
||||
};
|
||||
|
||||
export const brokerMetricsPayload: BrokerMetrics = {
|
||||
segmentSize: 23,
|
||||
segmentCount: 23,
|
||||
metrics: [
|
||||
{
|
||||
name: 'TotalFetchRequestsPerSec',
|
||||
canonicalName:
|
||||
'kafka.server:name=TotalFetchRequestsPerSec,topic=_connect_status,type=BrokerTopicMetrics',
|
||||
params: {
|
||||
topic: '_connect_status',
|
||||
name: 'TotalFetchRequestsPerSec',
|
||||
type: 'BrokerTopicMetrics',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 19.408369293127542,
|
||||
FifteenMinuteRate: 19.44631556589501,
|
||||
Count: 191615,
|
||||
FiveMinuteRate: 19.464393718807774,
|
||||
MeanRate: 19.4233855043407,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'ZooKeeperRequestLatencyMs',
|
||||
canonicalName:
|
||||
'kafka.server:name=ZooKeeperRequestLatencyMs,type=ZooKeeperClientMetrics',
|
||||
params: {
|
||||
name: 'ZooKeeperRequestLatencyMs',
|
||||
type: 'ZooKeeperClientMetrics',
|
||||
},
|
||||
value: {
|
||||
Mean: 4.907351022183558,
|
||||
StdDev: 10.589608223906348,
|
||||
'75thPercentile': 2,
|
||||
'98thPercentile': 10,
|
||||
Min: 0,
|
||||
'95thPercentile': 5,
|
||||
'99thPercentile': 15,
|
||||
Max: 151,
|
||||
'999thPercentile': 92.79700000000003,
|
||||
Count: 2301,
|
||||
'50thPercentile': 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'RequestHandlerAvgIdlePercent',
|
||||
canonicalName:
|
||||
'kafka.server:name=RequestHandlerAvgIdlePercent,type=KafkaRequestHandlerPool',
|
||||
params: {
|
||||
name: 'RequestHandlerAvgIdlePercent',
|
||||
type: 'KafkaRequestHandlerPool',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 0.9999008788765713,
|
||||
FifteenMinuteRate: 0.9983845959639047,
|
||||
Count: 9937344680371,
|
||||
FiveMinuteRate: 0.9986337207880311,
|
||||
MeanRate: 0.9971616923696525,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'BytesInPerSec',
|
||||
canonicalName:
|
||||
'kafka.server:name=BytesInPerSec,topic=_connect_status,type=BrokerTopicMetrics',
|
||||
params: {
|
||||
topic: '_connect_status',
|
||||
name: 'BytesInPerSec',
|
||||
type: 'BrokerTopicMetrics',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 0,
|
||||
FifteenMinuteRate: 0,
|
||||
Count: 0,
|
||||
FiveMinuteRate: 0,
|
||||
MeanRate: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'FetchMessageConversionsPerSec',
|
||||
canonicalName:
|
||||
'kafka.server:name=FetchMessageConversionsPerSec,topic=__consumer_offsets,type=BrokerTopicMetrics',
|
||||
params: {
|
||||
topic: '__consumer_offsets',
|
||||
name: 'FetchMessageConversionsPerSec',
|
||||
type: 'BrokerTopicMetrics',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 0,
|
||||
FifteenMinuteRate: 0,
|
||||
Count: 0,
|
||||
FiveMinuteRate: 0,
|
||||
MeanRate: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'TotalProduceRequestsPerSec',
|
||||
canonicalName:
|
||||
'kafka.server:name=TotalProduceRequestsPerSec,topic=_connect_status,type=BrokerTopicMetrics',
|
||||
params: {
|
||||
topic: '_connect_status',
|
||||
name: 'TotalProduceRequestsPerSec',
|
||||
type: 'BrokerTopicMetrics',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 0,
|
||||
FifteenMinuteRate: 0,
|
||||
Count: 0,
|
||||
FiveMinuteRate: 0,
|
||||
MeanRate: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'MaxLag',
|
||||
canonicalName:
|
||||
'kafka.server:clientId=Replica,name=MaxLag,type=ReplicaFetcherManager',
|
||||
params: {
|
||||
clientId: 'Replica',
|
||||
name: 'MaxLag',
|
||||
type: 'ReplicaFetcherManager',
|
||||
},
|
||||
value: {
|
||||
Value: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'UnderMinIsrPartitionCount',
|
||||
canonicalName:
|
||||
'kafka.server:name=UnderMinIsrPartitionCount,type=ReplicaManager',
|
||||
params: {
|
||||
name: 'UnderMinIsrPartitionCount',
|
||||
type: 'ReplicaManager',
|
||||
},
|
||||
value: {
|
||||
Value: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'ZooKeeperDisconnectsPerSec',
|
||||
canonicalName:
|
||||
'kafka.server:name=ZooKeeperDisconnectsPerSec,type=SessionExpireListener',
|
||||
params: {
|
||||
name: 'ZooKeeperDisconnectsPerSec',
|
||||
type: 'SessionExpireListener',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 0,
|
||||
FifteenMinuteRate: 0,
|
||||
Count: 0,
|
||||
FiveMinuteRate: 0,
|
||||
MeanRate: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'BytesInPerSec',
|
||||
canonicalName:
|
||||
'kafka.server:name=BytesInPerSec,topic=__confluent.support.metrics,type=BrokerTopicMetrics',
|
||||
params: {
|
||||
topic: '__confluent.support.metrics',
|
||||
name: 'BytesInPerSec',
|
||||
type: 'BrokerTopicMetrics',
|
||||
},
|
||||
value: {
|
||||
OneMinuteRate: 3.093893673470914e-70,
|
||||
FifteenMinuteRate: 0.004057932469784932,
|
||||
Count: 1263,
|
||||
FiveMinuteRate: 1.047243693828501e-12,
|
||||
MeanRate: 0.12704831069266603,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
export const transformedBrokerMetricsPayload =
|
||||
'{"segmentSize":23,"segmentCount":23,"metrics":[{"name":"TotalFetchRequestsPerSec","canonicalName":"kafka.server:name=TotalFetchRequestsPerSec,topic=_connect_status,type=BrokerTopicMetrics","params":{"topic":"_connect_status","name":"TotalFetchRequestsPerSec","type":"BrokerTopicMetrics"},"value":{"OneMinuteRate":19.408369293127542,"FifteenMinuteRate":19.44631556589501,"Count":191615,"FiveMinuteRate":19.464393718807774,"MeanRate":19.4233855043407}},{"name":"ZooKeeperRequestLatencyMs","canonicalName":"kafka.server:name=ZooKeeperRequestLatencyMs,type=ZooKeeperClientMetrics","params":{"name":"ZooKeeperRequestLatencyMs","type":"ZooKeeperClientMetrics"},"value":{"Mean":4.907351022183558,"StdDev":10.589608223906348,"75thPercentile":2,"98thPercentile":10,"Min":0,"95thPercentile":5,"99thPercentile":15,"Max":151,"999thPercentile":92.79700000000003,"Count":2301,"50thPercentile":1}},{"name":"RequestHandlerAvgIdlePercent","canonicalName":"kafka.server:name=RequestHandlerAvgIdlePercent,type=KafkaRequestHandlerPool","params":{"name":"RequestHandlerAvgIdlePercent","type":"KafkaRequestHandlerPool"},"value":{"OneMinuteRate":0.9999008788765713,"FifteenMinuteRate":0.9983845959639047,"Count":9937344680371,"FiveMinuteRate":0.9986337207880311,"MeanRate":0.9971616923696525}},{"name":"BytesInPerSec","canonicalName":"kafka.server:name=BytesInPerSec,topic=_connect_status,type=BrokerTopicMetrics","params":{"topic":"_connect_status","name":"BytesInPerSec","type":"BrokerTopicMetrics"},"value":{"OneMinuteRate":0,"FifteenMinuteRate":0,"Count":0,"FiveMinuteRate":0,"MeanRate":0}},{"name":"FetchMessageConversionsPerSec","canonicalName":"kafka.server:name=FetchMessageConversionsPerSec,topic=__consumer_offsets,type=BrokerTopicMetrics","params":{"topic":"__consumer_offsets","name":"FetchMessageConversionsPerSec","type":"BrokerTopicMetrics"},"value":{"OneMinuteRate":0,"FifteenMinuteRate":0,"Count":0,"FiveMinuteRate":0,"MeanRate":0}},{"name":"TotalProduceRequestsPerSec","canonicalName":"kafka.server:name=TotalProduceRequestsPerSec,topic=_connect_status,type=BrokerTopicMetrics","params":{"topic":"_connect_status","name":"TotalProduceRequestsPerSec","type":"BrokerTopicMetrics"},"value":{"OneMinuteRate":0,"FifteenMinuteRate":0,"Count":0,"FiveMinuteRate":0,"MeanRate":0}},{"name":"MaxLag","canonicalName":"kafka.server:clientId=Replica,name=MaxLag,type=ReplicaFetcherManager","params":{"clientId":"Replica","name":"MaxLag","type":"ReplicaFetcherManager"},"value":{"Value":0}},{"name":"UnderMinIsrPartitionCount","canonicalName":"kafka.server:name=UnderMinIsrPartitionCount,type=ReplicaManager","params":{"name":"UnderMinIsrPartitionCount","type":"ReplicaManager"},"value":{"Value":0}},{"name":"ZooKeeperDisconnectsPerSec","canonicalName":"kafka.server:name=ZooKeeperDisconnectsPerSec,type=SessionExpireListener","params":{"name":"ZooKeeperDisconnectsPerSec","type":"SessionExpireListener"},"value":{"OneMinuteRate":0,"FifteenMinuteRate":0,"Count":0,"FiveMinuteRate":0,"MeanRate":0}},{"name":"BytesInPerSec","canonicalName":"kafka.server:name=BytesInPerSec,topic=__confluent.support.metrics,type=BrokerTopicMetrics","params":{"topic":"__confluent.support.metrics","name":"BytesInPerSec","type":"BrokerTopicMetrics"},"value":{"OneMinuteRate":3.093893673470914e-70,"FifteenMinuteRate":0.004057932469784932,"Count":1263,"FiveMinuteRate":1.047243693828501e-12,"MeanRate":0.12704831069266603}}]}';
|
|
@ -0,0 +1,17 @@
|
|||
import { getEditorText } from 'components/Brokers/utils/getEditorText';
|
||||
|
||||
import {
|
||||
brokerMetricsPayload,
|
||||
transformedBrokerMetricsPayload,
|
||||
} from './fixtures';
|
||||
|
||||
describe('Get editor text', () => {
|
||||
it('returns error message when broker metrics is not defined', () => {
|
||||
expect(getEditorText(undefined)).toEqual('Metrics data not available');
|
||||
});
|
||||
it('returns transformed metrics text when broker logdirs metrics', () => {
|
||||
expect(getEditorText(brokerMetricsPayload)).toEqual(
|
||||
transformedBrokerMetricsPayload
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,35 @@
|
|||
import {
|
||||
translateLogdir,
|
||||
translateLogdirs,
|
||||
} from 'components/Brokers/utils/translateLogdirs';
|
||||
import { brokerLogDirsPayload } from 'components/Brokers/__test__/fixtures';
|
||||
|
||||
import {
|
||||
defaultTransformedBrokerLogDirsPayload,
|
||||
transformedBrokerLogDirsPayload,
|
||||
} from './fixtures';
|
||||
|
||||
describe('translateLogdir and translateLogdirs', () => {
|
||||
describe('translateLogdirs', () => {
|
||||
it('returns empty array when broker logdirs is not defined', () => {
|
||||
expect(translateLogdirs(undefined)).toEqual([]);
|
||||
});
|
||||
it('returns transformed LogDirs array when broker logdirs defined', () => {
|
||||
expect(translateLogdirs(brokerLogDirsPayload)).toEqual(
|
||||
transformedBrokerLogDirsPayload
|
||||
);
|
||||
});
|
||||
});
|
||||
describe('translateLogdir', () => {
|
||||
it('returns default data when broker logdir is empty', () => {
|
||||
expect(translateLogdir({})).toEqual(
|
||||
defaultTransformedBrokerLogDirsPayload
|
||||
);
|
||||
});
|
||||
it('returns transformed LogDir when broker logdir defined', () => {
|
||||
expect(translateLogdir(brokerLogDirsPayload[0])).toEqual(
|
||||
transformedBrokerLogDirsPayload[0]
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,4 @@
|
|||
import { BrokerMetrics } from 'generated-sources';
|
||||
|
||||
export const getEditorText = (metrics: BrokerMetrics | undefined): string =>
|
||||
metrics ? JSON.stringify(metrics) : 'Metrics data not available';
|
|
@ -15,3 +15,9 @@ export const translateLogdir = (data: BrokersLogdirs): BrokerLogdirState => {
|
|||
partitions: partitionsCount,
|
||||
};
|
||||
};
|
||||
|
||||
export const translateLogdirs = (
|
||||
data: BrokersLogdirs[] | undefined
|
||||
): BrokerLogdirState[] => {
|
||||
return data?.map(translateLogdir) || [];
|
||||
};
|
|
@ -6,6 +6,7 @@ const clusterName = 'test-cluster-name';
|
|||
const groupId = 'test-group-id';
|
||||
const schemaId = 'test-schema-id';
|
||||
const topicId = 'test-topic-id';
|
||||
const brokerId = 'test-Broker-id';
|
||||
const connectName = 'test-connect-name';
|
||||
const connectorName = 'test-connector-name';
|
||||
|
||||
|
@ -30,6 +31,23 @@ describe('Paths', () => {
|
|||
expect(paths.clusterBrokersPath()).toEqual(
|
||||
paths.clusterBrokersPath(RouteParams.clusterName)
|
||||
);
|
||||
|
||||
expect(paths.clusterBrokerPath(clusterName, brokerId)).toEqual(
|
||||
`${paths.clusterPath(clusterName)}/brokers/${brokerId}`
|
||||
);
|
||||
expect(paths.clusterBrokerPath()).toEqual(
|
||||
paths.clusterBrokerPath(RouteParams.clusterName, RouteParams.brokerId)
|
||||
);
|
||||
|
||||
expect(paths.clusterBrokerMetricsPath(clusterName, brokerId)).toEqual(
|
||||
`${paths.clusterPath(clusterName)}/brokers/${brokerId}/metrics`
|
||||
);
|
||||
expect(paths.clusterBrokerMetricsPath()).toEqual(
|
||||
paths.clusterBrokerMetricsPath(
|
||||
RouteParams.clusterName,
|
||||
RouteParams.brokerId
|
||||
)
|
||||
);
|
||||
});
|
||||
it('clusterConsumerGroupsPath', () => {
|
||||
expect(paths.clusterConsumerGroupsPath(clusterName)).toEqual(
|
||||
|
|
18
kafka-ui-react-app/src/lib/hooks/useBrokersMetrics.tsx
Normal file
18
kafka-ui-react-app/src/lib/hooks/useBrokersMetrics.tsx
Normal file
|
@ -0,0 +1,18 @@
|
|||
import { brokersApiClient } from 'lib/api';
|
||||
import { useQuery } from 'react-query';
|
||||
import { ClusterName } from 'redux/interfaces';
|
||||
|
||||
export default function useBrokersMetrics(
|
||||
clusterName: ClusterName,
|
||||
brokerId: number
|
||||
) {
|
||||
return useQuery(
|
||||
['metrics', clusterName, brokerId],
|
||||
() =>
|
||||
brokersApiClient.getBrokersMetrics({
|
||||
clusterName,
|
||||
id: brokerId,
|
||||
}),
|
||||
{ suspense: true, refetchInterval: 5000 }
|
||||
);
|
||||
}
|
|
@ -33,6 +33,7 @@ export type ClusterNameRoute = { clusterName: ClusterName };
|
|||
|
||||
// Brokers
|
||||
export const clusterBrokerRelativePath = 'brokers';
|
||||
export const clusterBrokerMetricsRelativePath = 'metrics';
|
||||
export const clusterBrokersPath = (
|
||||
clusterName: ClusterName = RouteParams.clusterName
|
||||
) => `${clusterPath(clusterName)}/${clusterBrokerRelativePath}`;
|
||||
|
@ -41,8 +42,19 @@ export const clusterBrokerPath = (
|
|||
clusterName: ClusterName = RouteParams.clusterName,
|
||||
brokerId: BrokerId | string = RouteParams.brokerId
|
||||
) => `${clusterBrokersPath(clusterName)}/${brokerId}`;
|
||||
export const clusterBrokerMetricsPath = (
|
||||
clusterName: ClusterName = RouteParams.clusterName,
|
||||
brokerId: BrokerId | string = RouteParams.brokerId
|
||||
) =>
|
||||
`${clusterBrokerPath(
|
||||
clusterName,
|
||||
brokerId
|
||||
)}/${clusterBrokerMetricsRelativePath}`;
|
||||
|
||||
export type ClusterBrokerParam = { clusterName: ClusterName; brokerId: string };
|
||||
export type ClusterBrokerParam = {
|
||||
clusterName: ClusterName;
|
||||
brokerId: string;
|
||||
};
|
||||
|
||||
// Consumer Groups
|
||||
export const clusterConsumerGroupsRelativePath = 'consumer-groups';
|
||||
|
|
Loading…
Add table
Reference in a new issue