Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change the required env variables #701

Merged
merged 1 commit into from
Apr 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 17 additions & 8 deletions ui/api/kafka/actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,11 @@ import * as topic from "./topic.promql";
export type ClusterMetric = keyof typeof cluster;
export type TopicMetric = keyof typeof topic;

const prom = new PrometheusDriver({
endpoint: process.env.CONSOLE_METRICS_PROMETHEUS_URL,
});
const prom = process.env.CONSOLE_METRICS_PROMETHEUS_URL
? new PrometheusDriver({
endpoint: process.env.CONSOLE_METRICS_PROMETHEUS_URL,
})
: undefined;

const log = logger.child({ module: "kafka-api" });

Expand Down Expand Up @@ -71,13 +73,20 @@ export async function getKafkaCluster(

export async function getKafkaClusterKpis(
clusterId: string,
): Promise<{ cluster: ClusterDetail; kpis: ClusterKpis } | null> {
): Promise<{ cluster: ClusterDetail; kpis: ClusterKpis | null } | null> {
try {
const cluster = await getKafkaCluster(clusterId);
if (!cluster) {
return null;
}

log.debug({ cluster, prom }, "????");

if (!prom) {
log.debug({ clusterId }, "getKafkaClusterKpis Prometheus unavailable");
return { cluster, kpis: null };
}

const valuesRes = await prom.instantQuery(
values(
cluster.attributes.namespace,
Expand Down Expand Up @@ -208,7 +217,7 @@ export async function getKafkaClusterMetrics(
const start = new Date().getTime() - 1 * 60 * 60 * 1000;
const end = new Date();
const step = 60 * 1;
const seriesRes = await prom.rangeQuery(
const seriesRes = await prom!.rangeQuery(
cluster[metric](namespace, name),
start,
end,
Expand All @@ -227,7 +236,7 @@ export async function getKafkaClusterMetrics(

try {
const cluster = await getKafkaCluster(clusterId);
if (!cluster) {
if (!cluster || !prom) {
return null;
}

Expand Down Expand Up @@ -271,7 +280,7 @@ export async function getKafkaTopicMetrics(
const start = new Date().getTime() - 1 * 60 * 60 * 1000;
const end = new Date();
const step = 60 * 1;
const seriesRes = await prom.rangeQuery(
const seriesRes = await prom!.rangeQuery(
topic[metric](namespace, name),
start,
end,
Expand All @@ -290,7 +299,7 @@ export async function getKafkaTopicMetrics(

try {
const cluster = await getKafkaCluster(clusterId);
if (!cluster) {
if (!cluster || !prom) {
return null;
}

Expand Down
14 changes: 7 additions & 7 deletions ui/app/[locale]/kafka/[kafkaId]/nodes/DistributionChart.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,24 +25,24 @@ import { useState } from "react";
export function DistributionChart({
data,
}: {
data: Record<string, { leaders: number; followers: number }>;
data: Record<string, { leaders?: number; followers?: number }>;
}) {
const t = useTranslations();
const [containerRef, width] = useChartWidth();
const [filter, setFilter] = useState<"all" | "leaders" | "followers">("all");
const allCount = Object.values(data).reduce(
(acc, v) => v.followers + v.leaders + acc,
(acc, v) => (v.followers ?? 0) + (v.leaders ?? 0) + acc,
0,
);
const leadersCount = Object.values(data).reduce(
(acc, v) => v.leaders + acc,
(acc, v) => (v.leaders ?? 0) + acc,
0,
);
const followersCount = Object.values(data).reduce(
(acc, v) => v.followers + acc,
(acc, v) => (v.followers ?? 0) + acc,
0,
);
return (
return allCount > 0 ? (
<Card className={"pf-v5-u-mb-lg"}>
<CardHeader>
<CardTitle>
Expand Down Expand Up @@ -172,7 +172,7 @@ export function DistributionChart({
name: `Broker ${node}`,
x: "x",
y: {
all: data.leaders + data.followers,
all: (data.leaders ?? 0) + (data.followers ?? 0),
leaders: data.leaders,
followers: data.followers,
}[filter || "all"],
Expand All @@ -185,5 +185,5 @@ export function DistributionChart({
</div>
</CardBody>
</Card>
);
) : null;
}
107 changes: 60 additions & 47 deletions ui/app/[locale]/kafka/[kafkaId]/nodes/NodesTable.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"use client";

import { Number } from "@/components/Format/Number";
import { ResponsiveTable } from "@/components/Table";
import {
ChartDonutThreshold,
Expand Down Expand Up @@ -29,12 +30,12 @@ export type Node = {
id: number;
isLeader: boolean;
status: string;
followers: number;
leaders: number;
followers?: number;
leaders?: number;
rack?: string;
hostname?: string;
diskCapacity: number;
diskUsage: number;
diskCapacity?: number;
diskUsage?: number;
};

export function NodesTable({ nodes }: { nodes: Node[] }) {
Expand Down Expand Up @@ -109,7 +110,13 @@ export function NodesTable({ nodes }: { nodes: Node[] }) {
case "replicas":
return (
<Td key={key} dataLabel={"Total replicas"}>
{row.followers + row.leaders}
<Number
value={
row.followers && row.leaders
? row.followers + row.leaders
: undefined
}
/>
</Td>
);
case "rack":
Expand Down Expand Up @@ -147,49 +154,55 @@ export function NodesTable({ nodes }: { nodes: Node[] }) {
</Text>
</TextContent>
<div style={{ width: 350, height: 200 }}>
<ChartDonutThreshold
ariaDesc="Storage capacity"
ariaTitle={`Broker ${row.id} disk usage`}
constrainToVisibleArea={true}
data={[
{ x: "Warning at 60%", y: 60 },
{ x: "Danger at 90%", y: 90 },
]}
height={200}
labels={({ datum }) => (datum.x ? datum.x : null)}
padding={{
bottom: 0,
left: 10,
right: 150,
top: 0,
}}
width={350}
>
<ChartDonutUtilization
data={{
x: "Storage capacity",
y: (row.diskUsage / row.diskCapacity) * 100,
}}
labels={({ datum }) =>
datum.x
? `${datum.x}: ${format.number(datum.y / 100, {
{row.diskUsage !== undefined &&
row.diskCapacity !== undefined && (
<ChartDonutThreshold
ariaDesc="Storage capacity"
ariaTitle={`Broker ${row.id} disk usage`}
constrainToVisibleArea={true}
data={[
{ x: "Warning at 60%", y: 60 },
{ x: "Danger at 90%", y: 90 },
]}
height={200}
labels={({ datum }) => (datum.x ? datum.x : null)}
padding={{
bottom: 0,
left: 10,
right: 150,
top: 0,
}}
width={350}
>
<ChartDonutUtilization
data={{
x: "Storage capacity",
y: (row.diskUsage / row.diskCapacity) * 100,
}}
labels={({ datum }) =>
datum.x
? `${datum.x}: ${format.number(datum.y / 100, {
style: "percent",
})}`
: null
}
legendData={[
{ name: `Capacity: 80%` },
{ name: "Warning at 60%" },
{ name: "Danger at 90%" },
]}
legendOrientation="vertical"
title={`${format.number(
row.diskUsage / row.diskCapacity,
{
style: "percent",
})}`
: null
}
legendData={[
{ name: `Capacity: 80%` },
{ name: "Warning at 60%" },
{ name: "Danger at 90%" },
]}
legendOrientation="vertical"
title={`${format.number(row.diskUsage / row.diskCapacity, {
style: "percent",
})}`}
subTitle={`of ${formatBytes(row.diskCapacity)}`}
thresholds={[{ value: 60 }, { value: 90 }]}
/>
</ChartDonutThreshold>
},
)}`}
subTitle={`of ${formatBytes(row.diskCapacity)}`}
thresholds={[{ value: 60 }, { value: 90 }]}
/>
</ChartDonutThreshold>
)}
</div>
</FlexItem>
</Flex>
Expand Down
55 changes: 42 additions & 13 deletions ui/app/[locale]/kafka/[kafkaId]/nodes/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,30 +5,53 @@ import {
Node,
NodesTable,
} from "@/app/[locale]/kafka/[kafkaId]/nodes/NodesTable";
import { PageSection } from "@/libs/patternfly/react-core";
import { Alert, PageSection } from "@/libs/patternfly/react-core";
import { redirect } from "@/navigation";
import { getTranslations } from "next-intl/server";
import { Suspense } from "react";

function nodeMetric(metrics: Record<string, number> | undefined, nodeId: number): number {
return metrics ? (metrics[nodeId.toString()] ?? 0) : 0;
function nodeMetric(
metrics: Record<string, number> | undefined,
nodeId: number,
): number {
return metrics ? metrics[nodeId.toString()] ?? 0 : 0;
}

export default async function NodesPage({ params }: { params: KafkaParams }) {
export default function NodesPage({ params }: { params: KafkaParams }) {
return (
<Suspense fallback={null}>
<ConnectedNodes params={params} />
</Suspense>
);
}

async function ConnectedNodes({ params }: { params: KafkaParams }) {
const t = await getTranslations();
const res = await getKafkaClusterKpis(params.kafkaId);
if (!res) {
return redirect("/");
}
const { cluster, kpis } = res;
if (!cluster) {
redirect("/");
return null;
}

const nodes: Node[] = cluster.attributes.nodes.map((node) => {
const status = nodeMetric(kpis.broker_state, node.id) === 3 ? "Stable" : "Unstable";
const leaders = nodeMetric(kpis.leader_count?.byNode, node.id);
const followers = nodeMetric(kpis.replica_count?.byNode, node.id) - leaders;
const diskCapacity = nodeMetric(kpis.volume_stats_capacity_bytes?.byNode, node.id);
const diskUsage = nodeMetric(kpis.volume_stats_used_bytes?.byNode, node.id);
const status = kpis
? nodeMetric(kpis.broker_state, node.id) === 3
? "Stable"
: "Unstable"
: "Unknown";
const leaders = kpis
? nodeMetric(kpis.leader_count?.byNode, node.id)
: undefined;
const followers =
kpis && leaders
? nodeMetric(kpis.replica_count?.byNode, node.id) - leaders
: undefined;
const diskCapacity = kpis
? nodeMetric(kpis.volume_stats_capacity_bytes?.byNode, node.id)
: undefined;
const diskUsage = kpis
? nodeMetric(kpis.volume_stats_used_bytes?.byNode, node.id)
: undefined;
return {
id: node.id,
status,
Expand All @@ -50,6 +73,12 @@ export default async function NodesPage({ params }: { params: KafkaParams }) {

return (
<>
{!kpis && (
<PageSection>
<Alert title={t("nodes.kpis_offline")} variant={"warning"} />
</PageSection>
)}

<PageSection isFilled>
<DistributionChart data={data} />
<NodesTable nodes={nodes} />
Expand Down
21 changes: 19 additions & 2 deletions ui/app/[locale]/kafka/[kafkaId]/overview/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,24 @@ async function ConnectedClusterCard({
data,
consumerGroups,
}: {
data: Promise<{ cluster: ClusterDetail; kpis: ClusterKpis } | null>;
data: Promise<{ cluster: ClusterDetail; kpis: ClusterKpis | null } | null>;
consumerGroups: Promise<ConsumerGroupsResponse>;
}) {
const res = await data;
if (!res?.kpis) {
return (
<ClusterCard
isLoading={false}
status={res?.cluster.attributes.status || "n/a"}
messages={[]}
name={res?.cluster.attributes.name || "n/a"}
consumerGroups={undefined}
brokersOnline={undefined}
brokersTotal={undefined}
kafkaVersion={res?.cluster.attributes.kafkaVersion || "n/a"}
/>
);
}
const groupCount = await consumerGroups.then(
(grpResp) => grpResp.meta.page.total ?? 0,
);
Expand Down Expand Up @@ -86,9 +100,12 @@ async function ConnectedClusterCard({
async function ConnectedTopicsPartitionsCard({
data,
}: {
data: Promise<{ cluster: ClusterDetail; kpis: ClusterKpis } | null>;
data: Promise<{ cluster: ClusterDetail; kpis: ClusterKpis | null } | null>;
}) {
const res = await data;
if (!res?.kpis) {
return null;
}
const topicsTotal = res?.kpis.total_topics || 0;
const topicsUnderreplicated = res?.kpis.underreplicated_topics || 0;
return (
Expand Down
Loading
Loading