MAESTRO: add red analysis progress tests

This commit is contained in:
Mariusz Banach
2026-02-18 02:02:25 +01:00
parent 2670970b24
commit 61a9da0c4e
3 changed files with 402 additions and 1 deletions

View File

@@ -33,7 +33,7 @@ Frontend uses `fetch` with `ReadableStream` reader (not native `EventSource`, wh
- [x] T025 [US3] Write failing tests (TDD Red) in `backend/tests/api/test_analysis_router.py` — happy path (valid headers → 200 with SSE progress + result), error path (empty → 400), oversized (>1MB → 413), partial failure (some tests error → mixed results per FR-25), timeout (30s limit per NFR-13, partial results per NFR-14) - [x] T025 [US3] Write failing tests (TDD Red) in `backend/tests/api/test_analysis_router.py` — happy path (valid headers → 200 with SSE progress + result), error path (empty → 400), oversized (>1MB → 413), partial failure (some tests error → mixed results per FR-25), timeout (30s limit per NFR-13, partial results per NFR-14)
- [x] T026 [US3] Create `backend/app/schemas/analysis.py` (request/response schemas) and `backend/app/routers/analysis.py` — FastAPI router with `POST /api/analyse` using SSE for progress streaming. Accepts headers string + config (test IDs, resolve, decode-all). Invokes `HeaderAnalyzer` with 30s timeout (NFR-13). Streams progress events then final result. Sanitises input (NFR-09), validates size ≤1MB (NFR-10). Stateless — no job_id, no in-memory state (Assumption 3). Register router in `backend/app/main.py`. Verify `test_analysis_router.py` passes (TDD Green) - [x] T026 [US3] Create `backend/app/schemas/analysis.py` (request/response schemas) and `backend/app/routers/analysis.py` — FastAPI router with `POST /api/analyse` using SSE for progress streaming. Accepts headers string + config (test IDs, resolve, decode-all). Invokes `HeaderAnalyzer` with 30s timeout (NFR-13). Streams progress events then final result. Sanitises input (NFR-09), validates size ≤1MB (NFR-10). Stateless — no job_id, no in-memory state (Assumption 3). Register router in `backend/app/main.py`. Verify `test_analysis_router.py` passes (TDD Green)
- [ ] T027 [US3] Write failing tests (TDD Red) in `frontend/src/__tests__/ProgressIndicator.test.tsx` (render at various states, timeout display) and `frontend/src/__tests__/useAnalysis.test.ts` (hook state transitions, SSE handling) - [x] T027 [US3] Write failing tests (TDD Red) in `frontend/src/__tests__/ProgressIndicator.test.tsx` (render at various states, timeout display) and `frontend/src/__tests__/useAnalysis.test.ts` (hook state transitions, SSE handling)
- [ ] T028 [P] [US3] Create `frontend/src/hooks/useAnalysis.ts` — custom hook managing analysis lifecycle. Submits to `POST /api/analyse` via API client, consumes SSE stream for real-time progress (no polling). States: idle, submitting, analysing (with progress), complete, error, timeout. Returns: `submit()`, `cancel()`, `progress`, `result`, `error`, `status`. Verify `useAnalysis.test.ts` passes (TDD Green) - [ ] T028 [P] [US3] Create `frontend/src/hooks/useAnalysis.ts` — custom hook managing analysis lifecycle. Submits to `POST /api/analyse` via API client, consumes SSE stream for real-time progress (no polling). States: idle, submitting, analysing (with progress), complete, error, timeout. Returns: `submit()`, `cancel()`, `progress`, `result`, `error`, `status`. Verify `useAnalysis.test.ts` passes (TDD Green)
- [ ] T029 [P] [US3] Create `frontend/src/components/ProgressIndicator.tsx` — progress bar with percentage, current test name (FR-22), countdown timer from 30s (NFR-13), elapsed time. Colour-coded: green progressing, amber near timeout, red on timeout. FontAwesome spinner. Timeout notification listing incomplete tests (NFR-14). Verify `ProgressIndicator.test.tsx` passes (TDD Green) - [ ] T029 [P] [US3] Create `frontend/src/components/ProgressIndicator.tsx` — progress bar with percentage, current test name (FR-22), countdown timer from 30s (NFR-13), elapsed time. Colour-coded: green progressing, amber near timeout, red on timeout. FontAwesome spinner. Timeout notification listing incomplete tests (NFR-14). Verify `ProgressIndicator.test.tsx` passes (TDD Green)

View File

@@ -0,0 +1,124 @@
import type { ReactElement } from "react";
import { act } from "react-dom/test-utils";
import { createRoot } from "react-dom/client";
import { afterEach, describe, expect, it } from "vitest";
import ProgressIndicator from "../components/ProgressIndicator";
import type { AnalysisProgress } from "../types/analysis";
type RenderResult = {
container: HTMLDivElement;
};
const cleanups: Array<() => void> = [];
const render = (ui: ReactElement): RenderResult => {
const container = document.createElement("div");
document.body.appendChild(container);
const root = createRoot(container);
act(() => {
root.render(ui);
});
cleanups.push(() => {
act(() => {
root.unmount();
});
container.remove();
});
return { container };
};
const getByTestId = (container: HTMLElement, testId: string): HTMLElement => {
const element = container.querySelector(`[data-testid="${testId}"]`);
if (!element) {
throw new Error(`Expected element ${testId} to be rendered.`);
}
return element as HTMLElement;
};
const baseProgress: AnalysisProgress = {
currentIndex: 1,
totalTests: 4,
currentTest: "SpamAssassin Rule Hits",
elapsedMs: 12000,
percentage: 50,
};
afterEach(() => {
while (cleanups.length > 0) {
const cleanup = cleanups.pop();
if (cleanup) {
cleanup();
}
}
});
describe("ProgressIndicator", () => {
it("renders progress details while analysing", () => {
const { container } = render(
<ProgressIndicator status="analysing" progress={baseProgress} timeoutSeconds={30} />,
);
const indicator = getByTestId(container, "progress-indicator");
expect(indicator.getAttribute("data-status")).toBe("analysing");
expect(indicator.getAttribute("data-variant")).toBe("normal");
expect(getByTestId(container, "progress-percentage").textContent ?? "").toMatch(
/50%/,
);
expect(getByTestId(container, "progress-current-test").textContent ?? "").toMatch(
/SpamAssassin Rule Hits/,
);
expect(getByTestId(container, "progress-elapsed").textContent ?? "").toMatch(/12s/);
expect(getByTestId(container, "progress-remaining").textContent ?? "").toMatch(/18s/);
expect(getByTestId(container, "progress-spinner")).toBeTruthy();
const progressbar = container.querySelector('[role="progressbar"]');
expect(progressbar?.getAttribute("aria-valuenow")).toBe("50");
expect(progressbar?.getAttribute("aria-valuemax")).toBe("100");
});
it("uses warning styling when near timeout", () => {
const nearTimeout: AnalysisProgress = {
...baseProgress,
elapsedMs: 28000,
percentage: 93,
};
const { container } = render(
<ProgressIndicator status="analysing" progress={nearTimeout} timeoutSeconds={30} />,
);
const indicator = getByTestId(container, "progress-indicator");
expect(indicator.getAttribute("data-variant")).toBe("warning");
expect(getByTestId(container, "progress-remaining").textContent ?? "").toMatch(/2s/);
});
it("shows a timeout notification with incomplete tests", () => {
const { container } = render(
<ProgressIndicator
status="timeout"
progress={baseProgress}
timeoutSeconds={30}
incompleteTests={["Mimecast Fingerprint", "Proofpoint TAP"]}
/>,
);
const indicator = getByTestId(container, "progress-indicator");
expect(indicator.getAttribute("data-status")).toBe("timeout");
expect(indicator.getAttribute("data-variant")).toBe("timeout");
const alert = container.querySelector('[role="alert"]');
expect(alert).not.toBeNull();
expect(alert?.textContent ?? "").toMatch(/timeout/i);
expect(getByTestId(container, "timeout-tests").textContent ?? "").toMatch(
/Mimecast Fingerprint/,
);
expect(getByTestId(container, "timeout-tests").textContent ?? "").toMatch(
/Proofpoint TAP/,
);
});
});

View File

@@ -0,0 +1,277 @@
import type { ReactElement } from "react";
import { useEffect } from "react";
import { act } from "react-dom/test-utils";
import { createRoot } from "react-dom/client";
import { afterEach, describe, expect, it, vi } from "vitest";
import { apiClient } from "../lib/api-client";
import type { AnalysisConfig, AnalysisProgress, AnalysisReport } from "../types/analysis";
import useAnalysis from "../hooks/useAnalysis";
type RenderResult = {
container: HTMLDivElement;
};
type AnalysisRequest = {
headers: string;
config: AnalysisConfig;
};
type HarnessProps = {
request: AnalysisRequest;
onStatusChange?: (status: string) => void;
};
const cleanups: Array<() => void> = [];
const render = (ui: ReactElement): RenderResult => {
const container = document.createElement("div");
document.body.appendChild(container);
const root = createRoot(container);
act(() => {
root.render(ui);
});
cleanups.push(() => {
act(() => {
root.unmount();
});
container.remove();
});
return { container };
};
const flushPromises = async (): Promise<void> => {
await new Promise((resolve) => setTimeout(resolve, 0));
};
const getByTestId = (container: HTMLElement, testId: string): HTMLElement => {
const element = container.querySelector(`[data-testid="${testId}"]`);
if (!element) {
throw new Error(`Expected element ${testId} to be rendered.`);
}
return element as HTMLElement;
};
const baseConfig: AnalysisConfig = {
testIds: [101],
resolve: true,
decodeAll: false,
};
const baseRequest: AnalysisRequest = {
headers: "X-Test: value",
config: baseConfig,
};
const progressEvent: AnalysisProgress = {
currentIndex: 0,
totalTests: 3,
currentTest: "SpamAssassin Rule Hits",
elapsedMs: 500,
percentage: 33,
};
const completeReport: AnalysisReport = {
results: [],
hopChain: [],
securityAppliances: [],
metadata: {
totalTests: 3,
passedTests: 3,
failedTests: 0,
skippedTests: 0,
elapsedMs: 1500,
timedOut: false,
incompleteTests: [],
},
};
const timeoutReport: AnalysisReport = {
results: [],
hopChain: [],
securityAppliances: [],
metadata: {
totalTests: 5,
passedTests: 2,
failedTests: 1,
skippedTests: 0,
elapsedMs: 30000,
timedOut: true,
incompleteTests: ["Mimecast Fingerprint", "Proofpoint TAP"],
},
};
const AnalysisHarness = ({ request, onStatusChange }: HarnessProps) => {
const { status, progress, result, error, submit, cancel } = useAnalysis();
useEffect(() => {
onStatusChange?.(status);
}, [status, onStatusChange]);
return (
<div>
<span data-testid="status">{status}</span>
<span data-testid="current-test">{progress?.currentTest ?? ""}</span>
<span data-testid="percentage">{progress?.percentage ?? ""}</span>
<span data-testid="result-total">{result?.metadata.totalTests ?? ""}</span>
<span data-testid="error">{error ?? ""}</span>
<button data-testid="submit" onClick={() => submit(request)}>
Submit
</button>
<button data-testid="cancel" onClick={() => cancel()}>
Cancel
</button>
</div>
);
};
afterEach(() => {
while (cleanups.length > 0) {
const cleanup = cleanups.pop();
if (cleanup) {
cleanup();
}
}
vi.restoreAllMocks();
});
describe("useAnalysis", () => {
it("starts in the idle state", () => {
const { container } = render(<AnalysisHarness request={baseRequest} />);
expect(getByTestId(container, "status").textContent).toBe("idle");
expect(getByTestId(container, "current-test").textContent).toBe("");
expect(getByTestId(container, "error").textContent).toBe("");
});
it("submits analysis and handles SSE progress + result", async () => {
const streamSpy = vi.spyOn(apiClient, "stream").mockImplementation(
async (_path, options) => {
options.onEvent({ event: "progress", data: progressEvent, raw: "" });
options.onEvent({ event: "result", data: completeReport, raw: "" });
},
);
const statuses: string[] = [];
const { container } = render(
<AnalysisHarness request={baseRequest} onStatusChange={(status) => statuses.push(status)} />,
);
act(() => {
getByTestId(container, "submit").dispatchEvent(
new MouseEvent("click", { bubbles: true }),
);
});
await act(async () => {
await flushPromises();
});
expect(streamSpy).toHaveBeenCalledWith(
"/api/analyse",
expect.objectContaining({
body: baseRequest,
onEvent: expect.any(Function),
signal: expect.any(AbortSignal),
}),
);
expect(statuses).toEqual(["idle", "submitting", "analysing", "complete"]);
expect(getByTestId(container, "current-test").textContent).toMatch(
/SpamAssassin Rule Hits/,
);
expect(getByTestId(container, "percentage").textContent).toBe("33");
expect(getByTestId(container, "result-total").textContent).toBe("3");
});
it("marks the analysis as timed out when the report indicates timeout", async () => {
vi.spyOn(apiClient, "stream").mockImplementation(async (_path, options) => {
options.onEvent({ event: "result", data: timeoutReport, raw: "" });
});
const statuses: string[] = [];
const { container } = render(
<AnalysisHarness request={baseRequest} onStatusChange={(status) => statuses.push(status)} />,
);
act(() => {
getByTestId(container, "submit").dispatchEvent(
new MouseEvent("click", { bubbles: true }),
);
});
await act(async () => {
await flushPromises();
});
expect(statuses).toEqual(["idle", "submitting", "timeout"]);
expect(getByTestId(container, "result-total").textContent).toBe("5");
});
it("surfaces stream errors and moves to error state", async () => {
vi.spyOn(apiClient, "stream").mockRejectedValue(new Error("Network error"));
const statuses: string[] = [];
const { container } = render(
<AnalysisHarness request={baseRequest} onStatusChange={(status) => statuses.push(status)} />,
);
act(() => {
getByTestId(container, "submit").dispatchEvent(
new MouseEvent("click", { bubbles: true }),
);
});
await act(async () => {
await flushPromises();
});
expect(statuses).toEqual(["idle", "submitting", "error"]);
expect(getByTestId(container, "error").textContent ?? "").toMatch(/Network error/);
});
it("cancels an in-flight analysis request", async () => {
let abortSignal: AbortSignal | undefined;
vi.spyOn(apiClient, "stream").mockImplementation(async (_path, options) => {
abortSignal = options.signal;
await new Promise<void>((resolve) => {
if (abortSignal?.aborted) {
resolve();
return;
}
abortSignal?.addEventListener("abort", () => resolve(), { once: true });
});
});
const statuses: string[] = [];
const { container } = render(
<AnalysisHarness request={baseRequest} onStatusChange={(status) => statuses.push(status)} />,
);
act(() => {
getByTestId(container, "submit").dispatchEvent(
new MouseEvent("click", { bubbles: true }),
);
});
await act(async () => {
await flushPromises();
});
act(() => {
getByTestId(container, "cancel").dispatchEvent(
new MouseEvent("click", { bubbles: true }),
);
});
await act(async () => {
await flushPromises();
});
expect(abortSignal?.aborted).toBe(true);
expect(statuses).toContain("idle");
});
});