mirror of
https://github.com/mgeeky/decode-spam-headers.git
synced 2026-02-22 05:23:31 +01:00
MAESTRO: add analysis router SSE tests
This commit is contained in:
@@ -0,0 +1,51 @@
|
|||||||
|
# Phase 05: US3 — Analysis Execution & Progress
|
||||||
|
|
||||||
|
This phase implements the core analysis pipeline: the backend `POST /api/analyse` endpoint with SSE progress streaming, and the frontend `useAnalysis` hook and progress indicator. After this phase, users can submit headers, see real-time progress with a countdown timer, and receive analysis results. Partial failures and 30-second timeouts are handled gracefully. TDD Red-Green approach throughout.
|
||||||
|
|
||||||
|
## Spec Kit Context
|
||||||
|
|
||||||
|
- **Feature:** 1-web-header-analyzer
|
||||||
|
- **Specification:** .specify/specs/1-web-header-analyzer/spec.md (FR-06, FR-10, FR-16, FR-22, FR-25, NFR-09, NFR-10, NFR-13, NFR-14)
|
||||||
|
- **Plan:** .specify/specs/1-web-header-analyzer/plan.md (SSE streaming section)
|
||||||
|
- **Tasks:** .specify/specs/1-web-header-analyzer/tasks.md
|
||||||
|
- **API Contract:** .specify/specs/1-web-header-analyzer/contracts/api.yaml (`POST /api/analyse`)
|
||||||
|
- **User Story:** US3 — Analysis Execution & Progress (Scenario 1, steps 5–6)
|
||||||
|
- **Constitution:** .specify/memory/constitution.md (TDD: P6, UX: P7)
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- **Requires Phase 02** completed (engine: HeaderAnalyzer, ScannerRegistry, HeaderParser)
|
||||||
|
- **Requires Phase 03** completed (HeaderInput, AnalyseButton for triggering)
|
||||||
|
- **Requires Phase 04** completed (AnalysisControls for config)
|
||||||
|
|
||||||
|
## SSE Streaming Design
|
||||||
|
|
||||||
|
The `POST /api/analyse` endpoint uses Server-Sent Events:
|
||||||
|
1. Client sends headers + config via POST
|
||||||
|
2. Server validates input, starts analysis
|
||||||
|
3. Server streams `event: progress` messages: `{current, total, testName, elapsed}`
|
||||||
|
4. Server sends `event: result` with the final `AnalysisResult`
|
||||||
|
5. Server closes the connection
|
||||||
|
|
||||||
|
Frontend uses `fetch` with `ReadableStream` reader (not native `EventSource`, which doesn't support POST).
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
- [x] T025 [US3] Write failing tests (TDD Red) in `backend/tests/api/test_analysis_router.py` — happy path (valid headers → 200 with SSE progress + result), error path (empty → 400), oversized (>1MB → 413), partial failure (some tests error → mixed results per FR-25), timeout (30s limit per NFR-13, partial results per NFR-14)
|
||||||
|
- [ ] T026 [US3] Create `backend/app/schemas/analysis.py` (request/response schemas) and `backend/app/routers/analysis.py` — FastAPI router with `POST /api/analyse` using SSE for progress streaming. Accepts headers string + config (test IDs, resolve, decode-all). Invokes `HeaderAnalyzer` with 30s timeout (NFR-13). Streams progress events then final result. Sanitises input (NFR-09), validates size ≤1MB (NFR-10). Stateless — no job_id, no in-memory state (Assumption 3). Register router in `backend/app/main.py`. Verify `test_analysis_router.py` passes (TDD Green)
|
||||||
|
- [ ] T027 [US3] Write failing tests (TDD Red) in `frontend/src/__tests__/ProgressIndicator.test.tsx` (render at various states, timeout display) and `frontend/src/__tests__/useAnalysis.test.ts` (hook state transitions, SSE handling)
|
||||||
|
- [ ] T028 [P] [US3] Create `frontend/src/hooks/useAnalysis.ts` — custom hook managing analysis lifecycle. Submits to `POST /api/analyse` via API client, consumes SSE stream for real-time progress (no polling). States: idle, submitting, analysing (with progress), complete, error, timeout. Returns: `submit()`, `cancel()`, `progress`, `result`, `error`, `status`. Verify `useAnalysis.test.ts` passes (TDD Green)
|
||||||
|
- [ ] T029 [P] [US3] Create `frontend/src/components/ProgressIndicator.tsx` — progress bar with percentage, current test name (FR-22), countdown timer from 30s (NFR-13), elapsed time. Colour-coded: green progressing, amber near timeout, red on timeout. FontAwesome spinner. Timeout notification listing incomplete tests (NFR-14). Verify `ProgressIndicator.test.tsx` passes (TDD Green)
|
||||||
|
|
||||||
|
## Completion
|
||||||
|
|
||||||
|
- [ ] `pytest backend/tests/api/test_analysis_router.py` passes (all paths: happy, error, oversized, partial failure, timeout)
|
||||||
|
- [ ] All vitest tests pass: `npx vitest run src/__tests__/ProgressIndicator.test.tsx src/__tests__/useAnalysis.test.ts`
|
||||||
|
- [ ] Submitting headers triggers backend analysis with SSE streaming
|
||||||
|
- [ ] Progress bar updates in real-time showing current test name and percentage
|
||||||
|
- [ ] Countdown timer counts down from 30 seconds
|
||||||
|
- [ ] Partial failures show inline error indicators per FR-25
|
||||||
|
- [ ] Timeout at 30s displays partial results with notification listing incomplete tests
|
||||||
|
- [ ] Empty input returns 400, oversized >1MB returns 413
|
||||||
|
- [ ] Linting passes on both sides
|
||||||
|
- [ ] Run `/speckit.analyze` to verify consistency
|
||||||
232
backend/tests/api/test_analysis_router.py
Normal file
232
backend/tests/api/test_analysis_router.py
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpx import ASGITransport, AsyncClient
|
||||||
|
|
||||||
|
from app.engine.analyzer import HeaderAnalyzer
|
||||||
|
from app.engine.models import AnalysisResult, ReportMetadata, Severity, TestResult, TestStatus
|
||||||
|
from app.main import app
|
||||||
|
|
||||||
|
FIXTURES_DIR = Path(__file__).resolve().parents[1] / "fixtures"
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_sse_events(raw: str) -> list[dict[str, Any]]:
|
||||||
|
normalized = raw.replace("\r\n", "\n").strip()
|
||||||
|
if not normalized:
|
||||||
|
return []
|
||||||
|
|
||||||
|
events: list[dict[str, Any]] = []
|
||||||
|
for block in normalized.split("\n\n"):
|
||||||
|
if not block.strip():
|
||||||
|
continue
|
||||||
|
event = "message"
|
||||||
|
data_lines: list[str] = []
|
||||||
|
for line in block.split("\n"):
|
||||||
|
if not line or line.startswith(":"):
|
||||||
|
continue
|
||||||
|
if line.startswith("event:"):
|
||||||
|
event = line.split(":", 1)[1].strip() or "message"
|
||||||
|
continue
|
||||||
|
if line.startswith("data:"):
|
||||||
|
data_lines.append(line.split(":", 1)[1].lstrip())
|
||||||
|
if not data_lines:
|
||||||
|
continue
|
||||||
|
raw_data = "\n".join(data_lines)
|
||||||
|
try:
|
||||||
|
data = json.loads(raw_data)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
data = raw_data
|
||||||
|
events.append({"event": event, "data": data, "raw": raw_data})
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
async def _collect_stream_events(client: AsyncClient, payload: dict[str, Any]) -> list[dict[str, Any]]:
|
||||||
|
async with client.stream(
|
||||||
|
"POST",
|
||||||
|
"/api/analyse",
|
||||||
|
json=payload,
|
||||||
|
headers={"Accept": "text/event-stream"},
|
||||||
|
) as response:
|
||||||
|
assert response.status_code == 200
|
||||||
|
content_type = response.headers.get("content-type", "")
|
||||||
|
assert content_type.startswith("text/event-stream")
|
||||||
|
chunks: list[str] = []
|
||||||
|
async for chunk in response.aiter_text():
|
||||||
|
chunks.append(chunk)
|
||||||
|
return _parse_sse_events("".join(chunks))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_analyse_streams_progress_and_result() -> None:
|
||||||
|
raw_headers = (FIXTURES_DIR / "sample_headers.txt").read_text(encoding="utf-8")
|
||||||
|
payload = {
|
||||||
|
"headers": raw_headers,
|
||||||
|
"config": {"testIds": [12, 13], "resolve": False, "decodeAll": False},
|
||||||
|
}
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
events = await _collect_stream_events(client, payload)
|
||||||
|
|
||||||
|
progress_events = [event for event in events if event["event"] == "progress"]
|
||||||
|
result_events = [event for event in events if event["event"] == "result"]
|
||||||
|
|
||||||
|
assert progress_events
|
||||||
|
assert len(result_events) == 1
|
||||||
|
|
||||||
|
progress_payload = progress_events[0]["data"]
|
||||||
|
assert progress_payload["currentIndex"] == 0
|
||||||
|
assert progress_payload["totalTests"] == 2
|
||||||
|
assert progress_payload["currentTest"]
|
||||||
|
assert progress_payload["elapsedMs"] >= 0
|
||||||
|
assert progress_payload["percentage"] >= 0
|
||||||
|
|
||||||
|
result_payload = result_events[0]["data"]
|
||||||
|
assert isinstance(result_payload["results"], list)
|
||||||
|
assert result_payload["metadata"]["totalTests"] == 2
|
||||||
|
assert (
|
||||||
|
result_payload["metadata"]["passedTests"]
|
||||||
|
+ result_payload["metadata"]["failedTests"]
|
||||||
|
+ result_payload["metadata"]["skippedTests"]
|
||||||
|
) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_analyse_rejects_empty_headers() -> None:
|
||||||
|
payload = {"headers": "", "config": {"testIds": [], "resolve": False, "decodeAll": False}}
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
response = await client.post("/api/analyse", json=payload)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
body = response.json()
|
||||||
|
assert "error" in body or "detail" in body
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_analyse_rejects_oversized_headers() -> None:
|
||||||
|
payload = {
|
||||||
|
"headers": "a" * 1_048_577,
|
||||||
|
"config": {"testIds": [], "resolve": False, "decodeAll": False},
|
||||||
|
}
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
response = await client.post("/api/analyse", json=payload)
|
||||||
|
|
||||||
|
assert response.status_code == 413
|
||||||
|
body = response.json()
|
||||||
|
assert "error" in body or "detail" in body
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_analyse_stream_includes_partial_failures(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
|
raw_headers = (FIXTURES_DIR / "sample_headers.txt").read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
def fake_analyze(
|
||||||
|
self: HeaderAnalyzer,
|
||||||
|
request: Any,
|
||||||
|
progress_callback: Any | None = None,
|
||||||
|
) -> AnalysisResult:
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(0, 2, "Test A")
|
||||||
|
progress_callback(1, 2, "Test B")
|
||||||
|
return AnalysisResult(
|
||||||
|
results=[
|
||||||
|
TestResult(
|
||||||
|
test_id=12,
|
||||||
|
test_name="Test A",
|
||||||
|
header_name="X-Test",
|
||||||
|
header_value="value",
|
||||||
|
analysis="ok",
|
||||||
|
description="",
|
||||||
|
severity=Severity.clean,
|
||||||
|
status=TestStatus.success,
|
||||||
|
error=None,
|
||||||
|
),
|
||||||
|
TestResult(
|
||||||
|
test_id=13,
|
||||||
|
test_name="Test B",
|
||||||
|
header_name="X-Test",
|
||||||
|
header_value="value",
|
||||||
|
analysis="boom",
|
||||||
|
description="",
|
||||||
|
severity=Severity.info,
|
||||||
|
status=TestStatus.error,
|
||||||
|
error="Scanner failed",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
metadata=ReportMetadata(
|
||||||
|
total_tests=2,
|
||||||
|
passed_tests=1,
|
||||||
|
failed_tests=1,
|
||||||
|
skipped_tests=0,
|
||||||
|
elapsed_ms=5.0,
|
||||||
|
timed_out=False,
|
||||||
|
incomplete_tests=["Test B"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
monkeypatch.setattr(HeaderAnalyzer, "analyze", fake_analyze)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"headers": raw_headers,
|
||||||
|
"config": {"testIds": [12, 13], "resolve": False, "decodeAll": False},
|
||||||
|
}
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
events = await _collect_stream_events(client, payload)
|
||||||
|
|
||||||
|
result_payload = next(event["data"] for event in events if event["event"] == "result")
|
||||||
|
statuses = [item["status"] for item in result_payload["results"]]
|
||||||
|
assert "error" in statuses
|
||||||
|
error_entries = [item for item in result_payload["results"] if item["status"] == "error"]
|
||||||
|
assert error_entries[0]["error"]
|
||||||
|
assert result_payload["metadata"]["failedTests"] == 1
|
||||||
|
assert result_payload["metadata"]["incompleteTests"] == ["Test B"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.anyio
|
||||||
|
async def test_analyse_times_out_with_partial_results(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||||
|
raw_headers = (FIXTURES_DIR / "sample_headers.txt").read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
def fake_analyze(
|
||||||
|
self: HeaderAnalyzer,
|
||||||
|
request: Any,
|
||||||
|
progress_callback: Any | None = None,
|
||||||
|
) -> AnalysisResult:
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(0, 3, "Test A")
|
||||||
|
progress_callback(1, 3, "Test B")
|
||||||
|
raise TimeoutError("Analysis timed out")
|
||||||
|
|
||||||
|
monkeypatch.setattr(HeaderAnalyzer, "analyze", fake_analyze)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"headers": raw_headers,
|
||||||
|
"config": {"testIds": [12, 13, 14], "resolve": False, "decodeAll": False},
|
||||||
|
}
|
||||||
|
|
||||||
|
async with AsyncClient(
|
||||||
|
transport=ASGITransport(app=app),
|
||||||
|
base_url="http://test",
|
||||||
|
) as client:
|
||||||
|
events = await _collect_stream_events(client, payload)
|
||||||
|
|
||||||
|
result_payload = next(event["data"] for event in events if event["event"] == "result")
|
||||||
|
assert result_payload["metadata"]["timedOut"] is True
|
||||||
|
assert result_payload["metadata"]["incompleteTests"]
|
||||||
Reference in New Issue
Block a user