mirror of
https://github.com/mgeeky/decode-spam-headers.git
synced 2026-02-22 13:33:30 +01:00
MAESTRO: fix linting issues
This commit is contained in:
@@ -47,5 +47,5 @@ Frontend uses `fetch` with `ReadableStream` reader (not native `EventSource`, wh
|
||||
- [x] Partial failures show inline error indicators per FR-25 (Added AnalysisResults rendering with inline error badges.)
|
||||
- [x] Timeout at 30s displays partial results with notification listing incomplete tests
|
||||
- [x] Empty input returns 400, oversized >1MB returns 413
|
||||
- [ ] Linting passes on both sides
|
||||
- [x] Linting passes on both sides
|
||||
- [ ] Run `/speckit.analyze` to verify consistency
|
||||
|
||||
@@ -4,7 +4,11 @@ from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from app.engine.models import (
|
||||
AnalysisConfig,
|
||||
)
|
||||
from app.engine.models import (
|
||||
AnalysisRequest as EngineAnalysisRequest,
|
||||
)
|
||||
from app.engine.models import (
|
||||
AnalysisResult as EngineAnalysisResult,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,7 +8,13 @@ import pytest
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
from app.engine.analyzer import HeaderAnalyzer
|
||||
from app.engine.models import AnalysisResult, ReportMetadata, Severity, TestResult, TestStatus
|
||||
from app.engine.models import (
|
||||
AnalysisResult,
|
||||
ReportMetadata,
|
||||
Severity,
|
||||
TestResult,
|
||||
TestStatus,
|
||||
)
|
||||
from app.main import app
|
||||
|
||||
FIXTURES_DIR = Path(__file__).resolve().parents[1] / "fixtures"
|
||||
@@ -44,7 +50,10 @@ def _parse_sse_events(raw: str) -> list[dict[str, Any]]:
|
||||
return events
|
||||
|
||||
|
||||
async def _collect_stream_events(client: AsyncClient, payload: dict[str, Any]) -> list[dict[str, Any]]:
|
||||
async def _collect_stream_events(
|
||||
client: AsyncClient,
|
||||
payload: dict[str, Any],
|
||||
) -> list[dict[str, Any]]:
|
||||
async with client.stream(
|
||||
"POST",
|
||||
"/api/analyse",
|
||||
@@ -99,7 +108,10 @@ async def test_analyse_streams_progress_and_result() -> None:
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_analyse_rejects_empty_headers() -> None:
|
||||
payload = {"headers": "", "config": {"testIds": [], "resolve": False, "decodeAll": False}}
|
||||
payload = {
|
||||
"headers": "",
|
||||
"config": {"testIds": [], "resolve": False, "decodeAll": False},
|
||||
}
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app),
|
||||
@@ -131,7 +143,9 @@ async def test_analyse_rejects_oversized_headers() -> None:
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_analyse_stream_includes_partial_failures(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
async def test_analyse_stream_includes_partial_failures(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
raw_headers = (FIXTURES_DIR / "sample_headers.txt").read_text(encoding="utf-8")
|
||||
|
||||
def fake_analyze(
|
||||
@@ -191,17 +205,23 @@ async def test_analyse_stream_includes_partial_failures(monkeypatch: pytest.Monk
|
||||
) as client:
|
||||
events = await _collect_stream_events(client, payload)
|
||||
|
||||
result_payload = next(event["data"] for event in events if event["event"] == "result")
|
||||
result_payload = next(
|
||||
event["data"] for event in events if event["event"] == "result"
|
||||
)
|
||||
statuses = [item["status"] for item in result_payload["results"]]
|
||||
assert "error" in statuses
|
||||
error_entries = [item for item in result_payload["results"] if item["status"] == "error"]
|
||||
error_entries = [
|
||||
item for item in result_payload["results"] if item["status"] == "error"
|
||||
]
|
||||
assert error_entries[0]["error"]
|
||||
assert result_payload["metadata"]["failedTests"] == 1
|
||||
assert result_payload["metadata"]["incompleteTests"] == ["Test B"]
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_analyse_times_out_with_partial_results(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
async def test_analyse_times_out_with_partial_results(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
raw_headers = (FIXTURES_DIR / "sample_headers.txt").read_text(encoding="utf-8")
|
||||
|
||||
def fake_analyze(
|
||||
@@ -227,6 +247,8 @@ async def test_analyse_times_out_with_partial_results(monkeypatch: pytest.Monkey
|
||||
) as client:
|
||||
events = await _collect_stream_events(client, payload)
|
||||
|
||||
result_payload = next(event["data"] for event in events if event["event"] == "result")
|
||||
result_payload = next(
|
||||
event["data"] for event in events if event["event"] == "result"
|
||||
)
|
||||
assert result_payload["metadata"]["timedOut"] is True
|
||||
assert result_payload["metadata"]["incompleteTests"]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import coreWebVitals from "eslint-config-next/core-web-vitals";
|
||||
import typescript from "eslint-config-next/typescript";
|
||||
|
||||
export default [...coreWebVitals, ...typescript];
|
||||
const config = [...coreWebVitals, ...typescript];
|
||||
|
||||
export default config;
|
||||
|
||||
@@ -63,24 +63,15 @@ export default function ProgressIndicator({
|
||||
timeoutSeconds,
|
||||
incompleteTests = [],
|
||||
}: ProgressIndicatorProps) {
|
||||
const [nowMs, setNowMs] = useState(() => Date.now());
|
||||
const [elapsedMs, setElapsedMs] = useState(() => progress?.elapsedMs ?? 0);
|
||||
const progressRef = useRef<AnalysisProgress | null>(progress);
|
||||
const statusRef = useRef<AnalysisStatus>(status);
|
||||
const anchorRef = useRef<{ elapsedMs: number; timestamp: number } | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (status !== "analysing") {
|
||||
return;
|
||||
}
|
||||
progressRef.current = progress;
|
||||
statusRef.current = status;
|
||||
|
||||
const interval = window.setInterval(() => {
|
||||
setNowMs(Date.now());
|
||||
}, 1000);
|
||||
|
||||
return () => {
|
||||
window.clearInterval(interval);
|
||||
};
|
||||
}, [status]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!progress || status !== "analysing") {
|
||||
anchorRef.current = null;
|
||||
return;
|
||||
@@ -90,14 +81,28 @@ export default function ProgressIndicator({
|
||||
elapsedMs: progress.elapsedMs,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
}, [progress?.elapsedMs, status]);
|
||||
}, [progress, status]);
|
||||
|
||||
const baseElapsedMs = progress?.elapsedMs ?? 0;
|
||||
useEffect(() => {
|
||||
const interval = window.setInterval(() => {
|
||||
const currentStatus = statusRef.current;
|
||||
const currentProgress = progressRef.current;
|
||||
const anchor = anchorRef.current;
|
||||
const elapsedMs =
|
||||
status === "analysing" && progress && anchor
|
||||
? anchor.elapsedMs + Math.max(0, nowMs - anchor.timestamp)
|
||||
const baseElapsedMs = currentProgress?.elapsedMs ?? 0;
|
||||
const nextElapsedMs =
|
||||
currentStatus === "analysing" && currentProgress && anchor
|
||||
? anchor.elapsedMs + Math.max(0, Date.now() - anchor.timestamp)
|
||||
: baseElapsedMs;
|
||||
|
||||
setElapsedMs((previous) =>
|
||||
previous === nextElapsedMs ? previous : nextElapsedMs,
|
||||
);
|
||||
}, 1000);
|
||||
|
||||
return () => {
|
||||
window.clearInterval(interval);
|
||||
};
|
||||
}, []);
|
||||
const elapsedSeconds = Math.floor(elapsedMs / 1000);
|
||||
const remainingSeconds = Math.max(0, timeoutSeconds - elapsedSeconds);
|
||||
const percentage = progress ? Math.round(progress.percentage) : 0;
|
||||
|
||||
Reference in New Issue
Block a user