fix: remove event-loop-blocking cold start; unify profiler to cursor-based incremental

Cold start fetched all logs in one bulk query then processed them in a tight
synchronous loop with no yields, blocking the asyncio event loop for seconds
on datasets of 30K+ rows. This stalled every concurrent await — including the
SSE stream generator's initial DB calls — causing the dashboard to show
INITIALIZING SENSORS indefinitely.

Changes:
- Drop _cold_start() and get_all_logs_raw(); uninitialized state now runs the
  same cursor loop as incremental, starting from last_log_id=0
- Yield to the event loop after every _BATCH_SIZE rows (asyncio.sleep(0))
- Add SSE keepalive comment as first yield so the connection flushes before
  any DB work begins
- Add Cache-Control/X-Accel-Buffering headers to StreamingResponse
This commit is contained in:
2026-04-15 13:46:42 -04:00
parent 12aa98a83c
commit 314e6c6388
7 changed files with 32 additions and 74 deletions

View File

@@ -111,11 +111,6 @@ class BaseRepository(ABC):
"""Store a specific state entry by key."""
pass
@abstractmethod
async def get_all_logs_raw(self) -> list[dict[str, Any]]:
"""Retrieve all log rows with fields needed by the attacker profile worker."""
pass
@abstractmethod
async def get_max_log_id(self) -> int:
"""Return the highest log ID, or 0 if the table is empty."""

View File

@@ -413,34 +413,6 @@ class SQLModelRepository(BaseRepository):
# ----------------------------------------------------------- attackers
async def get_all_logs_raw(self) -> List[dict[str, Any]]:
async with self.session_factory() as session:
result = await session.execute(
select(
Log.id,
Log.raw_line,
Log.attacker_ip,
Log.service,
Log.event_type,
Log.decky,
Log.timestamp,
Log.fields,
)
)
return [
{
"id": r.id,
"raw_line": r.raw_line,
"attacker_ip": r.attacker_ip,
"service": r.service,
"event_type": r.event_type,
"decky": r.decky,
"timestamp": r.timestamp,
"fields": r.fields,
}
for r in result.all()
]
async def get_all_bounties_by_ip(self) -> dict[str, List[dict[str, Any]]]:
from collections import defaultdict
async with self.session_factory() as session:

View File

@@ -40,6 +40,8 @@ async def stream_events(
loops_since_stats = 0
emitted_chunks = 0
try:
yield ": keepalive\n\n" # flush headers immediately; helps diagnose pre-yield hangs
if last_id == 0:
last_id = await repo.get_max_log_id()
@@ -90,4 +92,11 @@ async def stream_events(
log.exception("SSE stream error for user %s", last_event_id)
yield f"event: error\ndata: {json.dumps({'type': 'error', 'message': 'Stream interrupted'})}\n\n"
return StreamingResponse(event_generator(), media_type="text/event-stream")
return StreamingResponse(
event_generator(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"X-Accel-Buffering": "no",
},
)