Files
DECNET/decnet/web/router/logs/api_get_logs.py
anti 6301504c0e perf(api): TTL-cache /stats + unfiltered pagination counts
Every /stats call ran SELECT count(*) FROM logs + SELECT count(DISTINCT
attacker_ip) FROM logs; every /logs and /attackers call ran an
unfiltered count for the paginator. At 500 concurrent users these
serialize through aiosqlite's worker threads and dominate wall time.

Cache at the router layer (repo stays dialect-agnostic):
  - /stats response: 5s TTL
  - /logs total (only when no filters): 2s TTL
  - /attackers total (only when no filters): 2s TTL

Filtered paths bypass the cache. Pattern reused from api_get_config
and api_get_health (asyncio.Lock + time.monotonic window + lazy lock).
2026-04-17 19:09:15 -04:00

76 lines
2.5 KiB
Python

import asyncio
import time
from typing import Any, Optional
from fastapi import APIRouter, Depends, Query
from decnet.telemetry import traced as _traced
from decnet.web.dependencies import require_viewer, repo
from decnet.web.db.models import LogsResponse
router = APIRouter()
# Cache the unfiltered total-logs count. Filtered counts bypass the cache
# (rare, freshness matters for search). SELECT count(*) FROM logs is a
# full scan and gets hammered by paginating clients.
_TOTAL_TTL = 2.0
_total_cache: tuple[Optional[int], float] = (None, 0.0)
_total_lock: Optional[asyncio.Lock] = None
def _reset_total_cache() -> None:
global _total_cache, _total_lock
_total_cache = (None, 0.0)
_total_lock = None
async def _get_total_logs_cached() -> int:
global _total_cache, _total_lock
value, ts = _total_cache
now = time.monotonic()
if value is not None and now - ts < _TOTAL_TTL:
return value
if _total_lock is None:
_total_lock = asyncio.Lock()
async with _total_lock:
value, ts = _total_cache
now = time.monotonic()
if value is not None and now - ts < _TOTAL_TTL:
return value
value = await repo.get_total_logs()
_total_cache = (value, time.monotonic())
return value
@router.get("/logs", response_model=LogsResponse, tags=["Logs"],
responses={401: {"description": "Could not validate credentials"}, 403: {"description": "Insufficient permissions"}, 422: {"description": "Validation error"}})
@_traced("api.get_logs")
async def get_logs(
limit: int = Query(50, ge=1, le=1000),
offset: int = Query(0, ge=0, le=2147483647),
search: Optional[str] = Query(None, max_length=512),
start_time: Optional[str] = Query(None),
end_time: Optional[str] = Query(None),
user: dict = Depends(require_viewer)
) -> dict[str, Any]:
def _norm(v: Optional[str]) -> Optional[str]:
if v in (None, "null", "NULL", "undefined", ""):
return None
return v
s = _norm(search)
st = _norm(start_time)
et = _norm(end_time)
_logs: list[dict[str, Any]] = await repo.get_logs(limit=limit, offset=offset, search=s, start_time=st, end_time=et)
if s is None and st is None and et is None:
_total: int = await _get_total_logs_cached()
else:
_total = await repo.get_total_logs(search=s, start_time=st, end_time=et)
return {
"total": _total,
"limit": limit,
"offset": offset,
"data": _logs
}