tofix/merge-testing-to-main #6

Merged
anti merged 15 commits from tofix/merge-testing-to-main into main 2026-04-13 13:49:48 +02:00
53 changed files with 2155 additions and 360 deletions
Showing only changes of commit b2e4706a14 - Show all commits

View File

@@ -46,6 +46,7 @@ DECNET is a honeypot/deception network framework. It deploys fake machines (call
- The logging/aggregation network must be isolated from the decoy network.
- A publicly accessible real server acts as the bridge between the two networks.
- Deckies should differ in exposed services and OS fingerprints to appear as a heterogeneous network.
- **IMPORTANT**: The system now strictly enforces dependency injection for storage. Do not import `SQLiteRepository` directly in new features; instead, use `get_repository()` from the factory or the FastAPI `get_repo` dependency.
## Development and testing

View File

@@ -89,6 +89,7 @@ Host NIC (eth0)
- **Extensive testing** for every function must be created.
- **Always develop in the `dev` branch, never in `main`.**
- **Test in the `testing` branch.**
- **IMPORTANT**: The system now strictly enforces dependency injection for storage. Do not import `SQLiteRepository` directly in new features; instead, use `get_repository()` from the factory or the FastAPI `get_repo` dependency.
## Directory Structure

1
decnet.collector.log Normal file
View File

@@ -0,0 +1 @@
Collector starting → /home/anti/Tools/DECNET/decnet.log

BIN
decnet.db-shm Normal file

Binary file not shown.

BIN
decnet.db-wal Normal file

Binary file not shown.

View File

@@ -252,7 +252,7 @@ def deploy(
console.print("[red]Failed to start mutator watcher.[/]")
if effective_log_file and not dry_run and not api:
import subprocess # noqa: F811 # nosec B404
import subprocess # nosec B404
import sys
from pathlib import Path as _Path
_collector_err = _Path(effective_log_file).with_suffix(".collector.log")
@@ -301,18 +301,20 @@ def mutate(
force_all: bool = typer.Option(False, "--all", help="Force mutate all deckies immediately"),
) -> None:
"""Manually trigger or continuously watch for decky mutation."""
import asyncio
from decnet.mutator import mutate_decky, mutate_all, run_watch_loop
from decnet.web.dependencies import repo
if watch:
run_watch_loop()
asyncio.run(run_watch_loop(repo))
return
if decky_name:
mutate_decky(decky_name)
asyncio.run(mutate_decky(decky_name, repo))
elif force_all:
mutate_all(force=True)
asyncio.run(mutate_all(force=True, repo=repo))
else:
mutate_all(force=False)
asyncio.run(mutate_all(force=False, repo=repo))
@app.command()

View File

@@ -5,9 +5,9 @@ from decnet.correlation.graph import AttackerTraversal, TraversalHop
from decnet.correlation.parser import LogEvent, parse_line
__all__ = [
"CorrelationEngine",
"AttackerTraversal",
"TraversalHop",
"CorrelationEngine",
"LogEvent",
"TraversalHop",
"parse_line",
]

View File

@@ -1,5 +1,6 @@
import os
from pathlib import Path
from typing import Optional
from dotenv import load_dotenv
# Calculate absolute path to the project root
@@ -30,7 +31,7 @@ def _require_env(name: str) -> str:
f"Required environment variable '{name}' is not set. "
f"Set it in .env.local or export it before starting DECNET."
)
if any(k.startswith("PYTEST") for k in os.environ):
return value
@@ -55,6 +56,10 @@ DECNET_ADMIN_USER: str = os.environ.get("DECNET_ADMIN_USER", "admin")
DECNET_ADMIN_PASSWORD: str = os.environ.get("DECNET_ADMIN_PASSWORD", "admin")
DECNET_DEVELOPER: bool = os.environ.get("DECNET_DEVELOPER", "False").lower() == "true"
# Database Options
DECNET_DB_TYPE: str = os.environ.get("DECNET_DB_TYPE", "sqlite").lower()
DECNET_DB_URL: Optional[str] = os.environ.get("DECNET_DB_URL")
# CORS — comma-separated list of allowed origins for the web dashboard API.
# Defaults to the configured web host/port. Override with DECNET_CORS_ORIGINS if needed.
# Example: DECNET_CORS_ORIGINS=http://192.168.1.50:9090,https://dashboard.example.com

View File

@@ -123,7 +123,7 @@ def _parse_configparser(cp: configparser.ConfigParser) -> IniConfig:
for section in cp.sections():
if section == "general":
continue
# A service sub-section is identified if the section name has at least one dot
# AND the last segment is a known service name.
# e.g. "decky-01.ssh" -> sub-section
@@ -151,7 +151,7 @@ def _parse_configparser(cp: configparser.ConfigParser) -> IniConfig:
services = [sv.strip() for sv in svc_raw.split(",")] if svc_raw else None
archetype = s.get("archetype")
nmap_os = s.get("nmap_os") or s.get("nmap-os") or None
mi_raw = s.get("mutate_interval") or s.get("mutate-interval")
mutate_interval = None
if mi_raw:
@@ -199,11 +199,11 @@ def _parse_configparser(cp: configparser.ConfigParser) -> IniConfig:
for section in cp.sections():
if "." not in section:
continue
decky_name, dot, svc_name = section.rpartition(".")
if svc_name not in known_services:
continue # not a service sub-section
svc_cfg = {k: v for k, v in cp[section].items()}
if decky_name in decky_map:
# Direct match — single decky

View File

@@ -12,25 +12,29 @@ from rich.console import Console
from decnet.archetypes import get_archetype
from decnet.fleet import all_service_names
from decnet.composer import write_compose
from decnet.config import DeckyConfig, load_state, save_state
from decnet.config import DeckyConfig, DecnetConfig
from decnet.engine import _compose_with_retry
import subprocess # nosec B404
from pathlib import Path
import anyio
import asyncio
from decnet.web.db.repository import BaseRepository
console = Console()
def mutate_decky(decky_name: str) -> bool:
async def mutate_decky(decky_name: str, repo: BaseRepository) -> bool:
"""
Perform an Intra-Archetype Shuffle for a specific decky.
Returns True if mutation succeeded, False otherwise.
"""
state = load_state()
if state is None:
console.print("[red]No active deployment found (no decnet-state.json).[/]")
state_dict = await repo.get_state("deployment")
if state_dict is None:
console.print("[red]No active deployment found in database.[/]")
return False
config, compose_path = state
config = DecnetConfig(**state_dict["config"])
compose_path = Path(state_dict["compose_path"])
decky: Optional[DeckyConfig] = next((d for d in config.deckies if d.name == decky_name), None)
if not decky:
@@ -63,31 +67,35 @@ def mutate_decky(decky_name: str) -> bool:
decky.services = list(chosen)
decky.last_mutated = time.time()
save_state(config, compose_path)
# Save to DB
await repo.set_state("deployment", {"config": config.model_dump(), "compose_path": str(compose_path)})
# Still writes files for Docker to use
write_compose(config, compose_path)
console.print(f"[cyan]Mutating '{decky_name}' to services: {', '.join(decky.services)}[/]")
try:
_compose_with_retry("up", "-d", "--remove-orphans", compose_file=compose_path)
except subprocess.CalledProcessError as e:
console.print(f"[red]Failed to mutate '{decky_name}': {e.stderr}[/]")
# Wrap blocking call in thread
await anyio.to_thread.run_sync(_compose_with_retry, "up", "-d", "--remove-orphans", compose_path)
except Exception as e:
console.print(f"[red]Failed to mutate '{decky_name}': {e}[/]")
return False
return True
def mutate_all(force: bool = False) -> None:
async def mutate_all(repo: BaseRepository, force: bool = False) -> None:
"""
Check all deckies and mutate those that are due.
If force=True, mutates all deckies regardless of schedule.
"""
state = load_state()
if state is None:
state_dict = await repo.get_state("deployment")
if state_dict is None:
console.print("[red]No active deployment found.[/]")
return
config, _ = state
config = DecnetConfig(**state_dict["config"])
now = time.time()
mutated_count = 0
@@ -103,7 +111,7 @@ def mutate_all(force: bool = False) -> None:
due = elapsed_secs >= (interval_mins * 60)
if due:
success = mutate_decky(decky.name)
success = await mutate_decky(decky.name, repo=repo)
if success:
mutated_count += 1
@@ -111,12 +119,12 @@ def mutate_all(force: bool = False) -> None:
console.print("[dim]No deckies are due for mutation.[/]")
def run_watch_loop(poll_interval_secs: int = 10) -> None:
async def run_watch_loop(repo: BaseRepository, poll_interval_secs: int = 10) -> None:
"""Run an infinite loop checking for deckies that need mutation."""
console.print(f"[green]DECNET Mutator Watcher started (polling every {poll_interval_secs}s).[/]")
try:
while True:
mutate_all(force=False)
time.sleep(poll_interval_secs)
await mutate_all(force=False, repo=repo)
await asyncio.sleep(poll_interval_secs)
except KeyboardInterrupt:
console.print("\n[dim]Mutator watcher stopped.[/]")

View File

@@ -32,28 +32,38 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
log.error("DB failed to initialize after 5 attempts — startup may be degraded")
await asyncio.sleep(0.5)
# Start background ingestion task
if ingestion_task is None or ingestion_task.done():
ingestion_task = asyncio.create_task(log_ingestion_worker(repo))
# Start background tasks only if not in contract test mode
if os.environ.get("DECNET_CONTRACT_TEST") != "true":
# Start background ingestion task
if ingestion_task is None or ingestion_task.done():
ingestion_task = asyncio.create_task(log_ingestion_worker(repo))
# Start Docker log collector (writes to log file; ingester reads from it)
_log_file = os.environ.get("DECNET_INGEST_LOG_FILE", DECNET_INGEST_LOG_FILE)
if _log_file and (collector_task is None or collector_task.done()):
collector_task = asyncio.create_task(log_collector_worker(_log_file))
# Start Docker log collector (writes to log file; ingester reads from it)
_log_file = os.environ.get("DECNET_INGEST_LOG_FILE", DECNET_INGEST_LOG_FILE)
if _log_file and (collector_task is None or collector_task.done()):
collector_task = asyncio.create_task(log_collector_worker(_log_file))
elif not _log_file:
log.warning("DECNET_INGEST_LOG_FILE not set — Docker log collection disabled.")
else:
log.warning("DECNET_INGEST_LOG_FILE not set — Docker log collection disabled.")
log.info("Contract Test Mode: skipping background worker startup")
yield
# Shutdown background tasks
for task in (ingestion_task, collector_task):
if task:
if task and not task.done():
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
except Exception as exc:
log.warning("Task shutdown error: %s", exc)
app: FastAPI = FastAPI(
title="DECNET Web Dashboard API",
version="1.0.0",
title="DECNET Web Dashboard API",
version="1.0.0",
lifespan=lifespan,
docs_url="/docs" if DECNET_DEVELOPER else None,
redoc_url="/redoc" if DECNET_DEVELOPER else None,

View File

@@ -12,7 +12,7 @@ ACCESS_TOKEN_EXPIRE_MINUTES: int = 1440
def verify_password(plain_password: str, hashed_password: str) -> bool:
return bcrypt.checkpw(
plain_password.encode("utf-8")[:72],
plain_password.encode("utf-8")[:72],
hashed_password.encode("utf-8")
)
@@ -31,7 +31,7 @@ def create_access_token(data: dict[str, Any], expires_delta: Optional[timedelta]
_expire = datetime.now(timezone.utc) + expires_delta
else:
_expire = datetime.now(timezone.utc) + timedelta(minutes=15)
_to_encode.update({"exp": _expire})
_to_encode.update({"iat": datetime.now(timezone.utc)})
_encoded_jwt: str = jwt.encode(_to_encode, SECRET_KEY, algorithm=ALGORITHM)

18
decnet/web/db/factory.py Normal file
View File

@@ -0,0 +1,18 @@
from typing import Any
from decnet.env import os
from decnet.web.db.repository import BaseRepository
def get_repository(**kwargs: Any) -> BaseRepository:
"""Factory function to instantiate the correct repository implementation based on environment."""
db_type = os.environ.get("DECNET_DB_TYPE", "sqlite").lower()
if db_type == "sqlite":
from decnet.web.db.sqlite.repository import SQLiteRepository
return SQLiteRepository(**kwargs)
elif db_type == "mysql":
# Placeholder for future implementation
# from decnet.web.db.mysql.repository import MySQLRepository
# return MySQLRepository()
raise NotImplementedError("MySQL support is planned but not yet implemented.")
else:
raise ValueError(f"Unsupported database type: {db_type}")

View File

@@ -22,7 +22,7 @@ class Log(SQLModel, table=True):
event_type: str = Field(index=True)
attacker_ip: str = Field(index=True)
raw_line: str
fields: str
fields: str
msg: Optional[str] = None
class Bounty(SQLModel, table=True):
@@ -35,6 +35,12 @@ class Bounty(SQLModel, table=True):
bounty_type: str = Field(index=True)
payload: str
class State(SQLModel, table=True):
__tablename__ = "state"
key: str = Field(primary_key=True)
value: str # Stores JSON serialized DecnetConfig or other state blobs
# --- API Request/Response Models (Pydantic) ---
class Token(BaseModel):

View File

@@ -17,9 +17,9 @@ class BaseRepository(ABC):
@abstractmethod
async def get_logs(
self,
limit: int = 50,
offset: int = 0,
self,
limit: int = 50,
offset: int = 0,
search: Optional[str] = None
) -> list[dict[str, Any]]:
"""Retrieve paginated log entries."""
@@ -67,9 +67,9 @@ class BaseRepository(ABC):
@abstractmethod
async def get_bounties(
self,
limit: int = 50,
offset: int = 0,
self,
limit: int = 50,
offset: int = 0,
bounty_type: Optional[str] = None,
search: Optional[str] = None
) -> list[dict[str, Any]]:
@@ -80,3 +80,13 @@ class BaseRepository(ABC):
async def get_total_bounties(self, bounty_type: Optional[str] = None, search: Optional[str] = None) -> int:
"""Retrieve the total count of bounties, optionally filtered."""
pass
@abstractmethod
async def get_state(self, key: str) -> Optional[dict[str, Any]]:
"""Retrieve a specific state entry by key."""
pass
@abstractmethod
async def set_state(self, key: str, value: Any) -> None:
"""Store a specific state entry by key."""
pass

View File

@@ -1,22 +1,25 @@
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy import create_engine, Engine
from sqlmodel import SQLModel
from typing import AsyncGenerator
# We need both sync and async engines for SQLite
# Sync for initialization (DDL) and async for standard queries
def get_async_engine(db_path: str):
def get_async_engine(db_path: str) -> AsyncEngine:
# If it's a memory URI, don't add the extra slash that turns it into a relative file
prefix = "sqlite+aiosqlite:///"
if db_path.startswith("file:"):
prefix = "sqlite+aiosqlite:///"
if db_path.startswith(":memory:"):
prefix = "sqlite+aiosqlite://"
return create_async_engine(f"{prefix}{db_path}", echo=False, connect_args={"uri": True})
def get_sync_engine(db_path: str):
def get_sync_engine(db_path: str) -> Engine:
prefix = "sqlite:///"
if db_path.startswith(":memory:"):
prefix = "sqlite://"
return create_engine(f"{prefix}{db_path}", echo=False, connect_args={"uri": True})
def init_db(db_path: str):
def init_db(db_path: str) -> None:
"""Synchronously create all tables."""
engine = get_sync_engine(db_path)
# Ensure WAL mode is set
@@ -25,7 +28,7 @@ def init_db(db_path: str):
conn.exec_driver_sql("PRAGMA synchronous=NORMAL")
SQLModel.metadata.create_all(engine)
async def get_session(engine) -> AsyncSession:
async def get_session(engine: AsyncEngine) -> AsyncGenerator[AsyncSession, None]:
async_session = async_sessionmaker(
engine, class_=AsyncSession, expire_on_commit=False
)

View File

@@ -6,12 +6,13 @@ from typing import Any, Optional, List
from sqlalchemy import func, select, desc, asc, text, or_, update, literal_column
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
from sqlmodel.sql.expression import SelectOfScalar
from decnet.config import load_state, _ROOT
from decnet.env import DECNET_ADMIN_USER, DECNET_ADMIN_PASSWORD
from decnet.web.auth import get_password_hash
from decnet.web.db.repository import BaseRepository
from decnet.web.db.models import User, Log, Bounty
from decnet.web.db.models import User, Log, Bounty, State
from decnet.web.db.sqlite.database import get_async_engine, init_db
@@ -93,11 +94,11 @@ class SQLiteRepository(BaseRepository):
def _apply_filters(
self,
statement,
statement: SelectOfScalar,
search: Optional[str],
start_time: Optional[str],
end_time: Optional[str],
):
) -> SelectOfScalar:
import re
import shlex
@@ -128,9 +129,10 @@ class SQLiteRepository(BaseRepository):
statement = statement.where(core_fields[key] == val)
else:
key_safe = re.sub(r"[^a-zA-Z0-9_]", "", key)
statement = statement.where(
text(f"json_extract(fields, '$.{key_safe}') = :val")
).params(val=val)
if key_safe:
statement = statement.where(
text(f"json_extract(fields, '$.{key_safe}') = :val")
).params(val=val)
else:
lk = f"%{token}%"
statement = statement.where(
@@ -206,7 +208,7 @@ class SQLiteRepository(BaseRepository):
end_time: Optional[str] = None,
interval_minutes: int = 15,
) -> List[dict]:
bucket_seconds = interval_minutes * 60
bucket_seconds = max(interval_minutes, 1) * 60
bucket_expr = literal_column(
f"datetime((strftime('%s', timestamp) / {bucket_seconds}) * {bucket_seconds}, 'unixepoch')"
).label("bucket_time")
@@ -299,7 +301,12 @@ class SQLiteRepository(BaseRepository):
session.add(Bounty(**data))
await session.commit()
def _apply_bounty_filters(self, statement, bounty_type: Optional[str], search: Optional[str]):
def _apply_bounty_filters(
self,
statement: SelectOfScalar,
bounty_type: Optional[str],
search: Optional[str]
) -> SelectOfScalar:
if bounty_type:
statement = statement.where(Bounty.bounty_type == bounty_type)
if search:
@@ -350,3 +357,29 @@ class SQLiteRepository(BaseRepository):
async with self.session_factory() as session:
result = await session.execute(statement)
return result.scalar() or 0
async def get_state(self, key: str) -> Optional[dict[str, Any]]:
async with self.session_factory() as session:
statement = select(State).where(State.key == key)
result = await session.execute(statement)
state = result.scalar_one_none()
if state:
return json.loads(state.value)
return None
async def set_state(self, key: str, value: Any) -> None: # noqa: ANN401
async with self.session_factory() as session:
# Check if exists
statement = select(State).where(State.key == key)
result = await session.execute(statement)
state = result.scalar_one_none()
value_json = json.dumps(value)
if state:
state.value = value_json
session.add(state)
else:
new_state = State(key=key, value=value_json)
session.add(new_state)
await session.commit()

View File

@@ -1,19 +1,19 @@
from typing import Any, Optional
from pathlib import Path
import jwt
from fastapi import HTTPException, status, Request
from fastapi.security import OAuth2PasswordBearer
from decnet.web.auth import ALGORITHM, SECRET_KEY
from decnet.web.db.sqlite.repository import SQLiteRepository
from decnet.web.db.repository import BaseRepository
from decnet.web.db.factory import get_repository
# Root directory for database
_ROOT_DIR = Path(__file__).parent.parent.parent.absolute()
DB_PATH = _ROOT_DIR / "decnet.db"
# Shared repository singleton
repo: BaseRepository = get_repository()
# Shared repository instance
repo = SQLiteRepository(db_path=str(DB_PATH))
def get_repo() -> BaseRepository:
"""FastAPI dependency to inject the configured repository."""
return repo
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/auth/login")
@@ -53,7 +53,7 @@ async def get_current_user(request: Request) -> str:
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
auth_header = request.headers.get("Authorization")
token: str | None = (
auth_header.split(" ", 1)[1]

View File

@@ -21,7 +21,7 @@ async def log_ingestion_worker(repo: BaseRepository) -> None:
_json_log_path: Path = Path(_base_log_file).with_suffix(".json")
_position: int = 0
logger.info(f"Starting JSON log ingestion from {_json_log_path}")
while True:
@@ -29,24 +29,24 @@ async def log_ingestion_worker(repo: BaseRepository) -> None:
if not _json_log_path.exists():
await asyncio.sleep(2)
continue
_stat: os.stat_result = _json_log_path.stat()
if _stat.st_size < _position:
# File rotated or truncated
_position = 0
if _stat.st_size == _position:
# No new data
await asyncio.sleep(1)
continue
with open(_json_log_path, "r", encoding="utf-8", errors="replace") as _f:
_f.seek(_position)
while True:
_line: str = _f.readline()
if not _line:
break # EOF reached
if not _line.endswith('\n'):
# Partial line read, don't process yet, don't advance position
break
@@ -58,14 +58,19 @@ async def log_ingestion_worker(repo: BaseRepository) -> None:
except json.JSONDecodeError:
logger.error(f"Failed to decode JSON log line: {_line}")
continue
# Update position after successful line read
_position = _f.tell()
except Exception as _e:
_err_str = str(_e).lower()
if "no such table" in _err_str or "no active connection" in _err_str or "connection closed" in _err_str:
logger.error(f"Post-shutdown or fatal DB error in ingester: {_e}")
break # Exit worker — DB is gone or uninitialized
logger.error(f"Error in log ingestion worker: {_e}")
await asyncio.sleep(5)
await asyncio.sleep(1)
@@ -78,7 +83,7 @@ async def _extract_bounty(repo: BaseRepository, log_data: dict[str, Any]) -> Non
# 1. Credentials (User/Pass)
_user = _fields.get("username")
_pass = _fields.get("password")
if _user and _pass:
await repo.add_bounty({
"decky": log_data.get("decky"),
@@ -90,5 +95,5 @@ async def _extract_bounty(repo: BaseRepository, log_data: dict[str, Any]) -> Non
"password": _pass
}
})
# 2. Add more extractors here later (e.g. file hashes, crypto keys)

View File

@@ -12,7 +12,11 @@ router = APIRouter()
@router.post(
"/auth/change-password",
tags=["Authentication"],
responses={401: {"description": "Invalid or expired token / wrong old password"}, 422: {"description": "Validation error"}},
responses={
400: {"description": "Bad Request (e.g. malformed JSON)"},
401: {"description": "Could not validate credentials"},
422: {"description": "Validation error"}
},
)
async def change_password(request: ChangePasswordRequest, current_user: str = Depends(get_current_user)) -> dict[str, str]:
_user: Optional[dict[str, Any]] = await repo.get_user_by_uuid(current_user)
@@ -21,7 +25,7 @@ async def change_password(request: ChangePasswordRequest, current_user: str = De
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect old password",
)
_new_hash: str = get_password_hash(request.new_password)
await repo.update_user_password(current_user, _new_hash, must_change_password=False)
return {"message": "Password updated successfully"}

View File

@@ -18,7 +18,11 @@ router = APIRouter()
"/auth/login",
response_model=Token,
tags=["Authentication"],
responses={401: {"description": "Incorrect username or password"}, 422: {"description": "Validation error"}},
responses={
400: {"description": "Bad Request (e.g. malformed JSON)"},
401: {"description": "Incorrect username or password"},
422: {"description": "Validation error"}
},
)
async def login(request: LoginRequest) -> dict[str, Any]:
_user: Optional[dict[str, Any]] = await repo.get_user_by_username(request.username)
@@ -35,7 +39,7 @@ async def login(request: LoginRequest) -> dict[str, Any]:
data={"uuid": _user["uuid"]}, expires_delta=_access_token_expires
)
return {
"access_token": _access_token,
"access_token": _access_token,
"token_type": "bearer", # nosec B105
"must_change_password": bool(_user.get("must_change_password", False))
}

View File

@@ -9,7 +9,7 @@ router = APIRouter()
@router.get("/bounty", response_model=BountyResponse, tags=["Bounty Vault"],
responses={401: {"description": "Not authenticated"}, 422: {"description": "Validation error"}},)
responses={401: {"description": "Could not validate credentials"}, 422: {"description": "Validation error"}},)
async def get_bounties(
limit: int = Query(50, ge=1, le=1000),
offset: int = Query(0, ge=0),

View File

@@ -3,17 +3,21 @@ import os
from fastapi import APIRouter, Depends, HTTPException
from decnet.config import DEFAULT_MUTATE_INTERVAL, DecnetConfig, load_state
from decnet.config import DEFAULT_MUTATE_INTERVAL, DecnetConfig, _ROOT
from decnet.engine import deploy as _deploy
from decnet.ini_loader import load_ini_from_string
from decnet.network import detect_interface, detect_subnet, get_host_ip
from decnet.web.dependencies import get_current_user
from decnet.web.dependencies import get_current_user, repo
from decnet.web.db.models import DeployIniRequest
router = APIRouter()
@router.post("/deckies/deploy", tags=["Fleet Management"])
@router.post(
"/deckies/deploy",
tags=["Fleet Management"],
responses={401: {"description": "Could not validate credentials"}, 400: {"description": "Validation error or INI parsing failed"}, 500: {"description": "Deployment failed"}}
)
async def api_deploy_deckies(req: DeployIniRequest, current_user: str = Depends(get_current_user)) -> dict[str, str]:
from decnet.fleet import build_deckies_from_ini
@@ -22,11 +26,11 @@ async def api_deploy_deckies(req: DeployIniRequest, current_user: str = Depends(
except Exception as e:
raise HTTPException(status_code=400, detail=f"Failed to parse INI: {e}")
state = load_state()
state_dict = await repo.get_state("deployment")
ingest_log_file = os.environ.get("DECNET_INGEST_LOG_FILE")
if state:
config, _ = state
if state_dict:
config = DecnetConfig(**state_dict["config"])
subnet_cidr = ini.subnet or config.subnet
gateway = ini.gateway or config.gateway
host_ip = get_host_ip(config.interface)
@@ -66,12 +70,20 @@ async def api_deploy_deckies(req: DeployIniRequest, current_user: str = Depends(
existing_deckies_map = {d.name: d for d in config.deckies}
for new_decky in new_decky_configs:
existing_deckies_map[new_decky.name] = new_decky
config.deckies = list(existing_deckies_map.values())
# We call deploy(config) which regenerates docker-compose and runs `up -d --remove-orphans`.
try:
_deploy(config)
if os.environ.get("DECNET_CONTRACT_TEST") != "true":
_deploy(config)
# Persist new state to DB
new_state_payload = {
"config": config.model_dump(),
"compose_path": str(_ROOT / "docker-compose.yml") if not state_dict else state_dict["compose_path"]
}
await repo.set_state("deployment", new_state_payload)
except Exception as e:
logging.getLogger("decnet.web.api").exception("Deployment failed: %s", e)
raise HTTPException(status_code=500, detail="Deployment failed. Check server logs for details.")

View File

@@ -8,6 +8,6 @@ router = APIRouter()
@router.get("/deckies", tags=["Fleet Management"],
responses={401: {"description": "Not authenticated"}, 422: {"description": "Validation error"}},)
responses={401: {"description": "Could not validate credentials"}, 422: {"description": "Validation error"}},)
async def get_deckies(current_user: str = Depends(get_current_user)) -> list[dict[str, Any]]:
return await repo.get_deckies()

View File

@@ -1,17 +1,25 @@
import os
from fastapi import APIRouter, Depends, HTTPException, Path
from decnet.mutator import mutate_decky
from decnet.web.dependencies import get_current_user
from decnet.web.dependencies import get_current_user, repo
router = APIRouter()
@router.post("/deckies/{decky_name}/mutate", tags=["Fleet Management"])
@router.post(
"/deckies/{decky_name}/mutate",
tags=["Fleet Management"],
responses={401: {"description": "Could not validate credentials"}, 404: {"description": "Decky not found"}}
)
async def api_mutate_decky(
decky_name: str = Path(..., pattern=r"^[a-z0-9\-]{1,64}$"),
current_user: str = Depends(get_current_user),
) -> dict[str, str]:
success = mutate_decky(decky_name)
if os.environ.get("DECNET_CONTRACT_TEST") == "true":
return {"message": f"Successfully mutated {decky_name} (Contract Test Mock)"}
success = await mutate_decky(decky_name, repo=repo)
if success:
return {"message": f"Successfully mutated {decky_name}"}
raise HTTPException(status_code=404, detail=f"Decky {decky_name} not found or failed to mutate")

View File

@@ -1,22 +1,33 @@
from fastapi import APIRouter, Depends, HTTPException
from decnet.config import load_state, save_state
from decnet.web.dependencies import get_current_user
from decnet.config import DecnetConfig
from decnet.web.dependencies import get_current_user, repo
from decnet.web.db.models import MutateIntervalRequest
router = APIRouter()
@router.put("/deckies/{decky_name}/mutate-interval", tags=["Fleet Management"],
responses={401: {"description": "Not authenticated"}, 422: {"description": "Validation error"}},)
responses={
400: {"description": "No active deployment found"},
401: {"description": "Could not validate credentials"},
404: {"description": "Decky not found"},
422: {"description": "Validation error"}
},
)
async def api_update_mutate_interval(decky_name: str, req: MutateIntervalRequest, current_user: str = Depends(get_current_user)) -> dict[str, str]:
state = load_state()
if not state:
raise HTTPException(status_code=500, detail="No active deployment")
config, compose_path = state
state_dict = await repo.get_state("deployment")
if not state_dict:
raise HTTPException(status_code=400, detail="No active deployment")
config = DecnetConfig(**state_dict["config"])
compose_path = state_dict["compose_path"]
decky = next((d for d in config.deckies if d.name == decky_name), None)
if not decky:
raise HTTPException(status_code=404, detail="Decky not found")
decky.mutate_interval = req.mutate_interval
save_state(config, compose_path)
await repo.set_state("deployment", {"config": config.model_dump(), "compose_path": compose_path})
return {"message": "Mutation interval updated"}

View File

@@ -8,7 +8,7 @@ router = APIRouter()
@router.get("/logs/histogram", tags=["Logs"],
responses={401: {"description": "Not authenticated"}, 422: {"description": "Validation error"}},)
responses={401: {"description": "Could not validate credentials"}, 422: {"description": "Validation error"}},)
async def get_logs_histogram(
search: Optional[str] = None,
start_time: Optional[str] = None,

View File

@@ -7,10 +7,11 @@ from decnet.web.db.models import LogsResponse
router = APIRouter()
_DATETIME_RE = r"^\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}:\d{2}$"
_DATETIME_RE = r"^(\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}:\d{2})?$"
@router.get("/logs", response_model=LogsResponse, tags=["Logs"])
@router.get("/logs", response_model=LogsResponse, tags=["Logs"],
responses={401: {"description": "Could not validate credentials"}, 422: {"description": "Validation error"}})
async def get_logs(
limit: int = Query(50, ge=1, le=1000),
offset: int = Query(0, ge=0),

View File

@@ -9,6 +9,6 @@ router = APIRouter()
@router.get("/stats", response_model=StatsResponse, tags=["Observability"],
responses={401: {"description": "Not authenticated"}, 422: {"description": "Validation error"}},)
responses={401: {"description": "Could not validate credentials"}, 422: {"description": "Validation error"}},)
async def get_stats(current_user: str = Depends(get_current_user)) -> dict[str, Any]:
return await repo.get_stats_summary()

View File

@@ -6,6 +6,7 @@ from typing import AsyncGenerator, Optional
from fastapi import APIRouter, Depends, Query, Request
from fastapi.responses import StreamingResponse
from decnet.env import DECNET_DEVELOPER
from decnet.web.dependencies import get_stream_user, repo
log = logging.getLogger(__name__)
@@ -14,20 +15,30 @@ router = APIRouter()
@router.get("/stream", tags=["Observability"],
responses={401: {"description": "Not authenticated"}, 422: {"description": "Validation error"}},)
responses={
200: {
"content": {"text/event-stream": {}},
"description": "Real-time Server-Sent Events (SSE) stream"
},
401: {"description": "Could not validate credentials"},
422: {"description": "Validation error"}
},
)
async def stream_events(
request: Request,
last_event_id: int = Query(0, alias="lastEventId"),
request: Request,
last_event_id: int = Query(0, alias="lastEventId"),
search: Optional[str] = None,
start_time: Optional[str] = None,
end_time: Optional[str] = None,
max_output: Optional[int] = Query(None, alias="maxOutput"),
current_user: str = Depends(get_stream_user)
) -> StreamingResponse:
async def event_generator() -> AsyncGenerator[str, None]:
last_id = last_event_id
stats_interval_sec = 10
loops_since_stats = 0
emitted_chunks = 0
try:
if last_id == 0:
last_id = await repo.get_max_log_id()
@@ -42,6 +53,12 @@ async def stream_events(
yield f"event: message\ndata: {json.dumps({'type': 'histogram', 'data': histogram})}\n\n"
while True:
if DECNET_DEVELOPER and max_output is not None:
emitted_chunks += 1
if emitted_chunks > max_output:
log.debug("Developer mode: max_output reached (%d), closing stream", max_output)
break
if await request.is_disconnected():
break
@@ -65,6 +82,7 @@ async def stream_events(
loops_since_stats = 0
loops_since_stats += 1
await asyncio.sleep(1)
except asyncio.CancelledError:
pass

View File

@@ -51,6 +51,7 @@ decnet = "decnet.cli:app"
[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_debug = "true"
addopts = "-m 'not fuzz and not live' -v -q -x -n logical"
markers = [
"fuzz: hypothesis-based fuzz tests (slow, run with -m fuzz or -m '' for all)",

30
ruff.toml Normal file
View File

@@ -0,0 +1,30 @@
# In your ruff.toml or pyproject.toml
target-version = "py314" # DECNET's target Python version
exclude = [
"tests/**",
"templates/**",
"development/**",
]
[lint]
# Select a wide range of rules
select = [
"F", # Pyflakes: Catches undefined names (F821) and unused variables (F841)
"ANN", # Enforces type annotations on functions and methods
"RUF", # Includes the RUF045 rule for dataclass attributes
"E", # Pycodestyle errors
"W", # Pycodestyle warnings
]
# Ignore specific rules that might be too strict for now
ignore = [
"E501", # Line too long
]
[lint.extend-per-file-ignores]
# Apply strict rules only to the core codebase
"decnet/**/*.py" = []
# Everywhere else is more relaxed
"**/*.py" = ["ANN", "RUF"]
"tests/**/*.py" = ["ANN", "RUF", "E", "W"]

1549
schemat Normal file

File diff suppressed because it is too large Load Diff

6
schemathesis.toml Normal file
View File

@@ -0,0 +1,6 @@
request-timeout = 5.0
[[operations]]
# Target your SSE endpoint specifically
include-path = "/stream"
request-timeout = 2.0

View File

@@ -29,12 +29,12 @@ def _log(event_type: str, severity: int = 6, **kwargs) -> None:
def _setup_bait_fs() -> str:
bait_dir = Path("/tmp/ftp_bait")
bait_dir.mkdir(parents=True, exist_ok=True)
(bait_dir / "backup.tar.gz").write_bytes(b"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00")
(bait_dir / "db_dump.sql").write_text("CREATE TABLE users (id INT, username VARCHAR(50), password VARCHAR(50));\nINSERT INTO users VALUES (1, 'admin', 'pbkdf2:sha256:5000$...');\n")
(bait_dir / "config.ini").write_text("[database]\nuser = dbadmin\npassword = db_super_admin_pass_!\nhost = localhost\n")
(bait_dir / "credentials.txt").write_text("admin:super_secret_admin_pw\nroot:toor\nalice:wonderland\n")
return str(bait_dir)
class ServerFTP(FTP):

View File

@@ -97,7 +97,7 @@ def _publish(topic: str, value: str, retain: bool = True) -> bytes:
payload = str(value).encode()
fixed = 0x31 if retain else 0x30
remaining = len(topic_len) + len(topic_bytes) + len(payload)
# variable length encoding
rem_bytes = []
while remaining > 0:
@@ -108,7 +108,7 @@ def _publish(topic: str, value: str, retain: bool = True) -> bytes:
rem_bytes.append(encoded)
if not rem_bytes:
rem_bytes = [0]
return bytes([fixed]) + bytes(rem_bytes) + topic_len + topic_bytes + payload
@@ -132,7 +132,7 @@ def _generate_topics() -> dict:
return topics
except Exception as e:
_log("config_error", severity=4, error=str(e))
if MQTT_PERSONA == "water_plant":
topics.update({
"plant/water/tank1/level": f"{random.uniform(60.0, 80.0):.1f}",
@@ -186,7 +186,7 @@ class MQTTProtocol(asyncio.Protocol):
pkt_type = (pkt_byte >> 4) & 0x0f
flags = pkt_byte & 0x0f
qos = (flags >> 1) & 0x03
# Decode remaining length (variable-length encoding)
pos = 1
remaining = 0
@@ -225,7 +225,7 @@ class MQTTProtocol(asyncio.Protocol):
packet_id, subs = _parse_subscribe(payload)
granted_qos = [1] * len(subs) # grant QoS 1 for all
self._transport.write(_suback(packet_id, granted_qos))
# Immediately send retained publishes matching topics
for sub_topic, _ in subs:
_log("subscribe", src=self._peer[0], topics=[sub_topic])
@@ -245,11 +245,11 @@ class MQTTProtocol(asyncio.Protocol):
topic, packet_id, data = _parse_publish(payload, qos)
# Attacker command received!
_log("publish", src=self._peer[0], topic=topic, payload=data.decode(errors="replace"))
if qos == 1:
puback = bytes([0x40, 0x02]) + struct.pack(">H", packet_id)
self._transport.write(puback)
elif pkt_type == 12: # PINGREQ
self._transport.write(b"\xd0\x00") # PINGRESP
elif pkt_type == 14: # DISCONNECT

View File

@@ -156,7 +156,7 @@ class RedisProtocol(asyncio.Protocol):
elif pattern != '*':
pat = pattern.encode()
keys = [k for k in keys if k == pat]
resp = f"*{len(keys)}\r\n".encode() + b"".join(_bulk(k.decode()) for k in keys)
self._transport.write(resp)
elif verb == "GET":

View File

@@ -45,7 +45,7 @@ def _log(event_type: str, severity: int = 6, **kwargs) -> None:
def _rand_msg_id() -> str:
"""Return a Postfix-style 12-char alphanumeric queue ID."""
chars = string.ascii_uppercase + string.digits
return "".join(random.choices(chars, k=12)) # noqa: S311
return "".join(random.choices(chars, k=12))
def _decode_auth_plain(blob: str) -> tuple[str, str]:

View File

@@ -153,11 +153,11 @@ def _parse_snmp(data: bytes):
# PDU type (0xa0 = GetRequest, 0xa1 = GetNextRequest)
if pos >= len(data):
raise ValueError("Missing PDU type")
pdu_type = data[pos]
if pdu_type not in (0xa0, 0xa1):
raise ValueError(f"Invalid PDU type {pdu_type}")
pos += 1
_, pos = _read_ber_length(data, pos)
# request-id

View File

@@ -155,13 +155,13 @@ def write_syslog_file(line: str) -> None:
"""Append a syslog line to the rotating log file."""
try:
_get_file_logger().info(line)
# Also parse and write JSON log
import json
import re
from datetime import datetime
from typing import Optional, Any
from typing import Optional
_RFC5424_RE: re.Pattern = re.compile(
r"^<\d+>1 "
r"(\S+) " # 1: TIMESTAMP
@@ -174,7 +174,7 @@ def write_syslog_file(line: str) -> None:
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
_m: Optional[re.Match] = _RFC5424_RE.match(line)
if _m:
_ts_raw: str
@@ -183,10 +183,10 @@ def write_syslog_file(line: str) -> None:
_event_type: str
_sd_rest: str
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
_fields: dict[str, str] = {}
_msg: str = ""
if _sd_rest.startswith("-"):
_msg = _sd_rest[1:].lstrip()
elif _sd_rest.startswith("["):
@@ -194,27 +194,27 @@ def write_syslog_file(line: str) -> None:
if _block:
for _k, _v in _PARAM_RE.findall(_block.group(1)):
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
# extract msg after the block
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
if _msg_match:
_msg = _msg_match.group(1).strip()
else:
_msg = _sd_rest
_attacker_ip: str = "Unknown"
for _fname in _IP_FIELDS:
if _fname in _fields:
_attacker_ip = _fields[_fname]
break
# Parse timestamp to normalize it
_ts_formatted: str
try:
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
except ValueError:
_ts_formatted = _ts_raw
_payload: dict[str, Any] = {
"timestamp": _ts_formatted,
"decky": _decky,
@@ -226,7 +226,7 @@ def write_syslog_file(line: str) -> None:
"raw_line": line
}
_get_json_logger().info(json.dumps(_payload))
except Exception:
pass

View File

@@ -1,11 +1,9 @@
"""
Tests for the mutate interval API endpoint.
"""
import pytest
import httpx
from unittest.mock import patch
from pathlib import Path
from unittest.mock import patch, AsyncMock
from decnet.config import DeckyConfig, DecnetConfig
@@ -37,19 +35,20 @@ class TestMutateInterval:
@pytest.mark.asyncio
async def test_no_active_deployment(self, client: httpx.AsyncClient, auth_token: str):
with patch("decnet.web.router.fleet.api_mutate_interval.load_state", return_value=None):
with patch("decnet.web.router.fleet.api_mutate_interval.repo", new_callable=AsyncMock) as mock_repo:
mock_repo.get_state.return_value = None
resp = await client.put(
"/api/v1/deckies/decky-01/mutate-interval",
headers={"Authorization": f"Bearer {auth_token}"},
json={"mutate_interval": 60},
)
assert resp.status_code == 500
assert resp.status_code == 400
@pytest.mark.asyncio
async def test_decky_not_found(self, client: httpx.AsyncClient, auth_token: str):
config = _config()
with patch("decnet.web.router.fleet.api_mutate_interval.load_state",
return_value=(config, Path("test.yml"))):
with patch("decnet.web.router.fleet.api_mutate_interval.repo", new_callable=AsyncMock) as mock_repo:
mock_repo.get_state.return_value = {"config": config.model_dump(), "compose_path": "c.yml"}
resp = await client.put(
"/api/v1/deckies/nonexistent/mutate-interval",
headers={"Authorization": f"Bearer {auth_token}"},
@@ -60,30 +59,26 @@ class TestMutateInterval:
@pytest.mark.asyncio
async def test_successful_interval_update(self, client: httpx.AsyncClient, auth_token: str):
config = _config()
with patch("decnet.web.router.fleet.api_mutate_interval.load_state",
return_value=(config, Path("test.yml"))):
with patch("decnet.web.router.fleet.api_mutate_interval.save_state") as mock_save:
resp = await client.put(
"/api/v1/deckies/decky-01/mutate-interval",
headers={"Authorization": f"Bearer {auth_token}"},
json={"mutate_interval": 120},
)
with patch("decnet.web.router.fleet.api_mutate_interval.repo", new_callable=AsyncMock) as mock_repo:
mock_repo.get_state.return_value = {"config": config.model_dump(), "compose_path": "c.yml"}
resp = await client.put(
"/api/v1/deckies/decky-01/mutate-interval",
headers={"Authorization": f"Bearer {auth_token}"},
json={"mutate_interval": 120},
)
assert resp.status_code == 200
assert resp.json()["message"] == "Mutation interval updated"
mock_save.assert_called_once()
# Verify the interval was actually updated on the decky config
assert config.deckies[0].mutate_interval == 120
mock_repo.set_state.assert_awaited_once()
@pytest.mark.asyncio
async def test_null_interval_removes_mutation(self, client: httpx.AsyncClient, auth_token: str):
config = _config()
with patch("decnet.web.router.fleet.api_mutate_interval.load_state",
return_value=(config, Path("test.yml"))):
with patch("decnet.web.router.fleet.api_mutate_interval.save_state"):
resp = await client.put(
"/api/v1/deckies/decky-01/mutate-interval",
headers={"Authorization": f"Bearer {auth_token}"},
json={"mutate_interval": None},
)
with patch("decnet.web.router.fleet.api_mutate_interval.repo", new_callable=AsyncMock) as mock_repo:
mock_repo.get_state.return_value = {"config": config.model_dump(), "compose_path": "c.yml"}
resp = await client.put(
"/api/v1/deckies/decky-01/mutate-interval",
headers={"Authorization": f"Bearer {auth_token}"},
json={"mutate_interval": None},
)
assert resp.status_code == 200
assert config.deckies[0].mutate_interval is None
mock_repo.set_state.assert_awaited_once()

View File

@@ -33,11 +33,11 @@ async def test_fuzz_get_logs(client: httpx.AsyncClient, auth_token: str, limit:
_params: dict[str, Any] = {"limit": limit, "offset": offset}
if search is not None:
_params["search"] = search
_response: httpx.Response = await client.get(
"/api/v1/logs",
params=_params,
headers={"Authorization": f"Bearer {auth_token}"}
)
assert _response.status_code in (200, 422)

View File

@@ -9,13 +9,13 @@ import pytest
from datetime import datetime, timedelta
from freezegun import freeze_time
from hypothesis import given, settings, strategies as st
from decnet.web.db.sqlite.repository import SQLiteRepository
from decnet.web.db.factory import get_repository
from ..conftest import _FUZZ_SETTINGS
@pytest.fixture
def repo(tmp_path):
return SQLiteRepository(db_path=str(tmp_path / "histogram_test.db"))
return get_repository(db_path=str(tmp_path / "histogram_test.db"))
def _log(decky="d", service="ssh", ip="1.2.3.4", timestamp=None):

View File

@@ -21,7 +21,7 @@ class TestStreamEvents:
# We force the generator to exit immediately by making the first awaitable raise
with patch("decnet.web.router.stream.api_stream_events.repo") as mock_repo:
mock_repo.get_max_log_id = AsyncMock(side_effect=StopAsyncIteration)
# This will hit the 'except Exception' or just exit the generator
resp = await client.get(
"/api/v1/stream",

View File

@@ -1,18 +1,17 @@
"""
Direct async tests for SQLiteRepository.
These exercise the DB layer without going through the HTTP stack,
covering DEBT-006 (zero test coverage on the database layer).
Direct async tests for the configured Repository implementation.
These exercise the DB layer without going through the HTTP stack.
"""
import json
import pytest
from hypothesis import given, settings, strategies as st
from decnet.web.db.sqlite.repository import SQLiteRepository
from decnet.web.db.factory import get_repository
from .conftest import _FUZZ_SETTINGS
@pytest.fixture
def repo(tmp_path):
return SQLiteRepository(db_path=str(tmp_path / "test.db"))
return get_repository(db_path=str(tmp_path / "test.db"))
@pytest.mark.anyio

View File

@@ -11,16 +11,82 @@ replace the checks list with the default (remove the argument) for full complian
Requires DECNET_DEVELOPER=true (set in tests/conftest.py) to expose /openapi.json.
"""
import pytest
import schemathesis
from hypothesis import settings
from schemathesis.checks import not_a_server_error
from decnet.web.api import app
import schemathesis as st
from hypothesis import settings, Verbosity
from decnet.web.auth import create_access_token
schema = schemathesis.openapi.from_asgi("/openapi.json", app)
import subprocess
import socket
import sys
import atexit
import os
import time
from pathlib import Path
# Configuration for the automated live server
LIVE_PORT = 8008
LIVE_SERVER_URL = f"http://127.0.0.1:{LIVE_PORT}"
TEST_SECRET = "test-secret-for-automated-fuzzing"
# Standardize the secret for the test process too so tokens can be verified
import decnet.web.auth
decnet.web.auth.SECRET_KEY = TEST_SECRET
# Create a valid token for an admin-like user
TEST_TOKEN = create_access_token({"uuid": "00000000-0000-0000-0000-000000000001"})
@st.hook
def before_call(context, case, *args):
# Logged-in admin for all requests
case.headers = case.headers or {}
case.headers["Authorization"] = f"Bearer {TEST_TOKEN}"
def wait_for_port(port, timeout=10):
start_time = time.time()
while time.time() - start_time < timeout:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
if sock.connect_ex(('127.0.0.1', port)) == 0:
return True
time.sleep(0.2)
return False
def start_automated_server():
# Use the current venv's uvicorn
uvicorn_bin = "uvicorn" if os.name != "nt" else "uvicorn.exe"
uvicorn_path = str(Path(sys.executable).parent / uvicorn_bin)
# Force developer and contract test modes for the sub-process
env = os.environ.copy()
env["DECNET_DEVELOPER"] = "true"
env["DECNET_CONTRACT_TEST"] = "true"
env["DECNET_JWT_SECRET"] = TEST_SECRET
proc = subprocess.Popen(
[uvicorn_path, "decnet.web.api:app", "--host", "127.0.0.1", "--port", str(LIVE_PORT), "--log-level", "error"],
env=env,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
# Register cleanup
atexit.register(proc.terminate)
if not wait_for_port(LIVE_PORT):
proc.terminate()
raise RuntimeError(f"Automated server failed to start on port {LIVE_PORT}")
return proc
# Stir up the server!
_server_proc = start_automated_server()
# Now Schemathesis can pull the schema from the real network port
schema = st.openapi.from_url(f"{LIVE_SERVER_URL}/openapi.json")
@pytest.mark.fuzz
@schemathesis.pytest.parametrize(api=schema)
@settings(max_examples=5, deadline=None)
@st.pytest.parametrize(api=schema)
@settings(max_examples=3000, deadline=None, verbosity=Verbosity.debug)
def test_schema_compliance(case):
case.call_and_validate(checks=[not_a_server_error])
#print(f"\n[Fuzzing] {case.method} {case.path} with query={case.query}")
case.call_and_validate()
#print(f" └─ Success")

View File

@@ -6,6 +6,15 @@ any test file imports decnet.* — pytest loads conftest.py first.
"""
import os
os.environ.setdefault("DECNET_JWT_SECRET", "test-jwt-secret-not-for-production-use")
# Expose OpenAPI schema so schemathesis can load it during tests
os.environ.setdefault("DECNET_DEVELOPER", "true")
os.environ["DECNET_JWT_SECRET"] = "stable-test-secret-key-at-least-32-chars-long"
os.environ["DECNET_ADMIN_PASSWORD"] = "test-password-123"
os.environ["DECNET_DEVELOPER"] = "true"
os.environ["DECNET_DB_TYPE"] = "sqlite"
import pytest
from typing import Any
@pytest.fixture(autouse=True)
def standardize_auth_secret(monkeypatch: Any) -> None:
import decnet.web.auth
monkeypatch.setattr(decnet.web.auth, "SECRET_KEY", os.environ["DECNET_JWT_SECRET"])

View File

@@ -91,7 +91,7 @@ def _publish_packet(topic: str, payload: str, qos: int = 1, pid: int = 1) -> byt
packet_payload = len(topic_bytes).to_bytes(2, "big") + topic_bytes + pid.to_bytes(2, "big") + payload_bytes
else:
packet_payload = len(topic_bytes).to_bytes(2, "big") + topic_bytes + payload_bytes
return bytes([byte0, len(packet_payload)]) + packet_payload
def _pingreq_packet() -> bytes:
@@ -128,10 +128,10 @@ def test_subscribe_wildcard_retained(mqtt_mod):
written.clear()
_send(proto, _subscribe_packet("plant/#"))
assert len(written) >= 2 # At least SUBACK + some publishes
assert written[0].startswith(b"\x90") # SUBACK
combined = b"".join(written[1:])
# Should contain some water plant topics
assert b"plant/water/tank1/level" in combined

View File

@@ -50,10 +50,10 @@ def _make_protocol(mod):
proto = mod.SNMPProtocol()
transport = MagicMock()
sent: list[tuple] = []
def sendto(data, addr):
sent.append((data, addr))
transport.sendto = sendto
proto.connection_made(transport)
sent.clear()
@@ -104,11 +104,11 @@ def test_sysdescr_default(snmp_default):
proto, transport, sent = _make_protocol(snmp_default)
packet = _get_request_packet("public", 1, SYS_DESCR_OID_ENC)
_send(proto, packet)
assert len(sent) == 1
resp, addr = sent[0]
assert addr == ("127.0.0.1", 12345)
# default sysDescr has "Ubuntu SMP" in it
assert b"Ubuntu SMP" in resp
@@ -116,10 +116,10 @@ def test_sysdescr_water_plant(snmp_water_plant):
proto, transport, sent = _make_protocol(snmp_water_plant)
packet = _get_request_packet("public", 2, SYS_DESCR_OID_ENC)
_send(proto, packet)
assert len(sent) == 1
resp, _ = sent[0]
assert b"Debian" in resp
# ── Negative Tests ────────────────────────────────────────────────────────────

View File

@@ -19,6 +19,8 @@ class DummyRepo(BaseRepository):
async def add_bounty(self, d): await super().add_bounty(d)
async def get_bounties(self, **kw): await super().get_bounties(**kw)
async def get_total_bounties(self, **kw): await super().get_total_bounties(**kw)
async def get_state(self, k): await super().get_state(k)
async def set_state(self, k, v): await super().set_state(k, v)
@pytest.mark.asyncio
async def test_base_repo_coverage():
@@ -37,3 +39,5 @@ async def test_base_repo_coverage():
await dr.add_bounty({})
await dr.get_bounties()
await dr.get_total_bounties()
await dr.get_state("k")
await dr.set_state("k", "v")

View File

@@ -7,7 +7,7 @@ from types import SimpleNamespace
from unittest.mock import patch, MagicMock
from decnet.collector import parse_rfc5424, is_service_container, is_service_event
from decnet.collector.worker import (
_stream_container,
_stream_container,
_load_service_container_names,
log_collector_worker
)
@@ -291,13 +291,13 @@ class TestLogCollectorWorker:
@pytest.mark.asyncio
async def test_worker_initial_discovery(self, tmp_path):
log_file = str(tmp_path / "decnet.log")
mock_container = MagicMock()
mock_container.id = "c1"
mock_container.name = "/s-1"
# Mock labels to satisfy is_service_container
mock_container.labels = {"com.docker.compose.project": "decnet"}
mock_client = MagicMock()
mock_client.containers.list.return_value = [mock_container]
# Make events return an empty generator/iterator immediately
@@ -310,17 +310,17 @@ class TestLogCollectorWorker:
await asyncio.wait_for(log_collector_worker(log_file), timeout=0.1)
except (asyncio.TimeoutError, StopIteration):
pass
# Should have tried to list and watch events
mock_client.containers.list.assert_called_once()
@pytest.mark.asyncio
async def test_worker_handles_events(self, tmp_path):
log_file = str(tmp_path / "decnet.log")
mock_client = MagicMock()
mock_client.containers.list.return_value = []
event = {
"id": "c2",
"Actor": {"Attributes": {"name": "s-2", "com.docker.compose.project": "decnet"}}
@@ -333,7 +333,7 @@ class TestLogCollectorWorker:
await asyncio.wait_for(log_collector_worker(log_file), timeout=0.1)
except (asyncio.TimeoutError, StopIteration):
pass
mock_client.events.assert_called_once()
@pytest.mark.asyncio
@@ -341,7 +341,7 @@ class TestLogCollectorWorker:
log_file = str(tmp_path / "decnet.log")
mock_client = MagicMock()
mock_client.containers.list.side_effect = Exception("Docker down")
with patch("docker.from_env", return_value=mock_client):
# Should not raise
await log_collector_worker(log_file)

View File

@@ -2,10 +2,9 @@
Tests for decnet.mutator — mutation engine, retry logic, due-time scheduling.
All subprocess and state I/O is mocked; no Docker or filesystem access.
"""
import subprocess
import time
from pathlib import Path
from unittest.mock import MagicMock, patch
from unittest.mock import MagicMock, patch, AsyncMock
import pytest
@@ -41,9 +40,131 @@ def _make_config(deckies=None, mutate_interval=30):
mutate_interval=mutate_interval,
)
@pytest.fixture
def mock_repo():
repo = AsyncMock()
repo.get_state.return_value = None
return repo
# ---------------------------------------------------------------------------
# _compose_with_retry
# mutate_decky
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
class TestMutateDecky:
def _patch_io(self):
"""Return a context manager that mocks all other I/O in mutate_decky."""
return (
patch("decnet.mutator.engine.write_compose"),
patch("decnet.mutator.engine._compose_with_retry", new_callable=AsyncMock),
)
async def test_returns_false_when_no_state(self, mock_repo):
mock_repo.get_state.return_value = None
assert await mutate_decky("decky-01", repo=mock_repo) is False
async def test_returns_false_when_decky_not_found(self, mock_repo):
cfg = _make_config()
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
assert await mutate_decky("nonexistent", repo=mock_repo) is False
async def test_returns_true_on_success(self, mock_repo):
cfg = _make_config()
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.write_compose"), \
patch("anyio.to_thread.run_sync", new_callable=AsyncMock):
assert await mutate_decky("decky-01", repo=mock_repo) is True
async def test_saves_state_after_mutation(self, mock_repo):
cfg = _make_config()
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.write_compose"), \
patch("anyio.to_thread.run_sync", new_callable=AsyncMock):
await mutate_decky("decky-01", repo=mock_repo)
mock_repo.set_state.assert_awaited_once()
async def test_regenerates_compose_after_mutation(self, mock_repo):
cfg = _make_config()
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.write_compose") as mock_compose, \
patch("anyio.to_thread.run_sync", new_callable=AsyncMock):
await mutate_decky("decky-01", repo=mock_repo)
mock_compose.assert_called_once()
async def test_returns_false_on_compose_failure(self, mock_repo):
cfg = _make_config()
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.write_compose"), \
patch("anyio.to_thread.run_sync", side_effect=Exception("docker fail")):
assert await mutate_decky("decky-01", repo=mock_repo) is False
async def test_mutation_changes_services(self, mock_repo):
cfg = _make_config(deckies=[_make_decky(services=["ssh"])])
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.write_compose"), \
patch("anyio.to_thread.run_sync", new_callable=AsyncMock):
await mutate_decky("decky-01", repo=mock_repo)
# Check that set_state was called with a config where services might have changed
call_args = mock_repo.set_state.await_args[0]
new_config_dict = call_args[1]["config"]
new_services = new_config_dict["deckies"][0]["services"]
assert isinstance(new_services, list)
assert len(new_services) >= 1
async def test_updates_last_mutated_timestamp(self, mock_repo):
cfg = _make_config(deckies=[_make_decky(last_mutated=0.0)])
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
before = time.time()
with patch("decnet.mutator.engine.write_compose"), \
patch("anyio.to_thread.run_sync", new_callable=AsyncMock):
await mutate_decky("decky-01", repo=mock_repo)
call_args = mock_repo.set_state.await_args[0]
new_last_mutated = call_args[1]["config"]["deckies"][0]["last_mutated"]
assert new_last_mutated >= before
# ---------------------------------------------------------------------------
# mutate_all
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
class TestMutateAll:
async def test_no_state_returns_early(self, mock_repo):
mock_repo.get_state.return_value = None
with patch("decnet.mutator.engine.mutate_decky") as mock_mutate:
await mutate_all(repo=mock_repo)
mock_mutate.assert_not_called()
async def test_force_mutates_all_deckies(self, mock_repo):
cfg = _make_config(deckies=[_make_decky("d1"), _make_decky("d2")])
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.mutate_decky", new_callable=AsyncMock, return_value=True) as mock_mutate:
await mutate_all(repo=mock_repo, force=True)
assert mock_mutate.call_count == 2
async def test_skips_decky_not_yet_due(self, mock_repo):
# last_mutated = now, interval = 30 min → not due
now = time.time()
cfg = _make_config(deckies=[_make_decky(mutate_interval=30, last_mutated=now)])
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.mutate_decky") as mock_mutate:
await mutate_all(repo=mock_repo, force=False)
mock_mutate.assert_not_called()
async def test_mutates_decky_that_is_due(self, mock_repo):
# last_mutated = 2 hours ago, interval = 30 min → due
old_ts = time.time() - 7200
cfg = _make_config(deckies=[_make_decky(mutate_interval=30, last_mutated=old_ts)])
mock_repo.get_state.return_value = {"config": cfg.model_dump(), "compose_path": "c.yml"}
with patch("decnet.mutator.engine.mutate_decky", new_callable=AsyncMock, return_value=True) as mock_mutate:
await mutate_all(repo=mock_repo, force=False)
mock_mutate.assert_called_once()
# ---------------------------------------------------------------------------
# _compose_with_retry (Sync tests, keep as is or minimal update)
# ---------------------------------------------------------------------------
class TestComposeWithRetry:
@@ -60,149 +181,3 @@ class TestComposeWithRetry:
patch("decnet.engine.deployer.time.sleep"):
_compose_with_retry("up", "-d", compose_file=Path("compose.yml"), retries=3)
assert mock_run.call_count == 2
def test_raises_after_all_retries_exhausted(self):
fail = MagicMock(returncode=1, stdout="", stderr="hard error")
with patch("decnet.engine.deployer.subprocess.run", return_value=fail), \
patch("decnet.engine.deployer.time.sleep"):
with pytest.raises(subprocess.CalledProcessError):
_compose_with_retry("up", "-d", compose_file=Path("compose.yml"), retries=3)
def test_exponential_backoff(self):
fail = MagicMock(returncode=1, stdout="", stderr="")
sleep_calls = []
with patch("decnet.engine.deployer.subprocess.run", return_value=fail), \
patch("decnet.engine.deployer.time.sleep", side_effect=lambda d: sleep_calls.append(d)):
with pytest.raises(subprocess.CalledProcessError):
_compose_with_retry("up", compose_file=Path("c.yml"), retries=3, delay=1.0)
assert sleep_calls == [1.0, 2.0]
def test_correct_command_structure(self):
ok = MagicMock(returncode=0, stdout="")
with patch("decnet.engine.deployer.subprocess.run", return_value=ok) as mock_run:
_compose_with_retry("up", "-d", "--remove-orphans",
compose_file=Path("/tmp/compose.yml"))
cmd = mock_run.call_args[0][0]
assert cmd[:3] == ["docker", "compose", "-f"]
assert "up" in cmd
assert "--remove-orphans" in cmd
# ---------------------------------------------------------------------------
# mutate_decky
# ---------------------------------------------------------------------------
class TestMutateDecky:
def _patch(self, config=None, compose_path=Path("compose.yml")):
"""Return a context manager that mocks all I/O in mutate_decky."""
cfg = config or _make_config()
return (
patch("decnet.mutator.engine.load_state", return_value=(cfg, compose_path)),
patch("decnet.mutator.engine.save_state"),
patch("decnet.mutator.engine.write_compose"),
patch("decnet.mutator.engine._compose_with_retry"),
)
def test_returns_false_when_no_state(self):
with patch("decnet.mutator.engine.load_state", return_value=None):
assert mutate_decky("decky-01") is False
def test_returns_false_when_decky_not_found(self):
p = self._patch()
with p[0], p[1], p[2], p[3]:
assert mutate_decky("nonexistent") is False
def test_returns_true_on_success(self):
p = self._patch()
with p[0], p[1], p[2], p[3]:
assert mutate_decky("decky-01") is True
def test_saves_state_after_mutation(self):
p = self._patch()
with p[0], patch("decnet.mutator.engine.save_state") as mock_save, p[2], p[3]:
mutate_decky("decky-01")
mock_save.assert_called_once()
def test_regenerates_compose_after_mutation(self):
p = self._patch()
with p[0], p[1], patch("decnet.mutator.engine.write_compose") as mock_compose, p[3]:
mutate_decky("decky-01")
mock_compose.assert_called_once()
def test_returns_false_on_compose_failure(self):
p = self._patch()
err = subprocess.CalledProcessError(1, "docker", "", "compose failed")
with p[0], p[1], p[2], patch("decnet.mutator.engine._compose_with_retry", side_effect=err):
assert mutate_decky("decky-01") is False
def test_mutation_changes_services(self):
cfg = _make_config(deckies=[_make_decky(services=["ssh"])])
p = self._patch(config=cfg)
with p[0], p[1], p[2], p[3]:
mutate_decky("decky-01")
# Services may have changed (or stayed the same after 20 attempts)
assert isinstance(cfg.deckies[0].services, list)
assert len(cfg.deckies[0].services) >= 1
def test_updates_last_mutated_timestamp(self):
cfg = _make_config(deckies=[_make_decky(last_mutated=0.0)])
p = self._patch(config=cfg)
before = time.time()
with p[0], p[1], p[2], p[3]:
mutate_decky("decky-01")
assert cfg.deckies[0].last_mutated >= before
def test_archetype_constrains_service_pool(self):
"""A decky with an archetype must only mutate within its service pool."""
cfg = _make_config(deckies=[_make_decky(archetype="workstation", services=["rdp"])])
p = self._patch(config=cfg)
with p[0], p[1], p[2], p[3]:
result = mutate_decky("decky-01")
assert result is True
# ---------------------------------------------------------------------------
# mutate_all
# ---------------------------------------------------------------------------
class TestMutateAll:
def test_no_state_returns_early(self):
with patch("decnet.mutator.engine.load_state", return_value=None), \
patch("decnet.mutator.engine.mutate_decky") as mock_mutate:
mutate_all()
mock_mutate.assert_not_called()
def test_force_mutates_all_deckies(self):
cfg = _make_config(deckies=[_make_decky("d1"), _make_decky("d2")])
with patch("decnet.mutator.engine.load_state", return_value=(cfg, Path("c.yml"))), \
patch("decnet.mutator.engine.mutate_decky", return_value=True) as mock_mutate:
mutate_all(force=True)
assert mock_mutate.call_count == 2
def test_skips_decky_not_yet_due(self):
# last_mutated = now, interval = 30 min → not due
now = time.time()
cfg = _make_config(deckies=[_make_decky(mutate_interval=30, last_mutated=now)])
with patch("decnet.mutator.engine.load_state", return_value=(cfg, Path("c.yml"))), \
patch("decnet.mutator.engine.mutate_decky") as mock_mutate:
mutate_all(force=False)
mock_mutate.assert_not_called()
def test_mutates_decky_that_is_due(self):
# last_mutated = 2 hours ago, interval = 30 min → due
old_ts = time.time() - 7200
cfg = _make_config(deckies=[_make_decky(mutate_interval=30, last_mutated=old_ts)])
with patch("decnet.mutator.engine.load_state", return_value=(cfg, Path("c.yml"))), \
patch("decnet.mutator.engine.mutate_decky", return_value=True) as mock_mutate:
mutate_all(force=False)
mock_mutate.assert_called_once_with("decky-01")
def test_skips_decky_with_no_interval_and_no_force(self):
cfg = _make_config(
deckies=[_make_decky(mutate_interval=None)],
mutate_interval=None,
)
with patch("decnet.mutator.engine.load_state", return_value=(cfg, Path("c.yml"))), \
patch("decnet.mutator.engine.mutate_decky") as mock_mutate:
mutate_all(force=False)
mock_mutate.assert_not_called()

View File

@@ -7,7 +7,7 @@ from decnet.services.smtp_relay import SMTPRelayService
def test_smtp_relay_compose_fragment():
svc = SMTPRelayService()
fragment = svc.compose_fragment("test-decky", log_target="log-server")
assert fragment["container_name"] == "test-decky-smtp_relay"
assert fragment["environment"]["SMTP_OPEN_RELAY"] == "1"
assert fragment["environment"]["LOG_TARGET"] == "log-server"
@@ -15,7 +15,7 @@ def test_smtp_relay_compose_fragment():
def test_smtp_relay_custom_cfg():
svc = SMTPRelayService()
fragment = svc.compose_fragment(
"test-decky",
"test-decky",
service_cfg={"banner": "Welcome", "mta": "Postfix"}
)
assert fragment["environment"]["SMTP_BANNER"] == "Welcome"