Compare commits
45 Commits
v0.1.0
...
fe6b349e5e
| Author | SHA1 | Date | |
|---|---|---|---|
| fe6b349e5e | |||
| 65b220fdbe | |||
| 6f10e7556f | |||
| fc99375c62 | |||
| 6bdb5922fa | |||
| 32b06afef6 | |||
| 31e0c5151b | |||
| cc3d434c02 | |||
| 1b5d366b38 | |||
| 168ecf14ab | |||
| db9a2699b9 | |||
| d139729fa2 | |||
| dd363629ab | |||
| c544964f57 | |||
| 6e19848723 | |||
| e24da92e0f | |||
| 47f0e6da8f | |||
| 18de381a43 | |||
| 1f5c6604d6 | |||
| a9c7ddec2b | |||
| eb4be44c9a | |||
| 1a2ad27eca | |||
| b1f09b9c6a | |||
| 3656a89d60 | |||
| ba2faba5d5 | |||
| 950280a97b | |||
| 7bc8d75242 | |||
| 5f637b5272 | |||
| 6ed92d080f | |||
| 1b593920cd | |||
| bad90dfb75 | |||
| 05e71f6d2e | |||
| 52c26a2891 | |||
| 81135cb861 | |||
| 50e53120df | |||
| 697929a127 | |||
| b46934db46 | |||
| 5b990743db | |||
| fbb16a960c | |||
| c32ad82d0a | |||
| 850a6f2ad7 | |||
| d344e4c8bb | |||
| f8a9f8fc64 | |||
| a428410c8e | |||
| e5a6c2d9a7 |
7
.claude/settings.local.json
Normal file
7
.claude/settings.local.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"mcp__plugin_context-mode_context-mode__ctx_batch_execute"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
11
.env.example
Normal file
11
.env.example
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# API Options
|
||||||
|
DECNET_API_HOST=0.0.0.0
|
||||||
|
DECNET_API_PORT=8000
|
||||||
|
DECNET_JWT_SECRET=supersecretkey12345
|
||||||
|
DECNET_INGEST_LOG_FILE=/var/log/decnet/decnet.log
|
||||||
|
|
||||||
|
# Web Dashboard Options
|
||||||
|
DECNET_WEB_HOST=0.0.0.0
|
||||||
|
DECNET_WEB_PORT=8080
|
||||||
|
DECNET_ADMIN_USER=admin
|
||||||
|
DECNET_ADMIN_PASSWORD=admin
|
||||||
@@ -3,6 +3,9 @@ name: CI
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [dev, testing]
|
branches: [dev, testing]
|
||||||
|
paths-ignore:
|
||||||
|
- "**/*.md"
|
||||||
|
- "docs/**"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
@@ -27,7 +30,7 @@ jobs:
|
|||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- run: pip install -e .
|
- run: pip install -e .[dev]
|
||||||
- run: pytest tests/ -v --tb=short
|
- run: pytest tests/ -v --tb=short
|
||||||
|
|
||||||
bandit:
|
bandit:
|
||||||
@@ -50,7 +53,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
- run: pip install pip-audit
|
- run: pip install pip-audit
|
||||||
- run: pip install -e .
|
- run: pip install -e .[dev]
|
||||||
- run: pip-audit --skip-editable
|
- run: pip-audit --skip-editable
|
||||||
|
|
||||||
open-pr:
|
open-pr:
|
||||||
|
|||||||
@@ -3,6 +3,9 @@ name: PR Gate
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
|
paths-ignore:
|
||||||
|
- "**/*.md"
|
||||||
|
- "docs/**"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
|
|||||||
@@ -3,6 +3,9 @@ name: Release
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
|
paths-ignore:
|
||||||
|
- "**/*.md"
|
||||||
|
- "docs/**"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REGISTRY: git.resacachile.cl
|
REGISTRY: git.resacachile.cl
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -13,6 +13,9 @@ decnet.log*
|
|||||||
*.loggy
|
*.loggy
|
||||||
*.nmap
|
*.nmap
|
||||||
linterfails.log
|
linterfails.log
|
||||||
test-scan
|
|
||||||
webmail
|
webmail
|
||||||
windows1
|
windows1
|
||||||
|
*.db
|
||||||
|
decnet.json
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
CI/CD TEST 2
|
|
||||||
103
GEMINI.md
Normal file
103
GEMINI.md
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
# DECNET (Deception Network) Project Context
|
||||||
|
|
||||||
|
DECNET is a high-fidelity honeypot framework designed to deploy heterogeneous fleets of fake machines (called **deckies**) that appear as real hosts on a local network.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
- **Core Purpose:** To lure, profile, and log attacker interactions within a controlled, deceptive environment.
|
||||||
|
- **Key Technology:** Linux-native container networking (MACVLAN/IPvlan) combined with Docker to give each decoy its own MAC address, IP, and realistic TCP/IP stack behavior.
|
||||||
|
- **Main Components:**
|
||||||
|
- **Deckies:** Group of containers sharing a network namespace (one base container + multiple service containers).
|
||||||
|
- **Archetypes:** Pre-defined machine profiles (e.g., `windows-workstation`, `linux-server`) that bundle services and OS fingerprints.
|
||||||
|
- **Services:** Modular honeypot plugins (SSH, SMB, RDP, etc.) built as `BaseService` subclasses.
|
||||||
|
- **OS Fingerprinting:** Sysctl-based TCP/IP stack tuning to spoof OS detection (nmap).
|
||||||
|
- **Logging Pipeline:** RFC 5424 syslog forwarding to an isolated SIEM/ELK stack.
|
||||||
|
|
||||||
|
## Technical Stack
|
||||||
|
|
||||||
|
- **Language:** Python 3.11+
|
||||||
|
- **CLI Framework:** [Typer](https://typer.tiangolo.com/)
|
||||||
|
- **Data Validation:** [Pydantic v2](https://docs.pydantic.dev/)
|
||||||
|
- **Orchestration:** Docker Engine 24+ (via Docker SDK for Python)
|
||||||
|
- **Networking:** MACVLAN (default) or IPvlan L2 (for WiFi/restricted environments).
|
||||||
|
- **Testing:** Pytest (100% pass requirement).
|
||||||
|
- **Formatting/Linting:** Ruff, Bandit (SAST), pip-audit.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```text
|
||||||
|
Host NIC (eth0)
|
||||||
|
└── MACVLAN Bridge
|
||||||
|
├── Decky-01 (192.168.1.10) -> [Base] + [SSH] + [HTTP]
|
||||||
|
├── Decky-02 (192.168.1.11) -> [Base] + [SMB] + [RDP]
|
||||||
|
└── ...
|
||||||
|
```
|
||||||
|
|
||||||
|
- **Base Container:** Owns the IP/MAC, sets `sysctls` for OS spoofing, and runs `sleep infinity`.
|
||||||
|
- **Service Containers:** Use `network_mode: service:<base>` to share the identity and networking of the base container.
|
||||||
|
- **Isolation:** Decoy traffic is strictly separated from the logging network.
|
||||||
|
|
||||||
|
## Key Commands
|
||||||
|
|
||||||
|
### Development & Maintenance
|
||||||
|
- **Install (Dev):**
|
||||||
|
- `rm .venv -rf`
|
||||||
|
- `python3 -m venv .venv`
|
||||||
|
- `source .venv/bin/activate`
|
||||||
|
- `pip install -e .`
|
||||||
|
- **Run Tests:** `pytest` (Run before any commit)
|
||||||
|
- **Linting:** `ruff check .`
|
||||||
|
- **Security Scan:** `bandit -r decnet/`
|
||||||
|
- **Web Git:** git.resacachile.cl (Gitea)
|
||||||
|
|
||||||
|
### CLI Usage
|
||||||
|
- **List Services:** `decnet services`
|
||||||
|
- **List Archetypes:** `decnet archetypes`
|
||||||
|
- **Dry Run (Compose Gen):** `decnet deploy --deckies 3 --randomize-services --dry-run`
|
||||||
|
- **Deploy (Full):** `sudo .venv/bin/decnet deploy --interface eth0 --deckies 5 --randomize-services`
|
||||||
|
- **Status:** `decnet status`
|
||||||
|
- **Teardown:** `sudo .venv/bin/decnet teardown --all`
|
||||||
|
|
||||||
|
## Development Conventions
|
||||||
|
|
||||||
|
- **Code Style:**
|
||||||
|
- Strict adherence to Ruff/PEP8.
|
||||||
|
- **Always use typed variables**. If any non-types variables are found, they must be corrected.
|
||||||
|
- The correct way is `x: int = 1`, never `x : int = 1`.
|
||||||
|
- If assignment is present, always use a space between the type and the equal sign `x: int = 1`.
|
||||||
|
- **Never** use lowercase L (l), uppercase o (O) or uppercase i (i) in single-character names.
|
||||||
|
- **Internal vars are to be declared with an underscore** (_internal_variable_name).
|
||||||
|
- **Internal to internal vars are to be declared with double underscore** (__internal_variable_name).
|
||||||
|
- Always use snake_case for code.
|
||||||
|
- Always use PascalCase for classes and generics.
|
||||||
|
- **Testing:** New features MUST include a `pytest` case. 100% test pass rate is mandatory before merging.
|
||||||
|
- **Plugin System:**
|
||||||
|
- New services go in `decnet/services/<name>.py`.
|
||||||
|
- Subclass `decnet.services.base.BaseService`.
|
||||||
|
- The registry uses auto-discovery; no manual registration required.
|
||||||
|
- **Configuration:**
|
||||||
|
- Use Pydantic models in `decnet/config.py` for any new settings.
|
||||||
|
- INI file parsing is handled in `decnet/ini_loader.py`.
|
||||||
|
- **State Management:**
|
||||||
|
- Runtime state is persisted in `decnet-state.json`.
|
||||||
|
- Do not modify this file manually.
|
||||||
|
- **General Development Guidelines**:
|
||||||
|
- **Never** commit broken code.
|
||||||
|
- **No matter how small** the changes, they must be committed.
|
||||||
|
- **If new features are addedd** new tests must be added, too.
|
||||||
|
- **Never present broken code to the user**. Test, validate, then present.
|
||||||
|
- **Extensive testing** for every function must be created.
|
||||||
|
- **Always develop in the `dev` branch, never in `main`.**
|
||||||
|
- **Test in the `testing` branch.**
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
- `decnet/`: Main source code.
|
||||||
|
- `services/`: Honeypot service implementations.
|
||||||
|
- `logging/`: Syslog formatting and forwarding logic.
|
||||||
|
- `correlation/`: (In Progress) Logic for grouping attacker events.
|
||||||
|
- `templates/`: Dockerfiles and entrypoint scripts for services.
|
||||||
|
- `tests/`: Pytest suite.
|
||||||
|
- `pyproject.toml`: Dependency and entry point definitions.
|
||||||
|
- `CLAUDE.md`: Claude-specific environment guidance.
|
||||||
|
- `DEVELOPMENT.md`: Roadmap and TODOs.
|
||||||
113
NOTES.md
113
NOTES.md
@@ -1,113 +0,0 @@
|
|||||||
# Initial steps
|
|
||||||
|
|
||||||
# Architecture
|
|
||||||
|
|
||||||
## DECNET-UNIHOST model
|
|
||||||
|
|
||||||
The unihost model is a mode in which DECNET deploys an _n_ amount of machines from a single one. This execution model lives in a decoy network which is accessible to an attacker from the outside.
|
|
||||||
|
|
||||||
Each decky (the son of the DECNET unihost) should have different services (RDP, SMB, SSH, FTP, etc) and all of them should communicate with an external, isolated network, which aggregates data and allows
|
|
||||||
visualizations to be made. Think of the ELK stack. That data is then passed back via Logstash or other methods to a SIEM device or something else that may be beneficiated by this collected data.
|
|
||||||
|
|
||||||
## DECNET-MULTIHOST (SWARM) model
|
|
||||||
|
|
||||||
The SWARM model is similar to the UNIHOST model, but the difference is that instead of one real machine, we have n>1 machines. Same thought process really, but deployment may be different.
|
|
||||||
A low cost option and fairly automatable one is the usage of Ansible, sshpass, or other tools.
|
|
||||||
|
|
||||||
# Modus operandi
|
|
||||||
|
|
||||||
## Docker-Compose
|
|
||||||
|
|
||||||
I will use Docker Compose extensively for this project. The reasons are:
|
|
||||||
- Easily managed.
|
|
||||||
- Easily extensible.
|
|
||||||
- Less overhead.
|
|
||||||
|
|
||||||
To be completely transparent: I asked Deepseek to write the initial `docker-compose.yml` file. It was mostly boilerplate, and most of it mainly modified or deleted. It doesn't exist anymore.
|
|
||||||
|
|
||||||
## Distro to use.
|
|
||||||
|
|
||||||
I will be using the `debian:bookworm-slim` image for all the containers. I might think about mixing in there some Ubuntu or a Centos, but for now, Debian will do just fine.
|
|
||||||
|
|
||||||
The distro I'm running is WSL Kali Linux. Let's hope this doesn't cause any problems down the road.
|
|
||||||
|
|
||||||
## Networking
|
|
||||||
|
|
||||||
It was a hussle, but I think MACVLAN or IPVLAN (thanks @Deepseek!) might work. The reasoning behind picking this networking driver is that for the project to work, it requires having containers the entire container accessible from the network. This is to attempt to masquarede them as real, live machines.
|
|
||||||
|
|
||||||
Now, we will need a publicly accesible, real server that has access to this "internal" network. I'll try MACVLAN first.
|
|
||||||
|
|
||||||
### MACVLAN Tests
|
|
||||||
|
|
||||||
I will first use the default network to see what happens.
|
|
||||||
|
|
||||||
```
|
|
||||||
docker network create -d macvlan \
|
|
||||||
--subnet=192.168.1.0/24 \
|
|
||||||
--gateway=192.168.1.1 \
|
|
||||||
-o parent=eth0 localnet
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Issues
|
|
||||||
|
|
||||||
This initial test doesn't seem to be working. Might be that I'm using WSL, so I downloaded a Ubuntu 22.04 Server ISO. I'll try the MACVLAN network on it. Now, if that doesn't work, I don't see how the 802.1q would work, at least on _my network_. Perhaps if I had a switch I could make it work, but currently I don't have one :c
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# TODO
|
|
||||||
|
|
||||||
## Core / Hardening
|
|
||||||
|
|
||||||
- [ ] **Attacker fingerprinting** — Beyond IP logging: capture TLS JA3/JA4 hashes, TCP window sizes, User-Agent strings, SSH client banners, and tool signatures (nmap, masscan, Metasploit, Cobalt Strike). Build attacker profiles across sessions.
|
|
||||||
- [ ] **Canary tokens** — Embed canary URLs, fake AWS keys, fake API tokens, and honeydocs (PDF/DOCX with phone-home URLs) into decky filesystems. Fire an alert the moment one is used.
|
|
||||||
- [ ] **Tarpit mode** — Slow down attackers by making services respond extremely slowly (e.g., SSH that takes 60s to reject, HTTP that drip-feeds bytes). Wastes attacker time and resources.
|
|
||||||
- [ ] **Dynamic decky mutation** — Deckies that change their exposed services or OS fingerprint over time to confuse port-scan caching and appear more "alive."
|
|
||||||
- [ ] **Credential harvesting DB** — Every username/password attempt across all services lands in a queryable database. Expose via CLI (`decnet creds`) and flag reuse across deckies.
|
|
||||||
- [ ] **Session recording** — Full session capture for SSH/Telnet (keystroke logs, commands run, files downloaded). Cowrie already does this — surface it better in the CLI and correlation engine.
|
|
||||||
- [ ] **Payload capture** — Store every file uploaded or command executed by an attacker. Hash and auto-submit to VirusTotal or a local sandbox.
|
|
||||||
|
|
||||||
## Detection & Intelligence
|
|
||||||
|
|
||||||
- [ ] **Real-time alerting** — Webhook/Slack/Telegram notifications when an attacker hits a decky for the first time, crosses N deckies (lateral movement), or uses a known bad IP.
|
|
||||||
- [ ] **Threat intel enrichment** — Auto-lookup attacker IPs against AbuseIPDB, Shodan, GreyNoise, and AlienVault OTX. Tag known scanners vs. targeted attackers.
|
|
||||||
- [ ] **Attack campaign clustering** — Group attacker sessions by tooling signatures, timing patterns, and credential sets. Identify coordinated campaigns hitting multiple deckies.
|
|
||||||
- [ ] **GeoIP mapping** — Attacker origin on a world map. Correlate with ASN data to identify cloud exit nodes, VPNs, and Tor exits.
|
|
||||||
- [ ] **TTPs tagging** — Map observed attacker behaviors to MITRE ATT&CK techniques automatically. Tag events in the correlation engine.
|
|
||||||
- [ ] **Honeypot interaction scoring** — Score attackers on a scale: casual scanner vs. persistent targeted attacker, based on depth of interaction and commands run.
|
|
||||||
|
|
||||||
## Dashboard & Visibility
|
|
||||||
|
|
||||||
- [ ] **Web dashboard** — Real-time web UI showing live decky status, attacker activity, traversal graphs, and credential stats. Could be a simple FastAPI + HTMX or a full React app.
|
|
||||||
- [ ] **Pre-built Kibana/Grafana dashboards** — Ship dashboard JSON exports out of the box so ELK/Grafana deployments are plug-and-play.
|
|
||||||
- [ ] **CLI live feed** — `decnet watch` command: tail all decky logs in a unified, colored terminal stream (like `docker-compose logs -f` but prettier).
|
|
||||||
- [ ] **Traversal graph export** — Export attacker traversal graphs as DOT/Graphviz or JSON for visualization in external tools.
|
|
||||||
- [ ] **Daily digest** — Automated daily summary email/report: new attackers, top credentials tried, most-hit services.
|
|
||||||
|
|
||||||
## Deployment & Infrastructure
|
|
||||||
|
|
||||||
- [ ] **SWARM / multihost mode** — Full Ansible-based orchestration for deploying deckies across N real hosts.
|
|
||||||
- [ ] **Terraform/Pulumi provider** — Spin up cloud-hosted deckies on AWS/GCP/Azure with one command. Useful for internet-facing honeynets.
|
|
||||||
- [ ] **Auto-scaling** — When attack traffic increases, automatically spawn more deckies to absorb and log more activity.
|
|
||||||
- [ ] **Kubernetes deployment mode** — Run deckies as Kubernetes pods for environments already running k8s.
|
|
||||||
- [ ] **Proxmox/libvirt backend** — Full VM-based deckies instead of containers, for even more realistic OS fingerprints and behavior. Docker for speed; VMs for realism.
|
|
||||||
- [ ] **Raspberry Pi / ARM support** — Low-cost physical honeynets using RPis. Validate ARM image builds.
|
|
||||||
- [ ] **Decky health monitoring** — Watchdog that auto-restarts crashed deckies and alerts if a service goes dark.
|
|
||||||
|
|
||||||
## Services & Realism
|
|
||||||
|
|
||||||
- [ ] **HTTPS/TLS support** — HTTP honeypot with a self-signed or Let's Encrypt cert. Many real-world services use HTTPS; plain HTTP stands out.
|
|
||||||
- [ ] **Fake Active Directory** — A convincing fake AD/LDAP with fake users, groups, and GPOs. Attacker tools like BloodHound should get juicy (fake) data.
|
|
||||||
- [ ] **Fake file shares** — SMB/NFS shares pre-populated with enticing but fake files: "passwords.xlsx", "vpn_config.ovpn", "backup_keys.tar.gz". All instrumented to detect access.
|
|
||||||
- [ ] **Realistic web apps** — HTTP honeypot serving convincing fake apps: a fake WordPress, a fake phpMyAdmin, a fake Grafana login — all logging every interaction.
|
|
||||||
- [ ] **OT/ICS profiles** — Expand Conpot support: Modbus, DNP3, BACnet, EtherNet/IP. Convincing industrial control system decoys.
|
|
||||||
- [ ] **Printer/IoT archetypes** — Expand existing printer/camera archetypes with actual service emulation (IPP, ONVIF, WS-Discovery).
|
|
||||||
- [ ] **Service interaction depth** — Some services currently just log the connection. Deepen interaction: fake MySQL that accepts queries and returns realistic fake data, fake Redis that stores and retrieves dummy keys.
|
|
||||||
|
|
||||||
## Developer Experience
|
|
||||||
|
|
||||||
- [ ] **Plugin SDK docs** — Full documentation and an example plugin for adding custom services. Lower the barrier for community contributions.
|
|
||||||
- [ ] **Integration tests** — Full deploy/teardown cycle tests against a real Docker daemon (not just unit tests).
|
|
||||||
- [ ] **Per-service tests** — Each of the 29 service implementations deserves its own test coverage.
|
|
||||||
- [ ] **CI/CD pipeline** — GitHub/Gitea Actions: run tests on push, lint, build Docker images, publish releases.
|
|
||||||
- [ ] **Config validation CLI** — `decnet validate my.ini` to dry-check an INI config before deploying.
|
|
||||||
- [ ] **Config generator wizard** — `decnet wizard` interactive prompt to generate an INI config without writing one by hand.
|
|
||||||
52
README.md
52
README.md
@@ -69,7 +69,7 @@ From the outside a decky looks identical to a real machine: it has its own MAC a
|
|||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone <repo-url> DECNET
|
git clone https://git.resacachile.cl/anti/DECNET
|
||||||
cd DECNET
|
cd DECNET
|
||||||
pip install -e .
|
pip install -e .
|
||||||
```
|
```
|
||||||
@@ -207,6 +207,26 @@ sudo decnet deploy --deckies 4 --archetype windows-workstation
|
|||||||
[corp-workstations]
|
[corp-workstations]
|
||||||
archetype = windows-workstation
|
archetype = windows-workstation
|
||||||
amount = 4
|
amount = 4
|
||||||
|
|
||||||
|
[win-fileserver]
|
||||||
|
services = ftp
|
||||||
|
nmap_os = windows
|
||||||
|
os_version = Windows Server 2019
|
||||||
|
|
||||||
|
[dbsrv01]
|
||||||
|
ip = 192.168.1.112
|
||||||
|
services = mysql, http
|
||||||
|
nmap_os = linux
|
||||||
|
|
||||||
|
[dbsrv01.http]
|
||||||
|
server_header = Apache/2.4.54 (Debian)
|
||||||
|
response_code = 200
|
||||||
|
fake_app = wordpress
|
||||||
|
|
||||||
|
[dbsrv01.mysql]
|
||||||
|
mysql_version = 5.7.38-log
|
||||||
|
mysql_banner = MySQL Community Server
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -470,6 +490,30 @@ See [`test-full.ini`](test-full.ini) — covers all 25 services across 10 role-t
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## Environment Configuration (.env)
|
||||||
|
|
||||||
|
DECNET supports loading configuration from `.env.local` and `.env` files located in the project root. This is useful for securing secrets like the JWT key and configuring default ports without passing flags every time.
|
||||||
|
|
||||||
|
An example `.env.example` is provided:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
# API Options
|
||||||
|
DECNET_API_HOST=0.0.0.0
|
||||||
|
DECNET_API_PORT=8000
|
||||||
|
DECNET_JWT_SECRET=supersecretkey12345
|
||||||
|
DECNET_INGEST_LOG_FILE=/var/log/decnet/decnet.log
|
||||||
|
|
||||||
|
# Web Dashboard Options
|
||||||
|
DECNET_WEB_HOST=0.0.0.0
|
||||||
|
DECNET_WEB_PORT=8080
|
||||||
|
DECNET_ADMIN_USER=admin
|
||||||
|
DECNET_ADMIN_PASSWORD=admin
|
||||||
|
```
|
||||||
|
|
||||||
|
Copy `.env.example` to `.env.local` and modify it to suit your environment.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Logging
|
## Logging
|
||||||
|
|
||||||
All attacker interactions are forwarded off the decoy network to an isolated logging sink. The log pipeline lives on a separate internal Docker bridge (`decnet_logs`) that is not reachable from the fake LAN.
|
All attacker interactions are forwarded off the decoy network to an isolated logging sink. The log pipeline lives on a separate internal Docker bridge (`decnet_logs`) that is not reachable from the fake LAN.
|
||||||
@@ -631,3 +675,9 @@ The test suite covers:
|
|||||||
| `test_cli_service_pool.py` | CLI service resolution |
|
| `test_cli_service_pool.py` | CLI service resolution |
|
||||||
|
|
||||||
Every new feature requires passing tests before merging.
|
Every new feature requires passing tests before merging.
|
||||||
|
|
||||||
|
# AI Disclosure
|
||||||
|
|
||||||
|
This project has been made with lots, and I mean lots of help from AIs. While most of the design was made by me, most of the coding was done by AI models.
|
||||||
|
|
||||||
|
Nevertheless, this project will be kept under high scrutiny by humans.
|
||||||
|
|||||||
174
decnet/cli.py
174
decnet/cli.py
@@ -15,6 +15,13 @@ import typer
|
|||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
|
from decnet.env import (
|
||||||
|
DECNET_API_HOST,
|
||||||
|
DECNET_API_PORT,
|
||||||
|
DECNET_INGEST_LOG_FILE,
|
||||||
|
DECNET_WEB_HOST,
|
||||||
|
DECNET_WEB_PORT,
|
||||||
|
)
|
||||||
from decnet.archetypes import Archetype, all_archetypes, get_archetype
|
from decnet.archetypes import Archetype, all_archetypes, get_archetype
|
||||||
from decnet.config import (
|
from decnet.config import (
|
||||||
DeckyConfig,
|
DeckyConfig,
|
||||||
@@ -116,9 +123,12 @@ def _build_deckies_from_ini(
|
|||||||
gateway: str,
|
gateway: str,
|
||||||
host_ip: str,
|
host_ip: str,
|
||||||
randomize: bool,
|
randomize: bool,
|
||||||
|
cli_mutate_interval: int | None = None,
|
||||||
) -> list[DeckyConfig]:
|
) -> list[DeckyConfig]:
|
||||||
"""Build DeckyConfig list from an IniConfig, auto-allocating missing IPs."""
|
"""Build DeckyConfig list from an IniConfig, auto-allocating missing IPs."""
|
||||||
from ipaddress import IPv4Address, IPv4Network
|
from ipaddress import IPv4Address, IPv4Network
|
||||||
|
import time
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
explicit_ips: set[IPv4Address] = {
|
explicit_ips: set[IPv4Address] = {
|
||||||
IPv4Address(s.ip) for s in ini.deckies if s.ip
|
IPv4Address(s.ip) for s in ini.deckies if s.ip
|
||||||
@@ -139,11 +149,7 @@ def _build_deckies_from_ini(
|
|||||||
# Resolve archetype (if any) — explicit services/distro override it
|
# Resolve archetype (if any) — explicit services/distro override it
|
||||||
arch: Archetype | None = None
|
arch: Archetype | None = None
|
||||||
if spec.archetype:
|
if spec.archetype:
|
||||||
try:
|
|
||||||
arch = get_archetype(spec.archetype)
|
arch = get_archetype(spec.archetype)
|
||||||
except ValueError as e:
|
|
||||||
console.print(f"[red]{e}[/]")
|
|
||||||
raise typer.Exit(1)
|
|
||||||
|
|
||||||
# Distro: archetype preferred list → random → global cycle
|
# Distro: archetype preferred list → random → global cycle
|
||||||
distro_pool = arch.preferred_distros if arch else list(all_distros().keys())
|
distro_pool = arch.preferred_distros if arch else list(all_distros().keys())
|
||||||
@@ -152,19 +158,16 @@ def _build_deckies_from_ini(
|
|||||||
|
|
||||||
ip = spec.ip or next(auto_pool, None)
|
ip = spec.ip or next(auto_pool, None)
|
||||||
if ip is None:
|
if ip is None:
|
||||||
raise RuntimeError(
|
raise ValueError(f"Not enough free IPs in {subnet_cidr} while assigning IP for '{spec.name}'.")
|
||||||
f"Not enough free IPs in {subnet_cidr} while assigning IP for '{spec.name}'."
|
|
||||||
)
|
|
||||||
|
|
||||||
if spec.services:
|
if spec.services:
|
||||||
known = set(_all_service_names())
|
known = set(_all_service_names())
|
||||||
unknown = [s for s in spec.services if s not in known]
|
unknown = [s for s in spec.services if s not in known]
|
||||||
if unknown:
|
if unknown:
|
||||||
console.print(
|
raise ValueError(
|
||||||
f"[red]Unknown service(s) in [{spec.name}]: {unknown}. "
|
f"Unknown service(s) in [{spec.name}]: {unknown}. "
|
||||||
f"Available: {_all_service_names()}[/]"
|
f"Available: {_all_service_names()}"
|
||||||
)
|
)
|
||||||
raise typer.Exit(1)
|
|
||||||
svc_list = spec.services
|
svc_list = spec.services
|
||||||
elif arch:
|
elif arch:
|
||||||
svc_list = list(arch.services)
|
svc_list = list(arch.services)
|
||||||
@@ -173,14 +176,19 @@ def _build_deckies_from_ini(
|
|||||||
count = random.randint(1, min(3, len(svc_pool)))
|
count = random.randint(1, min(3, len(svc_pool)))
|
||||||
svc_list = random.sample(svc_pool, count)
|
svc_list = random.sample(svc_pool, count)
|
||||||
else:
|
else:
|
||||||
console.print(
|
raise ValueError(
|
||||||
f"[red]Decky '[{spec.name}]' has no services= in config. "
|
f"Decky '[{spec.name}]' has no services= in config. "
|
||||||
"Add services=, archetype=, or use --randomize-services.[/]"
|
"Add services=, archetype=, or use --randomize-services."
|
||||||
)
|
)
|
||||||
raise typer.Exit(1)
|
|
||||||
|
|
||||||
# nmap_os priority: explicit INI key > archetype default > "linux"
|
# nmap_os priority: explicit INI key > archetype default > "linux"
|
||||||
resolved_nmap_os = spec.nmap_os or (arch.nmap_os if arch else "linux")
|
resolved_nmap_os = spec.nmap_os or (arch.nmap_os if arch else "linux")
|
||||||
|
|
||||||
|
# mutation interval priority: CLI > per-decky INI > global INI
|
||||||
|
decky_mutate_interval = cli_mutate_interval
|
||||||
|
if decky_mutate_interval is None:
|
||||||
|
decky_mutate_interval = spec.mutate_interval if spec.mutate_interval is not None else ini.mutate_interval
|
||||||
|
|
||||||
deckies.append(DeckyConfig(
|
deckies.append(DeckyConfig(
|
||||||
name=spec.name,
|
name=spec.name,
|
||||||
ip=ip,
|
ip=ip,
|
||||||
@@ -192,10 +200,38 @@ def _build_deckies_from_ini(
|
|||||||
archetype=arch.slug if arch else None,
|
archetype=arch.slug if arch else None,
|
||||||
service_config=spec.service_config,
|
service_config=spec.service_config,
|
||||||
nmap_os=resolved_nmap_os,
|
nmap_os=resolved_nmap_os,
|
||||||
|
mutate_interval=decky_mutate_interval,
|
||||||
|
last_mutated=now,
|
||||||
))
|
))
|
||||||
return deckies
|
return deckies
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def api(
|
||||||
|
port: int = typer.Option(DECNET_API_PORT, "--port", help="Port for the backend API"),
|
||||||
|
host: str = typer.Option(DECNET_API_HOST, "--host", help="Host IP for the backend API"),
|
||||||
|
log_file: str = typer.Option(DECNET_INGEST_LOG_FILE, "--log-file", help="Path to the DECNET log file to monitor"),
|
||||||
|
) -> None:
|
||||||
|
"""Run the DECNET API and Web Dashboard in standalone mode."""
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
console.print(f"[green]Starting DECNET API on {host}:{port}...[/]")
|
||||||
|
_env: dict[str, str] = os.environ.copy()
|
||||||
|
_env["DECNET_INGEST_LOG_FILE"] = str(log_file)
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
[sys.executable, "-m", "uvicorn", "decnet.web.api:app", "--host", host, "--port", str(port)],
|
||||||
|
env=_env
|
||||||
|
)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
except (FileNotFoundError, subprocess.SubprocessError):
|
||||||
|
console.print("[red]Failed to start API. Ensure 'uvicorn' is installed in the current environment.[/]")
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
@app.command()
|
||||||
def deploy(
|
def deploy(
|
||||||
mode: str = typer.Option("unihost", "--mode", "-m", help="Deployment mode: unihost | swarm"),
|
mode: str = typer.Option("unihost", "--mode", "-m", help="Deployment mode: unihost | swarm"),
|
||||||
@@ -208,14 +244,18 @@ def deploy(
|
|||||||
distro: Optional[str] = typer.Option(None, "--distro", help="Comma-separated distro slugs, e.g. debian,ubuntu22,rocky9"),
|
distro: Optional[str] = typer.Option(None, "--distro", help="Comma-separated distro slugs, e.g. debian,ubuntu22,rocky9"),
|
||||||
randomize_distros: bool = typer.Option(False, "--randomize-distros", help="Assign a random distro to each decky"),
|
randomize_distros: bool = typer.Option(False, "--randomize-distros", help="Assign a random distro to each decky"),
|
||||||
log_target: Optional[str] = typer.Option(None, "--log-target", help="Forward logs to ip:port (e.g. 192.168.1.5:5140)"),
|
log_target: Optional[str] = typer.Option(None, "--log-target", help="Forward logs to ip:port (e.g. 192.168.1.5:5140)"),
|
||||||
log_file: Optional[str] = typer.Option(None, "--log-file", help="Write RFC 5424 syslog to this path inside containers (e.g. /var/log/decnet/decnet.log)"),
|
log_file: Optional[str] = typer.Option(DECNET_INGEST_LOG_FILE, "--log-file", help="Write RFC 5424 syslog to this path inside containers (e.g. /var/log/decnet/decnet.log)"),
|
||||||
archetype_name: Optional[str] = typer.Option(None, "--archetype", "-a", help="Machine archetype slug (e.g. linux-server, windows-workstation)"),
|
archetype_name: Optional[str] = typer.Option(None, "--archetype", "-a", help="Machine archetype slug (e.g. linux-server, windows-workstation)"),
|
||||||
|
mutate_interval: Optional[int] = typer.Option(30, "--mutate-interval", help="Automatically rotate services every N minutes"),
|
||||||
dry_run: bool = typer.Option(False, "--dry-run", help="Generate compose file without starting containers"),
|
dry_run: bool = typer.Option(False, "--dry-run", help="Generate compose file without starting containers"),
|
||||||
no_cache: bool = typer.Option(False, "--no-cache", help="Force rebuild all images, ignoring Docker layer cache"),
|
no_cache: bool = typer.Option(False, "--no-cache", help="Force rebuild all images, ignoring Docker layer cache"),
|
||||||
ipvlan: bool = typer.Option(False, "--ipvlan", help="Use IPvlan L2 instead of MACVLAN (required on WiFi interfaces)"),
|
ipvlan: bool = typer.Option(False, "--ipvlan", help="Use IPvlan L2 instead of MACVLAN (required on WiFi interfaces)"),
|
||||||
config_file: Optional[str] = typer.Option(None, "--config", "-c", help="Path to INI config file"),
|
config_file: Optional[str] = typer.Option(None, "--config", "-c", help="Path to INI config file"),
|
||||||
|
api: bool = typer.Option(False, "--api", help="Start the FastAPI backend to ingest and serve logs"),
|
||||||
|
api_port: int = typer.Option(8000, "--api-port", help="Port for the backend API"),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Deploy deckies to the LAN."""
|
"""Deploy deckies to the LAN."""
|
||||||
|
import os
|
||||||
if mode not in ("unihost", "swarm"):
|
if mode not in ("unihost", "swarm"):
|
||||||
console.print("[red]--mode must be 'unihost' or 'swarm'[/]")
|
console.print("[red]--mode must be 'unihost' or 'swarm'[/]")
|
||||||
raise typer.Exit(1)
|
raise typer.Exit(1)
|
||||||
@@ -260,9 +300,13 @@ def deploy(
|
|||||||
|
|
||||||
effective_log_target = log_target or ini.log_target
|
effective_log_target = log_target or ini.log_target
|
||||||
effective_log_file = log_file
|
effective_log_file = log_file
|
||||||
|
try:
|
||||||
decky_configs = _build_deckies_from_ini(
|
decky_configs = _build_deckies_from_ini(
|
||||||
ini, subnet_cidr, effective_gateway, host_ip, randomize_services
|
ini, subnet_cidr, effective_gateway, host_ip, randomize_services, cli_mutate_interval=mutate_interval
|
||||||
)
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
console.print(f"[red]{e}[/]")
|
||||||
|
raise typer.Exit(1)
|
||||||
# ------------------------------------------------------------------ #
|
# ------------------------------------------------------------------ #
|
||||||
# Classic CLI path #
|
# Classic CLI path #
|
||||||
# ------------------------------------------------------------------ #
|
# ------------------------------------------------------------------ #
|
||||||
@@ -316,11 +360,16 @@ def deploy(
|
|||||||
decky_configs = _build_deckies(
|
decky_configs = _build_deckies(
|
||||||
deckies, ips, services_list, randomize_services,
|
deckies, ips, services_list, randomize_services,
|
||||||
distros_explicit=distros_list, randomize_distros=randomize_distros,
|
distros_explicit=distros_list, randomize_distros=randomize_distros,
|
||||||
archetype=arch,
|
archetype=arch, mutate_interval=mutate_interval,
|
||||||
)
|
)
|
||||||
effective_log_target = log_target
|
effective_log_target = log_target
|
||||||
effective_log_file = log_file
|
effective_log_file = log_file
|
||||||
|
|
||||||
|
# Handle automatic log file for API
|
||||||
|
if api and not effective_log_file:
|
||||||
|
effective_log_file = os.path.join(os.getcwd(), "decnet.log")
|
||||||
|
console.print(f"[cyan]API mode enabled: defaulting log-file to {effective_log_file}[/]")
|
||||||
|
|
||||||
config = DecnetConfig(
|
config = DecnetConfig(
|
||||||
mode=mode,
|
mode=mode,
|
||||||
interface=iface,
|
interface=iface,
|
||||||
@@ -330,6 +379,7 @@ def deploy(
|
|||||||
log_target=effective_log_target,
|
log_target=effective_log_target,
|
||||||
log_file=effective_log_file,
|
log_file=effective_log_file,
|
||||||
ipvlan=ipvlan,
|
ipvlan=ipvlan,
|
||||||
|
mutate_interval=mutate_interval,
|
||||||
)
|
)
|
||||||
|
|
||||||
if effective_log_target and not dry_run:
|
if effective_log_target and not dry_run:
|
||||||
@@ -341,6 +391,57 @@ def deploy(
|
|||||||
from decnet.deployer import deploy as _deploy
|
from decnet.deployer import deploy as _deploy
|
||||||
_deploy(config, dry_run=dry_run, no_cache=no_cache)
|
_deploy(config, dry_run=dry_run, no_cache=no_cache)
|
||||||
|
|
||||||
|
if mutate_interval is not None and not dry_run:
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
console.print(f"[green]Starting DECNET Mutator watcher in the background (interval: {mutate_interval}m)...[/]")
|
||||||
|
try:
|
||||||
|
subprocess.Popen(
|
||||||
|
[sys.executable, "-m", "decnet.cli", "mutate", "--watch"],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
|
except (FileNotFoundError, subprocess.SubprocessError):
|
||||||
|
console.print("[red]Failed to start mutator watcher.[/]")
|
||||||
|
|
||||||
|
if api and not dry_run:
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
console.print(f"[green]Starting DECNET API on port {api_port}...[/]")
|
||||||
|
_env: dict[str, str] = os.environ.copy()
|
||||||
|
_env["DECNET_INGEST_LOG_FILE"] = str(effective_log_file)
|
||||||
|
try:
|
||||||
|
subprocess.Popen(
|
||||||
|
[sys.executable, "-m", "uvicorn", "decnet.web.api:app", "--host", "0.0.0.0", "--port", str(api_port)],
|
||||||
|
env=_env,
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
|
console.print(f"[dim]API running at http://0.0.0.0:{api_port}[/]")
|
||||||
|
except (FileNotFoundError, subprocess.SubprocessError):
|
||||||
|
console.print("[red]Failed to start API. Ensure 'uvicorn' is installed in the current environment.[/]")
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def mutate(
|
||||||
|
watch: bool = typer.Option(False, "--watch", "-w", help="Run continuously and mutate deckies according to their interval"),
|
||||||
|
decky_name: Optional[str] = typer.Option(None, "--decky", "-d", help="Force mutate a specific decky immediately"),
|
||||||
|
force_all: bool = typer.Option(False, "--all", help="Force mutate all deckies immediately"),
|
||||||
|
) -> None:
|
||||||
|
"""Manually trigger or continuously watch for decky mutation."""
|
||||||
|
from decnet.mutator import mutate_decky, mutate_all, run_watch_loop
|
||||||
|
|
||||||
|
if watch:
|
||||||
|
run_watch_loop()
|
||||||
|
return
|
||||||
|
|
||||||
|
if decky_name:
|
||||||
|
mutate_decky(decky_name)
|
||||||
|
elif force_all:
|
||||||
|
mutate_all(force=True)
|
||||||
|
else:
|
||||||
|
mutate_all(force=False)
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
@app.command()
|
||||||
def status() -> None:
|
def status() -> None:
|
||||||
@@ -459,3 +560,40 @@ def list_archetypes() -> None:
|
|||||||
arch.description,
|
arch.description,
|
||||||
)
|
)
|
||||||
console.print(table)
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
|
@app.command(name="web")
|
||||||
|
def serve_web(
|
||||||
|
web_port: int = typer.Option(DECNET_WEB_PORT, "--web-port", help="Port to serve the DECNET Web Dashboard"),
|
||||||
|
host: str = typer.Option(DECNET_WEB_HOST, "--host", help="Host IP to serve the Web Dashboard"),
|
||||||
|
) -> None:
|
||||||
|
"""Serve the DECNET Web Dashboard frontend."""
|
||||||
|
import http.server
|
||||||
|
import socketserver
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Assuming decnet_web/dist is relative to the project root
|
||||||
|
dist_dir = Path(__file__).parent.parent / "decnet_web" / "dist"
|
||||||
|
|
||||||
|
if not dist_dir.exists():
|
||||||
|
console.print(f"[red]Frontend build not found at {dist_dir}. Make sure you run 'npm run build' inside 'decnet_web'.[/]")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
class SPAHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||||
|
def do_GET(self):
|
||||||
|
# Try to serve the requested file
|
||||||
|
path = self.translate_path(self.path)
|
||||||
|
if not Path(path).exists() or Path(path).is_dir():
|
||||||
|
# If not found or is a directory, serve index.html (for React Router)
|
||||||
|
self.path = "/index.html"
|
||||||
|
return super().do_GET()
|
||||||
|
|
||||||
|
import os
|
||||||
|
os.chdir(dist_dir)
|
||||||
|
|
||||||
|
with socketserver.TCPServer((host, web_port), SPAHTTPRequestHandler) as httpd:
|
||||||
|
console.print(f"[green]Serving DECNET Web Dashboard on http://{host}:{web_port}[/]")
|
||||||
|
try:
|
||||||
|
httpd.serve_forever()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
console.print("\n[dim]Shutting down dashboard server.[/]")
|
||||||
|
|||||||
@@ -11,7 +11,10 @@ from pydantic import BaseModel, field_validator
|
|||||||
|
|
||||||
from decnet.distros import random_hostname as _random_hostname
|
from decnet.distros import random_hostname as _random_hostname
|
||||||
|
|
||||||
STATE_FILE = Path("decnet-state.json")
|
# Calculate absolute path to the project root (where the config file resides)
|
||||||
|
_ROOT: Path = Path(__file__).parent.parent.absolute()
|
||||||
|
STATE_FILE: Path = _ROOT / "decnet-state.json"
|
||||||
|
DEFAULT_MUTATE_INTERVAL: int = 30 # default rotation interval in minutes
|
||||||
|
|
||||||
|
|
||||||
def random_hostname(distro_slug: str = "debian") -> str:
|
def random_hostname(distro_slug: str = "debian") -> str:
|
||||||
@@ -29,6 +32,8 @@ class DeckyConfig(BaseModel):
|
|||||||
archetype: str | None = None # archetype slug if spawned from an archetype profile
|
archetype: str | None = None # archetype slug if spawned from an archetype profile
|
||||||
service_config: dict[str, dict] = {} # optional per-service persona config
|
service_config: dict[str, dict] = {} # optional per-service persona config
|
||||||
nmap_os: str = "linux" # OS family for TCP/IP stack spoofing (see os_fingerprint.py)
|
nmap_os: str = "linux" # OS family for TCP/IP stack spoofing (see os_fingerprint.py)
|
||||||
|
mutate_interval: int | None = None # automatic rotation interval in minutes
|
||||||
|
last_mutated: float = 0.0 # timestamp of last mutation
|
||||||
|
|
||||||
@field_validator("services")
|
@field_validator("services")
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -47,6 +52,7 @@ class DecnetConfig(BaseModel):
|
|||||||
log_target: str | None = None # "ip:port" or None
|
log_target: str | None = None # "ip:port" or None
|
||||||
log_file: str | None = None # path for RFC 5424 syslog file output
|
log_file: str | None = None # path for RFC 5424 syslog file output
|
||||||
ipvlan: bool = False # use IPvlan L2 instead of MACVLAN (WiFi-friendly)
|
ipvlan: bool = False # use IPvlan L2 instead of MACVLAN (WiFi-friendly)
|
||||||
|
mutate_interval: int | None = DEFAULT_MUTATE_INTERVAL # global automatic rotation interval in minutes
|
||||||
|
|
||||||
@field_validator("log_target")
|
@field_validator("log_target")
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -131,6 +131,33 @@ def deploy(config: DecnetConfig, dry_run: bool = False, no_cache: bool = False)
|
|||||||
_print_status(config)
|
_print_status(config)
|
||||||
|
|
||||||
|
|
||||||
|
def _kill_api() -> None:
|
||||||
|
"""Find and kill any running DECNET API (uvicorn) or mutator processes."""
|
||||||
|
import psutil
|
||||||
|
import signal
|
||||||
|
import os
|
||||||
|
|
||||||
|
_killed: bool = False
|
||||||
|
for _proc in psutil.process_iter(['pid', 'name', 'cmdline']):
|
||||||
|
try:
|
||||||
|
_cmd = _proc.info['cmdline']
|
||||||
|
if not _cmd:
|
||||||
|
continue
|
||||||
|
if "uvicorn" in _cmd and "decnet.web.api:app" in _cmd:
|
||||||
|
console.print(f"[yellow]Stopping DECNET API (PID {_proc.info['pid']})...[/]")
|
||||||
|
os.kill(_proc.info['pid'], signal.SIGTERM)
|
||||||
|
_killed = True
|
||||||
|
elif "decnet.cli" in _cmd and "mutate" in _cmd and "--watch" in _cmd:
|
||||||
|
console.print(f"[yellow]Stopping DECNET Mutator Watcher (PID {_proc.info['pid']})...[/]")
|
||||||
|
os.kill(_proc.info['pid'], signal.SIGTERM)
|
||||||
|
_killed = True
|
||||||
|
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if _killed:
|
||||||
|
console.print("[green]Background processes stopped.[/]")
|
||||||
|
|
||||||
|
|
||||||
def teardown(decky_id: str | None = None) -> None:
|
def teardown(decky_id: str | None = None) -> None:
|
||||||
state = load_state()
|
state = load_state()
|
||||||
if state is None:
|
if state is None:
|
||||||
@@ -159,6 +186,10 @@ def teardown(decky_id: str | None = None) -> None:
|
|||||||
teardown_host_macvlan(decky_range)
|
teardown_host_macvlan(decky_range)
|
||||||
remove_macvlan_network(client)
|
remove_macvlan_network(client)
|
||||||
clear_state()
|
clear_state()
|
||||||
|
|
||||||
|
# Kill API when doing full teardown
|
||||||
|
_kill_api()
|
||||||
|
|
||||||
net_driver = "IPvlan" if config.ipvlan else "MACVLAN"
|
net_driver = "IPvlan" if config.ipvlan else "MACVLAN"
|
||||||
console.print(f"[green]All deckies torn down. {net_driver} network removed.[/]")
|
console.print(f"[green]All deckies torn down. {net_driver} network removed.[/]")
|
||||||
|
|
||||||
|
|||||||
22
decnet/env.py
Normal file
22
decnet/env.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Calculate absolute path to the project root
|
||||||
|
_ROOT: Path = Path(__file__).parent.parent.absolute()
|
||||||
|
|
||||||
|
# Load .env.local first, then fallback to .env
|
||||||
|
load_dotenv(_ROOT / ".env.local")
|
||||||
|
load_dotenv(_ROOT / ".env")
|
||||||
|
|
||||||
|
# API Options
|
||||||
|
DECNET_API_HOST: str = os.environ.get("DECNET_API_HOST", "0.0.0.0")
|
||||||
|
DECNET_API_PORT: int = int(os.environ.get("DECNET_API_PORT", "8000"))
|
||||||
|
DECNET_JWT_SECRET: str = os.environ.get("DECNET_JWT_SECRET", "fallback-secret-key-change-me")
|
||||||
|
DECNET_INGEST_LOG_FILE: str | None = os.environ.get("DECNET_INGEST_LOG_FILE", "/var/log/decnet/decnet.log")
|
||||||
|
|
||||||
|
# Web Dashboard Options
|
||||||
|
DECNET_WEB_HOST: str = os.environ.get("DECNET_WEB_HOST", "0.0.0.0")
|
||||||
|
DECNET_WEB_PORT: int = int(os.environ.get("DECNET_WEB_PORT", "8080"))
|
||||||
|
DECNET_ADMIN_USER: str = os.environ.get("DECNET_ADMIN_USER", "admin")
|
||||||
|
DECNET_ADMIN_PASSWORD: str = os.environ.get("DECNET_ADMIN_PASSWORD", "admin")
|
||||||
@@ -54,6 +54,7 @@ class DeckySpec:
|
|||||||
archetype: str | None = None
|
archetype: str | None = None
|
||||||
service_config: dict[str, dict] = field(default_factory=dict)
|
service_config: dict[str, dict] = field(default_factory=dict)
|
||||||
nmap_os: str | None = None # explicit OS family override (linux/windows/bsd/embedded/cisco)
|
nmap_os: str | None = None # explicit OS family override (linux/windows/bsd/embedded/cisco)
|
||||||
|
mutate_interval: int | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -71,6 +72,7 @@ class IniConfig:
|
|||||||
gateway: str | None = None
|
gateway: str | None = None
|
||||||
interface: str | None = None
|
interface: str | None = None
|
||||||
log_target: str | None = None
|
log_target: str | None = None
|
||||||
|
mutate_interval: int | None = None
|
||||||
deckies: list[DeckySpec] = field(default_factory=list)
|
deckies: list[DeckySpec] = field(default_factory=list)
|
||||||
custom_services: list[CustomServiceSpec] = field(default_factory=list)
|
custom_services: list[CustomServiceSpec] = field(default_factory=list)
|
||||||
|
|
||||||
@@ -81,7 +83,33 @@ def load_ini(path: str | Path) -> IniConfig:
|
|||||||
read = cp.read(str(path))
|
read = cp.read(str(path))
|
||||||
if not read:
|
if not read:
|
||||||
raise FileNotFoundError(f"Config file not found: {path}")
|
raise FileNotFoundError(f"Config file not found: {path}")
|
||||||
|
return _parse_configparser(cp)
|
||||||
|
|
||||||
|
|
||||||
|
def load_ini_from_string(content: str) -> IniConfig:
|
||||||
|
"""Parse a DECNET INI string and return an IniConfig."""
|
||||||
|
validate_ini_string(content)
|
||||||
|
cp = configparser.ConfigParser()
|
||||||
|
cp.read_string(content)
|
||||||
|
return _parse_configparser(cp)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_ini_string(content: str) -> None:
|
||||||
|
"""Perform safety and sanity checks on raw INI content string."""
|
||||||
|
# 1. Size limit (e.g. 512KB)
|
||||||
|
if len(content) > 512 * 1024:
|
||||||
|
raise ValueError("INI content too large (max 512KB).")
|
||||||
|
|
||||||
|
# 2. Ensure it's not empty
|
||||||
|
if not content.strip():
|
||||||
|
raise ValueError("INI content is empty.")
|
||||||
|
|
||||||
|
# 3. Basic structure check (must contain at least one section header)
|
||||||
|
if "[" not in content or "]" not in content:
|
||||||
|
raise ValueError("Invalid INI format: no sections found.")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_configparser(cp: configparser.ConfigParser) -> IniConfig:
|
||||||
cfg = IniConfig()
|
cfg = IniConfig()
|
||||||
|
|
||||||
if cp.has_section("general"):
|
if cp.has_section("general"):
|
||||||
@@ -91,12 +119,23 @@ def load_ini(path: str | Path) -> IniConfig:
|
|||||||
cfg.interface = g.get("interface")
|
cfg.interface = g.get("interface")
|
||||||
cfg.log_target = g.get("log_target") or g.get("log-target")
|
cfg.log_target = g.get("log_target") or g.get("log-target")
|
||||||
|
|
||||||
|
from decnet.services.registry import all_services
|
||||||
|
known_services = set(all_services().keys())
|
||||||
|
|
||||||
# First pass: collect decky sections and custom service definitions
|
# First pass: collect decky sections and custom service definitions
|
||||||
for section in cp.sections():
|
for section in cp.sections():
|
||||||
if section == "general":
|
if section == "general":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# A service sub-section is identified if the section name has at least one dot
|
||||||
|
# AND the last segment is a known service name.
|
||||||
|
# e.g. "decky-01.ssh" -> sub-section
|
||||||
|
# e.g. "decky.webmail" -> decky section (if "webmail" is not a service)
|
||||||
if "." in section:
|
if "." in section:
|
||||||
continue # subsections handled in second pass
|
_, _, last_segment = section.rpartition(".")
|
||||||
|
if last_segment in known_services:
|
||||||
|
continue # sub-section handled in second pass
|
||||||
|
|
||||||
if section.startswith("custom-"):
|
if section.startswith("custom-"):
|
||||||
# Bring-your-own service definition
|
# Bring-your-own service definition
|
||||||
s = cp[section]
|
s = cp[section]
|
||||||
@@ -115,17 +154,30 @@ def load_ini(path: str | Path) -> IniConfig:
|
|||||||
services = [sv.strip() for sv in svc_raw.split(",")] if svc_raw else None
|
services = [sv.strip() for sv in svc_raw.split(",")] if svc_raw else None
|
||||||
archetype = s.get("archetype")
|
archetype = s.get("archetype")
|
||||||
nmap_os = s.get("nmap_os") or s.get("nmap-os") or None
|
nmap_os = s.get("nmap_os") or s.get("nmap-os") or None
|
||||||
|
|
||||||
|
mi_raw = s.get("mutate_interval") or s.get("mutate-interval")
|
||||||
|
mutate_interval = None
|
||||||
|
if mi_raw:
|
||||||
|
try:
|
||||||
|
mutate_interval = int(mi_raw)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"[{section}] mutate_interval= must be an integer, got '{mi_raw}'")
|
||||||
|
|
||||||
amount_raw = s.get("amount", "1")
|
amount_raw = s.get("amount", "1")
|
||||||
try:
|
try:
|
||||||
amount = int(amount_raw)
|
amount = int(amount_raw)
|
||||||
if amount < 1:
|
if amount < 1:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
except ValueError:
|
if amount > 100:
|
||||||
|
raise ValueError(f"[{section}] amount={amount} exceeds maximum allowed (100).")
|
||||||
|
except ValueError as e:
|
||||||
|
if "exceeds maximum" in str(e):
|
||||||
|
raise e
|
||||||
raise ValueError(f"[{section}] amount= must be a positive integer, got '{amount_raw}'")
|
raise ValueError(f"[{section}] amount= must be a positive integer, got '{amount_raw}'")
|
||||||
|
|
||||||
if amount == 1:
|
if amount == 1:
|
||||||
cfg.deckies.append(DeckySpec(
|
cfg.deckies.append(DeckySpec(
|
||||||
name=section, ip=ip, services=services, archetype=archetype, nmap_os=nmap_os,
|
name=section, ip=ip, services=services, archetype=archetype, nmap_os=nmap_os, mutate_interval=mutate_interval,
|
||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
# Expand into N deckies; explicit ip is ignored (can't share one IP)
|
# Expand into N deckies; explicit ip is ignored (can't share one IP)
|
||||||
@@ -141,6 +193,7 @@ def load_ini(path: str | Path) -> IniConfig:
|
|||||||
services=services,
|
services=services,
|
||||||
archetype=archetype,
|
archetype=archetype,
|
||||||
nmap_os=nmap_os,
|
nmap_os=nmap_os,
|
||||||
|
mutate_interval=mutate_interval,
|
||||||
))
|
))
|
||||||
|
|
||||||
# Second pass: collect per-service subsections [decky-name.service]
|
# Second pass: collect per-service subsections [decky-name.service]
|
||||||
@@ -149,7 +202,11 @@ def load_ini(path: str | Path) -> IniConfig:
|
|||||||
for section in cp.sections():
|
for section in cp.sections():
|
||||||
if "." not in section:
|
if "." not in section:
|
||||||
continue
|
continue
|
||||||
decky_name, _, svc_name = section.partition(".")
|
|
||||||
|
decky_name, dot, svc_name = section.rpartition(".")
|
||||||
|
if svc_name not in known_services:
|
||||||
|
continue # not a service sub-section
|
||||||
|
|
||||||
svc_cfg = {k: v for k, v in cp[section].items()}
|
svc_cfg = {k: v for k, v in cp[section].items()}
|
||||||
if decky_name in decky_map:
|
if decky_name in decky_map:
|
||||||
# Direct match — single decky
|
# Direct match — single decky
|
||||||
|
|||||||
152
decnet/mutator.py
Normal file
152
decnet/mutator.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
"""
|
||||||
|
Mutation Engine for DECNET.
|
||||||
|
Handles dynamic rotation of exposed honeypot services over time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import random
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from rich.console import Console
|
||||||
|
|
||||||
|
from decnet.archetypes import get_archetype
|
||||||
|
from decnet.cli import _all_service_names
|
||||||
|
from decnet.composer import write_compose
|
||||||
|
from decnet.config import DeckyConfig, load_state, save_state
|
||||||
|
from decnet.deployer import COMPOSE_FILE
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
def _compose_with_retry(
|
||||||
|
*args: str,
|
||||||
|
compose_file: Path = COMPOSE_FILE,
|
||||||
|
retries: int = 3,
|
||||||
|
delay: float = 5.0,
|
||||||
|
) -> None:
|
||||||
|
"""Run a docker compose command, retrying on transient failures."""
|
||||||
|
last_exc: subprocess.CalledProcessError | None = None
|
||||||
|
cmd = ["docker", "compose", "-f", str(compose_file), *args]
|
||||||
|
for attempt in range(1, retries + 1):
|
||||||
|
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||||
|
if result.returncode == 0:
|
||||||
|
if result.stdout:
|
||||||
|
print(result.stdout, end="")
|
||||||
|
return
|
||||||
|
last_exc = subprocess.CalledProcessError(
|
||||||
|
result.returncode, cmd, result.stdout, result.stderr
|
||||||
|
)
|
||||||
|
if attempt < retries:
|
||||||
|
time.sleep(delay)
|
||||||
|
delay *= 2
|
||||||
|
raise last_exc
|
||||||
|
|
||||||
|
def mutate_decky(decky_name: str) -> bool:
|
||||||
|
"""
|
||||||
|
Perform an Intra-Archetype Shuffle for a specific decky.
|
||||||
|
Returns True if mutation succeeded, False otherwise.
|
||||||
|
"""
|
||||||
|
state = load_state()
|
||||||
|
if state is None:
|
||||||
|
console.print("[red]No active deployment found (no decnet-state.json).[/]")
|
||||||
|
return False
|
||||||
|
|
||||||
|
config, compose_path = state
|
||||||
|
decky: Optional[DeckyConfig] = next((d for d in config.deckies if d.name == decky_name), None)
|
||||||
|
|
||||||
|
if not decky:
|
||||||
|
console.print(f"[red]Decky '{decky_name}' not found in state.[/]")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Determine allowed services pool
|
||||||
|
if decky.archetype:
|
||||||
|
try:
|
||||||
|
arch = get_archetype(decky.archetype)
|
||||||
|
svc_pool = list(arch.services)
|
||||||
|
except ValueError:
|
||||||
|
svc_pool = _all_service_names()
|
||||||
|
else:
|
||||||
|
svc_pool = _all_service_names()
|
||||||
|
|
||||||
|
if not svc_pool:
|
||||||
|
console.print(f"[yellow]No services available for mutating '{decky_name}'.[/]")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Prevent mutating to the exact same set if possible
|
||||||
|
current_services = set(decky.services)
|
||||||
|
|
||||||
|
attempts = 0
|
||||||
|
while True:
|
||||||
|
count = random.randint(1, min(3, len(svc_pool)))
|
||||||
|
chosen = set(random.sample(svc_pool, count))
|
||||||
|
attempts += 1
|
||||||
|
if chosen != current_services or attempts > 20:
|
||||||
|
break
|
||||||
|
|
||||||
|
decky.services = list(chosen)
|
||||||
|
decky.last_mutated = time.time()
|
||||||
|
|
||||||
|
# Save new state
|
||||||
|
save_state(config, compose_path)
|
||||||
|
|
||||||
|
# Regenerate compose file
|
||||||
|
write_compose(config, compose_path)
|
||||||
|
|
||||||
|
console.print(f"[cyan]Mutating '{decky_name}' to services: {', '.join(decky.services)}[/]")
|
||||||
|
|
||||||
|
# Bring up the new services and remove old orphans
|
||||||
|
try:
|
||||||
|
_compose_with_retry("up", "-d", "--remove-orphans", compose_file=compose_path)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
console.print(f"[red]Failed to mutate '{decky_name}': {e.stderr}[/]")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def mutate_all(force: bool = False) -> None:
|
||||||
|
"""
|
||||||
|
Check all deckies and mutate those that are due.
|
||||||
|
If force=True, mutates all deckies regardless of schedule.
|
||||||
|
"""
|
||||||
|
state = load_state()
|
||||||
|
if state is None:
|
||||||
|
console.print("[red]No active deployment found.[/]")
|
||||||
|
return
|
||||||
|
|
||||||
|
config, _ = state
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
mutated_count = 0
|
||||||
|
for decky in config.deckies:
|
||||||
|
interval_mins = decky.mutate_interval or config.mutate_interval
|
||||||
|
if interval_mins is None and not force:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if force:
|
||||||
|
due = True
|
||||||
|
else:
|
||||||
|
elapsed_secs = now - decky.last_mutated
|
||||||
|
due = elapsed_secs >= (interval_mins * 60)
|
||||||
|
|
||||||
|
if due:
|
||||||
|
success = mutate_decky(decky.name)
|
||||||
|
if success:
|
||||||
|
mutated_count += 1
|
||||||
|
# Re-load state for next decky just in case, but mutate_decky saves it.
|
||||||
|
# However, mutate_decky operates on its own loaded state.
|
||||||
|
# Since mutate_decky loads and saves the state, our loop over `config.deckies`
|
||||||
|
# has an outdated `last_mutated` if we don't reload. It's fine because we process one by one.
|
||||||
|
|
||||||
|
if mutated_count == 0 and not force:
|
||||||
|
console.print("[dim]No deckies are due for mutation.[/]")
|
||||||
|
|
||||||
|
def run_watch_loop(poll_interval_secs: int = 10) -> None:
|
||||||
|
"""Run an infinite loop checking for deckies that need mutation."""
|
||||||
|
console.print(f"[green]DECNET Mutator Watcher started (polling every {poll_interval_secs}s).[/]")
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
mutate_all(force=False)
|
||||||
|
time.sleep(poll_interval_secs)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
console.print("\n[dim]Mutator watcher stopped.[/]")
|
||||||
344
decnet/web/api.py
Normal file
344
decnet/web/api.py
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
import uuid
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Any, AsyncGenerator, Optional
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from fastapi import Depends, FastAPI, HTTPException, Query, status, Request
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from decnet.web.auth import (
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES,
|
||||||
|
ALGORITHM,
|
||||||
|
SECRET_KEY,
|
||||||
|
create_access_token,
|
||||||
|
get_password_hash,
|
||||||
|
verify_password,
|
||||||
|
)
|
||||||
|
from decnet.web.sqlite_repository import SQLiteRepository
|
||||||
|
from decnet.web.ingester import log_ingestion_worker
|
||||||
|
from decnet.env import DECNET_ADMIN_USER, DECNET_ADMIN_PASSWORD
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
repo: SQLiteRepository = SQLiteRepository()
|
||||||
|
ingestion_task: Optional[asyncio.Task[Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
||||||
|
global ingestion_task
|
||||||
|
await repo.initialize()
|
||||||
|
# Create default admin if no users exist
|
||||||
|
_admin_user: Optional[dict[str, Any]] = await repo.get_user_by_username(DECNET_ADMIN_USER)
|
||||||
|
if not _admin_user:
|
||||||
|
await repo.create_user(
|
||||||
|
{
|
||||||
|
"uuid": str(uuid.uuid4()),
|
||||||
|
"username": DECNET_ADMIN_USER,
|
||||||
|
"password_hash": get_password_hash(DECNET_ADMIN_PASSWORD),
|
||||||
|
"role": "admin",
|
||||||
|
"must_change_password": True
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start background ingestion task
|
||||||
|
ingestion_task = asyncio.create_task(log_ingestion_worker(repo))
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# Shutdown ingestion task
|
||||||
|
if ingestion_task:
|
||||||
|
ingestion_task.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
app: FastAPI = FastAPI(
|
||||||
|
title="DECNET Web Dashboard API",
|
||||||
|
version="1.0.0",
|
||||||
|
lifespan=lifespan
|
||||||
|
)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
oauth2_scheme: OAuth2PasswordBearer = OAuth2PasswordBearer(tokenUrl="/api/v1/auth/login")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(request: Request) -> str:
|
||||||
|
_credentials_exception: HTTPException = HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extract token from header or query param
|
||||||
|
token: str | None = None
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if auth_header and auth_header.startswith("Bearer "):
|
||||||
|
token = auth_header.split(" ")[1]
|
||||||
|
elif request.query_params.get("token"):
|
||||||
|
token = request.query_params.get("token")
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
raise _credentials_exception
|
||||||
|
|
||||||
|
try:
|
||||||
|
_payload: dict[str, Any] = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
|
_user_uuid: Optional[str] = _payload.get("uuid")
|
||||||
|
if _user_uuid is None:
|
||||||
|
raise _credentials_exception
|
||||||
|
return _user_uuid
|
||||||
|
except jwt.PyJWTError:
|
||||||
|
raise _credentials_exception
|
||||||
|
|
||||||
|
|
||||||
|
class Token(BaseModel):
|
||||||
|
access_token: str
|
||||||
|
token_type: str
|
||||||
|
must_change_password: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class LoginRequest(BaseModel):
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePasswordRequest(BaseModel):
|
||||||
|
old_password: str
|
||||||
|
new_password: str
|
||||||
|
|
||||||
|
|
||||||
|
class LogsResponse(BaseModel):
|
||||||
|
total: int
|
||||||
|
limit: int
|
||||||
|
offset: int
|
||||||
|
data: list[dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/v1/auth/login", response_model=Token)
|
||||||
|
async def login(request: LoginRequest) -> dict[str, Any]:
|
||||||
|
_user: Optional[dict[str, Any]] = await repo.get_user_by_username(request.username)
|
||||||
|
if not _user or not verify_password(request.password, _user["password_hash"]):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Incorrect username or password",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
_access_token_expires: timedelta = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
# Token uses uuid instead of sub
|
||||||
|
_access_token: str = create_access_token(
|
||||||
|
data={"uuid": _user["uuid"]}, expires_delta=_access_token_expires
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"access_token": _access_token,
|
||||||
|
"token_type": "bearer",
|
||||||
|
"must_change_password": bool(_user.get("must_change_password", False))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/v1/auth/change-password")
|
||||||
|
async def change_password(request: ChangePasswordRequest, current_user: str = Depends(get_current_user)) -> dict[str, str]:
|
||||||
|
_user: Optional[dict[str, Any]] = await repo.get_user_by_uuid(current_user)
|
||||||
|
if not _user or not verify_password(request.old_password, _user["password_hash"]):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Incorrect old password",
|
||||||
|
)
|
||||||
|
|
||||||
|
_new_hash: str = get_password_hash(request.new_password)
|
||||||
|
await repo.update_user_password(current_user, _new_hash, must_change_password=False)
|
||||||
|
return {"message": "Password updated successfully"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/v1/logs", response_model=LogsResponse)
|
||||||
|
async def get_logs(
|
||||||
|
limit: int = Query(50, ge=1, le=1000),
|
||||||
|
offset: int = Query(0, ge=0),
|
||||||
|
search: Optional[str] = None,
|
||||||
|
current_user: str = Depends(get_current_user)
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
_logs: list[dict[str, Any]] = await repo.get_logs(limit=limit, offset=offset, search=search)
|
||||||
|
_total: int = await repo.get_total_logs(search=search)
|
||||||
|
return {
|
||||||
|
"total": _total,
|
||||||
|
"limit": limit,
|
||||||
|
"offset": offset,
|
||||||
|
"data": _logs
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class StatsResponse(BaseModel):
|
||||||
|
total_logs: int
|
||||||
|
unique_attackers: int
|
||||||
|
active_deckies: int
|
||||||
|
deployed_deckies: int
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/v1/stats", response_model=StatsResponse)
|
||||||
|
async def get_stats(current_user: str = Depends(get_current_user)) -> dict[str, Any]:
|
||||||
|
return await repo.get_stats_summary()
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/v1/deckies")
|
||||||
|
async def get_deckies(current_user: str = Depends(get_current_user)) -> list[dict[str, Any]]:
|
||||||
|
return await repo.get_deckies()
|
||||||
|
|
||||||
|
|
||||||
|
class MutateIntervalRequest(BaseModel):
|
||||||
|
mutate_interval: int | None
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/v1/deckies/{decky_name}/mutate")
|
||||||
|
async def api_mutate_decky(decky_name: str, current_user: str = Depends(get_current_user)) -> dict[str, str]:
|
||||||
|
from decnet.mutator import mutate_decky
|
||||||
|
success = mutate_decky(decky_name)
|
||||||
|
if success:
|
||||||
|
return {"message": f"Successfully mutated {decky_name}"}
|
||||||
|
raise HTTPException(status_code=404, detail=f"Decky {decky_name} not found or failed to mutate")
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/api/v1/deckies/{decky_name}/mutate-interval")
|
||||||
|
async def api_update_mutate_interval(decky_name: str, req: MutateIntervalRequest, current_user: str = Depends(get_current_user)) -> dict[str, str]:
|
||||||
|
from decnet.config import load_state, save_state
|
||||||
|
state = load_state()
|
||||||
|
if not state:
|
||||||
|
raise HTTPException(status_code=500, detail="No active deployment")
|
||||||
|
config, compose_path = state
|
||||||
|
decky = next((d for d in config.deckies if d.name == decky_name), None)
|
||||||
|
if not decky:
|
||||||
|
raise HTTPException(status_code=404, detail="Decky not found")
|
||||||
|
decky.mutate_interval = req.mutate_interval
|
||||||
|
save_state(config, compose_path)
|
||||||
|
return {"message": "Mutation interval updated"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/v1/stream")
|
||||||
|
async def stream_events(
|
||||||
|
request: Request,
|
||||||
|
last_event_id: int = Query(0, alias="lastEventId"),
|
||||||
|
search: Optional[str] = None,
|
||||||
|
current_user: str = Depends(get_current_user)
|
||||||
|
) -> StreamingResponse:
|
||||||
|
import json
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
async def event_generator() -> AsyncGenerator[str, None]:
|
||||||
|
# Start tracking from the provided ID, or current max if 0
|
||||||
|
last_id = last_event_id
|
||||||
|
if last_id == 0:
|
||||||
|
last_id = await repo.get_max_log_id()
|
||||||
|
|
||||||
|
stats_interval_sec = 10
|
||||||
|
loops_since_stats = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if await request.is_disconnected():
|
||||||
|
break
|
||||||
|
|
||||||
|
# Poll for new logs
|
||||||
|
new_logs = await repo.get_logs_after_id(last_id, limit=50, search=search)
|
||||||
|
if new_logs:
|
||||||
|
# Update last_id to the max id in the fetched batch
|
||||||
|
last_id = max(log["id"] for log in new_logs)
|
||||||
|
payload = json.dumps({"type": "logs", "data": new_logs})
|
||||||
|
yield f"event: message\ndata: {payload}\n\n"
|
||||||
|
|
||||||
|
# If we have new logs, stats probably changed, so force a stats update
|
||||||
|
loops_since_stats = stats_interval_sec
|
||||||
|
|
||||||
|
# Periodically poll for stats
|
||||||
|
if loops_since_stats >= stats_interval_sec:
|
||||||
|
stats = await repo.get_stats_summary()
|
||||||
|
payload = json.dumps({"type": "stats", "data": stats})
|
||||||
|
yield f"event: message\ndata: {payload}\n\n"
|
||||||
|
loops_since_stats = 0
|
||||||
|
|
||||||
|
loops_since_stats += 1
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
return StreamingResponse(event_generator(), media_type="text/event-stream")
|
||||||
|
|
||||||
|
|
||||||
|
class DeployIniRequest(BaseModel):
|
||||||
|
ini_content: str = Field(..., min_length=5, max_length=512 * 1024)
|
||||||
|
|
||||||
|
@app.post("/api/v1/deckies/deploy")
|
||||||
|
async def api_deploy_deckies(req: DeployIniRequest, current_user: str = Depends(get_current_user)) -> dict[str, str]:
|
||||||
|
from decnet.ini_loader import load_ini_from_string
|
||||||
|
from decnet.cli import _build_deckies_from_ini
|
||||||
|
from decnet.config import load_state, DecnetConfig, DEFAULT_MUTATE_INTERVAL
|
||||||
|
from decnet.network import detect_interface, detect_subnet, get_host_ip
|
||||||
|
from decnet.deployer import deploy as _deploy
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
try:
|
||||||
|
ini = load_ini_from_string(req.ini_content)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Failed to parse INI: {e}")
|
||||||
|
|
||||||
|
state = load_state()
|
||||||
|
ingest_log_file = os.environ.get("DECNET_INGEST_LOG_FILE")
|
||||||
|
|
||||||
|
if state:
|
||||||
|
config, _ = state
|
||||||
|
subnet_cidr = ini.subnet or config.subnet
|
||||||
|
gateway = ini.gateway or config.gateway
|
||||||
|
host_ip = get_host_ip(config.interface)
|
||||||
|
randomize_services = False
|
||||||
|
# Always sync config log_file with current API ingestion target
|
||||||
|
if ingest_log_file:
|
||||||
|
config.log_file = ingest_log_file
|
||||||
|
else:
|
||||||
|
# If no state exists, we need to infer network details
|
||||||
|
iface = ini.interface or detect_interface()
|
||||||
|
subnet_cidr, gateway = ini.subnet, ini.gateway
|
||||||
|
if not subnet_cidr or not gateway:
|
||||||
|
detected_subnet, detected_gateway = detect_subnet(iface)
|
||||||
|
subnet_cidr = subnet_cidr or detected_subnet
|
||||||
|
gateway = gateway or detected_gateway
|
||||||
|
host_ip = get_host_ip(iface)
|
||||||
|
randomize_services = False
|
||||||
|
config = DecnetConfig(
|
||||||
|
mode="unihost",
|
||||||
|
interface=iface,
|
||||||
|
subnet=subnet_cidr,
|
||||||
|
gateway=gateway,
|
||||||
|
deckies=[],
|
||||||
|
log_target=ini.log_target,
|
||||||
|
log_file=ingest_log_file,
|
||||||
|
ipvlan=False,
|
||||||
|
mutate_interval=ini.mutate_interval or DEFAULT_MUTATE_INTERVAL
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
new_decky_configs = _build_deckies_from_ini(
|
||||||
|
ini, subnet_cidr, gateway, host_ip, randomize_services, cli_mutate_interval=None
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
# Merge deckies
|
||||||
|
existing_deckies_map = {d.name: d for d in config.deckies}
|
||||||
|
for new_decky in new_decky_configs:
|
||||||
|
existing_deckies_map[new_decky.name] = new_decky
|
||||||
|
|
||||||
|
config.deckies = list(existing_deckies_map.values())
|
||||||
|
|
||||||
|
# We call deploy(config) which regenerates docker-compose and runs `up -d --remove-orphans`.
|
||||||
|
try:
|
||||||
|
_deploy(config)
|
||||||
|
except Exception as e:
|
||||||
|
logging.getLogger("decnet.web.api").error(f"Deployment failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Deployment failed: {e}")
|
||||||
|
|
||||||
|
return {"message": "Deckies deployed successfully"}
|
||||||
38
decnet/web/auth.py
Normal file
38
decnet/web/auth.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Optional, Any
|
||||||
|
import jwt
|
||||||
|
import bcrypt
|
||||||
|
|
||||||
|
from decnet.env import DECNET_JWT_SECRET
|
||||||
|
|
||||||
|
SECRET_KEY: str = DECNET_JWT_SECRET
|
||||||
|
ALGORITHM: str = "HS256"
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES: int = 1440
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
return bcrypt.checkpw(
|
||||||
|
plain_password.encode("utf-8"),
|
||||||
|
hashed_password.encode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_password_hash(password: str) -> str:
|
||||||
|
# Use a cost factor of 12 (default for passlib/bcrypt)
|
||||||
|
_salt: bytes = bcrypt.gensalt(rounds=12)
|
||||||
|
_hashed: bytes = bcrypt.hashpw(password.encode("utf-8"), _salt)
|
||||||
|
return _hashed.decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def create_access_token(data: dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
|
_to_encode: dict[str, Any] = data.copy()
|
||||||
|
_expire: datetime
|
||||||
|
if expires_delta:
|
||||||
|
_expire = datetime.now(timezone.utc) + expires_delta
|
||||||
|
else:
|
||||||
|
_expire = datetime.now(timezone.utc) + timedelta(minutes=15)
|
||||||
|
|
||||||
|
_to_encode.update({"exp": _expire})
|
||||||
|
_to_encode.update({"iat": datetime.now(timezone.utc)})
|
||||||
|
_encoded_jwt: str = jwt.encode(_to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||||
|
return _encoded_jwt
|
||||||
68
decnet/web/ingester.py
Normal file
68
decnet/web/ingester.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
from typing import Any
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from decnet.web.repository import BaseRepository
|
||||||
|
|
||||||
|
logger: logging.Logger = logging.getLogger("decnet.web.ingester")
|
||||||
|
|
||||||
|
async def log_ingestion_worker(repo: BaseRepository) -> None:
|
||||||
|
"""
|
||||||
|
Background task that tails the DECNET_INGEST_LOG_FILE.json and
|
||||||
|
inserts structured JSON logs into the SQLite repository.
|
||||||
|
"""
|
||||||
|
_base_log_file: str | None = os.environ.get("DECNET_INGEST_LOG_FILE")
|
||||||
|
if not _base_log_file:
|
||||||
|
logger.warning("DECNET_INGEST_LOG_FILE not set. Log ingestion disabled.")
|
||||||
|
return
|
||||||
|
|
||||||
|
_json_log_path: Path = Path(_base_log_file).with_suffix(".json")
|
||||||
|
_position: int = 0
|
||||||
|
|
||||||
|
logger.info(f"Starting JSON log ingestion from {_json_log_path}")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
if not _json_log_path.exists():
|
||||||
|
await asyncio.sleep(2)
|
||||||
|
continue
|
||||||
|
|
||||||
|
_stat: os.stat_result = _json_log_path.stat()
|
||||||
|
if _stat.st_size < _position:
|
||||||
|
# File rotated or truncated
|
||||||
|
_position = 0
|
||||||
|
|
||||||
|
if _stat.st_size == _position:
|
||||||
|
# No new data
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
continue
|
||||||
|
|
||||||
|
with open(_json_log_path, "r", encoding="utf-8", errors="replace") as _f:
|
||||||
|
_f.seek(_position)
|
||||||
|
while True:
|
||||||
|
_line: str = _f.readline()
|
||||||
|
if not _line:
|
||||||
|
break # EOF reached
|
||||||
|
|
||||||
|
if not _line.endswith('\n'):
|
||||||
|
# Partial line read, don't process yet, don't advance position
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
_log_data: dict[str, Any] = json.loads(_line.strip())
|
||||||
|
await repo.add_log(_log_data)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.error(f"Failed to decode JSON log line: {_line}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Update position after successful line read
|
||||||
|
_position = _f.tell()
|
||||||
|
|
||||||
|
except Exception as _e:
|
||||||
|
logger.error(f"Error in log ingestion worker: {_e}")
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
|
||||||
|
await asyncio.sleep(1)
|
||||||
61
decnet/web/repository.py
Normal file
61
decnet/web/repository.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRepository(ABC):
|
||||||
|
"""Abstract base class for DECNET web dashboard data storage."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def initialize(self) -> None:
|
||||||
|
"""Initialize the database schema."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def add_log(self, log_data: dict[str, Any]) -> None:
|
||||||
|
"""Add a new log entry to the database."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_logs(
|
||||||
|
self,
|
||||||
|
limit: int = 50,
|
||||||
|
offset: int = 0,
|
||||||
|
search: Optional[str] = None
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Retrieve paginated log entries."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_total_logs(self, search: Optional[str] = None) -> int:
|
||||||
|
"""Retrieve the total count of logs, optionally filtered by search."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_stats_summary(self) -> dict[str, Any]:
|
||||||
|
"""Retrieve high-level dashboard metrics."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_deckies(self) -> list[dict[str, Any]]:
|
||||||
|
"""Retrieve the list of currently deployed deckies."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_user_by_username(self, username: str) -> Optional[dict[str, Any]]:
|
||||||
|
"""Retrieve a user by their username."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_user_by_uuid(self, uuid: str) -> Optional[dict[str, Any]]:
|
||||||
|
"""Retrieve a user by their UUID."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def create_user(self, user_data: dict[str, Any]) -> None:
|
||||||
|
"""Create a new dashboard user."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def update_user_password(self, uuid: str, password_hash: str, must_change_password: bool = False) -> None:
|
||||||
|
"""Update a user's password and change the must_change_password flag."""
|
||||||
|
pass
|
||||||
222
decnet/web/sqlite_repository.py
Normal file
222
decnet/web/sqlite_repository.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
import aiosqlite
|
||||||
|
from typing import Any, Optional
|
||||||
|
from decnet.web.repository import BaseRepository
|
||||||
|
from decnet.config import load_state, _ROOT
|
||||||
|
|
||||||
|
|
||||||
|
class SQLiteRepository(BaseRepository):
|
||||||
|
"""SQLite implementation of the DECNET web repository."""
|
||||||
|
|
||||||
|
def __init__(self, db_path: str = str(_ROOT / "decnet.db")) -> None:
|
||||||
|
self.db_path: str = db_path
|
||||||
|
|
||||||
|
async def initialize(self) -> None:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
# Logs table
|
||||||
|
await _db.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS logs (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
decky TEXT,
|
||||||
|
service TEXT,
|
||||||
|
event_type TEXT,
|
||||||
|
attacker_ip TEXT,
|
||||||
|
raw_line TEXT,
|
||||||
|
fields TEXT,
|
||||||
|
msg TEXT
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
try:
|
||||||
|
await _db.execute("ALTER TABLE logs ADD COLUMN fields TEXT")
|
||||||
|
except aiosqlite.OperationalError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
await _db.execute("ALTER TABLE logs ADD COLUMN msg TEXT")
|
||||||
|
except aiosqlite.OperationalError:
|
||||||
|
pass
|
||||||
|
# Users table (internal RBAC)
|
||||||
|
await _db.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
uuid TEXT PRIMARY KEY,
|
||||||
|
username TEXT UNIQUE,
|
||||||
|
password_hash TEXT,
|
||||||
|
role TEXT DEFAULT 'viewer',
|
||||||
|
must_change_password BOOLEAN DEFAULT 0
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
try:
|
||||||
|
await _db.execute("ALTER TABLE users ADD COLUMN must_change_password BOOLEAN DEFAULT 0")
|
||||||
|
except aiosqlite.OperationalError:
|
||||||
|
pass # Column already exists
|
||||||
|
await _db.commit()
|
||||||
|
|
||||||
|
async def add_log(self, log_data: dict[str, Any]) -> None:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_timestamp: Any = log_data.get("timestamp")
|
||||||
|
if _timestamp:
|
||||||
|
await _db.execute(
|
||||||
|
"INSERT INTO logs (timestamp, decky, service, event_type, attacker_ip, raw_line, fields, msg) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
(
|
||||||
|
_timestamp,
|
||||||
|
log_data.get("decky"),
|
||||||
|
log_data.get("service"),
|
||||||
|
log_data.get("event_type"),
|
||||||
|
log_data.get("attacker_ip"),
|
||||||
|
log_data.get("raw_line"),
|
||||||
|
log_data.get("fields"),
|
||||||
|
log_data.get("msg")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await _db.execute(
|
||||||
|
"INSERT INTO logs (decky, service, event_type, attacker_ip, raw_line, fields, msg) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
(
|
||||||
|
log_data.get("decky"),
|
||||||
|
log_data.get("service"),
|
||||||
|
log_data.get("event_type"),
|
||||||
|
log_data.get("attacker_ip"),
|
||||||
|
log_data.get("raw_line"),
|
||||||
|
log_data.get("fields"),
|
||||||
|
log_data.get("msg")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await _db.commit()
|
||||||
|
|
||||||
|
async def get_logs(
|
||||||
|
self,
|
||||||
|
limit: int = 50,
|
||||||
|
offset: int = 0,
|
||||||
|
search: Optional[str] = None
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
_query: str = "SELECT * FROM logs"
|
||||||
|
_params: list[Any] = []
|
||||||
|
if search:
|
||||||
|
_query += " WHERE raw_line LIKE ? OR decky LIKE ? OR service LIKE ? OR attacker_ip LIKE ?"
|
||||||
|
_like_val: str = f"%{search}%"
|
||||||
|
_params.extend([_like_val, _like_val, _like_val, _like_val])
|
||||||
|
|
||||||
|
_query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
||||||
|
_params.extend([limit, offset])
|
||||||
|
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute(_query, _params) as _cursor:
|
||||||
|
_rows: list[aiosqlite.Row] = await _cursor.fetchall()
|
||||||
|
return [dict(_row) for _row in _rows]
|
||||||
|
|
||||||
|
async def get_max_log_id(self) -> int:
|
||||||
|
_query: str = "SELECT MAX(id) as max_id FROM logs"
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute(_query) as _cursor:
|
||||||
|
_row: aiosqlite.Row | None = await _cursor.fetchone()
|
||||||
|
return _row["max_id"] if _row and _row["max_id"] is not None else 0
|
||||||
|
|
||||||
|
async def get_logs_after_id(self, last_id: int, limit: int = 50, search: Optional[str] = None) -> list[dict[str, Any]]:
|
||||||
|
_query: str = "SELECT * FROM logs WHERE id > ?"
|
||||||
|
_params: list[Any] = [last_id]
|
||||||
|
|
||||||
|
if search:
|
||||||
|
_query += " AND (raw_line LIKE ? OR decky LIKE ? OR service LIKE ? OR attacker_ip LIKE ?)"
|
||||||
|
_like_val: str = f"%{search}%"
|
||||||
|
_params.extend([_like_val, _like_val, _like_val, _like_val])
|
||||||
|
|
||||||
|
_query += " ORDER BY id ASC LIMIT ?"
|
||||||
|
_params.append(limit)
|
||||||
|
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute(_query, _params) as _cursor:
|
||||||
|
_rows: list[aiosqlite.Row] = await _cursor.fetchall()
|
||||||
|
return [dict(_row) for _row in _rows]
|
||||||
|
|
||||||
|
async def get_total_logs(self, search: Optional[str] = None) -> int:
|
||||||
|
_query: str = "SELECT COUNT(*) as total FROM logs"
|
||||||
|
_params: list[Any] = []
|
||||||
|
if search:
|
||||||
|
_query += " WHERE raw_line LIKE ? OR decky LIKE ? OR service LIKE ? OR attacker_ip LIKE ?"
|
||||||
|
_like_val: str = f"%{search}%"
|
||||||
|
_params.extend([_like_val, _like_val, _like_val, _like_val])
|
||||||
|
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute(_query, _params) as _cursor:
|
||||||
|
_row: Optional[aiosqlite.Row] = await _cursor.fetchone()
|
||||||
|
return _row["total"] if _row else 0
|
||||||
|
|
||||||
|
async def get_stats_summary(self) -> dict[str, Any]:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute("SELECT COUNT(*) as total_logs FROM logs") as _cursor:
|
||||||
|
_row: Optional[aiosqlite.Row] = await _cursor.fetchone()
|
||||||
|
_total_logs: int = _row["total_logs"] if _row else 0
|
||||||
|
|
||||||
|
async with _db.execute("SELECT COUNT(DISTINCT attacker_ip) as unique_attackers FROM logs") as _cursor:
|
||||||
|
_row = await _cursor.fetchone()
|
||||||
|
_unique_attackers: int = _row["unique_attackers"] if _row else 0
|
||||||
|
|
||||||
|
# Active deckies are those that HAVE interaction logs
|
||||||
|
async with _db.execute("SELECT COUNT(DISTINCT decky) as active_deckies FROM logs") as _cursor:
|
||||||
|
_row = await _cursor.fetchone()
|
||||||
|
_active_deckies: int = _row["active_deckies"] if _row else 0
|
||||||
|
|
||||||
|
# Deployed deckies are all those in the state file
|
||||||
|
_state = load_state()
|
||||||
|
_deployed_deckies: int = 0
|
||||||
|
if _state:
|
||||||
|
_deployed_deckies = len(_state[0].deckies)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_logs": _total_logs,
|
||||||
|
"unique_attackers": _unique_attackers,
|
||||||
|
"active_deckies": _active_deckies,
|
||||||
|
"deployed_deckies": _deployed_deckies
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_deckies(self) -> list[dict[str, Any]]:
|
||||||
|
_state = load_state()
|
||||||
|
if not _state:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# We can also enrich this with interaction counts/last seen from DB
|
||||||
|
_deckies: list[dict[str, Any]] = []
|
||||||
|
for _d in _state[0].deckies:
|
||||||
|
_deckies.append(_d.model_dump())
|
||||||
|
|
||||||
|
return _deckies
|
||||||
|
|
||||||
|
async def get_user_by_username(self, username: str) -> Optional[dict[str, Any]]:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute("SELECT * FROM users WHERE username = ?", (username,)) as _cursor:
|
||||||
|
_row: Optional[aiosqlite.Row] = await _cursor.fetchone()
|
||||||
|
return dict(_row) if _row else None
|
||||||
|
|
||||||
|
async def get_user_by_uuid(self, uuid: str) -> Optional[dict[str, Any]]:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
async with _db.execute("SELECT * FROM users WHERE uuid = ?", (uuid,)) as _cursor:
|
||||||
|
_row: Optional[aiosqlite.Row] = await _cursor.fetchone()
|
||||||
|
return dict(_row) if _row else None
|
||||||
|
|
||||||
|
async def create_user(self, user_data: dict[str, Any]) -> None:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
await _db.execute(
|
||||||
|
"INSERT INTO users (uuid, username, password_hash, role, must_change_password) VALUES (?, ?, ?, ?, ?)",
|
||||||
|
(
|
||||||
|
user_data["uuid"],
|
||||||
|
user_data["username"],
|
||||||
|
user_data["password_hash"],
|
||||||
|
user_data["role"],
|
||||||
|
user_data.get("must_change_password", False)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await _db.commit()
|
||||||
|
|
||||||
|
async def update_user_password(self, uuid: str, password_hash: str, must_change_password: bool = False) -> None:
|
||||||
|
async with aiosqlite.connect(self.db_path) as _db:
|
||||||
|
await _db.execute(
|
||||||
|
"UPDATE users SET password_hash = ?, must_change_password = ? WHERE uuid = ?",
|
||||||
|
(password_hash, must_change_password, uuid)
|
||||||
|
)
|
||||||
|
await _db.commit()
|
||||||
26
decnet_web/.gitignore
vendored
Normal file
26
decnet_web/.gitignore
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
pnpm-debug.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
|
||||||
|
node_modules
|
||||||
|
dist
|
||||||
|
dist-ssr
|
||||||
|
*.local
|
||||||
|
|
||||||
|
# Editor directories and files
|
||||||
|
.vscode/*
|
||||||
|
!.vscode/extensions.json
|
||||||
|
.idea
|
||||||
|
.DS_Store
|
||||||
|
*.suo
|
||||||
|
*.ntvs*
|
||||||
|
*.njsproj
|
||||||
|
*.sln
|
||||||
|
*.sw?
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
73
decnet_web/README.md
Normal file
73
decnet_web/README.md
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# React + TypeScript + Vite
|
||||||
|
|
||||||
|
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||||
|
|
||||||
|
Currently, two official plugins are available:
|
||||||
|
|
||||||
|
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Oxc](https://oxc.rs)
|
||||||
|
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/)
|
||||||
|
|
||||||
|
## React Compiler
|
||||||
|
|
||||||
|
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
|
||||||
|
|
||||||
|
## Expanding the ESLint configuration
|
||||||
|
|
||||||
|
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
|
||||||
|
|
||||||
|
```js
|
||||||
|
export default defineConfig([
|
||||||
|
globalIgnores(['dist']),
|
||||||
|
{
|
||||||
|
files: ['**/*.{ts,tsx}'],
|
||||||
|
extends: [
|
||||||
|
// Other configs...
|
||||||
|
|
||||||
|
// Remove tseslint.configs.recommended and replace with this
|
||||||
|
tseslint.configs.recommendedTypeChecked,
|
||||||
|
// Alternatively, use this for stricter rules
|
||||||
|
tseslint.configs.strictTypeChecked,
|
||||||
|
// Optionally, add this for stylistic rules
|
||||||
|
tseslint.configs.stylisticTypeChecked,
|
||||||
|
|
||||||
|
// Other configs...
|
||||||
|
],
|
||||||
|
languageOptions: {
|
||||||
|
parserOptions: {
|
||||||
|
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||||
|
tsconfigRootDir: import.meta.dirname,
|
||||||
|
},
|
||||||
|
// other options...
|
||||||
|
},
|
||||||
|
},
|
||||||
|
])
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// eslint.config.js
|
||||||
|
import reactX from 'eslint-plugin-react-x'
|
||||||
|
import reactDom from 'eslint-plugin-react-dom'
|
||||||
|
|
||||||
|
export default defineConfig([
|
||||||
|
globalIgnores(['dist']),
|
||||||
|
{
|
||||||
|
files: ['**/*.{ts,tsx}'],
|
||||||
|
extends: [
|
||||||
|
// Other configs...
|
||||||
|
// Enable lint rules for React
|
||||||
|
reactX.configs['recommended-typescript'],
|
||||||
|
// Enable lint rules for React DOM
|
||||||
|
reactDom.configs.recommended,
|
||||||
|
],
|
||||||
|
languageOptions: {
|
||||||
|
parserOptions: {
|
||||||
|
project: ['./tsconfig.node.json', './tsconfig.app.json'],
|
||||||
|
tsconfigRootDir: import.meta.dirname,
|
||||||
|
},
|
||||||
|
// other options...
|
||||||
|
},
|
||||||
|
},
|
||||||
|
])
|
||||||
|
```
|
||||||
23
decnet_web/eslint.config.js
Normal file
23
decnet_web/eslint.config.js
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import js from '@eslint/js'
|
||||||
|
import globals from 'globals'
|
||||||
|
import reactHooks from 'eslint-plugin-react-hooks'
|
||||||
|
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||||
|
import tseslint from 'typescript-eslint'
|
||||||
|
import { defineConfig, globalIgnores } from 'eslint/config'
|
||||||
|
|
||||||
|
export default defineConfig([
|
||||||
|
globalIgnores(['dist']),
|
||||||
|
{
|
||||||
|
files: ['**/*.{ts,tsx}'],
|
||||||
|
extends: [
|
||||||
|
js.configs.recommended,
|
||||||
|
tseslint.configs.recommended,
|
||||||
|
reactHooks.configs.flat.recommended,
|
||||||
|
reactRefresh.configs.vite,
|
||||||
|
],
|
||||||
|
languageOptions: {
|
||||||
|
ecmaVersion: 2020,
|
||||||
|
globals: globals.browser,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
])
|
||||||
13
decnet_web/index.html
Normal file
13
decnet_web/index.html
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>decnet_web</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="/src/main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
3320
decnet_web/package-lock.json
generated
Normal file
3320
decnet_web/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
33
decnet_web/package.json
Normal file
33
decnet_web/package.json
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{
|
||||||
|
"name": "decnet_web",
|
||||||
|
"private": true,
|
||||||
|
"version": "0.0.0",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite",
|
||||||
|
"build": "tsc -b && vite build",
|
||||||
|
"lint": "eslint .",
|
||||||
|
"preview": "vite preview"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"axios": "^1.14.0",
|
||||||
|
"lucide-react": "^1.7.0",
|
||||||
|
"react": "^19.2.4",
|
||||||
|
"react-dom": "^19.2.4",
|
||||||
|
"react-router-dom": "^7.14.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.39.4",
|
||||||
|
"@types/node": "^24.12.2",
|
||||||
|
"@types/react": "^19.2.14",
|
||||||
|
"@types/react-dom": "^19.2.3",
|
||||||
|
"@vitejs/plugin-react": "^6.0.1",
|
||||||
|
"eslint": "^9.39.4",
|
||||||
|
"eslint-plugin-react-hooks": "^7.0.1",
|
||||||
|
"eslint-plugin-react-refresh": "^0.5.2",
|
||||||
|
"globals": "^17.4.0",
|
||||||
|
"typescript": "~6.0.2",
|
||||||
|
"typescript-eslint": "^8.58.0",
|
||||||
|
"vite": "^8.0.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
1
decnet_web/public/favicon.svg
Normal file
1
decnet_web/public/favicon.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 9.3 KiB |
24
decnet_web/public/icons.svg
Normal file
24
decnet_web/public/icons.svg
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<symbol id="bluesky-icon" viewBox="0 0 16 17">
|
||||||
|
<g clip-path="url(#bluesky-clip)"><path fill="#08060d" d="M7.75 7.735c-.693-1.348-2.58-3.86-4.334-5.097-1.68-1.187-2.32-.981-2.74-.79C.188 2.065.1 2.812.1 3.251s.241 3.602.398 4.13c.52 1.744 2.367 2.333 4.07 2.145-2.495.37-4.71 1.278-1.805 4.512 3.196 3.309 4.38-.71 4.987-2.746.608 2.036 1.307 5.91 4.93 2.746 2.72-2.746.747-4.143-1.747-4.512 1.702.189 3.55-.4 4.07-2.145.156-.528.397-3.691.397-4.13s-.088-1.186-.575-1.406c-.42-.19-1.06-.395-2.741.79-1.755 1.24-3.64 3.752-4.334 5.099"/></g>
|
||||||
|
<defs><clipPath id="bluesky-clip"><path fill="#fff" d="M.1.85h15.3v15.3H.1z"/></clipPath></defs>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="discord-icon" viewBox="0 0 20 19">
|
||||||
|
<path fill="#08060d" d="M16.224 3.768a14.5 14.5 0 0 0-3.67-1.153c-.158.286-.343.67-.47.976a13.5 13.5 0 0 0-4.067 0c-.128-.306-.317-.69-.476-.976A14.4 14.4 0 0 0 3.868 3.77C1.546 7.28.916 10.703 1.231 14.077a14.7 14.7 0 0 0 4.5 2.306q.545-.748.965-1.587a9.5 9.5 0 0 1-1.518-.74q.191-.14.372-.293c2.927 1.369 6.107 1.369 8.999 0q.183.152.372.294-.723.437-1.52.74.418.838.963 1.588a14.6 14.6 0 0 0 4.504-2.308c.37-3.911-.63-7.302-2.644-10.309m-9.13 8.234c-.878 0-1.599-.82-1.599-1.82 0-.998.705-1.82 1.6-1.82.894 0 1.614.82 1.599 1.82.001 1-.705 1.82-1.6 1.82m5.91 0c-.878 0-1.599-.82-1.599-1.82 0-.998.705-1.82 1.6-1.82.893 0 1.614.82 1.599 1.82 0 1-.706 1.82-1.6 1.82"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="documentation-icon" viewBox="0 0 21 20">
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="m15.5 13.333 1.533 1.322c.645.555.967.833.967 1.178s-.322.623-.967 1.179L15.5 18.333m-3.333-5-1.534 1.322c-.644.555-.966.833-.966 1.178s.322.623.966 1.179l1.534 1.321"/>
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M17.167 10.836v-4.32c0-1.41 0-2.117-.224-2.68-.359-.906-1.118-1.621-2.08-1.96-.599-.21-1.349-.21-2.848-.21-2.623 0-3.935 0-4.983.369-1.684.591-3.013 1.842-3.641 3.428C3 6.449 3 7.684 3 10.154v2.122c0 2.558 0 3.838.706 4.726q.306.383.713.671c.76.536 1.79.64 3.581.66"/>
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M3 10a2.78 2.78 0 0 1 2.778-2.778c.555 0 1.209.097 1.748-.047.48-.129.854-.503.982-.982.145-.54.048-1.194.048-1.749a2.78 2.78 0 0 1 2.777-2.777"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="github-icon" viewBox="0 0 19 19">
|
||||||
|
<path fill="#08060d" fill-rule="evenodd" d="M9.356 1.85C5.05 1.85 1.57 5.356 1.57 9.694a7.84 7.84 0 0 0 5.324 7.44c.387.079.528-.168.528-.376 0-.182-.013-.805-.013-1.454-2.165.467-2.616-.935-2.616-.935-.349-.91-.864-1.143-.864-1.143-.71-.48.051-.48.051-.48.787.051 1.2.805 1.2.805.695 1.194 1.817.857 2.268.649.064-.507.27-.857.49-1.052-1.728-.182-3.545-.857-3.545-3.87 0-.857.31-1.558.8-2.104-.078-.195-.349-1 .077-2.078 0 0 .657-.208 2.14.805a7.5 7.5 0 0 1 1.946-.26c.657 0 1.328.092 1.946.26 1.483-1.013 2.14-.805 2.14-.805.426 1.078.155 1.883.078 2.078.502.546.799 1.247.799 2.104 0 3.013-1.818 3.675-3.558 3.87.284.247.528.714.528 1.454 0 1.052-.012 1.896-.012 2.156 0 .208.142.455.528.377a7.84 7.84 0 0 0 5.324-7.441c.013-4.338-3.48-7.844-7.773-7.844" clip-rule="evenodd"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="social-icon" viewBox="0 0 20 20">
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M12.5 6.667a4.167 4.167 0 1 0-8.334 0 4.167 4.167 0 0 0 8.334 0"/>
|
||||||
|
<path fill="none" stroke="#aa3bff" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.35" d="M2.5 16.667a5.833 5.833 0 0 1 8.75-5.053m3.837.474.513 1.035c.07.144.257.282.414.309l.93.155c.596.1.736.536.307.965l-.723.73a.64.64 0 0 0-.152.531l.207.903c.164.715-.213.991-.84.618l-.872-.52a.63.63 0 0 0-.577 0l-.872.52c-.624.373-1.003.094-.84-.618l.207-.903a.64.64 0 0 0-.152-.532l-.723-.729c-.426-.43-.289-.864.306-.964l.93-.156a.64.64 0 0 0 .412-.31l.513-1.034c.28-.562.735-.562 1.012 0"/>
|
||||||
|
</symbol>
|
||||||
|
<symbol id="x-icon" viewBox="0 0 19 19">
|
||||||
|
<path fill="#08060d" fill-rule="evenodd" d="M1.893 1.98c.052.072 1.245 1.769 2.653 3.77l2.892 4.114c.183.261.333.48.333.486s-.068.089-.152.183l-.522.593-.765.867-3.597 4.087c-.375.426-.734.834-.798.905a1 1 0 0 0-.118.148c0 .01.236.017.664.017h.663l.729-.83c.4-.457.796-.906.879-.999a692 692 0 0 0 1.794-2.038c.034-.037.301-.34.594-.675l.551-.624.345-.392a7 7 0 0 1 .34-.374c.006 0 .93 1.306 2.052 2.903l2.084 2.965.045.063h2.275c1.87 0 2.273-.003 2.266-.021-.008-.02-1.098-1.572-3.894-5.547-2.013-2.862-2.28-3.246-2.273-3.266.008-.019.282-.332 2.085-2.38l2-2.274 1.567-1.782c.022-.028-.016-.03-.65-.03h-.674l-.3.342a871 871 0 0 1-1.782 2.025c-.067.075-.405.458-.75.852a100 100 0 0 1-.803.91c-.148.172-.299.344-.99 1.127-.304.343-.32.358-.345.327-.015-.019-.904-1.282-1.976-2.808L6.365 1.85H1.8zm1.782.91 8.078 11.294c.772 1.08 1.413 1.973 1.425 1.984.016.017.241.02 1.05.017l1.03-.004-2.694-3.766L7.796 5.75 5.722 2.852l-1.039-.004-1.039-.004z" clip-rule="evenodd"/>
|
||||||
|
</symbol>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 4.9 KiB |
184
decnet_web/src/App.css
Normal file
184
decnet_web/src/App.css
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
.counter {
|
||||||
|
font-size: 16px;
|
||||||
|
padding: 5px 10px;
|
||||||
|
border-radius: 5px;
|
||||||
|
color: var(--accent);
|
||||||
|
background: var(--accent-bg);
|
||||||
|
border: 2px solid transparent;
|
||||||
|
transition: border-color 0.3s;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
border-color: var(--accent-border);
|
||||||
|
}
|
||||||
|
&:focus-visible {
|
||||||
|
outline: 2px solid var(--accent);
|
||||||
|
outline-offset: 2px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.hero {
|
||||||
|
position: relative;
|
||||||
|
|
||||||
|
.base,
|
||||||
|
.framework,
|
||||||
|
.vite {
|
||||||
|
inset-inline: 0;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.base {
|
||||||
|
width: 170px;
|
||||||
|
position: relative;
|
||||||
|
z-index: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.framework,
|
||||||
|
.vite {
|
||||||
|
position: absolute;
|
||||||
|
}
|
||||||
|
|
||||||
|
.framework {
|
||||||
|
z-index: 1;
|
||||||
|
top: 34px;
|
||||||
|
height: 28px;
|
||||||
|
transform: perspective(2000px) rotateZ(300deg) rotateX(44deg) rotateY(39deg)
|
||||||
|
scale(1.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.vite {
|
||||||
|
z-index: 0;
|
||||||
|
top: 107px;
|
||||||
|
height: 26px;
|
||||||
|
width: auto;
|
||||||
|
transform: perspective(2000px) rotateZ(300deg) rotateX(40deg) rotateY(39deg)
|
||||||
|
scale(0.8);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#center {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 25px;
|
||||||
|
place-content: center;
|
||||||
|
place-items: center;
|
||||||
|
flex-grow: 1;
|
||||||
|
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
padding: 32px 20px 24px;
|
||||||
|
gap: 18px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#next-steps {
|
||||||
|
display: flex;
|
||||||
|
border-top: 1px solid var(--border);
|
||||||
|
text-align: left;
|
||||||
|
|
||||||
|
& > div {
|
||||||
|
flex: 1 1 0;
|
||||||
|
padding: 32px;
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
padding: 24px 20px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.icon {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
width: 22px;
|
||||||
|
height: 22px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
flex-direction: column;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#docs {
|
||||||
|
border-right: 1px solid var(--border);
|
||||||
|
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
border-right: none;
|
||||||
|
border-bottom: 1px solid var(--border);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#next-steps ul {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
margin: 32px 0 0;
|
||||||
|
|
||||||
|
.logo {
|
||||||
|
height: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: var(--text-h);
|
||||||
|
font-size: 16px;
|
||||||
|
border-radius: 6px;
|
||||||
|
background: var(--social-bg);
|
||||||
|
display: flex;
|
||||||
|
padding: 6px 12px;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
text-decoration: none;
|
||||||
|
transition: box-shadow 0.3s;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
box-shadow: var(--shadow);
|
||||||
|
}
|
||||||
|
.button-icon {
|
||||||
|
height: 18px;
|
||||||
|
width: 18px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
margin-top: 20px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: center;
|
||||||
|
|
||||||
|
li {
|
||||||
|
flex: 1 1 calc(50% - 8px);
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
width: 100%;
|
||||||
|
justify-content: center;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#spacer {
|
||||||
|
height: 88px;
|
||||||
|
border-top: 1px solid var(--border);
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
height: 48px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.ticks {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
|
||||||
|
&::before,
|
||||||
|
&::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
top: -4.5px;
|
||||||
|
border: 5px solid transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::before {
|
||||||
|
left: 0;
|
||||||
|
border-left-color: var(--border);
|
||||||
|
}
|
||||||
|
&::after {
|
||||||
|
right: 0;
|
||||||
|
border-right-color: var(--border);
|
||||||
|
}
|
||||||
|
}
|
||||||
55
decnet_web/src/App.tsx
Normal file
55
decnet_web/src/App.tsx
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { BrowserRouter as Router, Routes, Route, Navigate } from 'react-router-dom';
|
||||||
|
import Login from './components/Login';
|
||||||
|
import Layout from './components/Layout';
|
||||||
|
import Dashboard from './components/Dashboard';
|
||||||
|
import DeckyFleet from './components/DeckyFleet';
|
||||||
|
import LiveLogs from './components/LiveLogs';
|
||||||
|
import Attackers from './components/Attackers';
|
||||||
|
import Config from './components/Config';
|
||||||
|
|
||||||
|
function App() {
|
||||||
|
const [token, setToken] = useState<string | null>(localStorage.getItem('token'));
|
||||||
|
const [searchQuery, setSearchQuery] = useState('');
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const savedToken = localStorage.getItem('token');
|
||||||
|
if (savedToken) {
|
||||||
|
setToken(savedToken);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleLogin = (newToken: string) => {
|
||||||
|
setToken(newToken);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleLogout = () => {
|
||||||
|
localStorage.removeItem('token');
|
||||||
|
setToken(null);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSearch = (query: string) => {
|
||||||
|
setSearchQuery(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return <Login onLogin={handleLogin} />;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Router>
|
||||||
|
<Layout onLogout={handleLogout} onSearch={handleSearch}>
|
||||||
|
<Routes>
|
||||||
|
<Route path="/" element={<Dashboard searchQuery={searchQuery} />} />
|
||||||
|
<Route path="/fleet" element={<DeckyFleet />} />
|
||||||
|
<Route path="/live-logs" element={<LiveLogs />} />
|
||||||
|
<Route path="/attackers" element={<Attackers />} />
|
||||||
|
<Route path="/config" element={<Config />} />
|
||||||
|
<Route path="*" element={<Navigate to="/" replace />} />
|
||||||
|
</Routes>
|
||||||
|
</Layout>
|
||||||
|
</Router>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default App;
|
||||||
BIN
decnet_web/src/assets/hero.png
Normal file
BIN
decnet_web/src/assets/hero.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
1
decnet_web/src/assets/react.svg
Normal file
1
decnet_web/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||||
|
After Width: | Height: | Size: 4.0 KiB |
1
decnet_web/src/assets/vite.svg
Normal file
1
decnet_web/src/assets/vite.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 8.5 KiB |
20
decnet_web/src/components/Attackers.tsx
Normal file
20
decnet_web/src/components/Attackers.tsx
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import { Activity } from 'lucide-react';
|
||||||
|
import './Dashboard.css';
|
||||||
|
|
||||||
|
const Attackers: React.FC = () => {
|
||||||
|
return (
|
||||||
|
<div className="logs-section">
|
||||||
|
<div className="section-header">
|
||||||
|
<Activity size={20} />
|
||||||
|
<h2>ATTACKER PROFILES</h2>
|
||||||
|
</div>
|
||||||
|
<div style={{ padding: '40px', textAlign: 'center', opacity: 0.5 }}>
|
||||||
|
<p>NO ACTIVE THREATS PROFILED YET.</p>
|
||||||
|
<p style={{ marginTop: '10px', fontSize: '0.8rem' }}>(Attackers view placeholder)</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Attackers;
|
||||||
20
decnet_web/src/components/Config.tsx
Normal file
20
decnet_web/src/components/Config.tsx
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import { Settings } from 'lucide-react';
|
||||||
|
import './Dashboard.css';
|
||||||
|
|
||||||
|
const Config: React.FC = () => {
|
||||||
|
return (
|
||||||
|
<div className="logs-section">
|
||||||
|
<div className="section-header">
|
||||||
|
<Settings size={20} />
|
||||||
|
<h2>SYSTEM CONFIGURATION</h2>
|
||||||
|
</div>
|
||||||
|
<div style={{ padding: '40px', textAlign: 'center', opacity: 0.5 }}>
|
||||||
|
<p>CONFIGURATION READ-ONLY MODE ACTIVE.</p>
|
||||||
|
<p style={{ marginTop: '10px', fontSize: '0.8rem' }}>(Config view placeholder)</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Config;
|
||||||
129
decnet_web/src/components/Dashboard.css
Normal file
129
decnet_web/src/components/Dashboard.css
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
.dashboard {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 32px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stats-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(3, 1fr);
|
||||||
|
gap: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-card {
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
padding: 24px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 20px;
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-card:hover {
|
||||||
|
border-color: var(--text-color);
|
||||||
|
box-shadow: var(--matrix-green-glow);
|
||||||
|
transform: translateY(-2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-icon {
|
||||||
|
color: var(--accent-color);
|
||||||
|
filter: drop-shadow(var(--violet-glow));
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-content {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-label {
|
||||||
|
font-size: 0.7rem;
|
||||||
|
opacity: 0.6;
|
||||||
|
letter-spacing: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-value {
|
||||||
|
font-size: 1.8rem;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logs-section {
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header {
|
||||||
|
padding: 16px 24px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header h2 {
|
||||||
|
font-size: 0.9rem;
|
||||||
|
letter-spacing: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logs-table-container {
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logs-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logs-table th {
|
||||||
|
padding: 12px 24px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
opacity: 0.5;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logs-table td {
|
||||||
|
padding: 12px 24px;
|
||||||
|
border-bottom: 1px solid rgba(48, 54, 61, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.logs-table tr:hover {
|
||||||
|
background-color: rgba(0, 255, 65, 0.03);
|
||||||
|
}
|
||||||
|
|
||||||
|
.raw-line {
|
||||||
|
max-width: 400px;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dim {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loader {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
height: 200px;
|
||||||
|
letter-spacing: 4px;
|
||||||
|
animation: pulse 1s infinite alternate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
from { opacity: 0.5; }
|
||||||
|
to { opacity: 1; }
|
||||||
|
}
|
||||||
|
|
||||||
|
.spin {
|
||||||
|
animation: spin 1.5s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
from { transform: rotate(0deg); }
|
||||||
|
to { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
198
decnet_web/src/components/Dashboard.tsx
Normal file
198
decnet_web/src/components/Dashboard.tsx
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import api from '../utils/api';
|
||||||
|
import './Dashboard.css';
|
||||||
|
import { Shield, Users, Activity, Clock } from 'lucide-react';
|
||||||
|
|
||||||
|
interface Stats {
|
||||||
|
total_logs: number;
|
||||||
|
unique_attackers: number;
|
||||||
|
active_deckies: number;
|
||||||
|
deployed_deckies: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LogEntry {
|
||||||
|
id: number;
|
||||||
|
timestamp: string;
|
||||||
|
decky: string;
|
||||||
|
service: string;
|
||||||
|
event_type: string | null;
|
||||||
|
attacker_ip: string;
|
||||||
|
raw_line: string;
|
||||||
|
fields: string | null;
|
||||||
|
msg: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DashboardProps {
|
||||||
|
searchQuery: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Dashboard: React.FC<DashboardProps> = ({ searchQuery }) => {
|
||||||
|
const [stats, setStats] = useState<Stats | null>(null);
|
||||||
|
const [logs, setLogs] = useState<LogEntry[]>([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
|
||||||
|
const fetchData = async () => {
|
||||||
|
try {
|
||||||
|
const [statsRes, logsRes] = await Promise.all([
|
||||||
|
api.get('/stats'),
|
||||||
|
api.get('/logs', { params: { limit: 50, search: searchQuery } })
|
||||||
|
]);
|
||||||
|
setStats(statsRes.data);
|
||||||
|
setLogs(logsRes.data.data);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch dashboard data', err);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Initial fetch to populate UI immediately
|
||||||
|
fetchData();
|
||||||
|
|
||||||
|
// Setup SSE connection
|
||||||
|
const token = localStorage.getItem('token');
|
||||||
|
const baseUrl = import.meta.env.VITE_API_URL || 'http://localhost:8000/api/v1';
|
||||||
|
let url = `${baseUrl}/stream?token=${token}`;
|
||||||
|
if (searchQuery) {
|
||||||
|
url += `&search=${encodeURIComponent(searchQuery)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventSource = new EventSource(url);
|
||||||
|
|
||||||
|
eventSource.onmessage = (event) => {
|
||||||
|
try {
|
||||||
|
const payload = JSON.parse(event.data);
|
||||||
|
if (payload.type === 'logs') {
|
||||||
|
setLogs(prev => {
|
||||||
|
const newLogs = payload.data;
|
||||||
|
// Prepend new logs, keep up to 100 in UI to prevent infinite DOM growth
|
||||||
|
return [...newLogs, ...prev].slice(0, 100);
|
||||||
|
});
|
||||||
|
} else if (payload.type === 'stats') {
|
||||||
|
setStats(payload.data);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to parse SSE payload', err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
eventSource.onerror = (err) => {
|
||||||
|
console.error('SSE connection error, attempting to reconnect...', err);
|
||||||
|
};
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
eventSource.close();
|
||||||
|
};
|
||||||
|
}, [searchQuery]);
|
||||||
|
|
||||||
|
if (loading && !stats) return <div className="loader">INITIALIZING SENSORS...</div>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="dashboard">
|
||||||
|
<div className="stats-grid">
|
||||||
|
<StatCard
|
||||||
|
icon={<Activity size={32} />}
|
||||||
|
label="TOTAL INTERACTIONS"
|
||||||
|
value={stats?.total_logs || 0}
|
||||||
|
/>
|
||||||
|
<StatCard
|
||||||
|
icon={<Users size={32} />}
|
||||||
|
label="UNIQUE ATTACKERS"
|
||||||
|
value={stats?.unique_attackers || 0}
|
||||||
|
/>
|
||||||
|
<StatCard
|
||||||
|
icon={<Shield size={32} />}
|
||||||
|
label="ACTIVE DECKIES"
|
||||||
|
value={`${stats?.active_deckies || 0} / ${stats?.deployed_deckies || 0}`}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="logs-section">
|
||||||
|
<div className="section-header">
|
||||||
|
<Clock size={20} />
|
||||||
|
<h2>LIVE INTERACTION LOG</h2>
|
||||||
|
</div>
|
||||||
|
<div className="logs-table-container">
|
||||||
|
<table className="logs-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>TIMESTAMP</th>
|
||||||
|
<th>DECKY</th>
|
||||||
|
<th>SERVICE</th>
|
||||||
|
<th>ATTACKER IP</th>
|
||||||
|
<th>EVENT</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{logs.length > 0 ? logs.map(log => {
|
||||||
|
let parsedFields: Record<string, string> = {};
|
||||||
|
if (log.fields) {
|
||||||
|
try {
|
||||||
|
parsedFields = JSON.parse(log.fields);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore parsing errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<tr key={log.id}>
|
||||||
|
<td className="dim">{new Date(log.timestamp).toLocaleString()}</td>
|
||||||
|
<td className="violet-accent">{log.decky}</td>
|
||||||
|
<td className="matrix-text">{log.service}</td>
|
||||||
|
<td>{log.attacker_ip}</td>
|
||||||
|
<td>
|
||||||
|
<div style={{ display: 'flex', flexDirection: 'column', gap: '8px' }}>
|
||||||
|
<div style={{ fontWeight: 'bold', color: 'var(--text-color)' }}>
|
||||||
|
{log.event_type} {log.msg && log.msg !== '-' && <span style={{ fontWeight: 'normal', opacity: 0.8 }}>— {log.msg}</span>}
|
||||||
|
</div>
|
||||||
|
{Object.keys(parsedFields).length > 0 && (
|
||||||
|
<div style={{ display: 'flex', gap: '8px', flexWrap: 'wrap' }}>
|
||||||
|
{Object.entries(parsedFields).map(([k, v]) => (
|
||||||
|
<span key={k} style={{
|
||||||
|
fontSize: '0.7rem',
|
||||||
|
backgroundColor: 'rgba(0, 255, 65, 0.1)',
|
||||||
|
padding: '2px 8px',
|
||||||
|
borderRadius: '4px',
|
||||||
|
border: '1px solid rgba(0, 255, 65, 0.3)',
|
||||||
|
wordBreak: 'break-all'
|
||||||
|
}}>
|
||||||
|
<span style={{ opacity: 0.6 }}>{k}:</span> {v}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
}) : (
|
||||||
|
<tr>
|
||||||
|
<td colSpan={5} style={{textAlign: 'center', padding: '40px'}}>NO INTERACTION DETECTED</td>
|
||||||
|
</tr>
|
||||||
|
)}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
interface StatCardProps {
|
||||||
|
icon: React.ReactNode;
|
||||||
|
label: string;
|
||||||
|
value: string | number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const StatCard: React.FC<StatCardProps> = ({ icon, label, value }) => (
|
||||||
|
<div className="stat-card">
|
||||||
|
<div className="stat-icon">{icon}</div>
|
||||||
|
<div className="stat-content">
|
||||||
|
<span className="stat-label">{label}</span>
|
||||||
|
<span className="stat-value">{value.toLocaleString()}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
export default Dashboard;
|
||||||
278
decnet_web/src/components/DeckyFleet.tsx
Normal file
278
decnet_web/src/components/DeckyFleet.tsx
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import api from '../utils/api';
|
||||||
|
import './Dashboard.css'; // Re-use common dashboard styles
|
||||||
|
import { Server, Cpu, Globe, Database, Clock, RefreshCw, Upload } from 'lucide-react';
|
||||||
|
|
||||||
|
interface Decky {
|
||||||
|
name: string;
|
||||||
|
ip: string;
|
||||||
|
services: string[];
|
||||||
|
distro: string;
|
||||||
|
hostname: string;
|
||||||
|
archetype: string | null;
|
||||||
|
service_config: Record<string, Record<string, any>>;
|
||||||
|
mutate_interval: number | null;
|
||||||
|
last_mutated: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DeckyFleet: React.FC = () => {
|
||||||
|
const [deckies, setDeckies] = useState<Decky[]>([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [mutating, setMutating] = useState<string | null>(null);
|
||||||
|
const [showDeploy, setShowDeploy] = useState(false);
|
||||||
|
const [iniContent, setIniContent] = useState('');
|
||||||
|
const [deploying, setDeploying] = useState(false);
|
||||||
|
|
||||||
|
const fetchDeckies = async () => {
|
||||||
|
try {
|
||||||
|
const _res = await api.get('/deckies');
|
||||||
|
setDeckies(_res.data);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch decky fleet', err);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleMutate = async (name: string) => {
|
||||||
|
setMutating(name);
|
||||||
|
try {
|
||||||
|
await api.post(`/deckies/${name}/mutate`, {}, { timeout: 120000 });
|
||||||
|
await fetchDeckies();
|
||||||
|
} catch (err: any) {
|
||||||
|
console.error('Failed to mutate', err);
|
||||||
|
if (err.code === 'ECONNABORTED') {
|
||||||
|
alert('Mutation is still running in the background but the UI timed out.');
|
||||||
|
} else {
|
||||||
|
alert('Mutation failed');
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
setMutating(null);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleIntervalChange = async (name: string, current: number | null) => {
|
||||||
|
const _val = prompt(`Enter new mutation interval in minutes for ${name} (leave empty to disable):`, current?.toString() || '');
|
||||||
|
if (_val === null) return;
|
||||||
|
const mutate_interval = _val.trim() === '' ? null : parseInt(_val);
|
||||||
|
try {
|
||||||
|
await api.put(`/deckies/${name}/mutate-interval`, { mutate_interval });
|
||||||
|
fetchDeckies();
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to update interval', err);
|
||||||
|
alert('Update failed');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleDeploy = async () => {
|
||||||
|
if (!iniContent.trim()) return;
|
||||||
|
setDeploying(true);
|
||||||
|
try {
|
||||||
|
await api.post('/deckies/deploy', { ini_content: iniContent }, { timeout: 120000 });
|
||||||
|
setIniContent('');
|
||||||
|
setShowDeploy(false);
|
||||||
|
fetchDeckies();
|
||||||
|
} catch (err: any) {
|
||||||
|
console.error('Deploy failed', err);
|
||||||
|
alert(`Deploy failed: ${err.response?.data?.detail || err.message}`);
|
||||||
|
} finally {
|
||||||
|
setDeploying(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleFileUpload = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
const file = e.target.files?.[0];
|
||||||
|
if (!file) return;
|
||||||
|
|
||||||
|
const reader = new FileReader();
|
||||||
|
reader.onload = (event) => {
|
||||||
|
const content = event.target?.result as string;
|
||||||
|
setIniContent(content);
|
||||||
|
};
|
||||||
|
reader.readAsText(file);
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
fetchDeckies();
|
||||||
|
const _interval = setInterval(fetchDeckies, 10000); // Fleet state updates less frequently than logs
|
||||||
|
return () => clearInterval(_interval);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
if (loading) return <div className="loader">SCANNING NETWORK FOR DECOYS...</div>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="dashboard">
|
||||||
|
<div className="section-header" style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', border: '1px solid var(--border-color)', backgroundColor: 'var(--secondary-color)', marginBottom: '24px' }}>
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '12px' }}>
|
||||||
|
<Server size={20} />
|
||||||
|
<h2 style={{ margin: 0 }}>DECOY FLEET ASSET INVENTORY</h2>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={() => setShowDeploy(!showDeploy)}
|
||||||
|
style={{ display: 'flex', alignItems: 'center', gap: '8px', border: '1px solid var(--accent-color)', color: 'var(--accent-color)' }}
|
||||||
|
>
|
||||||
|
+ DEPLOY DECKIES
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{showDeploy && (
|
||||||
|
<div style={{ marginBottom: '24px', padding: '24px', backgroundColor: 'var(--secondary-color)', border: '1px solid var(--accent-color)', display: 'flex', flexDirection: 'column', gap: '16px' }}>
|
||||||
|
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||||
|
<h3 style={{ fontSize: '1rem', color: 'var(--text-color)' }}>Deploy via INI Configuration</h3>
|
||||||
|
<div>
|
||||||
|
<input
|
||||||
|
type="file"
|
||||||
|
id="ini-upload"
|
||||||
|
accept=".ini"
|
||||||
|
onChange={handleFileUpload}
|
||||||
|
style={{ display: 'none' }}
|
||||||
|
/>
|
||||||
|
<label
|
||||||
|
htmlFor="ini-upload"
|
||||||
|
style={{
|
||||||
|
cursor: 'pointer',
|
||||||
|
display: 'flex',
|
||||||
|
alignItems: 'center',
|
||||||
|
gap: '8px',
|
||||||
|
fontSize: '0.8rem',
|
||||||
|
color: 'var(--accent-color)',
|
||||||
|
border: '1px solid var(--accent-color)',
|
||||||
|
padding: '4px 12px'
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Upload size={14} /> UPLOAD FILE
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<textarea
|
||||||
|
value={iniContent}
|
||||||
|
onChange={(e) => setIniContent(e.target.value)}
|
||||||
|
placeholder="[decky-01] archetype=linux-server services=ssh,http"
|
||||||
|
style={{ width: '100%', height: '200px', backgroundColor: '#000', color: 'var(--text-color)', border: '1px solid var(--border-color)', padding: '12px', fontFamily: 'monospace' }}
|
||||||
|
/>
|
||||||
|
<div style={{ display: 'flex', justifyContent: 'flex-end', gap: '12px' }}>
|
||||||
|
<button onClick={() => setShowDeploy(false)} style={{ border: '1px solid var(--border-color)', color: 'var(--dim-color)' }}>CANCEL</button>
|
||||||
|
<button onClick={handleDeploy} disabled={deploying} style={{ background: 'var(--accent-color)', color: '#000', border: 'none', display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||||
|
{deploying && <RefreshCw size={14} className="spin" />}
|
||||||
|
{deploying ? 'DEPLOYING...' : 'DEPLOY'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="deckies-grid" style={{ display: 'grid', gridTemplateColumns: 'repeat(auto-fill, minmax(350px, 1fr))', gap: '24px' }}>
|
||||||
|
{deckies.length > 0 ? deckies.map(decky => (
|
||||||
|
<div key={decky.name} className="stat-card" style={{ flexDirection: 'column', alignItems: 'flex-start', gap: '16px', padding: '24px' }}>
|
||||||
|
<div style={{ width: '100%', display: 'flex', justifyContent: 'space-between', alignItems: 'center', borderBottom: '1px solid var(--border-color)', paddingBottom: '12px' }}>
|
||||||
|
<span className="matrix-text" style={{ fontSize: '1.2rem', fontWeight: 'bold' }}>{decky.name}</span>
|
||||||
|
<span className="dim" style={{ fontSize: '0.8rem', backgroundColor: 'rgba(0, 255, 65, 0.1)', padding: '2px 8px', borderRadius: '4px' }}>{decky.ip}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div style={{ display: 'flex', flexDirection: 'column', gap: '8px', width: '100%' }}>
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '8px', fontSize: '0.85rem' }}>
|
||||||
|
<Cpu size={14} className="dim" />
|
||||||
|
<span className="dim">HOSTNAME:</span> {decky.hostname}
|
||||||
|
</div>
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '8px', fontSize: '0.85rem' }}>
|
||||||
|
<Globe size={14} className="dim" />
|
||||||
|
<span className="dim">DISTRO:</span> {decky.distro}
|
||||||
|
</div>
|
||||||
|
{decky.archetype && (
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '8px', fontSize: '0.85rem' }}>
|
||||||
|
<Database size={14} className="dim" />
|
||||||
|
<span className="dim">ARCHETYPE:</span> <span style={{ color: 'var(--highlight-color)' }}>{decky.archetype}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div style={{ display: 'flex', alignItems: 'center', gap: '8px', fontSize: '0.85rem', marginTop: '8px' }}>
|
||||||
|
<Clock size={14} className="dim" />
|
||||||
|
<span className="dim">MUTATION:</span>
|
||||||
|
<span
|
||||||
|
style={{ color: 'var(--accent-color)', cursor: 'pointer', textDecoration: 'underline' }}
|
||||||
|
onClick={() => handleIntervalChange(decky.name, decky.mutate_interval)}
|
||||||
|
>
|
||||||
|
{decky.mutate_interval ? `EVERY ${decky.mutate_interval}m` : 'DISABLED'}
|
||||||
|
</span>
|
||||||
|
<button
|
||||||
|
onClick={() => handleMutate(decky.name)}
|
||||||
|
disabled={!!mutating}
|
||||||
|
style={{
|
||||||
|
background: 'transparent', border: '1px solid var(--accent-color)',
|
||||||
|
color: 'var(--accent-color)', padding: '2px 8px', fontSize: '0.7rem',
|
||||||
|
cursor: mutating ? 'not-allowed' : 'pointer', display: 'flex', alignItems: 'center', gap: '4px', marginLeft: 'auto',
|
||||||
|
opacity: mutating ? 0.5 : 1
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<RefreshCw size={10} className={mutating === decky.name ? "spin" : ""} /> {mutating === decky.name ? 'MUTATING...' : 'FORCE'}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{decky.last_mutated > 0 && (
|
||||||
|
<div style={{ fontSize: '0.7rem', color: 'var(--dim-color)', fontStyle: 'italic', marginTop: '4px' }}>
|
||||||
|
Last mutated: {new Date(decky.last_mutated * 1000).toLocaleString()}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div style={{ width: '100%' }}>
|
||||||
|
<div className="dim" style={{ fontSize: '0.7rem', marginBottom: '8px', letterSpacing: '1px' }}>EXPOSED SERVICES:</div>
|
||||||
|
<div style={{ display: 'flex', flexWrap: 'wrap', gap: '8px' }}>
|
||||||
|
{decky.services.map(svc => {
|
||||||
|
const _config = decky.service_config[svc];
|
||||||
|
return (
|
||||||
|
<div key={svc} className="service-tag-container" style={{ position: 'relative' }}>
|
||||||
|
<span className="service-tag" style={{
|
||||||
|
display: 'inline-block',
|
||||||
|
padding: '4px 10px',
|
||||||
|
fontSize: '0.75rem',
|
||||||
|
backgroundColor: 'var(--bg-color)',
|
||||||
|
border: '1px solid var(--accent-color)',
|
||||||
|
color: 'var(--accent-color)',
|
||||||
|
borderRadius: '2px',
|
||||||
|
cursor: 'help'
|
||||||
|
}}>
|
||||||
|
{svc}
|
||||||
|
</span>
|
||||||
|
{_config && Object.keys(_config).length > 0 && (
|
||||||
|
<div className="service-config-tooltip" style={{
|
||||||
|
display: 'none',
|
||||||
|
position: 'absolute',
|
||||||
|
bottom: '100%',
|
||||||
|
left: '0',
|
||||||
|
backgroundColor: 'rgba(10, 10, 10, 0.95)',
|
||||||
|
border: '1px solid var(--accent-color)',
|
||||||
|
padding: '12px',
|
||||||
|
zIndex: 100,
|
||||||
|
minWidth: '200px',
|
||||||
|
boxShadow: '0 0 15px rgba(0, 255, 65, 0.2)',
|
||||||
|
marginBottom: '8px'
|
||||||
|
}}>
|
||||||
|
{Object.entries(_config).map(([k, v]) => (
|
||||||
|
<div key={k} style={{ fontSize: '0.7rem', marginBottom: '4px' }}>
|
||||||
|
<span style={{ color: 'var(--highlight-color)', fontWeight: 'bold' }}>{k}:</span>
|
||||||
|
<span style={{ marginLeft: '6px', opacity: 0.9 }}>{String(v)}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)) : (
|
||||||
|
<div className="stat-card" style={{ gridColumn: '1 / -1', justifyContent: 'center', padding: '60px' }}>
|
||||||
|
<span className="dim">NO DECOYS CURRENTLY DEPLOYED IN THIS SECTOR</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style dangerouslySetInnerHTML={{ __html: `
|
||||||
|
.service-tag-container:hover .service-config-tooltip {
|
||||||
|
display: block !important;
|
||||||
|
}
|
||||||
|
`}} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DeckyFleet;
|
||||||
179
decnet_web/src/components/Layout.css
Normal file
179
decnet_web/src/components/Layout.css
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
.layout-container {
|
||||||
|
display: flex;
|
||||||
|
height: 100vh;
|
||||||
|
width: 100vw;
|
||||||
|
background-color: var(--background-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Sidebar Styling */
|
||||||
|
.sidebar {
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
transition: width 0.3s cubic-bezier(0.4, 0, 0.2, 1);
|
||||||
|
overflow: hidden;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar.open {
|
||||||
|
width: 240px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar.closed {
|
||||||
|
width: 70px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-header {
|
||||||
|
padding: 20px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.logo-text {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
margin-left: 10px;
|
||||||
|
letter-spacing: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toggle-btn {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
color: var(--text-color);
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toggle-btn:hover {
|
||||||
|
box-shadow: none;
|
||||||
|
color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-nav {
|
||||||
|
flex-grow: 1;
|
||||||
|
padding: 20px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 12px 24px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
color: var(--text-color);
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-item:hover, .nav-item.active {
|
||||||
|
background-color: rgba(0, 255, 65, 0.1);
|
||||||
|
opacity: 1;
|
||||||
|
color: var(--text-color);
|
||||||
|
border-left: 3px solid var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-label {
|
||||||
|
margin-left: 12px;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-footer {
|
||||||
|
padding: 20px;
|
||||||
|
border-top: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.logout-btn {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
width: 100%;
|
||||||
|
padding: 10px;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logout-btn:hover {
|
||||||
|
border: 1px solid var(--accent-color);
|
||||||
|
color: var(--accent-color);
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main Content Area */
|
||||||
|
.main-content {
|
||||||
|
flex-grow: 1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
overflow: hidden;
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Topbar Styling */
|
||||||
|
.topbar {
|
||||||
|
height: 64px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 0 32px;
|
||||||
|
background-color: var(--background-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
padding: 4px 12px;
|
||||||
|
max-width: 400px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-icon {
|
||||||
|
margin-right: 8px;
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-container input {
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
width: 100%;
|
||||||
|
padding: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-container input:focus {
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.topbar-status {
|
||||||
|
font-size: 0.8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.neon-blink {
|
||||||
|
animation: blink 2s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes blink {
|
||||||
|
0%, 100% { opacity: 1; text-shadow: var(--matrix-green-glow); }
|
||||||
|
50% { opacity: 0.5; }
|
||||||
|
}
|
||||||
|
|
||||||
|
.violet-accent {
|
||||||
|
color: var(--accent-color);
|
||||||
|
filter: drop-shadow(var(--violet-glow));
|
||||||
|
}
|
||||||
|
|
||||||
|
.matrix-text {
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Viewport for dynamic content */
|
||||||
|
.content-viewport {
|
||||||
|
flex-grow: 1;
|
||||||
|
padding: 32px;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
90
decnet_web/src/components/Layout.tsx
Normal file
90
decnet_web/src/components/Layout.tsx
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import React, { useState } from 'react';
|
||||||
|
import { NavLink } from 'react-router-dom';
|
||||||
|
import { Menu, X, Search, Activity, LayoutDashboard, Terminal, Settings, LogOut, Server } from 'lucide-react';
|
||||||
|
import './Layout.css';
|
||||||
|
|
||||||
|
interface LayoutProps {
|
||||||
|
children: React.ReactNode;
|
||||||
|
onLogout: () => void;
|
||||||
|
onSearch: (q: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Layout: React.FC<LayoutProps> = ({ children, onLogout, onSearch }) => {
|
||||||
|
const [sidebarOpen, setSidebarOpen] = useState(true);
|
||||||
|
const [search, setSearch] = useState('');
|
||||||
|
|
||||||
|
const handleSearchSubmit = (e: React.FormEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
onSearch(search);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="layout-container">
|
||||||
|
{/* Sidebar */}
|
||||||
|
<aside className={`sidebar ${sidebarOpen ? 'open' : 'closed'}`}>
|
||||||
|
<div className="sidebar-header">
|
||||||
|
<Activity size={24} className="violet-accent" />
|
||||||
|
{sidebarOpen && <span className="logo-text">DECNET</span>}
|
||||||
|
<button className="toggle-btn" onClick={() => setSidebarOpen(!sidebarOpen)}>
|
||||||
|
{sidebarOpen ? <X size={20} /> : <Menu size={20} />}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav className="sidebar-nav">
|
||||||
|
<NavItem to="/" icon={<LayoutDashboard size={20} />} label="Dashboard" open={sidebarOpen} />
|
||||||
|
<NavItem to="/fleet" icon={<Server size={20} />} label="Decoy Fleet" open={sidebarOpen} />
|
||||||
|
<NavItem to="/live-logs" icon={<Terminal size={20} />} label="Live Logs" open={sidebarOpen} />
|
||||||
|
<NavItem to="/attackers" icon={<Activity size={20} />} label="Attackers" open={sidebarOpen} />
|
||||||
|
<NavItem to="/config" icon={<Settings size={20} />} label="Config" open={sidebarOpen} />
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<div className="sidebar-footer">
|
||||||
|
<button className="logout-btn" onClick={onLogout}>
|
||||||
|
<LogOut size={20} />
|
||||||
|
{sidebarOpen && <span>Logout</span>}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</aside>
|
||||||
|
|
||||||
|
{/* Main Content Area */}
|
||||||
|
<main className="main-content">
|
||||||
|
{/* Topbar */}
|
||||||
|
<header className="topbar">
|
||||||
|
<form onSubmit={handleSearchSubmit} className="search-container">
|
||||||
|
<Search size={18} className="search-icon" />
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Search logs, deckies, IPs..."
|
||||||
|
value={search}
|
||||||
|
onChange={(e) => setSearch(e.target.value)}
|
||||||
|
/>
|
||||||
|
</form>
|
||||||
|
<div className="topbar-status">
|
||||||
|
<span className="matrix-text neon-blink">SYSTEM: ACTIVE</span>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
{/* Dynamic Content */}
|
||||||
|
<div className="content-viewport">
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
interface NavItemProps {
|
||||||
|
to: string;
|
||||||
|
icon: React.ReactNode;
|
||||||
|
label: string;
|
||||||
|
open: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const NavItem: React.FC<NavItemProps> = ({ to, icon, label, open }) => (
|
||||||
|
<NavLink to={to} className={({ isActive }) => `nav-item ${isActive ? 'active' : ''}`} end={to === '/'}>
|
||||||
|
{icon}
|
||||||
|
{open && <span className="nav-label">{label}</span>}
|
||||||
|
</NavLink>
|
||||||
|
);
|
||||||
|
|
||||||
|
export default Layout;
|
||||||
20
decnet_web/src/components/LiveLogs.tsx
Normal file
20
decnet_web/src/components/LiveLogs.tsx
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import { Terminal } from 'lucide-react';
|
||||||
|
import './Dashboard.css';
|
||||||
|
|
||||||
|
const LiveLogs: React.FC = () => {
|
||||||
|
return (
|
||||||
|
<div className="logs-section">
|
||||||
|
<div className="section-header">
|
||||||
|
<Terminal size={20} />
|
||||||
|
<h2>FULL LIVE LOG STREAM</h2>
|
||||||
|
</div>
|
||||||
|
<div style={{ padding: '40px', textAlign: 'center', opacity: 0.5 }}>
|
||||||
|
<p>STREAM ESTABLISHED. WAITING FOR INCOMING DATA...</p>
|
||||||
|
<p style={{ marginTop: '10px', fontSize: '0.8rem' }}>(Dedicated Live Logs view placeholder)</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default LiveLogs;
|
||||||
90
decnet_web/src/components/Login.css
Normal file
90
decnet_web/src/components/Login.css
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
.login-container {
|
||||||
|
height: 100vh;
|
||||||
|
width: 100vw;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background-color: var(--background-color);
|
||||||
|
background-image:
|
||||||
|
linear-gradient(rgba(0, 255, 65, 0.05) 1px, transparent 1px),
|
||||||
|
linear-gradient(90deg, rgba(0, 255, 65, 0.05) 1px, transparent 1px);
|
||||||
|
background-size: 20px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-box {
|
||||||
|
width: 100%;
|
||||||
|
max-width: 400px;
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
padding: 40px;
|
||||||
|
box-shadow: 0 0 20px rgba(0, 0, 0, 0.5);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 32px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header {
|
||||||
|
text-align: center;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header h1 {
|
||||||
|
font-size: 2.5rem;
|
||||||
|
letter-spacing: 10px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header p {
|
||||||
|
font-size: 0.7rem;
|
||||||
|
letter-spacing: 2px;
|
||||||
|
opacity: 0.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group label {
|
||||||
|
font-size: 0.7rem;
|
||||||
|
opacity: 0.8;
|
||||||
|
letter-spacing: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form input {
|
||||||
|
width: 100%;
|
||||||
|
background-color: rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-msg {
|
||||||
|
color: #ff4141;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
text-align: center;
|
||||||
|
padding: 8px;
|
||||||
|
border: 1px solid #ff4141;
|
||||||
|
background-color: rgba(255, 65, 65, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form button {
|
||||||
|
padding: 12px;
|
||||||
|
margin-top: 8px;
|
||||||
|
font-weight: bold;
|
||||||
|
letter-spacing: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-footer {
|
||||||
|
text-align: center;
|
||||||
|
font-size: 0.6rem;
|
||||||
|
opacity: 0.4;
|
||||||
|
letter-spacing: 1px;
|
||||||
|
}
|
||||||
154
decnet_web/src/components/Login.tsx
Normal file
154
decnet_web/src/components/Login.tsx
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import React, { useState } from 'react';
|
||||||
|
import api from '../utils/api';
|
||||||
|
import './Login.css';
|
||||||
|
import { Activity } from 'lucide-react';
|
||||||
|
|
||||||
|
interface LoginProps {
|
||||||
|
onLogin: (token: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Login: React.FC<LoginProps> = ({ onLogin }) => {
|
||||||
|
const [username, setUsername] = useState('');
|
||||||
|
const [password, setPassword] = useState('');
|
||||||
|
const [error, setError] = useState('');
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [needsPasswordChange, setNeedsPasswordChange] = useState(false);
|
||||||
|
const [newPassword, setNewPassword] = useState('');
|
||||||
|
const [confirmPassword, setConfirmPassword] = useState('');
|
||||||
|
const [tempToken, setTempToken] = useState('');
|
||||||
|
|
||||||
|
const handleLoginSubmit = async (e: React.FormEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
setLoading(true);
|
||||||
|
setError('');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await api.post('/auth/login', { username, password });
|
||||||
|
const { access_token, must_change_password } = response.data;
|
||||||
|
|
||||||
|
if (must_change_password) {
|
||||||
|
setTempToken(access_token);
|
||||||
|
setNeedsPasswordChange(true);
|
||||||
|
} else {
|
||||||
|
localStorage.setItem('token', access_token);
|
||||||
|
onLogin(access_token);
|
||||||
|
}
|
||||||
|
} catch (err: any) {
|
||||||
|
setError(err.response?.data?.detail || 'Authentication failed');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleChangePasswordSubmit = async (e: React.FormEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
if (newPassword !== confirmPassword) {
|
||||||
|
setError('Passwords do not match');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setLoading(true);
|
||||||
|
setError('');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await api.post('/auth/change-password',
|
||||||
|
{ old_password: password, new_password: newPassword },
|
||||||
|
{ headers: { Authorization: `Bearer ${tempToken}` } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Re-authenticate to get a fresh token with must_change_password=false
|
||||||
|
const response = await api.post('/auth/login', { username, password: newPassword });
|
||||||
|
const { access_token } = response.data;
|
||||||
|
|
||||||
|
localStorage.setItem('token', access_token);
|
||||||
|
onLogin(access_token);
|
||||||
|
} catch (err: any) {
|
||||||
|
setError(err.response?.data?.detail || 'Password change failed');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="login-container">
|
||||||
|
<div className="login-box">
|
||||||
|
<div className="login-header">
|
||||||
|
<Activity size={48} className="violet-accent neon-blink" />
|
||||||
|
<h1>DECNET</h1>
|
||||||
|
<p>AUTHORIZED PERSONNEL ONLY</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!needsPasswordChange ? (
|
||||||
|
<form onSubmit={handleLoginSubmit} className="login-form">
|
||||||
|
<div className="form-group">
|
||||||
|
<label>IDENTIFIER</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={username}
|
||||||
|
onChange={(e) => setUsername(e.target.value)}
|
||||||
|
required
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label>ACCESS KEY</label>
|
||||||
|
<input
|
||||||
|
type="password"
|
||||||
|
value={password}
|
||||||
|
onChange={(e) => setPassword(e.target.value)}
|
||||||
|
required
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && <div className="error-msg">{error}</div>}
|
||||||
|
|
||||||
|
<button type="submit" disabled={loading}>
|
||||||
|
{loading ? 'VERIFYING...' : 'ESTABLISH CONNECTION'}
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
) : (
|
||||||
|
<form onSubmit={handleChangePasswordSubmit} className="login-form">
|
||||||
|
<div className="form-group" style={{ textAlign: 'center', marginBottom: '10px' }}>
|
||||||
|
<p className="violet-accent">MANDATORY SECURITY UPDATE</p>
|
||||||
|
<p style={{ fontSize: '0.8rem', opacity: 0.7 }}>Please establish a new access key</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label>NEW ACCESS KEY</label>
|
||||||
|
<input
|
||||||
|
type="password"
|
||||||
|
value={newPassword}
|
||||||
|
onChange={(e) => setNewPassword(e.target.value)}
|
||||||
|
required
|
||||||
|
minLength={8}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group">
|
||||||
|
<label>CONFIRM KEY</label>
|
||||||
|
<input
|
||||||
|
type="password"
|
||||||
|
value={confirmPassword}
|
||||||
|
onChange={(e) => setConfirmPassword(e.target.value)}
|
||||||
|
required
|
||||||
|
minLength={8}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && <div className="error-msg">{error}</div>}
|
||||||
|
|
||||||
|
<button type="submit" disabled={loading}>
|
||||||
|
{loading ? 'UPDATING...' : 'UPDATE SECURE KEY'}
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="login-footer">
|
||||||
|
<span>SECURE PROTOCOL v1.0</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Login;
|
||||||
69
decnet_web/src/index.css
Normal file
69
decnet_web/src/index.css
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
:root {
|
||||||
|
--background-color: #000000;
|
||||||
|
--text-color: #00ff41;
|
||||||
|
--accent-color: #ee82ee;
|
||||||
|
--secondary-color: #0d1117;
|
||||||
|
--border-color: #30363d;
|
||||||
|
--matrix-green-glow: 0 0 10px rgba(0, 255, 65, 0.5);
|
||||||
|
--violet-glow: 0 0 10px rgba(238, 130, 238, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Courier New', Courier, monospace;
|
||||||
|
background-color: var(--background-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
line-height: 1.5;
|
||||||
|
overflow-x: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
button {
|
||||||
|
cursor: pointer;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px solid var(--text-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
padding: 8px 16px;
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
button:hover {
|
||||||
|
background: var(--text-color);
|
||||||
|
color: var(--background-color);
|
||||||
|
box-shadow: var(--matrix-green-glow);
|
||||||
|
}
|
||||||
|
|
||||||
|
input {
|
||||||
|
background: #0d1117;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
padding: 8px 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--text-color);
|
||||||
|
box-shadow: var(--matrix-green-glow);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Custom scrollbar */
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: var(--background-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: var(--secondary-color);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--border-color);
|
||||||
|
}
|
||||||
10
decnet_web/src/main.tsx
Normal file
10
decnet_web/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { StrictMode } from 'react'
|
||||||
|
import { createRoot } from 'react-dom/client'
|
||||||
|
import './index.css'
|
||||||
|
import App from './App.tsx'
|
||||||
|
|
||||||
|
createRoot(document.getElementById('root')!).render(
|
||||||
|
<StrictMode>
|
||||||
|
<App />
|
||||||
|
</StrictMode>,
|
||||||
|
)
|
||||||
15
decnet_web/src/utils/api.ts
Normal file
15
decnet_web/src/utils/api.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
const api = axios.create({
|
||||||
|
baseURL: import.meta.env.VITE_API_URL || 'http://localhost:8000/api/v1',
|
||||||
|
});
|
||||||
|
|
||||||
|
api.interceptors.request.use((config) => {
|
||||||
|
const token = localStorage.getItem('token');
|
||||||
|
if (token) {
|
||||||
|
config.headers.Authorization = `Bearer ${token}`;
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
});
|
||||||
|
|
||||||
|
export default api;
|
||||||
25
decnet_web/tsconfig.app.json
Normal file
25
decnet_web/tsconfig.app.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||||
|
"target": "es2023",
|
||||||
|
"lib": ["ES2023", "DOM", "DOM.Iterable"],
|
||||||
|
"module": "esnext",
|
||||||
|
"types": ["vite/client"],
|
||||||
|
"skipLibCheck": true,
|
||||||
|
|
||||||
|
/* Bundler mode */
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"noEmit": true,
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
|
||||||
|
/* Linting */
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"erasableSyntaxOnly": true,
|
||||||
|
"noFallthroughCasesInSwitch": true
|
||||||
|
},
|
||||||
|
"include": ["src"]
|
||||||
|
}
|
||||||
7
decnet_web/tsconfig.json
Normal file
7
decnet_web/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"files": [],
|
||||||
|
"references": [
|
||||||
|
{ "path": "./tsconfig.app.json" },
|
||||||
|
{ "path": "./tsconfig.node.json" }
|
||||||
|
]
|
||||||
|
}
|
||||||
24
decnet_web/tsconfig.node.json
Normal file
24
decnet_web/tsconfig.node.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||||
|
"target": "es2023",
|
||||||
|
"lib": ["ES2023"],
|
||||||
|
"module": "esnext",
|
||||||
|
"types": ["node"],
|
||||||
|
"skipLibCheck": true,
|
||||||
|
|
||||||
|
/* Bundler mode */
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
/* Linting */
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"erasableSyntaxOnly": true,
|
||||||
|
"noFallthroughCasesInSwitch": true
|
||||||
|
},
|
||||||
|
"include": ["vite.config.ts"]
|
||||||
|
}
|
||||||
7
decnet_web/vite.config.ts
Normal file
7
decnet_web/vite.config.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import { defineConfig } from 'vite'
|
||||||
|
import react from '@vitejs/plugin-react'
|
||||||
|
|
||||||
|
// https://vite.dev/config/
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [react()],
|
||||||
|
})
|
||||||
29
deploy/decnet-api.service
Normal file
29
deploy/decnet-api.service
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=DECNET API Service
|
||||||
|
After=network.target docker.service
|
||||||
|
Requires=docker.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=decnet
|
||||||
|
Group=decnet
|
||||||
|
WorkingDirectory=/path/to/DECNET
|
||||||
|
# Ensure environment is loaded from the .env file
|
||||||
|
EnvironmentFile=/path/to/DECNET/.env
|
||||||
|
# Use the virtualenv python to run the decnet api command
|
||||||
|
ExecStart=/path/to/DECNET/.venv/bin/decnet api
|
||||||
|
|
||||||
|
# Capabilities required to manage MACVLAN interfaces and network links without root
|
||||||
|
CapabilityBoundingSet=CAP_NET_ADMIN CAP_NET_RAW
|
||||||
|
AmbientCapabilities=CAP_NET_ADMIN CAP_NET_RAW
|
||||||
|
|
||||||
|
# Security Hardening
|
||||||
|
NoNewPrivileges=yes
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=read-only
|
||||||
|
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
30
deploy/decnet-web.service
Normal file
30
deploy/decnet-web.service
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=DECNET Web Dashboard Service
|
||||||
|
After=network.target decnet-api.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=decnet
|
||||||
|
Group=decnet
|
||||||
|
WorkingDirectory=/path/to/DECNET
|
||||||
|
# Ensure environment is loaded from the .env file
|
||||||
|
EnvironmentFile=/path/to/DECNET/.env
|
||||||
|
# Use the virtualenv python to run the decnet web command
|
||||||
|
ExecStart=/path/to/DECNET/.venv/bin/decnet web
|
||||||
|
|
||||||
|
# The Web Dashboard service does not require network administration privileges.
|
||||||
|
# Enable the following lines if you wish to bind the Dashboard to a privileged port (e.g., 80 or 443)
|
||||||
|
# while still running as a non-root user.
|
||||||
|
# CapabilityBoundingSet=CAP_NET_BIND_SERVICE
|
||||||
|
# AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||||
|
|
||||||
|
# Security Hardening
|
||||||
|
NoNewPrivileges=yes
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectHome=read-only
|
||||||
|
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
50
development/DEVELOPMENT.md
Normal file
50
development/DEVELOPMENT.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# DECNET Development Roadmap
|
||||||
|
|
||||||
|
## Core / Hardening
|
||||||
|
|
||||||
|
- [ ] **Attacker fingerprinting** — Capture TLS JA3/JA4 hashes, TCP window sizes, User-Agent strings, and SSH client banners.
|
||||||
|
- [ ] **Canary tokens** — Embed fake AWS keys and honeydocs into decky filesystems.
|
||||||
|
- [ ] **Tarpit mode** — Slow down attackers by drip-feeding bytes or delaying responses.
|
||||||
|
- [x] **Dynamic decky mutation** — Rotate exposed services or OS fingerprints over time.
|
||||||
|
- [ ] **Credential harvesting DB** — Centralized database for all username/password attempts.
|
||||||
|
- [ ] **Session recording** — Full capture for SSH/Telnet sessions.
|
||||||
|
- [ ] **Payload capture** — Store and hash files uploaded by attackers.
|
||||||
|
|
||||||
|
## Detection & Intelligence
|
||||||
|
|
||||||
|
- [ ] **Real-time alerting** — Webhook/Slack/Telegram notifications for first-hits.
|
||||||
|
- [ ] **Threat intel enrichment** — Auto-lookup IPs against AbuseIPDB, Shodan, and GreyNoise.
|
||||||
|
- [ ] **Attack campaign clustering** — Group sessions by signatures and timing patterns.
|
||||||
|
- [ ] **GeoIP mapping** — Visualize attacker origin and ASN data on a map.
|
||||||
|
- [ ] **TTPs tagging** — Map observed behaviors to MITRE ATT&CK techniques.
|
||||||
|
|
||||||
|
## Dashboard & Visibility
|
||||||
|
|
||||||
|
- [x] **Web dashboard** — Real-time React SPA + FastAPI backend for logs and fleet status.
|
||||||
|
- [x] **Decky Inventory** — Dedicated "Decoy Fleet" page showing all deployed assets.
|
||||||
|
- [ ] **Pre-built Kibana/Grafana dashboards** — Ship JSON exports for ELK/Grafana.
|
||||||
|
- [ ] **CLI live feed** — `decnet watch` command for a unified, colored terminal stream.
|
||||||
|
- [ ] **Traversal graph export** — Export attacker movement as DOT or JSON.
|
||||||
|
|
||||||
|
## Deployment & Infrastructure
|
||||||
|
|
||||||
|
- [ ] **SWARM / multihost mode** — Ansible-based orchestration for multi-node deployments.
|
||||||
|
- [ ] **Terraform/Pulumi provider** — Cloud-hosted decky deployment.
|
||||||
|
- [ ] **Kubernetes deployment mode** — Run deckies as K8s pods.
|
||||||
|
- [x] **Lifecycle Management** — Automatic API process termination on `teardown`.
|
||||||
|
- [x] **Health monitoring** — Active vs. Deployed decky tracking in the dashboard.
|
||||||
|
|
||||||
|
## Services & Realism
|
||||||
|
|
||||||
|
- [ ] **HTTPS/TLS support** — Honeypots with SSL certificates.
|
||||||
|
- [ ] **Fake Active Directory** — Convincing AD/LDAP emulation.
|
||||||
|
- [ ] **Realistic web apps** — Fake WordPress, Grafana, and phpMyAdmin templates.
|
||||||
|
- [ ] **OT/ICS profiles** — Expanded Modbus, DNP3, and BACnet support.
|
||||||
|
|
||||||
|
## Developer Experience
|
||||||
|
|
||||||
|
- [x] **API Fuzzing** — Property-based testing for all web endpoints.
|
||||||
|
- [x] **CI/CD pipeline** — Automated testing and linting via Gitea Actions.
|
||||||
|
- [x] **Strict Typing** — Project-wide enforcement of PEP 484 type hints.
|
||||||
|
- [ ] **Plugin SDK docs** — Documentation for adding custom services.
|
||||||
|
- [ ] **Config generator wizard** — `decnet wizard` for interactive setup.
|
||||||
190
development/EVENTS.md
Normal file
190
development/EVENTS.md
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
# DECNET Honeypot Events
|
||||||
|
|
||||||
|
This document details the events generated by each DECNET honeypot service, as found in their respective `server.py` files.
|
||||||
|
|
||||||
|
## Service: `docker_api`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `request` | `method`, `path`, `remote_addr`, `body` |
|
||||||
|
| `startup` | *None* |
|
||||||
|
|
||||||
|
## Service: `elasticsearch`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `post_request` | `src`, `method`, `path`, `body_preview`, `user_agent` |
|
||||||
|
| `put_request` | `src`, `method`, `path`, `body_preview` |
|
||||||
|
| `delete_request` | `src`, `method`, `path` |
|
||||||
|
| `head_request` | `src`, `method`, `path` |
|
||||||
|
| `root_probe` | `src`, `method`, `path` |
|
||||||
|
| `cat_api` | `src`, `method`, `path` |
|
||||||
|
| `cluster_recon` | `src`, `method`, `path` |
|
||||||
|
| `nodes_recon` | `src`, `method`, `path` |
|
||||||
|
| `security_probe` | `src`, `method`, `path` |
|
||||||
|
| `request` | `src`, `method`, `path` |
|
||||||
|
|
||||||
|
## Service: `ftp`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connection` | `src_ip`, `src_port` |
|
||||||
|
| `user` | `username` |
|
||||||
|
| `auth_attempt` | `username`, `password` |
|
||||||
|
| `download_attempt` | `path` |
|
||||||
|
| `disconnect` | `src_ip`, `src_port` |
|
||||||
|
|
||||||
|
## Service: `http`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `request` | `method`, `path`, `remote_addr`, `headers`, `body` |
|
||||||
|
| `startup` | *None* |
|
||||||
|
|
||||||
|
## Service: `imap`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `auth` | `src`, `username`, `password` |
|
||||||
|
| `command` | `src`, `cmd` |
|
||||||
|
|
||||||
|
## Service: `k8s`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `request` | `method`, `path`, `remote_addr`, `auth`, `body` |
|
||||||
|
| `startup` | *None* |
|
||||||
|
|
||||||
|
## Service: `ldap`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `bind` | `src`, `dn`, `password` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
|
||||||
|
## Service: `llmnr`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `query` | `proto`, `src`, `src_port`, `name`, `qtype` |
|
||||||
|
| `raw_packet` | `proto`, `src`, `data`, `error` |
|
||||||
|
|
||||||
|
## Service: `mongodb`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `message` | `src`, `opcode`, `length` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
|
||||||
|
## Service: `mqtt`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `auth` | `src` |
|
||||||
|
| `packet` | `src`, `pkt_type` |
|
||||||
|
|
||||||
|
## Service: `mssql`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `auth` | `src`, `username` |
|
||||||
|
| `unknown_packet` | `src`, `pkt_type` |
|
||||||
|
|
||||||
|
## Service: `mysql`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `auth` | `src`, `username` |
|
||||||
|
|
||||||
|
## Service: `pop3`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `user` | `src`, `username` |
|
||||||
|
| `auth` | `src`, `username`, `password` |
|
||||||
|
| `command` | `src`, `cmd` |
|
||||||
|
|
||||||
|
## Service: `postgres`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `startup` | `src`, `username`, `database` |
|
||||||
|
| `auth` | `src`, `pw_hash` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
|
||||||
|
## Service: `rdp`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connection` | `src_ip`, `src_port` |
|
||||||
|
| `data` | `src_ip`, `src_port`, `bytes`, `hex` |
|
||||||
|
| `disconnect` | `src_ip`, `src_port` |
|
||||||
|
|
||||||
|
## Service: `redis`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `command` | `src`, `cmd`, `args` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `auth` | `src`, `password` |
|
||||||
|
|
||||||
|
## Service: `sip`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `request` | `src`, `src_port`, `method`, `from_`, `to`, `username`, `auth` |
|
||||||
|
| `startup` | *None* |
|
||||||
|
|
||||||
|
## Service: `smb`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `shutdown` | *None* |
|
||||||
|
|
||||||
|
## Service: `smtp`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `ehlo` | `src`, `domain` |
|
||||||
|
| `auth_attempt` | `src`, `command` |
|
||||||
|
| `mail_from` | `src`, `value` |
|
||||||
|
| `rcpt_to` | `src`, `value` |
|
||||||
|
| `vrfy` | `src`, `value` |
|
||||||
|
| `unknown_command` | `src`, `command` |
|
||||||
|
|
||||||
|
## Service: `snmp`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `get_request` | `src`, `src_port`, `version`, `community`, `oids` |
|
||||||
|
| `parse_error` | `src`, `error`, `data` |
|
||||||
|
|
||||||
|
## Service: `tftp`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `request` | `src`, `src_port`, `op`, `filename`, `mode` |
|
||||||
|
| `unknown_opcode` | `src`, `opcode`, `data` |
|
||||||
|
|
||||||
|
## Service: `vnc`
|
||||||
|
| Event Type | Included Fields |
|
||||||
|
| --- | --- |
|
||||||
|
| `startup` | *None* |
|
||||||
|
| `connect` | `src`, `src_port` |
|
||||||
|
| `disconnect` | `src` |
|
||||||
|
| `version` | `src`, `client_version` |
|
||||||
|
| `security_choice` | `src`, `type` |
|
||||||
|
| `auth_response` | `src`, `response` |
|
||||||
|
|
||||||
57
development/NOTES.md
Normal file
57
development/NOTES.md
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# Initial steps
|
||||||
|
|
||||||
|
# Architecture
|
||||||
|
|
||||||
|
## DECNET-UNIHOST model
|
||||||
|
|
||||||
|
The unihost model is a mode in which DECNET deploys an _n_ amount of machines from a single one. This execution model lives in a decoy network which is accessible to an attacker from the outside.
|
||||||
|
|
||||||
|
Each decky (the son of the DECNET unihost) should have different services (RDP, SMB, SSH, FTP, etc) and all of them should communicate with an external, isolated network, which aggregates data and allows
|
||||||
|
visualizations to be made. Think of the ELK stack. That data is then passed back via Logstash or other methods to a SIEM device or something else that may be beneficiated by this collected data.
|
||||||
|
|
||||||
|
## DECNET-MULTIHOST (SWARM) model
|
||||||
|
|
||||||
|
The SWARM model is similar to the UNIHOST model, but the difference is that instead of one real machine, we have n>1 machines. Same thought process really, but deployment may be different.
|
||||||
|
A low cost option and fairly automatable one is the usage of Ansible, sshpass, or other tools.
|
||||||
|
|
||||||
|
# Modus operandi
|
||||||
|
|
||||||
|
## Docker-Compose
|
||||||
|
|
||||||
|
I will use Docker Compose extensively for this project. The reasons are:
|
||||||
|
- Easily managed.
|
||||||
|
- Easily extensible.
|
||||||
|
- Less overhead.
|
||||||
|
|
||||||
|
To be completely transparent: I asked Deepseek to write the initial `docker-compose.yml` file. It was mostly boilerplate, and most of it mainly modified or deleted. It doesn't exist anymore.
|
||||||
|
|
||||||
|
## Distro to use.
|
||||||
|
|
||||||
|
I will be using the `debian:bookworm-slim` image for all the containers. I might think about mixing in there some Ubuntu or a Centos, but for now, Debian will do just fine.
|
||||||
|
|
||||||
|
The distro I'm running is WSL Kali Linux. Let's hope this doesn't cause any problems down the road.
|
||||||
|
|
||||||
|
## Networking
|
||||||
|
|
||||||
|
It was a hussle, but I think MACVLAN or IPVLAN (thanks @Deepseek!) might work. The reasoning behind picking this networking driver is that for the project to work, it requires having containers the entire container accessible from the network. This is to attempt to masquarede them as real, live machines.
|
||||||
|
|
||||||
|
Now, we will need a publicly accesible, real server that has access to this "internal" network. I'll try MACVLAN first.
|
||||||
|
|
||||||
|
### MACVLAN Tests
|
||||||
|
|
||||||
|
I will first use the default network to see what happens.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker network create -d macvlan \
|
||||||
|
--subnet=192.168.1.0/24 \
|
||||||
|
--gateway=192.168.1.1 \
|
||||||
|
-o parent=eth0 localnet
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issues
|
||||||
|
|
||||||
|
This initial test doesn't seem to be working. Might be that I'm using WSL, so I downloaded a Ubuntu 22.04 Server ISO. I'll try the MACVLAN network on it. Now, if that doesn't work, I don't see how the 802.1q would work, at least on _my network_. Perhaps if I had a switch I could make it work, but currently I don't have one :c
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# End of Notes
|
||||||
@@ -13,16 +13,28 @@ dependencies = [
|
|||||||
"docker>=7.0",
|
"docker>=7.0",
|
||||||
"pyyaml>=6.0",
|
"pyyaml>=6.0",
|
||||||
"jinja2>=3.1",
|
"jinja2>=3.1",
|
||||||
|
"fastapi>=0.110.0",
|
||||||
|
"uvicorn>=0.29.0",
|
||||||
|
"aiosqlite>=0.20.0",
|
||||||
|
"PyJWT>=2.8.0",
|
||||||
|
"bcrypt>=4.1.0",
|
||||||
|
"psutil>=5.9.0",
|
||||||
|
"python-dotenv>=1.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
"pytest>=8.0",
|
"pytest>=8.0",
|
||||||
"ruff>=0.4",
|
"ruff>=0.4",
|
||||||
"bandit>=1.7",
|
"bandit>=1.7",
|
||||||
"pip-audit>=2.0",
|
"pip-audit>=2.0",
|
||||||
|
"httpx>=0.27.0",
|
||||||
|
"hypothesis>=6.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
decnet = "decnet.cli:app"
|
decnet = "decnet.cli:app"
|
||||||
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.setuptools.packages.find]
|
||||||
where = ["."]
|
where = ["."]
|
||||||
include = ["decnet*"]
|
include = ["decnet*"]
|
||||||
|
|||||||
@@ -103,10 +103,8 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
log_path = Path(os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE))
|
log_path = Path(os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE))
|
||||||
try:
|
try:
|
||||||
log_path.parent.mkdir(parents=True, exist_ok=True)
|
log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
handler = logging.handlers.RotatingFileHandler(
|
handler = logging.FileHandler(
|
||||||
log_path,
|
log_path,
|
||||||
maxBytes=_MAX_BYTES,
|
|
||||||
backupCount=_BACKUP_COUNT,
|
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
)
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
@@ -120,10 +118,111 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.FileHandler(
|
||||||
|
json_path,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -120,10 +120,113 @@ def _get_file_logger() -> logging.Logger:
|
|||||||
return _file_logger
|
return _file_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_json_logger: logging.Logger | None = None
|
||||||
|
|
||||||
|
def _get_json_logger() -> logging.Logger:
|
||||||
|
global _json_logger
|
||||||
|
if _json_logger is not None:
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||||
|
json_path = Path(log_path_str).with_suffix(".json")
|
||||||
|
try:
|
||||||
|
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
json_path,
|
||||||
|
maxBytes=_MAX_BYTES,
|
||||||
|
backupCount=_BACKUP_COUNT,
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
except OSError:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
|
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||||
|
_json_logger = logging.getLogger("decnet.json")
|
||||||
|
_json_logger.setLevel(logging.DEBUG)
|
||||||
|
_json_logger.propagate = False
|
||||||
|
_json_logger.addHandler(handler)
|
||||||
|
return _json_logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_syslog_file(line: str) -> None:
|
def write_syslog_file(line: str) -> None:
|
||||||
"""Append a syslog line to the rotating log file."""
|
"""Append a syslog line to the rotating log file."""
|
||||||
try:
|
try:
|
||||||
_get_file_logger().info(line)
|
_get_file_logger().info(line)
|
||||||
|
|
||||||
|
# Also parse and write JSON log
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
_RFC5424_RE: re.Pattern = re.compile(
|
||||||
|
r"^<\d+>1 "
|
||||||
|
r"(\S+) " # 1: TIMESTAMP
|
||||||
|
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||||
|
r"(\S+) " # 3: APP-NAME (service)
|
||||||
|
r"- " # PROCID always NILVALUE
|
||||||
|
r"(\S+) " # 4: MSGID (event_type)
|
||||||
|
r"(.+)$", # 5: SD element + optional MSG
|
||||||
|
)
|
||||||
|
_SD_BLOCK_RE: re.Pattern = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||||
|
_PARAM_RE: re.Pattern = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||||
|
_IP_FIELDS: tuple[str, ...] = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||||
|
|
||||||
|
_m: Optional[re.Match] = _RFC5424_RE.match(line)
|
||||||
|
if _m:
|
||||||
|
_ts_raw: str
|
||||||
|
_decky: str
|
||||||
|
_service: str
|
||||||
|
_event_type: str
|
||||||
|
_sd_rest: str
|
||||||
|
_ts_raw, _decky, _service, _event_type, _sd_rest = _m.groups()
|
||||||
|
|
||||||
|
_fields: dict[str, str] = {}
|
||||||
|
_msg: str = ""
|
||||||
|
|
||||||
|
if _sd_rest.startswith("-"):
|
||||||
|
_msg = _sd_rest[1:].lstrip()
|
||||||
|
elif _sd_rest.startswith("["):
|
||||||
|
_block: Optional[re.Match] = _SD_BLOCK_RE.search(_sd_rest)
|
||||||
|
if _block:
|
||||||
|
for _k, _v in _PARAM_RE.findall(_block.group(1)):
|
||||||
|
_fields[_k] = _v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||||
|
|
||||||
|
# extract msg after the block
|
||||||
|
_msg_match: Optional[re.Match] = re.search(r'\]\s+(.+)$', _sd_rest)
|
||||||
|
if _msg_match:
|
||||||
|
_msg = _msg_match.group(1).strip()
|
||||||
|
else:
|
||||||
|
_msg = _sd_rest
|
||||||
|
|
||||||
|
_attacker_ip: str = "Unknown"
|
||||||
|
for _fname in _IP_FIELDS:
|
||||||
|
if _fname in _fields:
|
||||||
|
_attacker_ip = _fields[_fname]
|
||||||
|
break
|
||||||
|
|
||||||
|
# Parse timestamp to normalize it
|
||||||
|
_ts_formatted: str
|
||||||
|
try:
|
||||||
|
_ts_formatted = datetime.fromisoformat(_ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
_ts_formatted = _ts_raw
|
||||||
|
|
||||||
|
_payload: dict[str, Any] = {
|
||||||
|
"timestamp": _ts_formatted,
|
||||||
|
"decky": _decky,
|
||||||
|
"service": _service,
|
||||||
|
"event_type": _event_type,
|
||||||
|
"attacker_ip": _attacker_ip,
|
||||||
|
"fields": json.dumps(_fields),
|
||||||
|
"msg": _msg,
|
||||||
|
"raw_line": line
|
||||||
|
}
|
||||||
|
_get_json_logger().info(json.dumps(_payload))
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
4
tests/.hypothesis/constants/19d5adc9efd5ec68
Normal file
4
tests/.hypothesis/constants/19d5adc9efd5ec68
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/Tools/DECNET/decnet/web/ingester.py
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
['.json', 'decnet.web.ingester', 'r', 'replace', 'utf-8']
|
||||||
4
tests/.hypothesis/constants/219a36e8b671f84b
Normal file
4
tests/.hypothesis/constants/219a36e8b671f84b
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/Tools/DECNET/decnet/web/repository.py
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
[]
|
||||||
4
tests/.hypothesis/constants/a3207e9522fed10c
Normal file
4
tests/.hypothesis/constants/a3207e9522fed10c
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/Tools/DECNET/decnet/web/api.py
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
[1000, '*', '/api/v1/auth/login', '/api/v1/logs', '/api/v1/stats', '1.0.0', 'Bearer', 'WWW-Authenticate', 'access_token', 'admin', 'bearer', 'data', 'limit', 'message', 'must_change_password', 'offset', 'password_hash', 'role', 'token_type', 'total', 'username', 'uuid']
|
||||||
4
tests/.hypothesis/constants/ceb1d0465029fa83
Normal file
4
tests/.hypothesis/constants/ceb1d0465029fa83
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/.local/bin/pytest
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
['__main__']
|
||||||
4
tests/.hypothesis/constants/da39a3ee5e6b4b0d
Normal file
4
tests/.hypothesis/constants/da39a3ee5e6b4b0d
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/Tools/DECNET/decnet/__init__.py
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
[]
|
||||||
4
tests/.hypothesis/constants/da43cd4d80a43169
Normal file
4
tests/.hypothesis/constants/da43cd4d80a43169
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/Tools/DECNET/decnet/web/sqlite_repository.py
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
['SELECT * FROM logs', 'active_deckies', 'attacker_ip', 'decky', 'decnet.db', 'event_type', 'fields', 'msg', 'must_change_password', 'password_hash', 'raw_line', 'role', 'service', 'timestamp', 'total', 'total_logs', 'unique_attackers', 'username', 'uuid']
|
||||||
4
tests/.hypothesis/constants/df40fa14165138c7
Normal file
4
tests/.hypothesis/constants/df40fa14165138c7
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# file: /home/anti/Tools/DECNET/decnet/web/auth.py
|
||||||
|
# hypothesis_version: 6.151.11
|
||||||
|
|
||||||
|
[1440, 'DECNET_SECRET_KEY', 'HS256', 'exp', 'iat', 'utf-8']
|
||||||
BIN
tests/.hypothesis/unicode_data/16.0.0/charmap.json.gz
Normal file
BIN
tests/.hypothesis/unicode_data/16.0.0/charmap.json.gz
Normal file
Binary file not shown.
BIN
tests/.hypothesis/unicode_data/16.0.0/codec-utf-8.json.gz
Normal file
BIN
tests/.hypothesis/unicode_data/16.0.0/codec-utf-8.json.gz
Normal file
Binary file not shown.
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
312
tests/test_archetypes.py
Normal file
312
tests/test_archetypes.py
Normal file
@@ -0,0 +1,312 @@
|
|||||||
|
"""
|
||||||
|
Tests for machine archetypes and the amount= expansion feature.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import textwrap
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from decnet.archetypes import (
|
||||||
|
ARCHETYPES,
|
||||||
|
all_archetypes,
|
||||||
|
get_archetype,
|
||||||
|
random_archetype,
|
||||||
|
)
|
||||||
|
from decnet.ini_loader import load_ini
|
||||||
|
from decnet.distros import DISTROS
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Archetype registry
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_all_archetypes_returns_all():
|
||||||
|
result = all_archetypes()
|
||||||
|
assert isinstance(result, dict)
|
||||||
|
assert len(result) == len(ARCHETYPES)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_archetype_known():
|
||||||
|
arch = get_archetype("linux-server")
|
||||||
|
assert arch.slug == "linux-server"
|
||||||
|
assert "ssh" in arch.services
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_archetype_unknown_raises():
|
||||||
|
with pytest.raises(ValueError, match="Unknown archetype"):
|
||||||
|
get_archetype("does-not-exist")
|
||||||
|
|
||||||
|
|
||||||
|
def test_random_archetype_returns_valid():
|
||||||
|
arch = random_archetype()
|
||||||
|
assert arch.slug in ARCHETYPES
|
||||||
|
|
||||||
|
|
||||||
|
def test_every_archetype_has_services():
|
||||||
|
for slug, arch in ARCHETYPES.items():
|
||||||
|
assert arch.services, f"Archetype '{slug}' has no services"
|
||||||
|
|
||||||
|
|
||||||
|
def test_every_archetype_has_preferred_distros():
|
||||||
|
for slug, arch in ARCHETYPES.items():
|
||||||
|
assert arch.preferred_distros, f"Archetype '{slug}' has no preferred_distros"
|
||||||
|
|
||||||
|
|
||||||
|
def test_every_archetype_preferred_distro_is_valid():
|
||||||
|
valid_slugs = set(DISTROS.keys())
|
||||||
|
for slug, arch in ARCHETYPES.items():
|
||||||
|
for d in arch.preferred_distros:
|
||||||
|
assert d in valid_slugs, (
|
||||||
|
f"Archetype '{slug}' references unknown distro '{d}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# INI loader — archetype= parsing
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _write_ini(content: str) -> str:
|
||||||
|
"""Write INI content to a temp file and return the path."""
|
||||||
|
content = textwrap.dedent(content)
|
||||||
|
fd, path = tempfile.mkstemp(suffix=".ini")
|
||||||
|
os.write(fd, content.encode())
|
||||||
|
os.close(fd)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_archetype_parsed():
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[my-server]
|
||||||
|
archetype=linux-server
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert len(cfg.deckies) == 1
|
||||||
|
assert cfg.deckies[0].archetype == "linux-server"
|
||||||
|
assert cfg.deckies[0].services is None # not overridden
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_archetype_with_explicit_services_override():
|
||||||
|
"""explicit services= must survive alongside archetype="""
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[my-server]
|
||||||
|
archetype=linux-server
|
||||||
|
services=ftp,smb
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert cfg.deckies[0].archetype == "linux-server"
|
||||||
|
assert cfg.deckies[0].services == ["ftp", "smb"]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# INI loader — amount= expansion
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_ini_amount_one_keeps_section_name():
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[my-printer]
|
||||||
|
archetype=printer
|
||||||
|
amount=1
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert len(cfg.deckies) == 1
|
||||||
|
assert cfg.deckies[0].name == "my-printer"
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_amount_expands_deckies():
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[corp-ws]
|
||||||
|
archetype=windows-workstation
|
||||||
|
amount=5
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert len(cfg.deckies) == 5
|
||||||
|
for i, d in enumerate(cfg.deckies, start=1):
|
||||||
|
assert d.name == f"corp-ws-{i:02d}"
|
||||||
|
assert d.archetype == "windows-workstation"
|
||||||
|
assert d.ip is None # auto-allocated
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_amount_with_ip_raises():
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[bad-group]
|
||||||
|
services=ssh
|
||||||
|
ip=10.0.0.50
|
||||||
|
amount=3
|
||||||
|
""")
|
||||||
|
with pytest.raises(ValueError, match="Cannot combine ip="):
|
||||||
|
load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_amount_invalid_value_raises():
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[bad]
|
||||||
|
services=ssh
|
||||||
|
amount=potato
|
||||||
|
""")
|
||||||
|
with pytest.raises(ValueError, match="must be a positive integer"):
|
||||||
|
load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_amount_zero_raises():
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[bad]
|
||||||
|
services=ssh
|
||||||
|
amount=0
|
||||||
|
""")
|
||||||
|
with pytest.raises(ValueError, match="must be a positive integer"):
|
||||||
|
load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_amount_multiple_groups():
|
||||||
|
"""Two groups with different amounts expand independently."""
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[workers]
|
||||||
|
archetype=linux-server
|
||||||
|
amount=3
|
||||||
|
|
||||||
|
[printers]
|
||||||
|
archetype=printer
|
||||||
|
amount=2
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert len(cfg.deckies) == 5
|
||||||
|
names = [d.name for d in cfg.deckies]
|
||||||
|
assert names == ["workers-01", "workers-02", "workers-03", "printers-01", "printers-02"]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# INI loader — per-service subsections propagate to expanded deckies
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_ini_subsection_propagates_to_expanded_deckies():
|
||||||
|
"""[group.ssh] must apply to group-01, group-02, ..."""
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[linux-hosts]
|
||||||
|
archetype=linux-server
|
||||||
|
amount=3
|
||||||
|
|
||||||
|
[linux-hosts.ssh]
|
||||||
|
kernel_version=5.15.0-76-generic
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert len(cfg.deckies) == 3
|
||||||
|
for d in cfg.deckies:
|
||||||
|
assert "ssh" in d.service_config
|
||||||
|
assert d.service_config["ssh"]["kernel_version"] == "5.15.0-76-generic"
|
||||||
|
|
||||||
|
|
||||||
|
def test_ini_subsection_direct_match_unaffected():
|
||||||
|
"""A direct [decky.svc] subsection must still work when amount=1."""
|
||||||
|
path = _write_ini("""
|
||||||
|
[general]
|
||||||
|
net=10.0.0.0/24
|
||||||
|
gw=10.0.0.1
|
||||||
|
|
||||||
|
[web-01]
|
||||||
|
services=http
|
||||||
|
|
||||||
|
[web-01.http]
|
||||||
|
server_header=Apache/2.4.51
|
||||||
|
""")
|
||||||
|
cfg = load_ini(path)
|
||||||
|
os.unlink(path)
|
||||||
|
assert cfg.deckies[0].service_config["http"]["server_header"] == "Apache/2.4.51"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# _build_deckies — archetype applied via CLI path
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_build_deckies_archetype_sets_services():
|
||||||
|
from decnet.cli import _build_deckies
|
||||||
|
from decnet.archetypes import get_archetype
|
||||||
|
arch = get_archetype("mail-server")
|
||||||
|
result = _build_deckies(
|
||||||
|
n=2,
|
||||||
|
ips=["10.0.0.10", "10.0.0.11"],
|
||||||
|
services_explicit=None,
|
||||||
|
randomize_services=False,
|
||||||
|
archetype=arch,
|
||||||
|
)
|
||||||
|
assert len(result) == 2
|
||||||
|
for d in result:
|
||||||
|
assert set(d.services) == set(arch.services)
|
||||||
|
assert d.archetype == "mail-server"
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_deckies_archetype_preferred_distros():
|
||||||
|
from decnet.cli import _build_deckies
|
||||||
|
from decnet.archetypes import get_archetype
|
||||||
|
arch = get_archetype("iot-device") # preferred_distros=["alpine"]
|
||||||
|
result = _build_deckies(
|
||||||
|
n=3,
|
||||||
|
ips=["10.0.0.10", "10.0.0.11", "10.0.0.12"],
|
||||||
|
services_explicit=None,
|
||||||
|
randomize_services=False,
|
||||||
|
archetype=arch,
|
||||||
|
)
|
||||||
|
for d in result:
|
||||||
|
assert d.distro == "alpine"
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_deckies_explicit_services_override_archetype():
|
||||||
|
from decnet.cli import _build_deckies
|
||||||
|
from decnet.archetypes import get_archetype
|
||||||
|
arch = get_archetype("linux-server")
|
||||||
|
result = _build_deckies(
|
||||||
|
n=1,
|
||||||
|
ips=["10.0.0.10"],
|
||||||
|
services_explicit=["ftp"],
|
||||||
|
randomize_services=False,
|
||||||
|
archetype=arch,
|
||||||
|
)
|
||||||
|
assert result[0].services == ["ftp"]
|
||||||
|
assert result[0].archetype == "linux-server"
|
||||||
80
tests/test_cli_service_pool.py
Normal file
80
tests/test_cli_service_pool.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
"""
|
||||||
|
Tests for the CLI service pool — verifies that --randomize-services draws
|
||||||
|
from all registered services, not just the original hardcoded 5.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from decnet.cli import _all_service_names, _build_deckies
|
||||||
|
from decnet.services.registry import all_services
|
||||||
|
|
||||||
|
|
||||||
|
ORIGINAL_5 = {"ssh", "smb", "rdp", "http", "ftp"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_service_names_covers_full_registry():
|
||||||
|
"""_all_service_names() must return every service in the registry."""
|
||||||
|
pool = set(_all_service_names())
|
||||||
|
registry = set(all_services().keys())
|
||||||
|
assert pool == registry
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_service_names_is_sorted():
|
||||||
|
names = _all_service_names()
|
||||||
|
assert names == sorted(names)
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_service_names_includes_at_least_25():
|
||||||
|
assert len(_all_service_names()) >= 25
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_service_names_includes_all_original_5():
|
||||||
|
pool = set(_all_service_names())
|
||||||
|
assert ORIGINAL_5.issubset(pool)
|
||||||
|
|
||||||
|
|
||||||
|
def test_randomize_services_pool_exceeds_original_5():
|
||||||
|
"""
|
||||||
|
After enough random draws, at least one service outside the original 5 must appear.
|
||||||
|
With 25 services and picking 1-3 at a time, 200 draws makes this ~100% certain.
|
||||||
|
"""
|
||||||
|
all_drawn: set[str] = set()
|
||||||
|
for _ in range(200):
|
||||||
|
deckies = _build_deckies(
|
||||||
|
n=1,
|
||||||
|
ips=["10.0.0.10"],
|
||||||
|
services_explicit=None,
|
||||||
|
randomize_services=True,
|
||||||
|
)
|
||||||
|
all_drawn.update(deckies[0].services)
|
||||||
|
|
||||||
|
beyond_original = all_drawn - ORIGINAL_5
|
||||||
|
assert beyond_original, (
|
||||||
|
f"After 200 draws only saw the original 5 services. "
|
||||||
|
f"All drawn: {sorted(all_drawn)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_deckies_randomize_services_valid():
|
||||||
|
"""All randomly chosen services must exist in the registry."""
|
||||||
|
registry = set(all_services().keys())
|
||||||
|
for _ in range(50):
|
||||||
|
deckies = _build_deckies(
|
||||||
|
n=3,
|
||||||
|
ips=["10.0.0.10", "10.0.0.11", "10.0.0.12"],
|
||||||
|
services_explicit=None,
|
||||||
|
randomize_services=True,
|
||||||
|
)
|
||||||
|
for decky in deckies:
|
||||||
|
unknown = set(decky.services) - registry
|
||||||
|
assert not unknown, f"Decky {decky.name} got unknown services: {unknown}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_deckies_explicit_services_unchanged():
|
||||||
|
"""Explicit service list must pass through untouched."""
|
||||||
|
deckies = _build_deckies(
|
||||||
|
n=2,
|
||||||
|
ips=["10.0.0.10", "10.0.0.11"],
|
||||||
|
services_explicit=["ssh", "ftp"],
|
||||||
|
randomize_services=False,
|
||||||
|
)
|
||||||
|
for decky in deckies:
|
||||||
|
assert decky.services == ["ssh", "ftp"]
|
||||||
243
tests/test_composer.py
Normal file
243
tests/test_composer.py
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
"""
|
||||||
|
Tests for the composer — verifies BASE_IMAGE injection and distro heterogeneity.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from decnet.config import DeckyConfig, DecnetConfig
|
||||||
|
from decnet.composer import generate_compose
|
||||||
|
from decnet.distros import all_distros, DISTROS
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
APT_COMPATIBLE = {
|
||||||
|
"debian:bookworm-slim",
|
||||||
|
"ubuntu:22.04",
|
||||||
|
"ubuntu:20.04",
|
||||||
|
"kalilinux/kali-rolling",
|
||||||
|
}
|
||||||
|
|
||||||
|
BUILD_SERVICES = [
|
||||||
|
"ssh", "http", "rdp", "smb", "ftp", "smtp", "elasticsearch",
|
||||||
|
"pop3", "imap", "mysql", "mssql", "redis", "mongodb", "postgres",
|
||||||
|
"ldap", "vnc", "docker_api", "k8s", "sip",
|
||||||
|
"mqtt", "llmnr", "snmp", "tftp",
|
||||||
|
]
|
||||||
|
|
||||||
|
UPSTREAM_SERVICES = ["telnet", "conpot"]
|
||||||
|
|
||||||
|
|
||||||
|
def _make_config(services, distro="debian", base_image=None, build_base=None):
|
||||||
|
profile = DISTROS[distro]
|
||||||
|
decky = DeckyConfig(
|
||||||
|
name="decky-01",
|
||||||
|
ip="10.0.0.10",
|
||||||
|
services=services,
|
||||||
|
distro=distro,
|
||||||
|
base_image=base_image or profile.image,
|
||||||
|
build_base=build_base or profile.build_base,
|
||||||
|
hostname="test-host",
|
||||||
|
)
|
||||||
|
return DecnetConfig(
|
||||||
|
mode="unihost",
|
||||||
|
interface="eth0",
|
||||||
|
subnet="10.0.0.0/24",
|
||||||
|
gateway="10.0.0.1",
|
||||||
|
deckies=[decky],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# BASE_IMAGE injection — build services
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("svc", BUILD_SERVICES)
|
||||||
|
def test_build_service_gets_base_image_arg(svc):
|
||||||
|
"""Every build service must have BASE_IMAGE injected in compose args."""
|
||||||
|
config = _make_config([svc], distro="debian")
|
||||||
|
compose = generate_compose(config)
|
||||||
|
key = f"decky-01-{svc}"
|
||||||
|
fragment = compose["services"][key]
|
||||||
|
assert "build" in fragment, f"{svc}: missing 'build' key"
|
||||||
|
assert "args" in fragment["build"], f"{svc}: build section missing 'args'"
|
||||||
|
assert "BASE_IMAGE" in fragment["build"]["args"], f"{svc}: BASE_IMAGE not in args"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("distro,expected_build_base", [
|
||||||
|
("debian", "debian:bookworm-slim"),
|
||||||
|
("ubuntu22", "ubuntu:22.04"),
|
||||||
|
("ubuntu20", "ubuntu:20.04"),
|
||||||
|
("kali", "kalilinux/kali-rolling"),
|
||||||
|
("rocky9", "debian:bookworm-slim"),
|
||||||
|
("alpine", "debian:bookworm-slim"),
|
||||||
|
])
|
||||||
|
def test_build_service_base_image_matches_distro(distro, expected_build_base):
|
||||||
|
"""BASE_IMAGE arg must match the distro's build_base."""
|
||||||
|
config = _make_config(["http"], distro=distro)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"]["decky-01-http"]
|
||||||
|
assert fragment["build"]["args"]["BASE_IMAGE"] == expected_build_base
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# BASE_IMAGE NOT injected for upstream-image services
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("svc", UPSTREAM_SERVICES)
|
||||||
|
def test_upstream_service_has_no_build_section(svc):
|
||||||
|
"""Upstream-image services must not receive a build section or BASE_IMAGE."""
|
||||||
|
config = _make_config([svc])
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"][f"decky-01-{svc}"]
|
||||||
|
assert "build" not in fragment
|
||||||
|
assert "image" in fragment
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# service_config propagation tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_service_config_http_server_header():
|
||||||
|
"""service_config for http must inject SERVER_HEADER into compose env."""
|
||||||
|
from decnet.config import DeckyConfig, DecnetConfig
|
||||||
|
from decnet.distros import DISTROS
|
||||||
|
profile = DISTROS["debian"]
|
||||||
|
decky = DeckyConfig(
|
||||||
|
name="decky-01", ip="10.0.0.10",
|
||||||
|
services=["http"], distro="debian",
|
||||||
|
base_image=profile.image, build_base=profile.build_base,
|
||||||
|
hostname="test-host",
|
||||||
|
service_config={"http": {"server_header": "nginx/1.18.0"}},
|
||||||
|
)
|
||||||
|
config = DecnetConfig(
|
||||||
|
mode="unihost", interface="eth0",
|
||||||
|
subnet="10.0.0.0/24", gateway="10.0.0.1",
|
||||||
|
deckies=[decky],
|
||||||
|
)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
env = compose["services"]["decky-01-http"]["environment"]
|
||||||
|
assert env.get("SERVER_HEADER") == "nginx/1.18.0"
|
||||||
|
|
||||||
|
|
||||||
|
def test_service_config_ssh_kernel_version():
|
||||||
|
"""service_config for ssh must inject COWRIE_HONEYPOT_KERNEL_VERSION."""
|
||||||
|
from decnet.config import DeckyConfig, DecnetConfig
|
||||||
|
from decnet.distros import DISTROS
|
||||||
|
profile = DISTROS["debian"]
|
||||||
|
decky = DeckyConfig(
|
||||||
|
name="decky-01", ip="10.0.0.10",
|
||||||
|
services=["ssh"], distro="debian",
|
||||||
|
base_image=profile.image, build_base=profile.build_base,
|
||||||
|
hostname="test-host",
|
||||||
|
service_config={"ssh": {"kernel_version": "5.15.0-76-generic"}},
|
||||||
|
)
|
||||||
|
config = DecnetConfig(
|
||||||
|
mode="unihost", interface="eth0",
|
||||||
|
subnet="10.0.0.0/24", gateway="10.0.0.1",
|
||||||
|
deckies=[decky],
|
||||||
|
)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
env = compose["services"]["decky-01-ssh"]["environment"]
|
||||||
|
assert env.get("COWRIE_HONEYPOT_KERNEL_VERSION") == "5.15.0-76-generic"
|
||||||
|
|
||||||
|
|
||||||
|
def test_service_config_for_one_service_does_not_affect_another():
|
||||||
|
"""service_config for http must not bleed into ftp fragment."""
|
||||||
|
from decnet.config import DeckyConfig, DecnetConfig
|
||||||
|
from decnet.distros import DISTROS
|
||||||
|
profile = DISTROS["debian"]
|
||||||
|
decky = DeckyConfig(
|
||||||
|
name="decky-01", ip="10.0.0.10",
|
||||||
|
services=["http", "ftp"], distro="debian",
|
||||||
|
base_image=profile.image, build_base=profile.build_base,
|
||||||
|
hostname="test-host",
|
||||||
|
service_config={"http": {"server_header": "nginx/1.18.0"}},
|
||||||
|
)
|
||||||
|
config = DecnetConfig(
|
||||||
|
mode="unihost", interface="eth0",
|
||||||
|
subnet="10.0.0.0/24", gateway="10.0.0.1",
|
||||||
|
deckies=[decky],
|
||||||
|
)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
ftp_env = compose["services"]["decky-01-ftp"]["environment"]
|
||||||
|
assert "SERVER_HEADER" not in ftp_env
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_service_config_produces_no_extra_env():
|
||||||
|
"""A decky with no service_config must not have new persona env vars."""
|
||||||
|
config = _make_config(["http", "mysql"])
|
||||||
|
compose = generate_compose(config)
|
||||||
|
for svc in ("http", "mysql"):
|
||||||
|
env = compose["services"][f"decky-01-{svc}"]["environment"]
|
||||||
|
assert "SERVER_HEADER" not in env
|
||||||
|
assert "MYSQL_VERSION" not in env
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Base container uses distro image, not build_base
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("distro", list(DISTROS.keys()))
|
||||||
|
def test_base_container_uses_full_distro_image(distro):
|
||||||
|
"""The IP-holder base container must use distro.image, not build_base."""
|
||||||
|
config = _make_config(["ssh"], distro=distro)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
base = compose["services"]["decky-01"]
|
||||||
|
expected = DISTROS[distro].image
|
||||||
|
assert base["image"] == expected, (
|
||||||
|
f"distro={distro}: base container image '{base['image']}' != '{expected}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Distro profile — build_base is always apt-compatible
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_all_distros_have_build_base():
|
||||||
|
for slug, profile in all_distros().items():
|
||||||
|
assert profile.build_base, f"Distro '{slug}' has empty build_base"
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_distro_build_bases_are_apt_compatible():
|
||||||
|
for slug, profile in all_distros().items():
|
||||||
|
assert profile.build_base in APT_COMPATIBLE, (
|
||||||
|
f"Distro '{slug}' build_base '{profile.build_base}' is not apt-compatible. "
|
||||||
|
f"Allowed: {APT_COMPATIBLE}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Heterogeneity — multiple deckies with different distros get different images
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_multiple_deckies_different_build_bases():
|
||||||
|
"""A multi-decky deployment with ubuntu22 and debian must differ in BASE_IMAGE."""
|
||||||
|
deckies = [
|
||||||
|
DeckyConfig(
|
||||||
|
name="decky-01", ip="10.0.0.10",
|
||||||
|
services=["http"], distro="debian",
|
||||||
|
base_image="debian:bookworm-slim", build_base="debian:bookworm-slim",
|
||||||
|
hostname="host-01",
|
||||||
|
),
|
||||||
|
DeckyConfig(
|
||||||
|
name="decky-02", ip="10.0.0.11",
|
||||||
|
services=["http"], distro="ubuntu22",
|
||||||
|
base_image="ubuntu:22.04", build_base="ubuntu:22.04",
|
||||||
|
hostname="host-02",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
config = DecnetConfig(
|
||||||
|
mode="unihost", interface="eth0",
|
||||||
|
subnet="10.0.0.0/24", gateway="10.0.0.1",
|
||||||
|
deckies=deckies,
|
||||||
|
)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
|
||||||
|
base_img_01 = compose["services"]["decky-01-http"]["build"]["args"]["BASE_IMAGE"]
|
||||||
|
base_img_02 = compose["services"]["decky-02-http"]["build"]["args"]["BASE_IMAGE"]
|
||||||
|
|
||||||
|
assert base_img_01 == "debian:bookworm-slim"
|
||||||
|
assert base_img_02 == "ubuntu:22.04"
|
||||||
|
assert base_img_01 != base_img_02
|
||||||
418
tests/test_correlation.py
Normal file
418
tests/test_correlation.py
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
"""
|
||||||
|
Tests for the DECNET cross-decky correlation engine.
|
||||||
|
|
||||||
|
Covers:
|
||||||
|
- RFC 5424 line parsing (parser.py)
|
||||||
|
- Traversal graph data types (graph.py)
|
||||||
|
- CorrelationEngine ingestion, querying, and reporting (engine.py)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
from decnet.correlation.parser import LogEvent, parse_line
|
||||||
|
from decnet.correlation.graph import AttackerTraversal, TraversalHop
|
||||||
|
from decnet.correlation.engine import CorrelationEngine, _fmt_duration
|
||||||
|
from decnet.logging.syslog_formatter import format_rfc5424, SEVERITY_INFO, SEVERITY_WARNING
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Fixtures & helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_TS = "2026-04-04T10:00:00+00:00"
|
||||||
|
_TS2 = "2026-04-04T10:05:00+00:00"
|
||||||
|
_TS3 = "2026-04-04T10:10:00+00:00"
|
||||||
|
|
||||||
|
|
||||||
|
def _make_line(
|
||||||
|
service: str = "http",
|
||||||
|
hostname: str = "decky-01",
|
||||||
|
event_type: str = "connection",
|
||||||
|
src_ip: str = "1.2.3.4",
|
||||||
|
timestamp: str = _TS,
|
||||||
|
extra_fields: dict | None = None,
|
||||||
|
) -> str:
|
||||||
|
"""Build a real RFC 5424 DECNET syslog line via the formatter."""
|
||||||
|
fields = {}
|
||||||
|
if src_ip:
|
||||||
|
fields["src_ip"] = src_ip
|
||||||
|
if extra_fields:
|
||||||
|
fields.update(extra_fields)
|
||||||
|
return format_rfc5424(
|
||||||
|
service=service,
|
||||||
|
hostname=hostname,
|
||||||
|
event_type=event_type,
|
||||||
|
severity=SEVERITY_INFO,
|
||||||
|
timestamp=datetime.fromisoformat(timestamp),
|
||||||
|
**fields,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_line_src(hostname: str, src: str, timestamp: str = _TS) -> str:
|
||||||
|
"""Build a line that uses `src` instead of `src_ip` (mssql style)."""
|
||||||
|
return format_rfc5424(
|
||||||
|
service="mssql",
|
||||||
|
hostname=hostname,
|
||||||
|
event_type="unknown_packet",
|
||||||
|
severity=SEVERITY_INFO,
|
||||||
|
timestamp=datetime.fromisoformat(timestamp),
|
||||||
|
src=src,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# parser.py — parse_line
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestParserBasic:
|
||||||
|
def test_returns_none_for_blank(self):
|
||||||
|
assert parse_line("") is None
|
||||||
|
assert parse_line(" ") is None
|
||||||
|
|
||||||
|
def test_returns_none_for_non_rfc5424(self):
|
||||||
|
assert parse_line("this is not a syslog line") is None
|
||||||
|
assert parse_line("Jan 1 00:00:00 host sshd: blah") is None
|
||||||
|
|
||||||
|
def test_returns_log_event(self):
|
||||||
|
event = parse_line(_make_line())
|
||||||
|
assert isinstance(event, LogEvent)
|
||||||
|
|
||||||
|
def test_hostname_extracted(self):
|
||||||
|
event = parse_line(_make_line(hostname="decky-07"))
|
||||||
|
assert event.decky == "decky-07"
|
||||||
|
|
||||||
|
def test_service_extracted(self):
|
||||||
|
event = parse_line(_make_line(service="ftp"))
|
||||||
|
assert event.service == "ftp"
|
||||||
|
|
||||||
|
def test_event_type_extracted(self):
|
||||||
|
event = parse_line(_make_line(event_type="login_attempt"))
|
||||||
|
assert event.event_type == "login_attempt"
|
||||||
|
|
||||||
|
def test_timestamp_parsed(self):
|
||||||
|
event = parse_line(_make_line(timestamp=_TS))
|
||||||
|
assert event.timestamp == datetime.fromisoformat(_TS)
|
||||||
|
|
||||||
|
def test_raw_line_preserved(self):
|
||||||
|
line = _make_line()
|
||||||
|
event = parse_line(line)
|
||||||
|
assert event.raw == line.strip()
|
||||||
|
|
||||||
|
|
||||||
|
class TestParserAttackerIP:
|
||||||
|
def test_src_ip_field(self):
|
||||||
|
event = parse_line(_make_line(src_ip="10.0.0.1"))
|
||||||
|
assert event.attacker_ip == "10.0.0.1"
|
||||||
|
|
||||||
|
def test_src_field_fallback(self):
|
||||||
|
"""mssql logs use `src` instead of `src_ip`."""
|
||||||
|
event = parse_line(_make_line_src("decky-win", "192.168.1.5"))
|
||||||
|
assert event.attacker_ip == "192.168.1.5"
|
||||||
|
|
||||||
|
def test_no_ip_field_gives_none(self):
|
||||||
|
line = format_rfc5424("http", "decky-01", "startup", SEVERITY_INFO)
|
||||||
|
event = parse_line(line)
|
||||||
|
assert event is not None
|
||||||
|
assert event.attacker_ip is None
|
||||||
|
|
||||||
|
def test_extra_fields_in_dict(self):
|
||||||
|
event = parse_line(_make_line(extra_fields={"username": "root", "password": "admin"}))
|
||||||
|
assert event.fields["username"] == "root"
|
||||||
|
assert event.fields["password"] == "admin"
|
||||||
|
|
||||||
|
def test_src_ip_priority_over_src(self):
|
||||||
|
"""src_ip should win when both are present."""
|
||||||
|
line = format_rfc5424(
|
||||||
|
"mssql", "decky-01", "evt", SEVERITY_INFO,
|
||||||
|
timestamp=datetime.fromisoformat(_TS),
|
||||||
|
src_ip="1.1.1.1",
|
||||||
|
src="2.2.2.2",
|
||||||
|
)
|
||||||
|
event = parse_line(line)
|
||||||
|
assert event.attacker_ip == "1.1.1.1"
|
||||||
|
|
||||||
|
def test_sd_escape_chars_decoded(self):
|
||||||
|
"""Escaped characters in SD values should be unescaped."""
|
||||||
|
line = format_rfc5424(
|
||||||
|
"http", "decky-01", "evt", SEVERITY_INFO,
|
||||||
|
timestamp=datetime.fromisoformat(_TS),
|
||||||
|
src_ip="1.2.3.4",
|
||||||
|
path='/search?q=a"b',
|
||||||
|
)
|
||||||
|
event = parse_line(line)
|
||||||
|
assert '"' in event.fields["path"]
|
||||||
|
|
||||||
|
def test_nilvalue_hostname_skipped(self):
|
||||||
|
line = format_rfc5424("-", "decky-01", "evt", SEVERITY_INFO)
|
||||||
|
assert parse_line(line) is None
|
||||||
|
|
||||||
|
def test_nilvalue_service_skipped(self):
|
||||||
|
line = format_rfc5424("http", "-", "evt", SEVERITY_INFO)
|
||||||
|
assert parse_line(line) is None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# graph.py — AttackerTraversal
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _make_traversal(ip: str, hops_spec: list[tuple]) -> AttackerTraversal:
|
||||||
|
"""hops_spec: list of (ts_str, decky, service, event_type)"""
|
||||||
|
hops = [
|
||||||
|
TraversalHop(
|
||||||
|
timestamp=datetime.fromisoformat(ts),
|
||||||
|
decky=decky,
|
||||||
|
service=svc,
|
||||||
|
event_type=evt,
|
||||||
|
)
|
||||||
|
for ts, decky, svc, evt in hops_spec
|
||||||
|
]
|
||||||
|
return AttackerTraversal(attacker_ip=ip, hops=hops)
|
||||||
|
|
||||||
|
|
||||||
|
class TestTraversalGraph:
|
||||||
|
def setup_method(self):
|
||||||
|
self.t = _make_traversal("5.6.7.8", [
|
||||||
|
(_TS, "decky-01", "ssh", "login_attempt"),
|
||||||
|
(_TS2, "decky-03", "http", "request"),
|
||||||
|
(_TS3, "decky-05", "ftp", "auth_attempt"),
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_first_seen(self):
|
||||||
|
assert self.t.first_seen == datetime.fromisoformat(_TS)
|
||||||
|
|
||||||
|
def test_last_seen(self):
|
||||||
|
assert self.t.last_seen == datetime.fromisoformat(_TS3)
|
||||||
|
|
||||||
|
def test_duration_seconds(self):
|
||||||
|
assert self.t.duration_seconds == 600.0
|
||||||
|
|
||||||
|
def test_deckies_ordered(self):
|
||||||
|
assert self.t.deckies == ["decky-01", "decky-03", "decky-05"]
|
||||||
|
|
||||||
|
def test_decky_count(self):
|
||||||
|
assert self.t.decky_count == 3
|
||||||
|
|
||||||
|
def test_path_string(self):
|
||||||
|
assert self.t.path == "decky-01 → decky-03 → decky-05"
|
||||||
|
|
||||||
|
def test_to_dict_keys(self):
|
||||||
|
d = self.t.to_dict()
|
||||||
|
assert d["attacker_ip"] == "5.6.7.8"
|
||||||
|
assert d["decky_count"] == 3
|
||||||
|
assert d["hop_count"] == 3
|
||||||
|
assert len(d["hops"]) == 3
|
||||||
|
assert d["path"] == "decky-01 → decky-03 → decky-05"
|
||||||
|
|
||||||
|
def test_to_dict_hops_structure(self):
|
||||||
|
hop = self.t.to_dict()["hops"][0]
|
||||||
|
assert set(hop.keys()) == {"timestamp", "decky", "service", "event_type"}
|
||||||
|
|
||||||
|
def test_repeated_decky_not_double_counted_in_path(self):
|
||||||
|
t = _make_traversal("1.1.1.1", [
|
||||||
|
(_TS, "decky-01", "ssh", "conn"),
|
||||||
|
(_TS2, "decky-02", "ftp", "conn"),
|
||||||
|
(_TS3, "decky-01", "ssh", "conn"), # revisit
|
||||||
|
])
|
||||||
|
assert t.deckies == ["decky-01", "decky-02"]
|
||||||
|
assert t.decky_count == 2
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# engine.py — CorrelationEngine
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestEngineIngestion:
|
||||||
|
def test_ingest_returns_event(self):
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
evt = engine.ingest(_make_line())
|
||||||
|
assert evt is not None
|
||||||
|
|
||||||
|
def test_ingest_blank_returns_none(self):
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
assert engine.ingest("") is None
|
||||||
|
|
||||||
|
def test_lines_parsed_counter(self):
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
engine.ingest(_make_line())
|
||||||
|
engine.ingest("garbage")
|
||||||
|
assert engine.lines_parsed == 2
|
||||||
|
|
||||||
|
def test_events_indexed_counter(self):
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
engine.ingest(_make_line(src_ip="1.2.3.4"))
|
||||||
|
engine.ingest(_make_line(src_ip="")) # no IP
|
||||||
|
assert engine.events_indexed == 1
|
||||||
|
|
||||||
|
def test_ingest_file(self, tmp_path):
|
||||||
|
log = tmp_path / "decnet.log"
|
||||||
|
lines = [
|
||||||
|
_make_line("ssh", "decky-01", "conn", "10.0.0.1", _TS),
|
||||||
|
_make_line("http", "decky-02", "req", "10.0.0.1", _TS2),
|
||||||
|
_make_line("ftp", "decky-03", "auth", "10.0.0.1", _TS3),
|
||||||
|
]
|
||||||
|
log.write_text("\n".join(lines))
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
count = engine.ingest_file(log)
|
||||||
|
assert count == 3
|
||||||
|
|
||||||
|
|
||||||
|
class TestEngineTraversals:
|
||||||
|
def _engine_with(self, specs: list[tuple]) -> CorrelationEngine:
|
||||||
|
"""specs: (service, decky, event_type, src_ip, timestamp)"""
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
for svc, decky, evt, ip, ts in specs:
|
||||||
|
engine.ingest(_make_line(svc, decky, evt, ip, ts))
|
||||||
|
return engine
|
||||||
|
|
||||||
|
def test_single_decky_not_a_traversal(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS),
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS2),
|
||||||
|
])
|
||||||
|
assert engine.traversals() == []
|
||||||
|
|
||||||
|
def test_two_deckies_is_traversal(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS),
|
||||||
|
("http", "decky-02", "req", "1.1.1.1", _TS2),
|
||||||
|
])
|
||||||
|
t = engine.traversals()
|
||||||
|
assert len(t) == 1
|
||||||
|
assert t[0].attacker_ip == "1.1.1.1"
|
||||||
|
assert t[0].decky_count == 2
|
||||||
|
|
||||||
|
def test_min_deckies_filter(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS),
|
||||||
|
("http", "decky-02", "req", "1.1.1.1", _TS2),
|
||||||
|
("ftp", "decky-03", "auth", "1.1.1.1", _TS3),
|
||||||
|
])
|
||||||
|
assert len(engine.traversals(min_deckies=3)) == 1
|
||||||
|
assert len(engine.traversals(min_deckies=4)) == 0
|
||||||
|
|
||||||
|
def test_multiple_attackers_separate_traversals(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS),
|
||||||
|
("http", "decky-02", "req", "1.1.1.1", _TS2),
|
||||||
|
("ssh", "decky-03", "conn", "9.9.9.9", _TS),
|
||||||
|
("ftp", "decky-04", "auth", "9.9.9.9", _TS2),
|
||||||
|
])
|
||||||
|
traversals = engine.traversals()
|
||||||
|
assert len(traversals) == 2
|
||||||
|
ips = {t.attacker_ip for t in traversals}
|
||||||
|
assert ips == {"1.1.1.1", "9.9.9.9"}
|
||||||
|
|
||||||
|
def test_traversals_sorted_by_first_seen(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ssh", "decky-01", "conn", "9.9.9.9", _TS2), # later
|
||||||
|
("ftp", "decky-02", "auth", "9.9.9.9", _TS3),
|
||||||
|
("http", "decky-03", "req", "1.1.1.1", _TS), # earlier
|
||||||
|
("smb", "decky-04", "auth", "1.1.1.1", _TS2),
|
||||||
|
])
|
||||||
|
traversals = engine.traversals()
|
||||||
|
assert traversals[0].attacker_ip == "1.1.1.1"
|
||||||
|
assert traversals[1].attacker_ip == "9.9.9.9"
|
||||||
|
|
||||||
|
def test_hops_ordered_chronologically(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ftp", "decky-02", "auth", "5.5.5.5", _TS2), # ingested first but later ts
|
||||||
|
("ssh", "decky-01", "conn", "5.5.5.5", _TS),
|
||||||
|
])
|
||||||
|
t = engine.traversals()[0]
|
||||||
|
assert t.hops[0].decky == "decky-01"
|
||||||
|
assert t.hops[1].decky == "decky-02"
|
||||||
|
|
||||||
|
def test_all_attackers(self):
|
||||||
|
engine = self._engine_with([
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS),
|
||||||
|
("ssh", "decky-01", "conn", "1.1.1.1", _TS2),
|
||||||
|
("ssh", "decky-01", "conn", "2.2.2.2", _TS),
|
||||||
|
])
|
||||||
|
attackers = engine.all_attackers()
|
||||||
|
assert attackers["1.1.1.1"] == 2
|
||||||
|
assert attackers["2.2.2.2"] == 1
|
||||||
|
|
||||||
|
def test_mssql_src_field_correlated(self):
|
||||||
|
"""Verify that `src=` (mssql style) is picked up for cross-decky correlation."""
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
engine.ingest(_make_line_src("decky-win1", "10.10.10.5", _TS))
|
||||||
|
engine.ingest(_make_line_src("decky-win2", "10.10.10.5", _TS2))
|
||||||
|
t = engine.traversals()
|
||||||
|
assert len(t) == 1
|
||||||
|
assert t[0].decky_count == 2
|
||||||
|
|
||||||
|
|
||||||
|
class TestEngineReporting:
|
||||||
|
def _two_decky_engine(self) -> CorrelationEngine:
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
engine.ingest(_make_line("ssh", "decky-01", "conn", "3.3.3.3", _TS))
|
||||||
|
engine.ingest(_make_line("http", "decky-02", "req", "3.3.3.3", _TS2))
|
||||||
|
return engine
|
||||||
|
|
||||||
|
def test_report_json_structure(self):
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
report = engine.report_json()
|
||||||
|
assert "stats" in report
|
||||||
|
assert "traversals" in report
|
||||||
|
assert report["stats"]["traversals"] == 1
|
||||||
|
t = report["traversals"][0]
|
||||||
|
assert t["attacker_ip"] == "3.3.3.3"
|
||||||
|
assert t["decky_count"] == 2
|
||||||
|
|
||||||
|
def test_report_json_serialisable(self):
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
# Should not raise
|
||||||
|
json.dumps(engine.report_json())
|
||||||
|
|
||||||
|
def test_report_table_returns_rich_table(self):
|
||||||
|
from rich.table import Table
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
table = engine.report_table()
|
||||||
|
assert isinstance(table, Table)
|
||||||
|
|
||||||
|
def test_traversal_syslog_lines_count(self):
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
lines = engine.traversal_syslog_lines()
|
||||||
|
assert len(lines) == 1
|
||||||
|
|
||||||
|
def test_traversal_syslog_line_is_rfc5424(self):
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
line = engine.traversal_syslog_lines()[0]
|
||||||
|
# Must match RFC 5424 header
|
||||||
|
assert re.match(r"^<\d+>1 \S+ \S+ correlator - traversal_detected", line)
|
||||||
|
|
||||||
|
def test_traversal_syslog_contains_attacker_ip(self):
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
line = engine.traversal_syslog_lines()[0]
|
||||||
|
assert "3.3.3.3" in line
|
||||||
|
|
||||||
|
def test_traversal_syslog_severity_is_warning(self):
|
||||||
|
engine = self._two_decky_engine()
|
||||||
|
line = engine.traversal_syslog_lines()[0]
|
||||||
|
pri = int(re.match(r"^<(\d+)>", line).group(1))
|
||||||
|
assert pri == 16 * 8 + SEVERITY_WARNING # local0 + warning
|
||||||
|
|
||||||
|
def test_no_traversals_empty_json(self):
|
||||||
|
engine = CorrelationEngine()
|
||||||
|
engine.ingest(_make_line()) # single decky, no traversal
|
||||||
|
assert engine.report_json()["stats"]["traversals"] == 0
|
||||||
|
assert engine.traversal_syslog_lines() == []
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# _fmt_duration helper
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestFmtDuration:
|
||||||
|
def test_seconds(self):
|
||||||
|
assert _fmt_duration(45) == "45s"
|
||||||
|
|
||||||
|
def test_minutes(self):
|
||||||
|
assert _fmt_duration(90) == "1.5m"
|
||||||
|
|
||||||
|
def test_hours(self):
|
||||||
|
assert _fmt_duration(7200) == "2.0h"
|
||||||
71
tests/test_file_handler.py
Normal file
71
tests/test_file_handler.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
"""Tests for the syslog file handler."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import decnet.logging.file_handler as fh
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def reset_handler(tmp_path, monkeypatch):
|
||||||
|
"""Reset the module-level logger between tests."""
|
||||||
|
monkeypatch.setattr(fh, "_handler", None)
|
||||||
|
monkeypatch.setattr(fh, "_logger", None)
|
||||||
|
monkeypatch.setenv(fh._LOG_FILE_ENV, str(tmp_path / "test.log"))
|
||||||
|
yield
|
||||||
|
# Remove handlers to avoid file lock issues on next test
|
||||||
|
if fh._logger is not None:
|
||||||
|
for h in list(fh._logger.handlers):
|
||||||
|
h.close()
|
||||||
|
fh._logger.removeHandler(h)
|
||||||
|
fh._handler = None
|
||||||
|
fh._logger = None
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_creates_log_file(tmp_path):
|
||||||
|
log_path = tmp_path / "decnet.log"
|
||||||
|
os.environ[fh._LOG_FILE_ENV] = str(log_path)
|
||||||
|
fh.write_syslog("<134>1 2026-04-04T12:00:00+00:00 h svc - e - test message")
|
||||||
|
assert log_path.exists()
|
||||||
|
assert "test message" in log_path.read_text()
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_appends_multiple_lines(tmp_path):
|
||||||
|
log_path = tmp_path / "decnet.log"
|
||||||
|
os.environ[fh._LOG_FILE_ENV] = str(log_path)
|
||||||
|
for i in range(3):
|
||||||
|
fh.write_syslog(f"<134>1 ts host svc - event{i} -")
|
||||||
|
lines = log_path.read_text().splitlines()
|
||||||
|
assert len(lines) == 3
|
||||||
|
assert "event0" in lines[0]
|
||||||
|
assert "event2" in lines[2]
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_log_path_default(monkeypatch):
|
||||||
|
monkeypatch.delenv(fh._LOG_FILE_ENV, raising=False)
|
||||||
|
assert fh.get_log_path() == Path(fh._DEFAULT_LOG_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_log_path_custom(monkeypatch, tmp_path):
|
||||||
|
custom = str(tmp_path / "custom.log")
|
||||||
|
monkeypatch.setenv(fh._LOG_FILE_ENV, custom)
|
||||||
|
assert fh.get_log_path() == Path(custom)
|
||||||
|
|
||||||
|
|
||||||
|
def test_rotating_handler_configured(tmp_path):
|
||||||
|
log_path = tmp_path / "r.log"
|
||||||
|
os.environ[fh._LOG_FILE_ENV] = str(log_path)
|
||||||
|
logger = fh._get_logger()
|
||||||
|
handler = logger.handlers[0]
|
||||||
|
assert isinstance(handler, logging.handlers.RotatingFileHandler)
|
||||||
|
assert handler.maxBytes == fh._MAX_BYTES
|
||||||
|
assert handler.backupCount == fh._BACKUP_COUNT
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_syslog_does_not_raise_on_bad_path(monkeypatch):
|
||||||
|
monkeypatch.setenv(fh._LOG_FILE_ENV, "/no/such/dir/that/exists/decnet.log")
|
||||||
|
# Should not raise — falls back to StreamHandler
|
||||||
|
fh.write_syslog("<134>1 ts h svc - e -")
|
||||||
86
tests/test_fleet_api.py
Normal file
86
tests/test_fleet_api.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from decnet.web.api import app
|
||||||
|
import decnet.config
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
TEST_STATE_FILE = Path("test-decnet-state.json")
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def patch_state_file(monkeypatch):
|
||||||
|
# Patch the global STATE_FILE variable in the config module
|
||||||
|
monkeypatch.setattr(decnet.config, "STATE_FILE", TEST_STATE_FILE)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_state_file():
|
||||||
|
# Create a dummy state file for testing
|
||||||
|
_test_state = {
|
||||||
|
"config": {
|
||||||
|
"mode": "unihost",
|
||||||
|
"interface": "eth0",
|
||||||
|
"subnet": "192.168.1.0/24",
|
||||||
|
"gateway": "192.168.1.1",
|
||||||
|
"deckies": [
|
||||||
|
{
|
||||||
|
"name": "test-decky-1",
|
||||||
|
"ip": "192.168.1.10",
|
||||||
|
"services": ["ssh"],
|
||||||
|
"distro": "debian",
|
||||||
|
"base_image": "debian",
|
||||||
|
"hostname": "test-host-1",
|
||||||
|
"service_config": {"ssh": {"banner": "SSH-2.0-OpenSSH_8.9"}},
|
||||||
|
"archetype": "deaddeck",
|
||||||
|
"nmap_os": "linux",
|
||||||
|
"build_base": "debian:bookworm-slim"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "test-decky-2",
|
||||||
|
"ip": "192.168.1.11",
|
||||||
|
"services": ["http"],
|
||||||
|
"distro": "ubuntu",
|
||||||
|
"base_image": "ubuntu",
|
||||||
|
"hostname": "test-host-2",
|
||||||
|
"service_config": {},
|
||||||
|
"archetype": None,
|
||||||
|
"nmap_os": "linux",
|
||||||
|
"build_base": "debian:bookworm-slim"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"log_target": None,
|
||||||
|
"log_file": "test.log",
|
||||||
|
"ipvlan": False
|
||||||
|
},
|
||||||
|
"compose_path": "test-compose.yml"
|
||||||
|
}
|
||||||
|
TEST_STATE_FILE.write_text(json.dumps(_test_state))
|
||||||
|
|
||||||
|
yield _test_state
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
if TEST_STATE_FILE.exists():
|
||||||
|
TEST_STATE_FILE.unlink()
|
||||||
|
|
||||||
|
def test_get_deckies_endpoint(mock_state_file):
|
||||||
|
with TestClient(app) as _client:
|
||||||
|
# Login to get token
|
||||||
|
_login_resp = _client.post("/api/v1/auth/login", json={"username": "admin", "password": "admin"})
|
||||||
|
_token = _login_resp.json()["access_token"]
|
||||||
|
|
||||||
|
_response = _client.get("/api/v1/deckies", headers={"Authorization": f"Bearer {_token}"})
|
||||||
|
assert _response.status_code == 200
|
||||||
|
_data = _response.json()
|
||||||
|
assert len(_data) == 2
|
||||||
|
assert _data[0]["name"] == "test-decky-1"
|
||||||
|
assert _data[0]["service_config"]["ssh"]["banner"] == "SSH-2.0-OpenSSH_8.9"
|
||||||
|
|
||||||
|
def test_stats_includes_deployed_count(mock_state_file):
|
||||||
|
with TestClient(app) as _client:
|
||||||
|
_login_resp = _client.post("/api/v1/auth/login", json={"username": "admin", "password": "admin"})
|
||||||
|
_token = _login_resp.json()["access_token"]
|
||||||
|
|
||||||
|
_response = _client.get("/api/v1/stats", headers={"Authorization": f"Bearer {_token}"})
|
||||||
|
assert _response.status_code == 200
|
||||||
|
_data = _response.json()
|
||||||
|
assert "deployed_deckies" in _data
|
||||||
|
assert _data["deployed_deckies"] == 2
|
||||||
217
tests/test_ini_loader.py
Normal file
217
tests/test_ini_loader.py
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
"""
|
||||||
|
Tests for the INI loader — subsection parsing, custom service definitions,
|
||||||
|
and per-service config propagation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import textwrap
|
||||||
|
from pathlib import Path
|
||||||
|
from decnet.ini_loader import load_ini
|
||||||
|
|
||||||
|
|
||||||
|
def _write_ini(tmp_path: Path, content: str) -> Path:
|
||||||
|
f = tmp_path / "decnet.ini"
|
||||||
|
f.write_text(textwrap.dedent(content))
|
||||||
|
return f
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Basic decky parsing (regression)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_basic_decky_parsed(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[general]
|
||||||
|
net = 192.168.1.0/24
|
||||||
|
gw = 192.168.1.1
|
||||||
|
|
||||||
|
[decky-01]
|
||||||
|
ip = 192.168.1.101
|
||||||
|
services = ssh, http
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert len(cfg.deckies) == 1
|
||||||
|
assert cfg.deckies[0].name == "decky-01"
|
||||||
|
assert cfg.deckies[0].services == ["ssh", "http"]
|
||||||
|
assert cfg.deckies[0].service_config == {}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Per-service subsection parsing
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_subsection_parsed_into_service_config(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
ip = 192.168.1.101
|
||||||
|
services = ssh
|
||||||
|
|
||||||
|
[decky-01.ssh]
|
||||||
|
kernel_version = 5.15.0-76-generic
|
||||||
|
hardware_platform = x86_64
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
svc_cfg = cfg.deckies[0].service_config
|
||||||
|
assert "ssh" in svc_cfg
|
||||||
|
assert svc_cfg["ssh"]["kernel_version"] == "5.15.0-76-generic"
|
||||||
|
assert svc_cfg["ssh"]["hardware_platform"] == "x86_64"
|
||||||
|
|
||||||
|
|
||||||
|
def test_multiple_subsections_for_same_decky(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = ssh, http
|
||||||
|
|
||||||
|
[decky-01.ssh]
|
||||||
|
users = root:toor
|
||||||
|
|
||||||
|
[decky-01.http]
|
||||||
|
server_header = nginx/1.18.0
|
||||||
|
fake_app = wordpress
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
svc_cfg = cfg.deckies[0].service_config
|
||||||
|
assert svc_cfg["ssh"]["users"] == "root:toor"
|
||||||
|
assert svc_cfg["http"]["server_header"] == "nginx/1.18.0"
|
||||||
|
assert svc_cfg["http"]["fake_app"] == "wordpress"
|
||||||
|
|
||||||
|
|
||||||
|
def test_subsection_for_unknown_decky_is_ignored(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = ssh
|
||||||
|
|
||||||
|
[ghost.ssh]
|
||||||
|
kernel_version = 5.15.0
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
# ghost.ssh must not create a new decky or error out
|
||||||
|
assert len(cfg.deckies) == 1
|
||||||
|
assert cfg.deckies[0].name == "decky-01"
|
||||||
|
assert cfg.deckies[0].service_config == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_plain_decky_without_subsections_has_empty_service_config(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = http
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.deckies[0].service_config == {}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Bring-your-own service (BYOS) parsing
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_custom_service_parsed(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[general]
|
||||||
|
net = 10.0.0.0/24
|
||||||
|
gw = 10.0.0.1
|
||||||
|
|
||||||
|
[custom-myservice]
|
||||||
|
binary = my-image:latest
|
||||||
|
exec = /usr/bin/myapp -p 8080
|
||||||
|
ports = 8080
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert len(cfg.custom_services) == 1
|
||||||
|
cs = cfg.custom_services[0]
|
||||||
|
assert cs.name == "myservice"
|
||||||
|
assert cs.image == "my-image:latest"
|
||||||
|
assert cs.exec_cmd == "/usr/bin/myapp -p 8080"
|
||||||
|
assert cs.ports == [8080]
|
||||||
|
|
||||||
|
|
||||||
|
def test_custom_service_without_ports(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[custom-scanner]
|
||||||
|
binary = scanner:1.0
|
||||||
|
exec = /usr/bin/scanner
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.custom_services[0].ports == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_custom_service_not_added_to_deckies(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = ssh
|
||||||
|
|
||||||
|
[custom-myservice]
|
||||||
|
binary = foo:bar
|
||||||
|
exec = /bin/foo
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert len(cfg.deckies) == 1
|
||||||
|
assert cfg.deckies[0].name == "decky-01"
|
||||||
|
assert len(cfg.custom_services) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_custom_services_gives_empty_list(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = http
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.custom_services == []
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# nmap_os parsing
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def test_nmap_os_parsed_from_ini(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-win]
|
||||||
|
ip = 192.168.1.101
|
||||||
|
services = rdp, smb
|
||||||
|
nmap_os = windows
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.deckies[0].nmap_os == "windows"
|
||||||
|
|
||||||
|
|
||||||
|
def test_nmap_os_defaults_to_none_when_absent(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = ssh
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.deckies[0].nmap_os is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("os_family", ["linux", "windows", "bsd", "embedded", "cisco"])
|
||||||
|
def test_nmap_os_all_families_accepted(tmp_path, os_family):
|
||||||
|
ini_file = _write_ini(tmp_path, f"""
|
||||||
|
[decky-01]
|
||||||
|
services = ssh
|
||||||
|
nmap_os = {os_family}
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.deckies[0].nmap_os == os_family
|
||||||
|
|
||||||
|
|
||||||
|
def test_nmap_os_propagates_to_amount_expanded_deckies(tmp_path):
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[corp-printers]
|
||||||
|
services = snmp
|
||||||
|
nmap_os = embedded
|
||||||
|
amount = 3
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert len(cfg.deckies) == 3
|
||||||
|
for d in cfg.deckies:
|
||||||
|
assert d.nmap_os == "embedded"
|
||||||
|
|
||||||
|
|
||||||
|
def test_nmap_os_hyphen_alias_accepted(tmp_path):
|
||||||
|
"""nmap-os= (hyphen) should work as an alias for nmap_os=."""
|
||||||
|
ini_file = _write_ini(tmp_path, """
|
||||||
|
[decky-01]
|
||||||
|
services = ssh
|
||||||
|
nmap-os = bsd
|
||||||
|
""")
|
||||||
|
cfg = load_ini(ini_file)
|
||||||
|
assert cfg.deckies[0].nmap_os == "bsd"
|
||||||
41
tests/test_ini_validation.py
Normal file
41
tests/test_ini_validation.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import pytest
|
||||||
|
from decnet.ini_loader import load_ini_from_string, validate_ini_string
|
||||||
|
|
||||||
|
def test_validate_ini_string_too_large():
|
||||||
|
content = "[" + "a" * (512 * 1024 + 1) + "]"
|
||||||
|
with pytest.raises(ValueError, match="too large"):
|
||||||
|
validate_ini_string(content)
|
||||||
|
|
||||||
|
def test_validate_ini_string_empty():
|
||||||
|
with pytest.raises(ValueError, match="is empty"):
|
||||||
|
validate_ini_string("")
|
||||||
|
with pytest.raises(ValueError, match="is empty"):
|
||||||
|
validate_ini_string(" ")
|
||||||
|
|
||||||
|
def test_validate_ini_string_no_sections():
|
||||||
|
with pytest.raises(ValueError, match="no sections found"):
|
||||||
|
validate_ini_string("key=value")
|
||||||
|
|
||||||
|
def test_load_ini_from_string_amount_limit():
|
||||||
|
content = """
|
||||||
|
[general]
|
||||||
|
net=192.168.1.0/24
|
||||||
|
|
||||||
|
[decky-01]
|
||||||
|
amount=101
|
||||||
|
archetype=linux-server
|
||||||
|
"""
|
||||||
|
with pytest.raises(ValueError, match="exceeds maximum allowed"):
|
||||||
|
load_ini_from_string(content)
|
||||||
|
|
||||||
|
def test_load_ini_from_string_valid():
|
||||||
|
content = """
|
||||||
|
[general]
|
||||||
|
net=192.168.1.0/24
|
||||||
|
|
||||||
|
[decky-01]
|
||||||
|
amount=5
|
||||||
|
archetype=linux-server
|
||||||
|
"""
|
||||||
|
cfg = load_ini_from_string(content)
|
||||||
|
assert len(cfg.deckies) == 5
|
||||||
96
tests/test_log_file_mount.py
Normal file
96
tests/test_log_file_mount.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""Tests for log_file volume mount in compose generation."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
from decnet.composer import _CONTAINER_LOG_DIR, _resolve_log_file, generate_compose
|
||||||
|
from decnet.config import DeckyConfig, DecnetConfig
|
||||||
|
from decnet.distros import DISTROS
|
||||||
|
|
||||||
|
|
||||||
|
def _make_config(log_file: str | None = None) -> DecnetConfig:
|
||||||
|
profile = DISTROS["debian"]
|
||||||
|
decky = DeckyConfig(
|
||||||
|
name="decky-01",
|
||||||
|
ip="10.0.0.10",
|
||||||
|
services=["http"],
|
||||||
|
distro="debian",
|
||||||
|
base_image=profile.image,
|
||||||
|
build_base=profile.build_base,
|
||||||
|
hostname="test-host",
|
||||||
|
)
|
||||||
|
return DecnetConfig(
|
||||||
|
mode="unihost",
|
||||||
|
interface="eth0",
|
||||||
|
subnet="10.0.0.0/24",
|
||||||
|
gateway="10.0.0.1",
|
||||||
|
deckies=[decky],
|
||||||
|
log_file=log_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestResolveLogFile:
|
||||||
|
def test_absolute_path(self, tmp_path):
|
||||||
|
log_path = str(tmp_path / "decnet.log")
|
||||||
|
host_dir, container_path = _resolve_log_file(log_path)
|
||||||
|
assert host_dir == str(tmp_path)
|
||||||
|
assert container_path == f"{_CONTAINER_LOG_DIR}/decnet.log"
|
||||||
|
|
||||||
|
def test_relative_path_resolves_to_absolute(self):
|
||||||
|
host_dir, container_path = _resolve_log_file("decnet.log")
|
||||||
|
assert Path(host_dir).is_absolute()
|
||||||
|
assert container_path == f"{_CONTAINER_LOG_DIR}/decnet.log"
|
||||||
|
|
||||||
|
def test_nested_filename_preserved(self, tmp_path):
|
||||||
|
log_path = str(tmp_path / "logs" / "honeypot.log")
|
||||||
|
_, container_path = _resolve_log_file(log_path)
|
||||||
|
assert container_path.endswith("honeypot.log")
|
||||||
|
|
||||||
|
|
||||||
|
class TestComposeLogFileMount:
|
||||||
|
def test_no_log_file_no_volume(self):
|
||||||
|
config = _make_config(log_file=None)
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"]["decky-01-http"]
|
||||||
|
assert "DECNET_LOG_FILE" not in fragment.get("environment", {})
|
||||||
|
volumes = fragment.get("volumes", [])
|
||||||
|
assert not any(_CONTAINER_LOG_DIR in v for v in volumes)
|
||||||
|
|
||||||
|
def test_log_file_sets_env_var(self, tmp_path):
|
||||||
|
config = _make_config(log_file=str(tmp_path / "decnet.log"))
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"]["decky-01-http"]
|
||||||
|
env = fragment["environment"]
|
||||||
|
assert "DECNET_LOG_FILE" in env
|
||||||
|
assert env["DECNET_LOG_FILE"].startswith(_CONTAINER_LOG_DIR)
|
||||||
|
assert env["DECNET_LOG_FILE"].endswith("decnet.log")
|
||||||
|
|
||||||
|
def test_log_file_adds_volume_mount(self, tmp_path):
|
||||||
|
config = _make_config(log_file=str(tmp_path / "decnet.log"))
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"]["decky-01-http"]
|
||||||
|
volumes = fragment.get("volumes", [])
|
||||||
|
assert any(_CONTAINER_LOG_DIR in v for v in volumes)
|
||||||
|
|
||||||
|
def test_volume_mount_format(self, tmp_path):
|
||||||
|
config = _make_config(log_file=str(tmp_path / "decnet.log"))
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"]["decky-01-http"]
|
||||||
|
mount = next(v for v in fragment["volumes"] if _CONTAINER_LOG_DIR in v)
|
||||||
|
host_part, container_part = mount.split(":")
|
||||||
|
assert Path(host_part).is_absolute()
|
||||||
|
assert container_part == _CONTAINER_LOG_DIR
|
||||||
|
|
||||||
|
def test_host_log_dir_created(self, tmp_path):
|
||||||
|
log_dir = tmp_path / "newdir"
|
||||||
|
config = _make_config(log_file=str(log_dir / "decnet.log"))
|
||||||
|
generate_compose(config)
|
||||||
|
assert log_dir.exists()
|
||||||
|
|
||||||
|
def test_volume_not_duplicated(self, tmp_path):
|
||||||
|
"""Same mount must not appear twice even if fragment already has volumes."""
|
||||||
|
config = _make_config(log_file=str(tmp_path / "decnet.log"))
|
||||||
|
compose = generate_compose(config)
|
||||||
|
fragment = compose["services"]["decky-01-http"]
|
||||||
|
log_mounts = [v for v in fragment["volumes"] if _CONTAINER_LOG_DIR in v]
|
||||||
|
assert len(log_mounts) == 1
|
||||||
195
tests/test_network.py
Normal file
195
tests/test_network.py
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
"""
|
||||||
|
Tests for decnet.network utility functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from decnet.network import (
|
||||||
|
HOST_IPVLAN_IFACE,
|
||||||
|
HOST_MACVLAN_IFACE,
|
||||||
|
MACVLAN_NETWORK_NAME,
|
||||||
|
create_ipvlan_network,
|
||||||
|
create_macvlan_network,
|
||||||
|
ips_to_range,
|
||||||
|
setup_host_ipvlan,
|
||||||
|
setup_host_macvlan,
|
||||||
|
teardown_host_ipvlan,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# ips_to_range
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestIpsToRange:
|
||||||
|
def test_single_ip(self):
|
||||||
|
assert ips_to_range(["192.168.1.100"]) == "192.168.1.100/32"
|
||||||
|
|
||||||
|
def test_consecutive_small_range(self):
|
||||||
|
# .97–.101: max^min = 4, bit_length=3, prefix=29 → .96/29
|
||||||
|
result = ips_to_range([f"192.168.1.{i}" for i in range(97, 102)])
|
||||||
|
from ipaddress import IPv4Network, IPv4Address
|
||||||
|
net = IPv4Network(result)
|
||||||
|
for i in range(97, 102):
|
||||||
|
assert IPv4Address(f"192.168.1.{i}") in net
|
||||||
|
|
||||||
|
def test_range_crossing_cidr_boundary(self):
|
||||||
|
# .110–.119 crosses the /28 boundary (.96–.111 vs .112–.127)
|
||||||
|
# Subtraction gives /28 (wrong), XOR gives /27 (correct)
|
||||||
|
ips = [f"192.168.1.{i}" for i in range(110, 120)]
|
||||||
|
result = ips_to_range(ips)
|
||||||
|
from ipaddress import IPv4Network, IPv4Address
|
||||||
|
net = IPv4Network(result)
|
||||||
|
for i in range(110, 120):
|
||||||
|
assert IPv4Address(f"192.168.1.{i}") in net, (
|
||||||
|
f"192.168.1.{i} not in computed range {result}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_all_ips_covered(self):
|
||||||
|
# Larger spread: .10–.200
|
||||||
|
ips = [f"10.0.0.{i}" for i in range(10, 201)]
|
||||||
|
result = ips_to_range(ips)
|
||||||
|
from ipaddress import IPv4Network, IPv4Address
|
||||||
|
net = IPv4Network(result)
|
||||||
|
for i in range(10, 201):
|
||||||
|
assert IPv4Address(f"10.0.0.{i}") in net
|
||||||
|
|
||||||
|
def test_two_ips_same_cidr(self):
|
||||||
|
# .100 and .101 share /31
|
||||||
|
result = ips_to_range(["192.168.1.100", "192.168.1.101"])
|
||||||
|
from ipaddress import IPv4Network, IPv4Address
|
||||||
|
net = IPv4Network(result)
|
||||||
|
assert IPv4Address("192.168.1.100") in net
|
||||||
|
assert IPv4Address("192.168.1.101") in net
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# create_macvlan_network
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestCreateMacvlanNetwork:
|
||||||
|
def _make_client(self, existing=None):
|
||||||
|
client = MagicMock()
|
||||||
|
nets = [MagicMock(name=n) for n in (existing or [])]
|
||||||
|
for net, n in zip(nets, (existing or [])):
|
||||||
|
net.name = n
|
||||||
|
client.networks.list.return_value = nets
|
||||||
|
return client
|
||||||
|
|
||||||
|
def test_creates_network_when_absent(self):
|
||||||
|
client = self._make_client([])
|
||||||
|
create_macvlan_network(client, "eth0", "192.168.1.0/24", "192.168.1.1", "192.168.1.96/27")
|
||||||
|
client.networks.create.assert_called_once()
|
||||||
|
kwargs = client.networks.create.call_args
|
||||||
|
assert kwargs[1]["driver"] == "macvlan"
|
||||||
|
assert kwargs[1]["name"] == MACVLAN_NETWORK_NAME
|
||||||
|
assert kwargs[1]["options"]["parent"] == "eth0"
|
||||||
|
|
||||||
|
def test_noop_when_network_exists(self):
|
||||||
|
client = self._make_client([MACVLAN_NETWORK_NAME])
|
||||||
|
create_macvlan_network(client, "eth0", "192.168.1.0/24", "192.168.1.1", "192.168.1.96/27")
|
||||||
|
client.networks.create.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# create_ipvlan_network
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestCreateIpvlanNetwork:
|
||||||
|
def _make_client(self, existing=None):
|
||||||
|
client = MagicMock()
|
||||||
|
nets = [MagicMock(name=n) for n in (existing or [])]
|
||||||
|
for net, n in zip(nets, (existing or [])):
|
||||||
|
net.name = n
|
||||||
|
client.networks.list.return_value = nets
|
||||||
|
return client
|
||||||
|
|
||||||
|
def test_creates_ipvlan_network(self):
|
||||||
|
client = self._make_client([])
|
||||||
|
create_ipvlan_network(client, "wlan0", "192.168.1.0/24", "192.168.1.1", "192.168.1.96/27")
|
||||||
|
client.networks.create.assert_called_once()
|
||||||
|
kwargs = client.networks.create.call_args
|
||||||
|
assert kwargs[1]["driver"] == "ipvlan"
|
||||||
|
assert kwargs[1]["options"]["parent"] == "wlan0"
|
||||||
|
assert kwargs[1]["options"]["ipvlan_mode"] == "l2"
|
||||||
|
|
||||||
|
def test_noop_when_network_exists(self):
|
||||||
|
client = self._make_client([MACVLAN_NETWORK_NAME])
|
||||||
|
create_ipvlan_network(client, "wlan0", "192.168.1.0/24", "192.168.1.1", "192.168.1.96/27")
|
||||||
|
client.networks.create.assert_not_called()
|
||||||
|
|
||||||
|
def test_uses_same_network_name_as_macvlan(self):
|
||||||
|
"""Both drivers share the same logical network name so compose files are identical."""
|
||||||
|
client = self._make_client([])
|
||||||
|
create_ipvlan_network(client, "wlan0", "192.168.1.0/24", "192.168.1.1", "192.168.1.96/27")
|
||||||
|
assert client.networks.create.call_args[1]["name"] == MACVLAN_NETWORK_NAME
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# setup_host_macvlan / teardown_host_macvlan
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestSetupHostMacvlan:
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=0)
|
||||||
|
@patch("decnet.network._run")
|
||||||
|
def test_creates_interface_when_absent(self, mock_run, _):
|
||||||
|
# Simulate interface not existing (returncode != 0)
|
||||||
|
mock_run.side_effect = lambda cmd, **kw: MagicMock(returncode=1) if "show" in cmd else MagicMock(returncode=0)
|
||||||
|
setup_host_macvlan("eth0", "192.168.1.5", "192.168.1.96/27")
|
||||||
|
calls = [str(c) for c in mock_run.call_args_list]
|
||||||
|
assert any("macvlan" in c for c in calls)
|
||||||
|
assert any("mode" in c and "bridge" in c for c in calls)
|
||||||
|
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=0)
|
||||||
|
@patch("decnet.network._run")
|
||||||
|
def test_skips_create_when_interface_exists(self, mock_run, _):
|
||||||
|
mock_run.return_value = MagicMock(returncode=0)
|
||||||
|
setup_host_macvlan("eth0", "192.168.1.5", "192.168.1.96/27")
|
||||||
|
calls = [c[0][0] for c in mock_run.call_args_list]
|
||||||
|
# "ip link add <iface> link ..." should not be called when iface exists
|
||||||
|
assert not any("link" in cmd and "add" in cmd and HOST_MACVLAN_IFACE in cmd for cmd in calls)
|
||||||
|
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=1)
|
||||||
|
def test_requires_root(self, _):
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
setup_host_macvlan("eth0", "192.168.1.5", "192.168.1.96/27")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# setup_host_ipvlan / teardown_host_ipvlan
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestSetupHostIpvlan:
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=0)
|
||||||
|
@patch("decnet.network._run")
|
||||||
|
def test_creates_ipvlan_interface(self, mock_run, _):
|
||||||
|
mock_run.side_effect = lambda cmd, **kw: MagicMock(returncode=1) if "show" in cmd else MagicMock(returncode=0)
|
||||||
|
setup_host_ipvlan("wlan0", "192.168.1.5", "192.168.1.96/27")
|
||||||
|
calls = [str(c) for c in mock_run.call_args_list]
|
||||||
|
assert any("ipvlan" in c for c in calls)
|
||||||
|
assert any("mode" in c and "l2" in c for c in calls)
|
||||||
|
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=0)
|
||||||
|
@patch("decnet.network._run")
|
||||||
|
def test_uses_ipvlan_iface_name(self, mock_run, _):
|
||||||
|
mock_run.side_effect = lambda cmd, **kw: MagicMock(returncode=1) if "show" in cmd else MagicMock(returncode=0)
|
||||||
|
setup_host_ipvlan("wlan0", "192.168.1.5", "192.168.1.96/27")
|
||||||
|
calls = [str(c) for c in mock_run.call_args_list]
|
||||||
|
assert any(HOST_IPVLAN_IFACE in c for c in calls)
|
||||||
|
assert not any(HOST_MACVLAN_IFACE in c for c in calls)
|
||||||
|
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=1)
|
||||||
|
def test_requires_root(self, _):
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
setup_host_ipvlan("wlan0", "192.168.1.5", "192.168.1.96/27")
|
||||||
|
|
||||||
|
@patch("decnet.network.os.geteuid", return_value=0)
|
||||||
|
@patch("decnet.network._run")
|
||||||
|
def test_teardown_uses_ipvlan_iface(self, mock_run, _):
|
||||||
|
mock_run.return_value = MagicMock(returncode=0)
|
||||||
|
teardown_host_ipvlan("192.168.1.96/27")
|
||||||
|
calls = [str(c) for c in mock_run.call_args_list]
|
||||||
|
assert any(HOST_IPVLAN_IFACE in c for c in calls)
|
||||||
|
assert not any(HOST_MACVLAN_IFACE in c for c in calls)
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user