fix(packaging): move templates/ into decnet/ package so they ship with pip install

The docker build contexts and syslog_bridge.py lived at repo root, which
meant setuptools (include = ["decnet*"]) never shipped them. Agents
installed via `pip install $RELEASE_DIR` got site-packages/decnet/** but no
templates/, so every deploy blew up in deployer._sync_logging_helper with
FileNotFoundError on templates/syslog_bridge.py.

Move templates/ -> decnet/templates/ and declare it as setuptools
package-data. Path resolutions in services/*.py and engine/deployer.py drop
one .parent since templates now lives beside the code. Test fixtures,
bandit exclude path, and coverage omit glob updated to match.
This commit is contained in:
2026-04-19 19:30:04 -04:00
parent 2bef3edb72
commit 6708f26e6b
158 changed files with 38 additions and 33 deletions

View File

@@ -31,7 +31,7 @@ from decnet.network import (
log = get_logger("engine")
console = Console()
COMPOSE_FILE = Path("decnet-compose.yml")
_CANONICAL_LOGGING = Path(__file__).parent.parent.parent / "templates" / "syslog_bridge.py"
_CANONICAL_LOGGING = Path(__file__).parent.parent / "templates" / "syslog_bridge.py"
def _sync_logging_helper(config: DecnetConfig) -> None:

View File

@@ -32,4 +32,4 @@ class ConpotService(BaseService):
}
def dockerfile_context(self):
return Path(__file__).parent.parent.parent / "templates" / "conpot"
return Path(__file__).parent.parent / "templates" / "conpot"

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "docker_api"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "docker_api"
class DockerAPIService(BaseService):

View File

@@ -2,7 +2,7 @@ from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "elasticsearch"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "elasticsearch"
class ElasticsearchService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "ftp"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "ftp"
class FTPService(BaseService):

View File

@@ -2,7 +2,7 @@ import json
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "http"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "http"
class HTTPService(BaseService):

View File

@@ -2,7 +2,7 @@ import json
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "https"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "https"
class HTTPSService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "imap"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "imap"
class IMAPService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "k8s"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "k8s"
class KubernetesAPIService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "ldap"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "ldap"
class LDAPService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "llmnr"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "llmnr"
class LLMNRService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "mongodb"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "mongodb"
class MongoDBService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "mqtt"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "mqtt"
class MQTTService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "mssql"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "mssql"
class MSSQLService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "mysql"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "mysql"
class MySQLService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "pop3"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "pop3"
class POP3Service(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "postgres"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "postgres"
class PostgresService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "rdp"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "rdp"
class RDPService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "redis"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "redis"
class RedisService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "sip"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "sip"
class SIPService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "smb"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "smb"
class SMBService(BaseService):

View File

@@ -2,7 +2,7 @@ from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "smtp"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "smtp"
class SMTPService(BaseService):

View File

@@ -4,7 +4,7 @@ from decnet.services.base import BaseService
# Reuses the same template as the smtp service — only difference is
# SMTP_OPEN_RELAY=1 in the environment, which enables the open relay persona.
_TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "smtp"
_TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "smtp"
class SMTPRelayService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "sniffer"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "sniffer"
class SnifferService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "snmp"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "snmp"
class SNMPService(BaseService):

View File

@@ -2,7 +2,7 @@ from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "ssh"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "ssh"
class SSHService(BaseService):

View File

@@ -2,7 +2,7 @@ from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "telnet"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "telnet"
class TelnetService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "tftp"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "tftp"
class TFTPService(BaseService):

View File

@@ -1,7 +1,7 @@
from pathlib import Path
from decnet.services.base import BaseService
TEMPLATES_DIR = Path(__file__).parent.parent.parent / "templates" / "vnc"
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "vnc"
class VNCService(BaseService):

View File

@@ -0,0 +1,28 @@
ARG BASE_IMAGE=honeynet/conpot:latest
FROM ${BASE_IMAGE}
USER root
# Replace 5020 with 502 in all templates so Modbus binds on the standard port
RUN find /opt /usr /etc /home -name "*.xml" -exec sed -i 's/<port>5020<\/port>/<port>502<\/port>/g' {} + 2>/dev/null || true
RUN find /opt /usr /etc /home -name "*.xml" -exec sed -i 's/port="5020"/port="502"/g' {} + 2>/dev/null || true
# Install libcap and give the Python interpreter permission to bind ports < 1024
RUN (apt-get update && apt-get install -y --no-install-recommends libcap2-bin 2>/dev/null) || (apk add --no-cache libcap 2>/dev/null) || true
RUN find /home/conpot/.local/bin /usr /opt -type f -name 'python*' -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true
# Bridge conpot's own logger into syslog-relay's RFC 5424 syslog pipeline.
# entrypoint.py is self-contained (inlines the formatter) because the
# conpot base image runs Python 3.6, which cannot import the shared
# syslog_bridge.py (that file uses 3.9+ / 3.10+ type syntax).
COPY entrypoint.py /home/conpot/entrypoint.py
RUN chown conpot:conpot /home/conpot/entrypoint.py \
&& chmod +x /home/conpot/entrypoint.py
# The upstream image already runs as non-root 'conpot'.
# We do NOT switch to a 'logrelay' user — doing so breaks pkg_resources
# because conpot's eggs live under /home/conpot/.local and are only on
# the Python path for that user.
USER conpot
ENTRYPOINT ["/usr/bin/python3", "/home/conpot/entrypoint.py"]

View File

@@ -0,0 +1,144 @@
#!/usr/bin/env python3
"""
Entrypoint wrapper for the Conpot ICS/SCADA honeypot.
Launches conpot as a child process and bridges its log output into the
syslog-relay structured syslog pipeline. Each line from conpot stdout/stderr
is classified and emitted as an RFC 5424 syslog line so the host-side
collector can ingest it alongside every other service.
Written to be compatible with Python 3.6 (the conpot base image version).
"""
from __future__ import print_function
import os
import re
import signal
import subprocess
import sys
from datetime import datetime, timezone
# ── RFC 5424 inline formatter (Python 3.6-compatible) ─────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_INFO = 6
SEVERITY_WARNING = 4
SEVERITY_ERROR = 3
def _sd_escape(value):
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _syslog_line(event_type, severity=SEVERITY_INFO, **fields):
pri = "<{}>".format(_FACILITY_LOCAL0 * 8 + severity)
ts = datetime.now(timezone.utc).isoformat()
host = NODE_NAME[:255]
appname = "conpot"
msgid = event_type[:32]
if fields:
params = " ".join('{}="{}"'.format(k, _sd_escape(str(v))) for k, v in fields.items())
sd = "[{} {}]".format(_SD_ID, params)
else:
sd = _NILVALUE
return "{pri}1 {ts} {host} {appname} {nil} {msgid} {sd}".format(
pri=pri, ts=ts, host=host, appname=appname,
nil=_NILVALUE, msgid=msgid, sd=sd,
)
def _log(event_type, severity=SEVERITY_INFO, **fields):
print(_syslog_line(event_type, severity, **fields), flush=True)
# ── Config ────────────────────────────────────────────────────────────────────
NODE_NAME = os.environ.get("NODE_NAME", "conpot-node")
TEMPLATE = os.environ.get("CONPOT_TEMPLATE", "default")
_CONPOT_CMD = [
"/home/conpot/.local/bin/conpot",
"--template", TEMPLATE,
"--logfile", "/var/log/conpot/conpot.log",
"-f",
"--temp_dir", "/tmp",
]
# Grab the first routable IPv4 address from a log line
_IP_RE = re.compile(r"\b((?!127\.)(?!0\.)(?!255\.)\d{1,3}(?:\.\d{1,3}){3})\b")
_REQUEST_RE = re.compile(
r"request|recv|received|connect|session|query|command|"
r"modbus|snmp|http|s7comm|bacnet|enip",
re.IGNORECASE,
)
_ERROR_RE = re.compile(r"error|exception|traceback|critical|fail", re.IGNORECASE)
_WARN_RE = re.compile(r"warning|warn", re.IGNORECASE)
_STARTUP_RE = re.compile(
r"starting|started|listening|server|initializ|template|conpot",
re.IGNORECASE,
)
# ── Classifier ────────────────────────────────────────────────────────────────
def _classify(raw):
"""Return (event_type, severity, fields) for one conpot log line."""
fields = {}
m = _IP_RE.search(raw)
if m:
fields["src"] = m.group(1)
fields["msg"] = raw[:300]
if _ERROR_RE.search(raw):
return "error", SEVERITY_ERROR, fields
if _WARN_RE.search(raw):
return "warning", SEVERITY_WARNING, fields
if _REQUEST_RE.search(raw):
return "request", SEVERITY_INFO, fields
if _STARTUP_RE.search(raw):
return "startup", SEVERITY_INFO, fields
return "log", SEVERITY_INFO, fields
# ── Main ──────────────────────────────────────────────────────────────────────
def main():
_log("startup", msg="Conpot ICS honeypot starting (template={})".format(TEMPLATE))
proc = subprocess.Popen(
_CONPOT_CMD,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=1,
universal_newlines=True,
)
def _forward(sig, _frame):
proc.send_signal(sig)
signal.signal(signal.SIGTERM, _forward)
signal.signal(signal.SIGINT, _forward)
try:
for raw_line in proc.stdout:
line = raw_line.rstrip()
if not line:
continue
event_type, severity, fields = _classify(line)
_log(event_type, severity, **fields)
finally:
proc.wait()
_log("shutdown", msg="Conpot ICS honeypot stopped")
sys.exit(proc.returncode)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,22 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip python3-venv \
libssl-dev libffi-dev \
git authbind \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,30 @@
[honeypot]
hostname = {{ COWRIE_HOSTNAME | default('svr01') }}
listen_endpoints = tcp:2222:interface=0.0.0.0
kernel_version = {{ COWRIE_HONEYPOT_KERNEL_VERSION | default('5.15.0-76-generic') }}
kernel_build_string = {{ COWRIE_HONEYPOT_KERNEL_BUILD_STRING | default('#83-Ubuntu SMP Thu Jun 15 19:16:32 UTC 2023') }}
hardware_platform = {{ COWRIE_HONEYPOT_HARDWARE_PLATFORM | default('x86_64') }}
[ssh]
enabled = true
listen_endpoints = tcp:2222:interface=0.0.0.0
version = {{ COWRIE_SSH_VERSION | default('SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.5') }}
{% if COWRIE_LOG_HOST is defined and COWRIE_LOG_HOST %}
[output_jsonlog]
enabled = true
logfile = cowrie.json
[output_localsocket]
enabled = false
# Forward JSON events to SIEM/aggregator
[output_tcp]
enabled = true
host = {{ COWRIE_LOG_HOST }}
port = {{ COWRIE_LOG_PORT | default('5140') }}
{% else %}
[output_jsonlog]
enabled = true
logfile = cowrie.json
{% endif %}

View File

@@ -0,0 +1,33 @@
#!/bin/bash
set -e
# Render Jinja2 config template
/home/cowrie/cowrie-env/bin/python3 - <<'EOF'
import os
from jinja2 import Template
with open("/home/cowrie/cowrie.cfg.j2") as f:
tpl = Template(f.read())
rendered = tpl.render(**os.environ)
with open("/home/cowrie/cowrie-env/etc/cowrie.cfg", "w") as f:
f.write(rendered)
EOF
# Write userdb.txt if custom users were provided
# Format: COWRIE_USERDB_ENTRIES=root:toor,admin:admin123
if [ -n "${COWRIE_USERDB_ENTRIES}" ]; then
USERDB="/home/cowrie/cowrie-env/etc/userdb.txt"
: > "$USERDB"
IFS=',' read -ra PAIRS <<< "${COWRIE_USERDB_ENTRIES}"
for pair in "${PAIRS[@]}"; do
user="${pair%%:*}"
pass="${pair#*:}"
uid=1000
[ "$user" = "root" ] && uid=0
echo "${user}:${uid}:${pass}" >> "$USERDB"
done
fi
exec authbind --deep /home/cowrie/cowrie-env/bin/twistd -n --pidfile= cowrie

View File

@@ -0,0 +1,62 @@
root:x:0:
daemon:x:1:
bin:x:2:
sys:x:3:
adm:x:4:syslog,admin
tty:x:5:
disk:x:6:
lp:x:7:
mail:x:8:
news:x:9:
uucp:x:10:
man:x:12:
proxy:x:13:
kmem:x:15:
dialout:x:20:
fax:x:21:
voice:x:22:
cdrom:x:24:admin
floppy:x:25:
tape:x:26:
sudo:x:27:admin
audio:x:29:
dip:x:30:admin
www-data:x:33:
backup:x:34:
operator:x:37:
list:x:38:
irc:x:39:
src:x:40:
gnats:x:41:
shadow:x:42:
utmp:x:43:
video:x:44:
sasl:x:45:
plugdev:x:46:admin
staff:x:50:
games:x:60:
users:x:100:
nogroup:x:65534:
systemd-journal:x:101:
systemd-network:x:102:
systemd-resolve:x:103:
crontab:x:104:
messagebus:x:105:
systemd-timesync:x:106:
input:x:107:
sgx:x:108:
kvm:x:109:
render:x:110:
syslog:x:110:
tss:x:111:
uuidd:x:112:
tcpdump:x:113:
ssl-cert:x:114:
landscape:x:115:
fwupd-refresh:x:116:
usbmux:x:46:
lxd:x:117:admin
systemd-coredump:x:999:
mysql:x:119:
netdev:x:120:admin
admin:x:1000:

View File

@@ -0,0 +1 @@
NODE_NAME

View File

@@ -0,0 +1,5 @@
127.0.0.1 localhost
127.0.1.1 NODE_NAME
::1 localhost ip6-localhost ip6-loopback
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters

View File

@@ -0,0 +1,2 @@
Ubuntu 22.04.3 LTS \n \l

View File

@@ -0,0 +1 @@
Ubuntu 22.04.3 LTS

View File

@@ -0,0 +1,26 @@
* Documentation: https://help.ubuntu.com
* Management: https://landscape.canonical.com
* Support: https://ubuntu.com/advantage
System information as of Mon Jan 15 09:12:44 UTC 2024
System load: 0.08 Processes: 142
Usage of /: 34.2% of 49.10GB Users logged in: 0
Memory usage: 22% IPv4 address for eth0: 10.0.1.5
Swap usage: 0%
* Strictly confined Kubernetes makes edge and IoT secure. Learn how MicroK8s
just raised the bar for K8s security.
https://ubuntu.com/engage/secure-kubernetes-at-the-edge
Expanded Security Maintenance for Applications is not enabled.
0 updates can be applied immediately.
Enable ESM Apps to receive additional future security updates.
See https://ubuntu.com/esm or run: sudo pro status
Last login: Sun Jan 14 23:45:01 2024 from 10.0.0.1

View File

@@ -0,0 +1,12 @@
PRETTY_NAME="Ubuntu 22.04.3 LTS"
NAME="Ubuntu"
VERSION_ID="22.04"
VERSION="22.04.3 LTS (Jammy Jellyfish)"
VERSION_CODENAME=jammy
ID=ubuntu
ID_LIKE=debian
HOME_URL="https://www.ubuntu.com/"
SUPPORT_URL="https://help.ubuntu.com/"
BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
UBUNTU_CODENAME=jammy

View File

@@ -0,0 +1,36 @@
root:x:0:0:root:/root:/bin/bash
daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin
bin:x:2:2:bin:/bin:/usr/sbin/nologin
sys:x:3:3:sys:/dev:/usr/sbin/nologin
sync:x:4:65534:sync:/bin:/bin/sync
games:x:5:60:games:/usr/games:/usr/sbin/nologin
man:x:6:12:man:/var/cache/man:/usr/sbin/nologin
lp:x:7:7:lp:/var/spool/lpd:/usr/sbin/nologin
mail:x:8:8:mail:/var/mail:/usr/sbin/nologin
news:x:9:9:news:/var/spool/news:/usr/sbin/nologin
uucp:x:10:10:uucp:/var/spool/uucp:/usr/sbin/nologin
proxy:x:13:13:proxy:/bin:/usr/sbin/nologin
www-data:x:33:33:www-data:/var/www:/usr/sbin/nologin
backup:x:34:34:backup:/var/backups:/usr/sbin/nologin
list:x:38:38:Mailing List Manager:/var/list:/usr/sbin/nologin
irc:x:39:39:ircd:/run/ircd:/usr/sbin/nologin
gnats:x:41:41:Gnats Bug-Reporting System (admin):/var/lib/gnats:/usr/sbin/nologin
nobody:x:65534:65534:nobody:/nonexistent:/usr/sbin/nologin
systemd-network:x:100:102:systemd Network Management,,,:/run/systemd:/usr/sbin/nologin
systemd-resolve:x:101:103:systemd Resolver,,,:/run/systemd:/usr/sbin/nologin
messagebus:x:102:105::/nonexistent:/usr/sbin/nologin
systemd-timesync:x:103:106:systemd Time Synchronization,,,:/run/systemd:/usr/sbin/nologin
syslog:x:104:110::/home/syslog:/usr/sbin/nologin
_apt:x:105:65534::/nonexistent:/usr/sbin/nologin
tss:x:106:111:TPM software stack,,,:/var/lib/tpm:/bin/false
uuidd:x:107:112::/run/uuidd:/usr/sbin/nologin
tcpdump:x:108:113::/nonexistent:/usr/sbin/nologin
landscape:x:109:115::/var/lib/landscape:/usr/sbin/nologin
pollinate:x:110:1::/var/cache/pollinate:/bin/false
fwupd-refresh:x:111:116:fwupd-refresh user,,,:/run/systemd:/usr/sbin/nologin
usbmux:x:112:46:usbmux daemon,,,:/var/lib/usbmux:/usr/sbin/nologin
sshd:x:113:65534::/run/sshd:/usr/sbin/nologin
systemd-coredump:x:999:999:systemd Core Dumper:/:/usr/sbin/nologin
lxd:x:998:100::/var/snap/lxd/common/lxd:/bin/false
mysql:x:114:119:MySQL Server,,,:/nonexistent:/bin/false
admin:x:1000:1000:Admin User,,,:/home/admin:/bin/bash

View File

@@ -0,0 +1,4 @@
# This file is managed by man:systemd-resolved(8). Do not edit.
nameserver 8.8.8.8
nameserver 8.8.4.4
search company.internal

View File

@@ -0,0 +1,36 @@
root:$6$rounds=4096$randomsalt$hashed_root_password:19000:0:99999:7:::
daemon:*:19000:0:99999:7:::
bin:*:19000:0:99999:7:::
sys:*:19000:0:99999:7:::
sync:*:19000:0:99999:7:::
games:*:19000:0:99999:7:::
man:*:19000:0:99999:7:::
lp:*:19000:0:99999:7:::
mail:*:19000:0:99999:7:::
news:*:19000:0:99999:7:::
uucp:*:19000:0:99999:7:::
proxy:*:19000:0:99999:7:::
www-data:*:19000:0:99999:7:::
backup:*:19000:0:99999:7:::
list:*:19000:0:99999:7:::
irc:*:19000:0:99999:7:::
gnats:*:19000:0:99999:7:::
nobody:*:19000:0:99999:7:::
systemd-network:*:19000:0:99999:7:::
systemd-resolve:*:19000:0:99999:7:::
messagebus:*:19000:0:99999:7:::
systemd-timesync:*:19000:0:99999:7:::
syslog:*:19000:0:99999:7:::
_apt:*:19000:0:99999:7:::
tss:*:19000:0:99999:7:::
uuidd:*:19000:0:99999:7:::
tcpdump:*:19000:0:99999:7:::
landscape:*:19000:0:99999:7:::
pollinate:*:19000:0:99999:7:::
fwupd-refresh:*:19000:0:99999:7:::
usbmux:*:19000:0:99999:7:::
sshd:*:19000:0:99999:7:::
systemd-coredump:!!:19000::::::
lxd:!:19000::::::
mysql:!:19000:0:99999:7:::
admin:$6$rounds=4096$xyz123$hashed_admin_password:19000:0:99999:7:::

View File

@@ -0,0 +1,14 @@
[default]
aws_access_key_id = AKIAIOSFODNN7EXAMPLE
aws_secret_access_key = wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
region = us-east-1
[production]
aws_access_key_id = AKIAI44QH8DHBEXAMPLE
aws_secret_access_key = je7MtGbClwBF/2Zp9Utk/h3yCo8nvbEXAMPLEKEY
region = us-east-1
[backup-role]
aws_access_key_id = AKIAIOSFODNN7BACKUP1
aws_secret_access_key = 9drTJvcXLB89EXAMPLEKEY/bPxRfiCYBACKUPKEY
region = eu-west-2

View File

@@ -0,0 +1,33 @@
ls -la
cd /var/www/html
git status
git pull origin main
sudo systemctl restart nginx
sudo systemctl status nginx
df -h
free -m
top
ps aux | grep nginx
aws s3 ls
aws s3 ls s3://company-prod-backups
aws s3 cp /var/www/html/backup.tar.gz s3://company-prod-backups/
aws ec2 describe-instances --region us-east-1
kubectl get pods -n production
kubectl get services -n production
kubectl describe pod api-deployment-7d4b9c5f6-xk2pz -n production
docker ps
docker images
docker-compose up -d
mysql -u admin -pSup3rS3cr3t! -h 10.0.1.5 production
cat /etc/mysql/my.cnf
tail -f /var/log/nginx/access.log
tail -f /var/log/auth.log
ssh root@10.0.1.10
scp admin@10.0.1.20:/home/admin/.aws/credentials /tmp/
cat ~/.aws/credentials
vim ~/.aws/credentials
sudo crontab -l
ls /opt/app/
cd /opt/app && npm run build
git log --oneline -20
history

View File

@@ -0,0 +1,2 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC7+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekeyforadminuser+xamplekey admin@workstation
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDbackupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline+backupkeyfordeploymentpipeline deploy@ci-runner

View File

@@ -0,0 +1,22 @@
whoami
id
uname -a
cat /etc/passwd
cat /etc/shadow
ls /home
ls /home/admin
cat /home/admin/.bash_history
cat /home/admin/.aws/credentials
find / -name "*.pem" 2>/dev/null
find / -name "id_rsa" 2>/dev/null
find / -name "*.key" 2>/dev/null
netstat -tunlp
ss -tunlp
iptables -L
cat /etc/crontab
crontab -l
ps aux
systemctl list-units
cat /etc/mysql/my.cnf
mysql -u root -p
history -c

View File

@@ -0,0 +1,12 @@
Jan 14 23:31:04 NODE_NAME sshd[1832]: Accepted publickey for admin from 10.0.0.1 port 54321 ssh2: RSA SHA256:xAmPlEkEyHaSh1234567890abcdefghijklmnop
Jan 14 23:31:04 NODE_NAME sshd[1832]: pam_unix(sshd:session): session opened for user admin by (uid=0)
Jan 14 23:31:46 NODE_NAME sudo[1901]: admin : TTY=pts/0 ; PWD=/home/admin ; USER=root ; COMMAND=/usr/bin/systemctl restart nginx
Jan 14 23:31:46 NODE_NAME sudo[1901]: pam_unix(sudo:session): session opened for user root by admin(uid=0)
Jan 14 23:31:47 NODE_NAME sudo[1901]: pam_unix(sudo:session): session closed for user root
Jan 14 23:45:01 NODE_NAME sshd[1832]: pam_unix(sshd:session): session closed for user admin
Jan 15 00:02:14 NODE_NAME sshd[2104]: Failed password for invalid user oracle from 185.220.101.47 port 38291 ssh2
Jan 15 00:02:16 NODE_NAME sshd[2106]: Failed password for invalid user postgres from 185.220.101.47 port 38295 ssh2
Jan 15 00:02:19 NODE_NAME sshd[2108]: Failed password for root from 185.220.101.47 port 38301 ssh2
Jan 15 00:02:19 NODE_NAME sshd[2108]: error: maximum authentication attempts exceeded for root from 185.220.101.47 port 38301 ssh2 [preauth]
Jan 15 09:12:44 NODE_NAME sshd[2891]: Accepted password for admin from 10.0.0.5 port 51243 ssh2
Jan 15 09:12:44 NODE_NAME sshd[2891]: pam_unix(sshd:session): session opened for user admin by (uid=0)

View File

@@ -0,0 +1,26 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip \
&& rm -rf /var/lib/apt/lists/*
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN pip3 install --no-cache-dir flask
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 2375 2376
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,116 @@
#!/usr/bin/env python3
"""
Docker APIserver.
Serves a fake Docker REST API on port 2375. Responds to common recon
endpoints (/version, /info, /containers/json, /images/json) with plausible
but fake data. Logs all requests as JSON.
"""
import json
import os
from flask import Flask, request
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "docker-host")
SERVICE_NAME = "docker_api"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
app = Flask(__name__)
_VERSION = {
"Version": "24.0.5",
"ApiVersion": "1.43",
"MinAPIVersion": "1.12",
"GitCommit": "ced0996",
"GoVersion": "go1.20.6",
"Os": "linux",
"Arch": "amd64",
"KernelVersion": "5.15.0-76-generic",
}
_INFO = {
"ID": "FAKE:FAKE:FAKE:FAKE",
"Containers": 3,
"ContainersRunning": 3,
"Images": 7,
"Driver": "overlay2",
"MemoryLimit": True,
"SwapLimit": True,
"KernelMemory": False,
"Name": NODE_NAME,
"DockerRootDir": "/var/lib/docker",
"HttpProxy": "",
"HttpsProxy": "",
"NoProxy": "",
"ServerVersion": "24.0.5",
}
_CONTAINERS = [
{
"Id": "a1b2c3d4e5f6",
"Names": ["/webapp"],
"Image": "nginx:latest",
"State": "running",
"Status": "Up 3 days",
"Ports": [{"IP": "0.0.0.0", "PrivatePort": 80, "PublicPort": 8080, "Type": "tcp"}], # nosec B104
}
]
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
@app.before_request
def log_request():
_log(
"request",
method=request.method,
path=request.path,
remote_addr=request.remote_addr,
body=request.get_data(as_text=True)[:512],
)
@app.route("/version")
@app.route("/<ver>/version")
def version(ver=None):
return app.response_class(json.dumps(_VERSION), mimetype="application/json")
@app.route("/info")
@app.route("/<ver>/info")
def info(ver=None):
return app.response_class(json.dumps(_INFO), mimetype="application/json")
@app.route("/containers/json")
@app.route("/<ver>/containers/json")
def containers(ver=None):
return app.response_class(json.dumps(_CONTAINERS), mimetype="application/json")
@app.route("/images/json")
@app.route("/<ver>/images/json")
def images(ver=None):
return app.response_class(json.dumps([]), mimetype="application/json")
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE"])
def catch_all(path):
return app.response_class(
json.dumps({"message": "page not found", "response": 404}),
status=404,
mimetype="application/json",
)
if __name__ == "__main__":
_log("startup", msg=f"Docker API server starting as {NODE_NAME}")
app.run(host="0.0.0.0", port=2375, debug=False) # nosec B104

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 9200
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,123 @@
#!/usr/bin/env python3
"""
Elasticsearch server — presents a convincing ES 7.x HTTP API on port 9200.
Logs all requests (especially recon probes like /_cat/, /_cluster/, /_nodes/)
as JSON. Designed to attract automated scanners and credential stuffers.
"""
import json
import os
from http.server import BaseHTTPRequestHandler, HTTPServer
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "esserver")
SERVICE_NAME = "elasticsearch"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
_CLUSTER_UUID = "xC3Pr9abTq2mNkOeLvXwYA"
_NODE_UUID = "dJH7Lm2sRqWvPn0kFiEtBo"
_ROOT_RESPONSE = {
"name": NODE_NAME,
"cluster_name": "elasticsearch",
"cluster_uuid": _CLUSTER_UUID,
"version": {
"number": "7.17.9",
"build_flavor": "default",
"build_type": "docker",
"build_hash": "ef48222227ee6b9e70e502f0f0daa52435ee634d",
"build_date": "2023-01-31T05:34:43.305517834Z",
"build_snapshot": False,
"lucene_version": "8.11.1",
"minimum_wire_compatibility_version": "6.8.0",
"minimum_index_compatibility_version": "6.0.0-beta1",
},
"tagline": "You Know, for Search",
}
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
class ESHandler(BaseHTTPRequestHandler):
server_version = "elasticsearch"
sys_version = ""
def _send_json(self, code: int, data: dict) -> None:
body = json.dumps(data).encode()
self.send_response(code)
self.send_header("Content-Type", "application/json; charset=UTF-8")
self.send_header("Content-Length", str(len(body)))
self.send_header("X-elastic-product", "Elasticsearch")
self.end_headers()
self.wfile.write(body)
def _read_body(self) -> str:
length = int(self.headers.get("Content-Length", 0))
return self.rfile.read(length).decode(errors="replace") if length else ""
def do_GET(self):
src = self.client_address[0]
path = self.path.split("?")[0]
if path in ("/", ""):
_log("root_probe", src=src, method="GET", path=self.path)
self._send_json(200, _ROOT_RESPONSE)
elif path.startswith("/_cat/"):
_log("cat_api", src=src, method="GET", path=self.path)
self._send_json(200, [])
elif path.startswith("/_cluster/"):
_log("cluster_recon", src=src, method="GET", path=self.path)
self._send_json(200, {"cluster_name": "elasticsearch", "status": "green",
"number_of_nodes": 3, "number_of_data_nodes": 3})
elif path.startswith("/_nodes"):
_log("nodes_recon", src=src, method="GET", path=self.path)
self._send_json(200, {"_nodes": {"total": 3, "successful": 3, "failed": 0}, "nodes": {}})
elif path.startswith("/_security/") or path.startswith("/_xpack/"):
_log("security_probe", src=src, method="GET", path=self.path)
self._send_json(200, {"enabled": True, "available": True})
else:
_log("request", src=src, method="GET", path=self.path)
self._send_json(404, {"error": {"root_cause": [{"type": "index_not_found_exception",
"reason": "no such index"}]}})
def do_POST(self):
src = self.client_address[0]
body = self._read_body()
path = self.path.split("?")[0]
_log("post_request", src=src, method="POST", path=self.path,
body_preview=body[:300], user_agent=self.headers.get("User-Agent", ""))
if "_search" in path or "_bulk" in path:
self._send_json(200, {"took": 1, "timed_out": False, "hits": {"total": {"value": 0}, "hits": []}})
else:
self._send_json(200, {"result": "created", "_id": "1", "_index": "server"})
def do_PUT(self):
src = self.client_address[0]
body = self._read_body()
_log("put_request", src=src, method="PUT", path=self.path, body_preview=body[:300])
self._send_json(200, {"acknowledged": True})
def do_DELETE(self):
src = self.client_address[0]
_log("delete_request", src=src, method="DELETE", path=self.path)
self._send_json(200, {"acknowledged": True})
def do_HEAD(self):
src = self.client_address[0]
_log("head_request", src=src, method="HEAD", path=self.path)
self._send_json(200, {})
def log_message(self, fmt, *args):
pass # suppress default HTTP server logging
if __name__ == "__main__":
_log("startup", msg=f"Elasticsearch server starting as {NODE_NAME}")
server = HTTPServer(("0.0.0.0", 9200), ESHandler) # nosec B104
server.serve_forever()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,26 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip \
&& rm -rf /var/lib/apt/lists/*
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN pip3 install --no-cache-dir twisted jinja2
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 21
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,75 @@
#!/usr/bin/env python3
"""
FTP server using Twisted's FTP server infrastructure.
Accepts any credentials, logs all commands and file requests,
forwards events as JSON to LOG_TARGET if set.
"""
import os
from pathlib import Path
from twisted.internet import defer, reactor
from twisted.protocols.ftp import FTP, FTPFactory, FTPAnonymousShell
from twisted.python.filepath import FilePath
from twisted.python import log as twisted_log
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "ftpserver")
SERVICE_NAME = "ftp"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
PORT = int(os.environ.get("PORT", "21"))
BANNER = os.environ.get("FTP_BANNER", "220 (vsFTPd 3.0.3)")
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
def _setup_bait_fs() -> str:
bait_dir = Path("/tmp/ftp_bait")
bait_dir.mkdir(parents=True, exist_ok=True)
(bait_dir / "backup.tar.gz").write_bytes(b"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00")
(bait_dir / "db_dump.sql").write_text("CREATE TABLE users (id INT, username VARCHAR(50), password VARCHAR(50));\nINSERT INTO users VALUES (1, 'admin', 'pbkdf2:sha256:5000$...');\n")
(bait_dir / "config.ini").write_text("[database]\nuser = dbadmin\npassword = db_super_admin_pass_!\nhost = localhost\n")
(bait_dir / "credentials.txt").write_text("admin:super_secret_admin_pw\nroot:toor\nalice:wonderland\n")
return str(bait_dir)
class ServerFTP(FTP):
def connectionMade(self):
peer = self.transport.getPeer()
_log("connection", src_ip=peer.host, src_port=peer.port)
super().connectionMade()
def ftp_USER(self, username):
self._server_user = username
_log("user", username=username)
return super().ftp_USER(username)
def ftp_PASS(self, password):
_log("auth_attempt", username=getattr(self, "_server_user", "?"), password=password)
# Accept everything — we're a honeypot server
self.state = self.AUTHED
self._user = getattr(self, "_server_user", "anonymous")
self.shell = FTPAnonymousShell(FilePath(_setup_bait_fs()))
return defer.succeed((230, "Login successful."))
def ftp_RETR(self, path):
_log("download_attempt", path=path)
return super().ftp_RETR(path)
def connectionLost(self, reason):
peer = self.transport.getPeer()
_log("disconnect", src_ip=peer.host, src_port=peer.port)
super().connectionLost(reason)
class ServerFTPFactory(FTPFactory):
protocol = ServerFTP
welcomeMessage = BANNER
if __name__ == "__main__":
twisted_log.startLoggingWithObserver(lambda e: None, setStdout=False)
_log("startup", msg=f"FTP server starting as {NODE_NAME} on port {PORT}")
reactor.listenTCP(PORT, ServerFTPFactory())
reactor.run()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,26 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip \
&& rm -rf /var/lib/apt/lists/*
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN pip3 install --no-cache-dir flask jinja2
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 80 443
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,127 @@
#!/usr/bin/env python3
"""
HTTP service emulator using Flask.
Accepts all requests, logs every detail (method, path, headers, body),
and responds with configurable pages. Forwards events as JSON to LOG_TARGET if set.
"""
import json
import logging
import os
from pathlib import Path
from flask import Flask, request, send_from_directory
from werkzeug.serving import make_server, WSGIRequestHandler
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
logging.getLogger("werkzeug").setLevel(logging.ERROR)
NODE_NAME = os.environ.get("NODE_NAME", "webserver")
SERVICE_NAME = "http"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
PORT = int(os.environ.get("PORT", "80"))
SERVER_HEADER = os.environ.get("SERVER_HEADER", "Apache/2.4.54 (Debian)")
RESPONSE_CODE = int(os.environ.get("RESPONSE_CODE", "403"))
FAKE_APP = os.environ.get("FAKE_APP", "")
EXTRA_HEADERS = json.loads(os.environ.get("EXTRA_HEADERS", "{}"))
CUSTOM_BODY = os.environ.get("CUSTOM_BODY", "")
FILES_DIR = os.environ.get("FILES_DIR", "")
_FAKE_APP_BODIES: dict[str, str] = {
"apache_default": (
"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n"
"<html><head><title>Apache2 Debian Default Page</title></head>\n"
"<body><h1>Apache2 Debian Default Page</h1>\n"
"<p>It works!</p></body></html>"
),
"nginx_default": (
"<!DOCTYPE html><html><head><title>Welcome to nginx!</title></head>\n"
"<body><h1>Welcome to nginx!</h1>\n"
"<p>If you see this page, the nginx web server is successfully installed.</p>\n"
"</body></html>"
),
"wordpress": (
"<!DOCTYPE html><html><head><title>WordPress &rsaquo; Error</title></head>\n"
"<body id=\"error-page\"><div class=\"wp-die-message\">\n"
"<h1>Error establishing a database connection</h1></div></body></html>"
),
"phpmyadmin": (
"<!DOCTYPE html><html><head><title>phpMyAdmin</title></head>\n"
"<body><form method=\"post\" action=\"index.php\">\n"
"<input type=\"text\" name=\"pma_username\" />\n"
"<input type=\"password\" name=\"pma_password\" />\n"
"<input type=\"submit\" value=\"Go\" /></form></body></html>"
),
"iis_default": (
"<!DOCTYPE html><html><head><title>IIS Windows Server</title></head>\n"
"<body><h1>IIS Windows Server</h1>\n"
"<p>Welcome to Internet Information Services</p></body></html>"
),
}
app = Flask(__name__)
@app.after_request
def _fix_server_header(response):
response.headers["Server"] = SERVER_HEADER
return response
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
@app.before_request
def log_request():
_log(
"request",
method=request.method,
path=request.path,
remote_addr=request.remote_addr,
headers=json.dumps(dict(request.headers)),
body=request.get_data(as_text=True)[:512],
)
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD"])
def catch_all(path):
# Serve static files directory if configured
if FILES_DIR and path:
files_path = Path(FILES_DIR) / path
if files_path.is_file():
return send_from_directory(FILES_DIR, path)
# Select response body: custom > fake_app preset > default 403
if CUSTOM_BODY:
body = CUSTOM_BODY
elif FAKE_APP and FAKE_APP in _FAKE_APP_BODIES:
body = _FAKE_APP_BODIES[FAKE_APP]
else:
body = (
"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n"
"<html><head>\n"
"<title>403 Forbidden</title>\n"
"</head><body>\n"
"<h1>Forbidden</h1>\n"
"<p>You don't have permission to access this resource.</p>\n"
"<hr>\n"
f"<address>{SERVER_HEADER} Server at {NODE_NAME} Port 80</address>\n"
"</body></html>\n"
)
headers = {"Content-Type": "text/html", **EXTRA_HEADERS}
return body, RESPONSE_CODE, headers
class _SilentHandler(WSGIRequestHandler):
"""Suppress Werkzeug's Server header so Flask's after_request is the sole source."""
def version_string(self) -> str:
return ""
if __name__ == "__main__":
_log("startup", msg=f"HTTP server starting as {NODE_NAME}")
srv = make_server("0.0.0.0", PORT, app, request_handler=_SilentHandler) # nosec B104
srv.serve_forever()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,29 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip openssl \
&& rm -rf /var/lib/apt/lists/*
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN pip3 install --no-cache-dir flask jinja2
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
RUN mkdir -p /opt/tls
EXPOSE 443
RUN useradd -r -s /bin/false -d /opt logrelay \
&& chown -R logrelay:logrelay /opt/tls \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
TLS_DIR="/opt/tls"
CERT="${TLS_CERT:-$TLS_DIR/cert.pem}"
KEY="${TLS_KEY:-$TLS_DIR/key.pem}"
# Generate a self-signed certificate if none exists
if [ ! -f "$CERT" ] || [ ! -f "$KEY" ]; then
mkdir -p "$TLS_DIR"
CN="${TLS_CN:-${NODE_NAME:-localhost}}"
openssl req -x509 -newkey rsa:2048 -nodes \
-keyout "$KEY" -out "$CERT" \
-days 3650 -subj "/CN=$CN" \
2>/dev/null
fi
exec python3 /opt/server.py

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python3
"""
HTTPS service emulator using Flask + TLS.
Identical to the HTTP honeypot but wrapped in TLS. Accepts all requests,
logs every detail (method, path, headers, body, TLS info), and responds
with configurable pages. Forwards events as JSON to LOG_TARGET if set.
"""
import json
import logging
import os
import ssl
from pathlib import Path
from flask import Flask, request, send_from_directory
from werkzeug.serving import make_server, WSGIRequestHandler
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
logging.getLogger("werkzeug").setLevel(logging.ERROR)
NODE_NAME = os.environ.get("NODE_NAME", "webserver")
SERVICE_NAME = "https"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
PORT = int(os.environ.get("PORT", "443"))
SERVER_HEADER = os.environ.get("SERVER_HEADER", "Apache/2.4.54 (Debian)")
RESPONSE_CODE = int(os.environ.get("RESPONSE_CODE", "403"))
FAKE_APP = os.environ.get("FAKE_APP", "")
EXTRA_HEADERS = json.loads(os.environ.get("EXTRA_HEADERS", "{}"))
CUSTOM_BODY = os.environ.get("CUSTOM_BODY", "")
FILES_DIR = os.environ.get("FILES_DIR", "")
TLS_CERT = os.environ.get("TLS_CERT", "/opt/tls/cert.pem")
TLS_KEY = os.environ.get("TLS_KEY", "/opt/tls/key.pem")
_FAKE_APP_BODIES: dict[str, str] = {
"apache_default": (
"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n"
"<html><head><title>Apache2 Debian Default Page</title></head>\n"
"<body><h1>Apache2 Debian Default Page</h1>\n"
"<p>It works!</p></body></html>"
),
"nginx_default": (
"<!DOCTYPE html><html><head><title>Welcome to nginx!</title></head>\n"
"<body><h1>Welcome to nginx!</h1>\n"
"<p>If you see this page, the nginx web server is successfully installed.</p>\n"
"</body></html>"
),
"wordpress": (
"<!DOCTYPE html><html><head><title>WordPress &rsaquo; Error</title></head>\n"
"<body id=\"error-page\"><div class=\"wp-die-message\">\n"
"<h1>Error establishing a database connection</h1></div></body></html>"
),
"phpmyadmin": (
"<!DOCTYPE html><html><head><title>phpMyAdmin</title></head>\n"
"<body><form method=\"post\" action=\"index.php\">\n"
"<input type=\"text\" name=\"pma_username\" />\n"
"<input type=\"password\" name=\"pma_password\" />\n"
"<input type=\"submit\" value=\"Go\" /></form></body></html>"
),
"iis_default": (
"<!DOCTYPE html><html><head><title>IIS Windows Server</title></head>\n"
"<body><h1>IIS Windows Server</h1>\n"
"<p>Welcome to Internet Information Services</p></body></html>"
),
}
app = Flask(__name__)
@app.after_request
def _fix_server_header(response):
response.headers["Server"] = SERVER_HEADER
return response
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
@app.before_request
def log_request():
_log(
"request",
method=request.method,
path=request.path,
remote_addr=request.remote_addr,
headers=dict(request.headers),
body=request.get_data(as_text=True)[:512],
)
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD"])
def catch_all(path):
# Serve static files directory if configured
if FILES_DIR and path:
files_path = Path(FILES_DIR) / path
if files_path.is_file():
return send_from_directory(FILES_DIR, path)
# Select response body: custom > fake_app preset > default 403
if CUSTOM_BODY:
body = CUSTOM_BODY
elif FAKE_APP and FAKE_APP in _FAKE_APP_BODIES:
body = _FAKE_APP_BODIES[FAKE_APP]
else:
body = (
"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n"
"<html><head>\n"
"<title>403 Forbidden</title>\n"
"</head><body>\n"
"<h1>Forbidden</h1>\n"
"<p>You don't have permission to access this resource.</p>\n"
"<hr>\n"
f"<address>{SERVER_HEADER} Server at {NODE_NAME} Port 443</address>\n"
"</body></html>\n"
)
headers = {"Content-Type": "text/html", **EXTRA_HEADERS}
return body, RESPONSE_CODE, headers
class _SilentHandler(WSGIRequestHandler):
"""Suppress Werkzeug's Server header so Flask's after_request is the sole source."""
def version_string(self) -> str:
return ""
if __name__ == "__main__":
_log("startup", msg=f"HTTPS server starting as {NODE_NAME}")
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.load_cert_chain(TLS_CERT, TLS_KEY)
srv = make_server("0.0.0.0", PORT, app, request_handler=_SilentHandler) # nosec B104
srv.socket = ctx.wrap_socket(srv.socket, server_side=True)
srv.serve_forever()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 143 993
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,541 @@
#!/usr/bin/env python3
"""
IMAP server (port 143).
Full IMAP4rev1 state machine with bait mailbox.
States: NOT_AUTHENTICATED → AUTHENTICATED → SELECTED
Credentials via IMAP_USERS env var ("user:pass,user2:pass2").
10 bait emails in INBOX containing AWS keys, DB passwords, tokens etc.
Banner advertises Dovecot so nmap fingerprints correctly.
"""
import asyncio
import os
from syslog_bridge import SEVERITY_WARNING, syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "mailserver")
SERVICE_NAME = "imap"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
PORT = int(os.environ.get("PORT", "143"))
IMAP_BANNER = os.environ.get("IMAP_BANNER", "* OK Dovecot ready.\r\n")
_RAW_USERS = os.environ.get("IMAP_USERS", "admin:admin123,root:toor,mail:mail,user:user")
VALID_USERS: dict[str, str] = {
u: p for part in _RAW_USERS.split(",") if ":" in part for u, p in [part.split(":", 1)]
}
# DEBT-026: path to a JSON file with custom email definitions.
# When set, _BAIT_EMAILS should be replaced/extended from that file.
# Wiring (service_cfg["email_seed"] → compose_fragment → env var → here) is deferred.
_EMAIL_SEED_PATH = os.environ.get("IMAP_EMAIL_SEED", "") # stub — currently unused
# ── Bait emails ───────────────────────────────────────────────────────────────
# All 10 live in INBOX. UID == sequence number.
_BAIT_EMAILS: list[dict] = [
{
"uid": 1, "flags": [r"\Seen"],
"from_name": "DevOps Team", "from_addr": "devops@company.internal",
"to_addr": "admin@company.internal",
"subject": "AWS credentials rotation",
"date": "Mon, 06 Nov 2023 09:12:33 +0000",
"body": (
"Date: Mon, 06 Nov 2023 09:12:33 +0000\r\n"
"From: DevOps Team <devops@company.internal>\r\n"
"To: admin@company.internal\r\n"
"Subject: AWS credentials rotation\r\n"
"Message-ID: <1@company.internal>\r\n"
"\r\n"
"Team,\r\n\r\n"
"New AWS credentials have been issued. Old keys deactivated.\r\n\r\n"
"Access Key ID: AKIAIOSFODNN7EXAMPLE\r\n"
"Secret Access Key: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY\r\n\r\n"
"Update ~/.aws/credentials immediately.\r\n\r\n-- DevOps\r\n"
),
},
{
"uid": 2, "flags": [r"\Seen"],
"from_name": "Monitoring", "from_addr": "monitoring@company.internal",
"to_addr": "admin@company.internal",
"subject": "DB password changed",
"date": "Tue, 07 Nov 2023 14:05:11 +0000",
"body": (
"Date: Tue, 07 Nov 2023 14:05:11 +0000\r\n"
"From: Monitoring <monitoring@company.internal>\r\n"
"To: admin@company.internal\r\n"
"Subject: DB password changed\r\n"
"Message-ID: <2@company.internal>\r\n"
"\r\n"
"Production database password was rotated.\r\n\r\n"
"Connection string: mysql://admin:Sup3rS3cr3t!@10.0.1.5:3306/production\r\n\r\n"
"Update all app configs.\r\n"
),
},
{
"uid": 3, "flags": [],
"from_name": "GitHub", "from_addr": "noreply@github.com",
"to_addr": "admin@company.internal",
"subject": "Your personal access token",
"date": "Wed, 08 Nov 2023 08:30:00 +0000",
"body": (
"Date: Wed, 08 Nov 2023 08:30:00 +0000\r\n"
"From: GitHub <noreply@github.com>\r\n"
"To: admin@company.internal\r\n"
"Subject: Your personal access token\r\n"
"Message-ID: <3@company.internal>\r\n"
"\r\n"
"Hi admin,\r\n\r\n"
"A new personal access token was created for your account.\r\n\r\n"
"Token: ghp_16C7e42F292c6912E7710c838347Ae178B4a\r\n\r\n"
"If this wasn't you, revoke it immediately at github.com/settings/tokens.\r\n"
),
},
{
"uid": 4, "flags": [r"\Seen"],
"from_name": "IT Admin", "from_addr": "admin@company.internal",
"to_addr": "team@company.internal",
"subject": "VPN config attached",
"date": "Thu, 09 Nov 2023 11:22:47 +0000",
"body": (
"Date: Thu, 09 Nov 2023 11:22:47 +0000\r\n"
"From: IT Admin <admin@company.internal>\r\n"
"To: team@company.internal\r\n"
"Subject: VPN config attached\r\n"
"Message-ID: <4@company.internal>\r\n"
"\r\n"
"VPN access details for new starters:\r\n\r\n"
" Host: vpn.company.internal:1194\r\n"
" Protocol: UDP\r\n"
" Username: vpnadmin\r\n"
" Password: VpnP@ss2024\r\n\r\n"
"Config file sent separately via secure channel.\r\n"
),
},
{
"uid": 5, "flags": [],
"from_name": "SysAdmin", "from_addr": "sysadmin@company.internal",
"to_addr": "admin@company.internal",
"subject": "Root password",
"date": "Fri, 10 Nov 2023 16:45:00 +0000",
"body": (
"Date: Fri, 10 Nov 2023 16:45:00 +0000\r\n"
"From: SysAdmin <sysadmin@company.internal>\r\n"
"To: admin@company.internal\r\n"
"Subject: Root password\r\n"
"Message-ID: <5@company.internal>\r\n"
"\r\n"
"New root password for prod servers:\r\n\r\n"
" r00tM3T00!\r\n\r\n"
"Change after first login. Do NOT forward this email.\r\n"
),
},
{
"uid": 6, "flags": [r"\Seen"],
"from_name": "Backup System", "from_addr": "backup@company.internal",
"to_addr": "admin@company.internal",
"subject": "Backup job failed",
"date": "Sat, 11 Nov 2023 03:12:04 +0000",
"body": (
"Date: Sat, 11 Nov 2023 03:12:04 +0000\r\n"
"From: Backup System <backup@company.internal>\r\n"
"To: admin@company.internal\r\n"
"Subject: Backup job failed\r\n"
"Message-ID: <6@company.internal>\r\n"
"\r\n"
"Nightly backup to 192.168.1.50:/mnt/nas FAILED at 03:11 UTC.\r\n\r\n"
"Error: Authentication failed. Credentials in /etc/backup.conf may be stale.\r\n\r\n"
"Last successful backup: 2023-11-10 03:11 UTC\r\n"
),
},
{
"uid": 7, "flags": [r"\Seen"],
"from_name": "Security Alerts", "from_addr": "alerts@company.internal",
"to_addr": "admin@company.internal",
"subject": "SSH brute-force alert",
"date": "Sun, 12 Nov 2023 07:04:31 +0000",
"body": (
"Date: Sun, 12 Nov 2023 07:04:31 +0000\r\n"
"From: Security Alerts <alerts@company.internal>\r\n"
"To: admin@company.internal\r\n"
"Subject: SSH brute-force alert\r\n"
"Message-ID: <7@company.internal>\r\n"
"\r\n"
"47 failed SSH login attempts detected against prod-web-01.\r\n\r\n"
"Source IPs: 185.220.101.34, 185.220.101.47, 185.220.101.52\r\n"
"Target user: root\r\n"
"Period: 2023-11-12 06:58 07:04 UTC\r\n\r\n"
"All attempts blocked by fail2ban. No successful logins.\r\n"
),
},
{
"uid": 8, "flags": [r"\Seen"],
"from_name": "External Vendor", "from_addr": "vendor@external.com",
"to_addr": "admin@company.internal",
"subject": "RE: API integration",
"date": "Mon, 13 Nov 2023 10:11:55 +0000",
"body": (
"Date: Mon, 13 Nov 2023 10:11:55 +0000\r\n"
"From: External Vendor <vendor@external.com>\r\n"
"To: admin@company.internal\r\n"
"Subject: RE: API integration\r\n"
"Message-ID: <8@company.internal>\r\n"
"\r\n"
"Hi,\r\n\r\n"
"Here is the live API key for the integration:\r\n\r\n"
" sk_live_9mK3xF2aP7qR1bN8cT4dW6vE0yU5hJ\r\n\r\n"
"Keep this confidential. Let me know if you need the webhook secret.\r\n\r\n"
"Best regards,\r\nVendor Support\r\n"
),
},
{
"uid": 9, "flags": [],
"from_name": "Help Desk", "from_addr": "helpdesk@company.internal",
"to_addr": "admin@company.internal",
"subject": "Password reset request",
"date": "Tue, 14 Nov 2023 13:48:22 +0000",
"body": (
"Date: Tue, 14 Nov 2023 13:48:22 +0000\r\n"
"From: Help Desk <helpdesk@company.internal>\r\n"
"To: admin@company.internal\r\n"
"Subject: Password reset request\r\n"
"Message-ID: <9@company.internal>\r\n"
"\r\n"
"Hi,\r\n\r\n"
"Could you reset my MFA? Current password is Winter2024! so you can verify it's me.\r\n\r\n"
"Thanks\r\n"
),
},
{
"uid": 10, "flags": [r"\Seen"],
"from_name": "AWS Billing", "from_addr": "noreply@aws.amazon.com",
"to_addr": "admin@company.internal",
"subject": "Your AWS bill is ready",
"date": "Wed, 15 Nov 2023 00:01:00 +0000",
"body": (
"Date: Wed, 15 Nov 2023 00:01:00 +0000\r\n"
"From: AWS Billing <noreply@aws.amazon.com>\r\n"
"To: admin@company.internal\r\n"
"Subject: Your AWS bill is ready\r\n"
"Message-ID: <10@company.internal>\r\n"
"\r\n"
"Your AWS bill for October 2023 is $847.23.\r\n\r\n"
"Top services:\r\n"
" EC2 (us-east-1): $412.10\r\n"
" RDS (us-east-1): $198.50\r\n"
" S3: $87.43\r\n"
" EC2 (eu-west-2): $149.20\r\n\r\n"
"Account ID: 123456789012\r\n"
),
},
]
_MAILBOXES = ["INBOX", "Sent", "Drafts", "Archive"]
# ── Logging ───────────────────────────────────────────────────────────────────
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
# ── FETCH helpers ─────────────────────────────────────────────────────────────
def _parse_seq_range(range_str: str, total: int) -> list[int]:
"""Parse IMAP sequence set ('1', '1:3', '1:*', '*') → list of 1-based indices."""
result = []
for part in range_str.split(","):
part = part.strip()
if ":" in part:
lo_s, hi_s = part.split(":", 1)
lo = total if lo_s == "*" else int(lo_s)
hi = total if hi_s == "*" else int(hi_s)
result.extend(range(min(lo, hi), max(lo, hi) + 1))
elif part == "*":
result.append(total)
else:
result.append(int(part))
return [n for n in result if 1 <= n <= total]
def _parse_fetch_items(items_str: str) -> list[str]:
"""Parse '(FLAGS ENVELOPE)' or 'BODY[]' → list of item name strings."""
s = items_str.strip()
if s.startswith("(") and s.endswith(")"):
s = s[1:-1]
tokens, i = [], 0
while i < len(s):
if s[i] == " ":
i += 1
continue
j, depth = i, 0
while j < len(s):
if s[j] == "[":
depth += 1
elif s[j] == "]":
depth -= 1
elif s[j] == " " and depth == 0:
break
j += 1
tokens.append(s[i:j].upper())
i = j
return tokens
def _envelope(msg: dict) -> str:
"""Build minimal RFC 3501 ENVELOPE tuple string."""
def addr(name: str, email: str) -> str:
parts = email.split("@", 1)
user = parts[0]
host = parts[1] if len(parts) > 1 else ""
safe_name = name.replace('"', '\\"')
return f'("{safe_name}" NIL "{user}" "{host}")'
from_addr = addr(msg["from_name"], msg["from_addr"])
to_addr = addr("", msg["to_addr"])
subject = msg["subject"].replace('"', '\\"')
return (
f'("{msg["date"]}" "{subject}" '
f'({from_addr}) ({from_addr}) ({from_addr}) '
f'({to_addr}) NIL NIL NIL "<{msg["uid"]}@{NODE_NAME}>")'
)
def _build_fetch_response(seq: int, msg: dict, items: list[str]) -> bytes:
"""Build the bytes for a single '* N FETCH (...)' response."""
non_literal: list[str] = []
literal_name: str | None = None
literal_raw: bytes | None = None
for item in items:
norm = item.upper()
if norm == "FLAGS":
flags = " ".join(msg["flags"]) if msg["flags"] else ""
non_literal.append(f"FLAGS ({flags})")
elif norm == "ENVELOPE":
non_literal.append(f"ENVELOPE {_envelope(msg)}")
elif norm == "RFC822.SIZE":
non_literal.append(f"RFC822.SIZE {len(msg['body'].encode())}")
elif norm in ("UID",):
non_literal.append(f"UID {msg['uid']}")
elif norm in ("BODY[]", "RFC822", "BODY[TEXT]", "BODY.PEEK[]"):
literal_name = "BODY[]"
literal_raw = msg["body"].encode()
elif norm in ("BODY[HEADER]", "BODY.PEEK[HEADER]"):
header_part = msg["body"].split("\r\n\r\n", 1)[0] + "\r\n\r\n"
literal_name = "BODY[HEADER]"
literal_raw = header_part.encode()
# unknown items silently ignored
if literal_raw is not None:
prefix_str = (" ".join(non_literal) + " ") if non_literal else ""
header = f"* {seq} FETCH ({prefix_str}{literal_name} {{{len(literal_raw)}}}\r\n".encode()
return header + literal_raw + b")\r\n"
else:
return f"* {seq} FETCH ({' '.join(non_literal)})\r\n".encode()
# ── Protocol ──────────────────────────────────────────────────────────────────
class IMAPProtocol(asyncio.Protocol):
def __init__(self):
self._transport = None
self._peer = ("?", 0)
self._buf = b""
self._state = "NOT_AUTHENTICATED"
self._selected = None # mailbox name currently selected
def connection_made(self, transport):
self._transport = transport
self._peer = transport.get_extra_info("peername", ("?", 0))
_log("connect", src=self._peer[0], src_port=self._peer[1])
banner = IMAP_BANNER if IMAP_BANNER.endswith("\r\n") else IMAP_BANNER + "\r\n"
transport.write(banner.encode())
def data_received(self, data):
self._buf += data
while b"\n" in self._buf:
line, self._buf = self._buf.split(b"\n", 1)
self._handle_line(line.decode(errors="replace").strip())
def connection_lost(self, exc):
_log("disconnect", src=self._peer[0] if self._peer else "?")
# ── Command dispatch ──────────────────────────────────────────────────────
def _handle_line(self, line: str) -> None:
parts = line.split(None, 2)
if not parts:
return
tag = parts[0]
cmd = parts[1].upper() if len(parts) > 1 else ""
args = parts[2] if len(parts) > 2 else ""
_log("command", src=self._peer[0], cmd=cmd, state=self._state)
# Commands valid in any state
if cmd == "CAPABILITY":
self._w(b"* CAPABILITY IMAP4rev1 LITERAL+ SASL-IR LOGIN-REFERRALS"
b" ID ENABLE IDLE AUTH=PLAIN AUTH=LOGIN\r\n")
self._w(f"{tag} OK CAPABILITY completed\r\n")
elif cmd == "NOOP":
self._w(f"{tag} OK\r\n")
elif cmd == "LOGOUT":
self._w(b"* BYE Logging out\r\n")
self._w(f"{tag} OK LOGOUT completed\r\n")
self._transport.close()
# NOT_AUTHENTICATED only
elif cmd == "LOGIN":
self._cmd_login(tag, args)
# AUTHENTICATED or SELECTED
elif cmd in ("LIST", "LSUB"):
self._cmd_list(tag, cmd)
elif cmd == "STATUS":
self._cmd_status(tag, args)
elif cmd in ("SELECT", "EXAMINE"):
self._cmd_select(tag, cmd, args)
# SELECTED only
elif cmd == "FETCH":
self._cmd_fetch(tag, args, use_uid=False)
elif cmd == "SEARCH":
self._cmd_search(tag)
elif cmd == "CLOSE":
self._cmd_close(tag)
# UID prefix — dispatch sub-command
elif cmd == "UID":
sub_parts = args.split(None, 1)
sub_cmd = sub_parts[0].upper() if sub_parts else ""
sub_args = sub_parts[1] if len(sub_parts) > 1 else ""
if sub_cmd == "FETCH":
self._cmd_fetch(tag, sub_args, use_uid=True)
elif sub_cmd == "SEARCH":
self._cmd_search(tag, uid_mode=True)
else:
self._w(f"{tag} BAD Unknown UID sub-command\r\n")
else:
self._w(f"{tag} BAD Command not recognized or not supported\r\n")
# ── Command implementations ───────────────────────────────────────────────
def _cmd_login(self, tag: str, args: str) -> None:
if self._state != "NOT_AUTHENTICATED":
self._w(f"{tag} BAD Already authenticated\r\n")
return
parts = args.split(None, 1)
username = parts[0].strip('"') if parts else ""
password = parts[1].strip('"') if len(parts) > 1 else ""
if VALID_USERS.get(username) == password:
self._state = "AUTHENTICATED"
_log("auth", src=self._peer[0], username=username, password=password,
status="success")
self._w(f"{tag} OK [CAPABILITY IMAP4rev1] Logged in\r\n")
else:
_log("auth", src=self._peer[0], username=username, password=password,
status="failed", severity=SEVERITY_WARNING)
self._w(f"{tag} NO [AUTHENTICATIONFAILED] Authentication failed.\r\n")
def _cmd_list(self, tag: str, cmd: str) -> None:
if self._state == "NOT_AUTHENTICATED":
self._w(f"{tag} BAD Not authenticated\r\n")
return
for box in _MAILBOXES:
self._w(f'* {cmd} (\\HasNoChildren) "/" "{box}"\r\n')
self._w(f"{tag} OK {cmd} completed\r\n")
def _cmd_status(self, tag: str, args: str) -> None:
if self._state == "NOT_AUTHENTICATED":
self._w(f"{tag} BAD Not authenticated\r\n")
return
parts = args.split(None, 1)
mailbox = parts[0].strip('"') if parts else "INBOX"
attr_str = parts[1].strip("()").upper() if len(parts) > 1 else "MESSAGES"
counts = {"MESSAGES": 10, "RECENT": 0, "UNSEEN": 10} if mailbox == "INBOX" \
else {"MESSAGES": 0, "RECENT": 0, "UNSEEN": 0}
result_parts = []
for attr in attr_str.split():
if attr in counts:
result_parts.append(f"{attr} {counts[attr]}")
self._w(f"* STATUS {mailbox} ({' '.join(result_parts)})\r\n")
self._w(f"{tag} OK STATUS completed\r\n")
def _cmd_select(self, tag: str, cmd: str, args: str) -> None:
if self._state == "NOT_AUTHENTICATED":
self._w(f"{tag} BAD Not authenticated\r\n")
return
mailbox = args.strip('"')
total = len(_BAIT_EMAILS) if mailbox == "INBOX" else 0
self._selected = mailbox
self._state = "SELECTED"
self._w(f"* {total} EXISTS\r\n")
self._w(b"* 0 RECENT\r\n")
self._w(b"* OK [UNSEEN 1] Message 1 is first unseen\r\n")
self._w(b"* OK [UIDVALIDITY 1712345678] UIDs valid\r\n")
self._w(f"* OK [UIDNEXT {total + 1}] Predicted next UID\r\n")
self._w(b"* FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)\r\n")
self._w(b"* OK [PERMANENTFLAGS (\\Deleted \\Seen \\*)] Limited\r\n")
mode = "READ-ONLY" if cmd == "EXAMINE" else "READ-WRITE"
self._w(f"{tag} OK [{mode}] {cmd} completed\r\n")
def _cmd_fetch(self, tag: str, args: str, use_uid: bool) -> None:
if self._state != "SELECTED":
self._w(f"{tag} BAD Not in selected state\r\n")
return
parts = args.split(None, 1)
range_str = parts[0] if parts else "1:*"
items_str = parts[1] if len(parts) > 1 else "FLAGS"
total = len(_BAIT_EMAILS)
indices = _parse_seq_range(range_str, total)
items = _parse_fetch_items(items_str)
# Ensure UID is included when using UID FETCH
if use_uid and "UID" not in items:
items = ["UID"] + items
for seq in indices:
if 1 <= seq <= total:
self._transport.write(_build_fetch_response(seq, _BAIT_EMAILS[seq - 1], items))
self._w(f"{tag} OK FETCH completed\r\n")
def _cmd_search(self, tag: str, uid_mode: bool = False) -> None:
if self._state != "SELECTED":
self._w(f"{tag} BAD Not in selected state\r\n")
return
nums = " ".join(str(i) for i in range(1, len(_BAIT_EMAILS) + 1))
self._w(f"* SEARCH {nums}\r\n")
self._w(f"{tag} OK SEARCH completed\r\n")
def _cmd_close(self, tag: str) -> None:
if self._state != "SELECTED":
self._w(f"{tag} BAD Not in selected state\r\n")
return
self._state = "AUTHENTICATED"
self._selected = None
self._w(f"{tag} OK CLOSE completed\r\n")
# ── Helpers ───────────────────────────────────────────────────────────────
def _w(self, data: str | bytes) -> None:
if isinstance(data, str):
data = data.encode()
self._transport.write(data)
async def main():
_log("startup", msg=f"IMAP server starting as {NODE_NAME}")
loop = asyncio.get_running_loop()
server = await loop.create_server(IMAPProtocol, "0.0.0.0", PORT) # nosec B104
async with server:
await server.serve_forever()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,26 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 python3-pip \
&& rm -rf /var/lib/apt/lists/*
ENV PIP_BREAK_SYSTEM_PACKAGES=1
RUN pip3 install --no-cache-dir flask
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 6443 8080
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,127 @@
#!/usr/bin/env python3
"""
Kubernetes APIserver.
Serves a fake K8s REST API on port 6443 (HTTPS-ish, plain HTTP) and 8080.
Responds to recon endpoints (/version, /api, /apis, /api/v1/namespaces,
/api/v1/pods) with plausible but fake data. Logs all requests as JSON.
"""
import json
import os
from flask import Flask, request
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "k8s-master")
SERVICE_NAME = "k8s"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
app = Flask(__name__)
_VERSION = {
"major": "1",
"minor": "27",
"gitVersion": "v1.27.4",
"gitCommit": "fa3d7990104d7c1f16943a67f11b154b71f6a132",
"gitTreeState": "clean",
"buildDate": "2023-07-19T12:14:46Z",
"goVersion": "go1.20.6",
"compiler": "gc",
"platform": "linux/amd64",
}
_API_VERSIONS = {
"kind": "APIVersions",
"versions": ["v1"],
"serverAddressByClientCIDRs": [{"clientCIDR": "0.0.0.0/0", "serverAddress": f"{NODE_NAME}:6443"}],
}
_NAMESPACES = {
"kind": "NamespaceList",
"apiVersion": "v1",
"items": [
{"metadata": {"name": "default"}},
{"metadata": {"name": "kube-system"}},
{"metadata": {"name": "production"}},
],
}
_PODS = {
"kind": "PodList",
"apiVersion": "v1",
"items": [
{"metadata": {"name": "webapp-6d5f8b9-xk2p7", "namespace": "production"},
"status": {"phase": "Running"}},
],
}
_SECRETS = {
"kind": "Status",
"apiVersion": "v1",
"status": "Failure",
"message": "secrets is forbidden: User \"system:anonymous\" cannot list resource \"secrets\"",
"reason": "Forbidden",
"code": 403,
}
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
@app.before_request
def log_request():
_log(
"request",
method=request.method,
path=request.path,
remote_addr=request.remote_addr,
auth=request.headers.get("Authorization", ""),
body=request.get_data(as_text=True)[:512],
)
@app.route("/version")
def version():
return app.response_class(json.dumps(_VERSION), mimetype="application/json")
@app.route("/api")
def api():
return app.response_class(json.dumps(_API_VERSIONS), mimetype="application/json")
@app.route("/api/v1/namespaces")
def namespaces():
return app.response_class(json.dumps(_NAMESPACES), mimetype="application/json")
@app.route("/api/v1/pods")
@app.route("/api/v1/namespaces/<ns>/pods")
def pods(ns="default"):
return app.response_class(json.dumps(_PODS), mimetype="application/json")
@app.route("/api/v1/secrets")
@app.route("/api/v1/namespaces/<ns>/secrets")
def secrets(ns="default"):
return app.response_class(json.dumps(_SECRETS), status=403, mimetype="application/json")
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE", "PATCH"])
def catch_all(path):
return app.response_class(
json.dumps({"kind": "Status", "status": "Failure", "code": 404}),
status=404,
mimetype="application/json",
)
if __name__ == "__main__":
_log("startup", msg=f"Kubernetes API server starting as {NODE_NAME}")
app.run(host="0.0.0.0", port=6443, debug=False) # nosec B104

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 389 636
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,149 @@
#!/usr/bin/env python3
"""
LDAPserver.
Parses BER-encoded BindRequest messages, logs DN and password, returns an
invalidCredentials error. Logs all interactions as JSON.
"""
import asyncio
import os
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "ldapserver")
SERVICE_NAME = "ldap"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
def _ber_length(data: bytes, pos: int):
"""Return (length, next_pos)."""
b = data[pos]
if b < 0x80:
return b, pos + 1
n = b & 0x7f
length = int.from_bytes(data[pos + 1:pos + 1 + n], "big")
return length, pos + 1 + n
def _ber_string(data: bytes, pos: int):
"""Skip tag byte, read BER length, return (string, next_pos)."""
pos += 1 # skip tag
length, pos = _ber_length(data, pos)
return data[pos:pos + length].decode(errors="replace"), pos + length
def _parse_bind_request(msg: bytes):
"""Best-effort extraction of (dn, password) from a raw LDAPMessage."""
try:
pos = 0
# LDAPMessage SEQUENCE
assert msg[pos] == 0x30 # nosec B101
pos += 1
_, pos = _ber_length(msg, pos)
# messageID INTEGER
assert msg[pos] == 0x02 # nosec B101
pos += 1
id_len, pos = _ber_length(msg, pos)
pos += id_len
# BindRequest [APPLICATION 0]
assert msg[pos] == 0x60 # nosec B101
pos += 1
_, pos = _ber_length(msg, pos)
# version INTEGER
assert msg[pos] == 0x02 # nosec B101
pos += 1
v_len, pos = _ber_length(msg, pos)
pos += v_len
# name LDAPDN (OCTET STRING)
dn, pos = _ber_string(msg, pos)
# authentication CHOICE — simple [0] OCTET STRING
if msg[pos] == 0x80:
pos += 1
pw_len, pos = _ber_length(msg, pos)
password = msg[pos:pos + pw_len].decode(errors="replace")
else:
password = "<sasl_or_unknown>" # nosec B105
return dn, password
except Exception:
return "<parse_error>", "<parse_error>"
def _bind_error_response(message_id: int) -> bytes:
# BindResponse: resultCode=49 (invalidCredentials), matchedDN="", errorMessage=""
result_code = bytes([0x0a, 0x01, 0x31]) # ENUMERATED 49
matched_dn = bytes([0x04, 0x00]) # empty OCTET STRING
error_msg = bytes([0x04, 0x00]) # empty OCTET STRING
bind_resp_body = result_code + matched_dn + error_msg
bind_resp = bytes([0x61, len(bind_resp_body)]) + bind_resp_body
msg_id_enc = bytes([0x02, 0x01, message_id & 0xff])
ldap_msg_body = msg_id_enc + bind_resp
return bytes([0x30, len(ldap_msg_body)]) + ldap_msg_body
class LDAPProtocol(asyncio.Protocol):
def __init__(self):
self._transport = None
self._peer = None
self._buf = b""
def connection_made(self, transport):
self._transport = transport
self._peer = transport.get_extra_info("peername", ("?", 0))
_log("connect", src=self._peer[0], src_port=self._peer[1])
def data_received(self, data):
self._buf += data
self._process()
def _process(self):
while len(self._buf) >= 2:
if self._buf[0] != 0x30:
self._buf = b""
return
if self._buf[1] < 0x80:
msg_len = self._buf[1] + 2
elif self._buf[1] == 0x81:
if len(self._buf) < 3:
return
msg_len = self._buf[2] + 3
else:
self._buf = b""
return
if len(self._buf) < msg_len:
return
msg = self._buf[:msg_len]
self._buf = self._buf[msg_len:]
self._handle_message(msg)
def _handle_message(self, msg: bytes):
# Extract messageID for the response
try:
message_id = msg[4] if len(msg) > 4 else 1
except Exception:
message_id = 1
dn, password = _parse_bind_request(msg)
_log("bind", src=self._peer[0], dn=dn, password=password)
self._transport.write(_bind_error_response(message_id))
def connection_lost(self, exc):
_log("disconnect", src=self._peer[0] if self._peer else "?")
async def main():
_log("startup", msg=f"LDAP server starting as {NODE_NAME}")
loop = asyncio.get_running_loop()
server = await loop.create_server(LDAPProtocol, "0.0.0.0", 389) # nosec B104
async with server:
await server.serve_forever()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,24 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 5355/udp
EXPOSE 5353/udp
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,113 @@
#!/usr/bin/env python3
"""
LLMNR / mDNS poisoning detector (UDP 5355 and UDP 5353).
Listens for any incoming name-resolution queries. Any traffic here is a
strong signal of an attacker running Responder or similar tools on the LAN.
Logs every packet with source IP and decoded query name where possible.
"""
import asyncio
import os
import struct
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "lan-host")
SERVICE_NAME = "llmnr"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
def _decode_dns_name(data: bytes, offset: int) -> str:
"""Decode a DNS-encoded label sequence starting at offset."""
labels = []
visited = set()
pos = offset
while pos < len(data):
if pos in visited:
break
visited.add(pos)
length = data[pos]
if length == 0:
break
if length & 0xc0 == 0xc0: # pointer
if pos + 1 >= len(data):
break
ptr = ((length & 0x3f) << 8) | data[pos + 1]
labels.append(_decode_dns_name(data, ptr))
break
pos += 1
labels.append(data[pos:pos + length].decode(errors="replace"))
pos += length
return ".".join(labels)
def _parse_query(data: bytes, proto: str, src_addr) -> None:
"""Parse DNS/LLMNR/mDNS query and log the queried name."""
try:
if len(data) < 12:
raise ValueError("too short")
flags = struct.unpack(">H", data[2:4])[0]
qr = (flags >> 15) & 1
qdcount = struct.unpack(">H", data[4:6])[0]
if qr != 0 or qdcount < 1:
return # not a query or no questions
name = _decode_dns_name(data, 12)
pos = 12
while pos < len(data) and data[pos] != 0:
pos += data[pos] + 1
pos += 1
qtype = struct.unpack(">H", data[pos:pos + 2])[0] if pos + 2 <= len(data) else 0
_log(
"query",
proto=proto,
src=src_addr[0],
src_port=src_addr[1],
name=name,
qtype=qtype,
)
except Exception as e:
_log("raw_packet", proto=proto, src=src_addr[0], data=data[:64].hex(), error=str(e))
class LLMNRProtocol(asyncio.DatagramProtocol):
def __init__(self, proto_label: str):
self._proto = proto_label
def datagram_received(self, data, addr):
_parse_query(data, self._proto, addr)
def error_received(self, exc):
pass
async def main():
_log("startup", msg=f"LLMNR/mDNS server starting as {NODE_NAME}")
loop = asyncio.get_running_loop()
# LLMNR: UDP 5355
llmnr_transport, _ = await loop.create_datagram_endpoint(
lambda: LLMNRProtocol("LLMNR"),
local_addr=("0.0.0.0", 5355), # nosec B104
)
# mDNS: UDP 5353
mdns_transport, _ = await loop.create_datagram_endpoint(
lambda: LLMNRProtocol("mDNS"),
local_addr=("0.0.0.0", 5353), # nosec B104
)
try:
await asyncio.sleep(float("inf"))
finally:
llmnr_transport.close()
mdns_transport.close()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 27017
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,127 @@
#!/usr/bin/env python3
"""
MongoDBserver.
Implements the MongoDB wire protocol OP_MSG/OP_QUERY handshake. Responds
to isMaster/hello, listDatabases, and authenticate commands. Logs all
received messages as JSON.
"""
import asyncio
import os
import struct
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "mongodb")
SERVICE_NAME = "mongodb"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
PORT = int(os.environ.get("PORT", "27017"))
# Minimal BSON helpers
def _bson_str(key: str, val: str) -> bytes:
k = key.encode() + b"\x00"
v = val.encode() + b"\x00"
return b"\x02" + k + struct.pack("<I", len(v)) + v
def _bson_int32(key: str, val: int) -> bytes:
return b"\x10" + key.encode() + b"\x00" + struct.pack("<i", val)
def _bson_bool(key: str, val: bool) -> bytes:
return b"\x08" + key.encode() + b"\x00" + (b"\x01" if val else b"\x00")
def _bson_doc(*fields: bytes) -> bytes:
body = b"".join(fields) + b"\x00"
return struct.pack("<I", len(body) + 4) + body
def _op_reply(request_id: int, doc: bytes) -> bytes:
# OP_REPLY header: total_len(4), req_id(4), response_to(4), opcode(4)=1,
# flags(4), cursor_id(8), starting_from(4), number_returned(4), docs
header = struct.pack(
"<iiiiiqii",
16 + 20 + len(doc), # total length
0, # request id
request_id, # response to
1, # OP_REPLY
0, # flags
0, # cursor id (int64)
0, # starting from
1, # number returned
)
return header + doc
def _op_msg(request_id: int, doc: bytes) -> bytes:
payload = b"\x00" + doc
flag_bits = struct.pack("<I", 0)
msg_body = flag_bits + payload
header = struct.pack("<iiii",
16 + len(msg_body),
1,
request_id,
2013,
)
return header + msg_body
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
class MongoDBProtocol(asyncio.Protocol):
def __init__(self):
self._transport = None
self._peer = None
self._buf = b""
def connection_made(self, transport):
self._transport = transport
self._peer = transport.get_extra_info("peername", ("?", 0))
_log("connect", src=self._peer[0], src_port=self._peer[1])
def data_received(self, data):
self._buf += data
while len(self._buf) >= 16:
msg_len = struct.unpack("<I", self._buf[:4])[0]
if msg_len < 16 or msg_len > 48 * 1024 * 1024:
self._transport.close()
self._buf = b""
return
if len(self._buf) < msg_len:
break
msg = self._buf[:msg_len]
self._buf = self._buf[msg_len:]
self._handle_message(msg)
def _handle_message(self, msg: bytes):
if len(msg) < 16:
return
request_id = struct.unpack("<I", msg[4:8])[0]
opcode = struct.unpack("<I", msg[12:16])[0]
_log("message", src=self._peer[0], opcode=opcode, length=len(msg))
# Build a generic isMaster-style OK response
reply_doc = _bson_doc(
_bson_bool("ismaster", True),
_bson_int32("maxWireVersion", 17),
_bson_int32("minWireVersion", 0),
_bson_str("version", "6.0.5"),
_bson_int32("ok", 1),
)
if opcode == 2013: # OP_MSG
self._transport.write(_op_msg(request_id, reply_doc))
else:
self._transport.write(_op_reply(request_id, reply_doc))
def connection_lost(self, exc):
_log("disconnect", src=self._peer[0] if self._peer else "?")
async def main():
_log("startup", msg=f"MongoDB server starting as {NODE_NAME}")
loop = asyncio.get_running_loop()
server = await loop.create_server(MongoDBProtocol, "0.0.0.0", PORT) # nosec B104
async with server:
await server.serve_forever()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 1883
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,273 @@
#!/usr/bin/env python3
"""
MQTT server (port 1883).
Parses MQTT CONNECT packets, extracts client_id, etc.
Responds with CONNACK.
Supports dynamic topics and retained publishes.
Logs PUBLISH commands sent by clients.
"""
import asyncio
import json
import os
import random
import struct
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "mqtt-broker")
SERVICE_NAME = "mqtt"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
PORT = int(os.environ.get("PORT", "1883"))
MQTT_ACCEPT_ALL = os.environ.get("MQTT_ACCEPT_ALL", "1") == "1"
MQTT_PERSONA = os.environ.get("MQTT_PERSONA", "water_plant")
MQTT_CUSTOM_TOPICS = os.environ.get("MQTT_CUSTOM_TOPICS", "")
_CONNACK_ACCEPTED = b"\x20\x02\x00\x00"
_CONNACK_NOT_AUTH = b"\x20\x02\x00\x05"
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
def _read_utf8(data: bytes, pos: int):
"""Read MQTT UTF-8 string (2-byte length prefix). Returns (string, next_pos)."""
if pos + 2 > len(data):
return "", pos
length = struct.unpack(">H", data[pos:pos + 2])[0]
pos += 2
return data[pos:pos + length].decode(errors="replace"), pos + length
def _parse_connect(payload: bytes):
pos = 0
proto_name, pos = _read_utf8(payload, pos)
if pos >= len(payload):
return {}, pos
_proto_level = payload[pos]
pos += 1
if pos >= len(payload):
return {}, pos
flags = payload[pos]
pos += 1
pos += 2 # Keep alive
client_id, pos = _read_utf8(payload, pos)
result = {"client_id": client_id, "proto": proto_name}
if flags & 0x04:
_, pos = _read_utf8(payload, pos)
_, pos = _read_utf8(payload, pos)
if flags & 0x80:
username, pos = _read_utf8(payload, pos)
result["username"] = username
if flags & 0x40:
password, pos = _read_utf8(payload, pos)
result["password"] = password
return result
def _parse_subscribe(payload: bytes):
"""Returns (packet_id, [(topic, qos), ...])"""
if len(payload) < 2:
return 0, []
pos = 0
packet_id = struct.unpack(">H", payload[pos:pos+2])[0]
pos += 2
topics = []
while pos < len(payload):
topic, pos = _read_utf8(payload, pos)
if pos >= len(payload):
break
qos = payload[pos] & 0x03
pos += 1
topics.append((topic, qos))
return packet_id, topics
def _suback(packet_id: int, granted_qos: list[int]) -> bytes:
payload = struct.pack(">H", packet_id) + bytes(granted_qos)
return bytes([0x90, len(payload)]) + payload
def _publish(topic: str, value: str, retain: bool = True) -> bytes:
topic_bytes = topic.encode()
topic_len = struct.pack(">H", len(topic_bytes))
payload = str(value).encode()
fixed = 0x31 if retain else 0x30
remaining = len(topic_len) + len(topic_bytes) + len(payload)
# variable length encoding
rem_bytes = []
while remaining > 0:
encoded = remaining % 128
remaining = remaining // 128
if remaining > 0:
encoded = encoded | 128
rem_bytes.append(encoded)
if not rem_bytes:
rem_bytes = [0]
return bytes([fixed]) + bytes(rem_bytes) + topic_len + topic_bytes + payload
def _parse_publish(payload: bytes, qos: int):
pos = 0
topic, pos = _read_utf8(payload, pos)
packet_id = 0
if qos > 0:
if pos + 2 <= len(payload):
packet_id = struct.unpack(">H", payload[pos:pos+2])[0]
pos += 2
data = payload[pos:]
return topic, packet_id, data
def _generate_topics() -> dict:
topics: dict = {}
if MQTT_CUSTOM_TOPICS:
try:
topics = json.loads(MQTT_CUSTOM_TOPICS)
return topics
except Exception as e:
_log("config_error", severity=4, error=str(e))
if MQTT_PERSONA == "water_plant":
topics.update({
"plant/water/tank1/level": f"{random.uniform(60.0, 80.0):.1f}",
"plant/water/tank1/pressure": f"{random.uniform(2.5, 3.0):.2f}",
"plant/water/pump1/status": "RUNNING",
"plant/water/pump1/rpm": f"{int(random.uniform(1400, 1450))}",
"plant/water/pump2/status": "STANDBY",
"plant/water/chlorine/dosing": f"{random.uniform(1.1, 1.3):.1f}",
"plant/water/chlorine/residual": f"{random.uniform(0.7, 0.9):.1f}",
"plant/water/valve/inlet/state": "OPEN",
"plant/water/valve/drain/state": "CLOSED",
"plant/alarm/high_pressure": "0",
"plant/alarm/low_chlorine": "0",
"plant/alarm/pump_fault": "0",
"plant/$SYS/broker/version": "Mosquitto 2.0.15",
"plant/$SYS/broker/uptime": "2847392",
})
elif not topics:
topics = {
"device/status": "online",
"device/uptime": "3600"
}
return topics
class MQTTProtocol(asyncio.Protocol):
def __init__(self):
self._transport = None
self._peer = None
self._buf = b""
self._auth = False
self._topics = _generate_topics()
def connection_made(self, transport):
self._transport = transport
self._peer = transport.get_extra_info("peername", ("?", 0))
_log("connect", src=self._peer[0], src_port=self._peer[1])
def data_received(self, data):
self._buf += data
try:
self._process()
except Exception as e:
_log("protocol_error", severity=4, error=str(e))
if self._transport:
self._transport.close()
def _process(self):
while len(self._buf) >= 2:
pkt_byte = self._buf[0]
pkt_type = (pkt_byte >> 4) & 0x0f
flags = pkt_byte & 0x0f
qos = (flags >> 1) & 0x03
# Decode remaining length (variable-length encoding)
pos = 1
remaining = 0
multiplier = 1
while pos < len(self._buf):
if pos > 4: # MQTT spec: max 4 bytes for remaining length
self._transport.close()
self._buf = b""
return
byte = self._buf[pos]
remaining += (byte & 0x7f) * multiplier
multiplier *= 128
pos += 1
if not (byte & 0x80):
break
else:
return # incomplete length
if len(self._buf) < pos + remaining:
return # incomplete payload
payload = self._buf[pos:pos + remaining]
self._buf = self._buf[pos + remaining:]
if pkt_type == 1: # CONNECT
info = _parse_connect(payload)
_log("auth", **info)
if MQTT_ACCEPT_ALL:
self._auth = True
self._transport.write(_CONNACK_ACCEPTED)
else:
self._transport.write(_CONNACK_NOT_AUTH)
self._transport.close()
elif pkt_type == 8: # SUBSCRIBE
if not self._auth:
self._transport.close()
continue
packet_id, subs = _parse_subscribe(payload)
granted_qos = [1] * len(subs) # grant QoS 1 for all
self._transport.write(_suback(packet_id, granted_qos))
# Immediately send retained publishes matching topics
for sub_topic, _ in subs:
_log("subscribe", src=self._peer[0], topics=[sub_topic])
for t, v in self._topics.items():
# simple match: if topic ends with #, it matches prefix
if sub_topic.endswith("#"):
prefix = sub_topic[:-1]
if t.startswith(prefix):
self._transport.write(_publish(t, str(v)))
elif sub_topic == t:
self._transport.write(_publish(t, str(v)))
elif pkt_type == 3: # PUBLISH
if not self._auth:
self._transport.close()
continue
topic, packet_id, data = _parse_publish(payload, qos)
# Attacker command received!
_log("publish", src=self._peer[0], topic=topic, payload=data.decode(errors="replace"))
if qos == 1:
puback = bytes([0x40, 0x02]) + struct.pack(">H", packet_id)
self._transport.write(puback)
elif pkt_type == 12: # PINGREQ
self._transport.write(b"\xd0\x00") # PINGRESP
elif pkt_type == 14: # DISCONNECT
self._transport.close()
else:
_log("packet", src=self._peer[0], pkt_type=pkt_type)
self._transport.close()
def connection_lost(self, exc):
_log("disconnect", src=self._peer[0] if self._peer else "?")
async def main():
_log("startup", msg=f"MQTT server starting as {NODE_NAME}")
loop = asyncio.get_running_loop()
server = await loop.create_server(MQTTProtocol, "0.0.0.0", PORT) # nosec B104
async with server:
await server.serve_forever()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 1433
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

View File

@@ -0,0 +1,143 @@
#!/usr/bin/env python3
"""
MSSQL (TDS)server.
Reads TDS pre-login and login7 packets, extracts username, responds with
a login failed error. Logs auth attempts as JSON.
"""
import asyncio
import os
import struct
from syslog_bridge import syslog_line, write_syslog_file, forward_syslog
NODE_NAME = os.environ.get("NODE_NAME", "dbserver")
SERVICE_NAME = "mssql"
LOG_TARGET = os.environ.get("LOG_TARGET", "")
_PRELOGIN_RESP = bytes([
0x04, 0x01, 0x00, 0x2f, 0x00, 0x00, 0x01, 0x00, # TDS header type=4, status=1, len=47
# 0. VERSION option
0x00, 0x00, 0x1a, 0x00, 0x06,
# 1. ENCRYPTION option
0x01, 0x00, 0x20, 0x00, 0x01,
# 2. INSTOPT
0x02, 0x00, 0x21, 0x00, 0x01,
# 3. THREADID
0x03, 0x00, 0x22, 0x00, 0x04,
# 4. MARS
0x04, 0x00, 0x26, 0x00, 0x01,
# TERMINATOR
0xff,
# version data: 14.0.2000
0x0e, 0x00, 0x07, 0xd0, 0x00, 0x00,
# encryption: NOT_SUP
0x02,
# instopt
0x00,
# thread id
0x00, 0x00, 0x00, 0x00,
# mars
0x00,
])
def _log(event_type: str, severity: int = 6, **kwargs) -> None:
line = syslog_line(SERVICE_NAME, NODE_NAME, event_type, severity, **kwargs)
write_syslog_file(line)
forward_syslog(line, LOG_TARGET)
def _tds_error_packet(message: str) -> bytes:
msg_enc = message.encode("utf-16-le")
# Token type 0xAA = ERROR, followed by length, error number, state, class, msg_len, msg
token = (
b"\xaa"
+ struct.pack("<H", 4 + 1 + 1 + 2 + len(msg_enc) + 1 + 1 + 1 + 1 + 4)
+ struct.pack("<I", 18456) # SQL error number: login failed
+ b"\x01" # state
+ b"\x0e" # class
+ struct.pack("<H", len(message))
+ msg_enc
+ b"\x00" # server name length
+ b"\x00" # proc name length
+ struct.pack("<I", 1) # line number
)
done = b"\xfd\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
payload = token + done
header = struct.pack(">BBHBBBB", 0x04, 0x01, len(payload) + 8, 0x00, 0x00, 0x01, 0x00)
return header + payload
class MSSQLProtocol(asyncio.Protocol):
def __init__(self):
self._transport = None
self._peer = None
self._buf = b""
self._prelogin_done = False
def connection_made(self, transport):
self._transport = transport
self._peer = transport.get_extra_info("peername", ("?", 0))
_log("connect", src=self._peer[0], src_port=self._peer[1])
def data_received(self, data):
self._buf += data
while len(self._buf) >= 8:
pkt_type = self._buf[0]
pkt_len = struct.unpack(">H", self._buf[2:4])[0]
if pkt_len < 8:
_log("unknown_packet", src=self._peer[0], pkt_type=hex(pkt_type))
self._transport.close()
self._buf = b""
return
if len(self._buf) < pkt_len:
break
payload = self._buf[8:pkt_len]
self._buf = self._buf[pkt_len:]
self._handle_packet(pkt_type, payload)
if self._transport.is_closing():
self._buf = b""
break
def _handle_packet(self, pkt_type: int, payload: bytes):
if pkt_type == 0x12: # Pre-login
self._transport.write(_PRELOGIN_RESP)
self._prelogin_done = True
elif pkt_type == 0x10: # Login7
username = self._parse_login7_username(payload)
_log("auth", src=self._peer[0], username=username)
self._transport.write(_tds_error_packet("Login failed for user."))
self._transport.close()
else:
_log("unknown_packet", src=self._peer[0], pkt_type=hex(pkt_type))
self._transport.close()
def _parse_login7_username(self, payload: bytes) -> str:
try:
# Login7 layout: fixed header 36 bytes, then offsets
# Username offset at bytes 36-37, length at 38-39
if len(payload) < 40:
return "<short_packet>"
offset = struct.unpack("<H", payload[36:38])[0]
length = struct.unpack("<H", payload[38:40])[0]
username = payload[offset:offset + length * 2].decode("utf-16-le", errors="replace")
return username
except Exception:
return "<parse_error>"
def connection_lost(self, exc):
_log("disconnect", src=self._peer[0] if self._peer else "?")
async def main():
_log("startup", msg=f"MSSQL server starting as {NODE_NAME}")
loop = asyncio.get_running_loop()
server = await loop.create_server(MSSQLProtocol, "0.0.0.0", 1433) # nosec B104
async with server:
await server.serve_forever()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Shared RFC 5424 syslog helper used by service containers.
Services call syslog_line() to format an RFC 5424 message, then
write_syslog_file() to emit it to stdout — the container runtime
captures it, and the host-side collector streams it into the log file.
RFC 5424 structure:
<PRI>1 TIMESTAMP HOSTNAME APP-NAME PROCID MSGID [SD-ELEMENT] MSG
Facility: local0 (16). SD element ID uses PEN 55555.
"""
from datetime import datetime, timezone
from typing import Any
# ─── Constants ────────────────────────────────────────────────────────────────
_FACILITY_LOCAL0 = 16
_SD_ID = "relay@55555"
_NILVALUE = "-"
SEVERITY_EMERG = 0
SEVERITY_ALERT = 1
SEVERITY_CRIT = 2
SEVERITY_ERROR = 3
SEVERITY_WARNING = 4
SEVERITY_NOTICE = 5
SEVERITY_INFO = 6
SEVERITY_DEBUG = 7
_MAX_HOSTNAME = 255
_MAX_APPNAME = 48
_MAX_MSGID = 32
# ─── Formatter ────────────────────────────────────────────────────────────────
def _sd_escape(value: str) -> str:
"""Escape SD-PARAM-VALUE per RFC 5424 §6.3.3."""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("]", "\\]")
def _sd_element(fields: dict[str, Any]) -> str:
if not fields:
return _NILVALUE
params = " ".join(f'{k}="{_sd_escape(str(v))}"' for k, v in fields.items())
return f"[{_SD_ID} {params}]"
def syslog_line(
service: str,
hostname: str,
event_type: str,
severity: int = SEVERITY_INFO,
timestamp: datetime | None = None,
msg: str | None = None,
**fields: Any,
) -> str:
"""
Return a single RFC 5424-compliant syslog line (no trailing newline).
Args:
service: APP-NAME (e.g. "http", "mysql")
hostname: HOSTNAME (node name)
event_type: MSGID (e.g. "request", "login_attempt")
severity: Syslog severity integer (default: INFO=6)
timestamp: UTC datetime; defaults to now
msg: Optional free-text MSG
**fields: Encoded as structured data params
"""
pri = f"<{_FACILITY_LOCAL0 * 8 + severity}>"
ts = (timestamp or datetime.now(timezone.utc)).isoformat()
host = (hostname or _NILVALUE)[:_MAX_HOSTNAME]
appname = (service or _NILVALUE)[:_MAX_APPNAME]
msgid = (event_type or _NILVALUE)[:_MAX_MSGID]
sd = _sd_element(fields)
message = f" {msg}" if msg else ""
return f"{pri}1 {ts} {host} {appname} {_NILVALUE} {msgid} {sd}{message}"
def write_syslog_file(line: str) -> None:
"""Emit a syslog line to stdout for container log capture."""
print(line, flush=True)
def forward_syslog(line: str, log_target: str) -> None:
"""No-op stub. TCP forwarding is handled by rsyslog, not by service containers."""
pass

View File

@@ -0,0 +1,23 @@
ARG BASE_IMAGE=debian:bookworm-slim
FROM ${BASE_IMAGE}
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
&& rm -rf /var/lib/apt/lists/*
COPY syslog_bridge.py /opt/syslog_bridge.py
COPY server.py /opt/server.py
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
EXPOSE 3306
RUN useradd -r -s /bin/false -d /opt logrelay \
&& apt-get update && apt-get install -y --no-install-recommends libcap2-bin \
&& rm -rf /var/lib/apt/lists/* \
&& (find /usr/bin/ -maxdepth 1 -name 'python3*' -type f -exec setcap 'cap_net_bind_service+eip' {} \; 2>/dev/null || true)
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD kill -0 1 || exit 1
USER logrelay
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
#!/bin/bash
set -e
exec python3 /opt/server.py

Some files were not shown because too many files have changed in this diff Show More