feat: switch to JSON-based log ingestion for higher reliability
This commit is contained in:
@@ -1,36 +1,36 @@
|
||||
import asyncio
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
|
||||
from decnet.correlation.parser import parse_line
|
||||
from decnet.web.repository import BaseRepository
|
||||
|
||||
logger = logging.getLogger("decnet.web.ingester")
|
||||
|
||||
async def log_ingestion_worker(repo: BaseRepository) -> None:
|
||||
"""
|
||||
Background task that tails the DECNET_INGEST_LOG_FILE and
|
||||
inserts parsed LogEvents into the SQLite repository.
|
||||
Background task that tails the DECNET_INGEST_LOG_FILE.json and
|
||||
inserts structured JSON logs into the SQLite repository.
|
||||
"""
|
||||
log_file_path_str = os.environ.get("DECNET_INGEST_LOG_FILE")
|
||||
if not log_file_path_str:
|
||||
base_log_file = os.environ.get("DECNET_INGEST_LOG_FILE")
|
||||
if not base_log_file:
|
||||
logger.warning("DECNET_INGEST_LOG_FILE not set. Log ingestion disabled.")
|
||||
return
|
||||
|
||||
log_path = Path(log_file_path_str)
|
||||
json_log_path = Path(base_log_file).with_suffix(".json")
|
||||
position = 0
|
||||
|
||||
logger.info(f"Starting log ingestion from {log_path}")
|
||||
logger.info(f"Starting JSON log ingestion from {json_log_path}")
|
||||
|
||||
while True:
|
||||
try:
|
||||
if not log_path.exists():
|
||||
if not json_log_path.exists():
|
||||
await asyncio.sleep(2)
|
||||
continue
|
||||
|
||||
stat = log_path.stat()
|
||||
stat = json_log_path.stat()
|
||||
if stat.st_size < position:
|
||||
# File rotated or truncated
|
||||
position = 0
|
||||
@@ -40,26 +40,26 @@ async def log_ingestion_worker(repo: BaseRepository) -> None:
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
with open(log_path, "r", encoding="utf-8", errors="replace") as f:
|
||||
with open(json_log_path, "r", encoding="utf-8", errors="replace") as f:
|
||||
f.seek(position)
|
||||
while True:
|
||||
line = f.readline()
|
||||
if not line:
|
||||
break # EOF reached
|
||||
|
||||
event = parse_line(line)
|
||||
if event:
|
||||
log_data = {
|
||||
"timestamp": event.timestamp.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"decky": event.decky,
|
||||
"service": event.service,
|
||||
"event_type": event.event_type,
|
||||
"attacker_ip": event.attacker_ip or "Unknown",
|
||||
"raw_line": event.raw
|
||||
}
|
||||
await repo.add_log(log_data)
|
||||
if not line.endswith('\n'):
|
||||
# Partial line read, don't process yet, don't advance position
|
||||
break
|
||||
|
||||
position = f.tell()
|
||||
try:
|
||||
log_data = json.loads(line.strip())
|
||||
await repo.add_log(log_data)
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f"Failed to decode JSON log line: {line}")
|
||||
continue
|
||||
|
||||
# Update position after successful line read
|
||||
position = f.tell()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in log ingestion worker: {e}")
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -120,10 +120,90 @@ def _get_file_logger() -> logging.Logger:
|
||||
return _file_logger
|
||||
|
||||
|
||||
|
||||
_json_logger: logging.Logger | None = None
|
||||
|
||||
def _get_json_logger() -> logging.Logger:
|
||||
global _json_logger
|
||||
if _json_logger is not None:
|
||||
return _json_logger
|
||||
|
||||
log_path_str = os.environ.get(_LOG_FILE_ENV, _DEFAULT_LOG_FILE)
|
||||
json_path = Path(log_path_str).with_suffix(".json")
|
||||
try:
|
||||
json_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
json_path,
|
||||
maxBytes=_MAX_BYTES,
|
||||
backupCount=_BACKUP_COUNT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
except OSError:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
_json_logger = logging.getLogger("decnet.json")
|
||||
_json_logger.setLevel(logging.DEBUG)
|
||||
_json_logger.propagate = False
|
||||
_json_logger.addHandler(handler)
|
||||
return _json_logger
|
||||
|
||||
|
||||
def write_syslog_file(line: str) -> None:
|
||||
"""Append a syslog line to the rotating log file."""
|
||||
try:
|
||||
_get_file_logger().info(line)
|
||||
|
||||
# Also parse and write JSON log
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
_RFC5424_RE = re.compile(
|
||||
r"^<\d+>1 "
|
||||
r"(\S+) " # 1: TIMESTAMP
|
||||
r"(\S+) " # 2: HOSTNAME (decky name)
|
||||
r"(\S+) " # 3: APP-NAME (service)
|
||||
r"- " # PROCID always NILVALUE
|
||||
r"(\S+) " # 4: MSGID (event_type)
|
||||
r"(.+)$", # 5: SD element + optional MSG
|
||||
)
|
||||
_SD_BLOCK_RE = re.compile(r'\[decnet@55555\s+(.*?)\]', re.DOTALL)
|
||||
_PARAM_RE = re.compile(r'(\w+)="((?:[^"\\]|\\.)*)"')
|
||||
_IP_FIELDS = ("src_ip", "src", "client_ip", "remote_ip", "ip")
|
||||
|
||||
m = _RFC5424_RE.match(line)
|
||||
if m:
|
||||
ts_raw, decky, service, event_type, sd_rest = m.groups()
|
||||
|
||||
block = _SD_BLOCK_RE.search(sd_rest)
|
||||
fields = {}
|
||||
if block:
|
||||
for k, v in _PARAM_RE.findall(block.group(1)):
|
||||
fields[k] = v.replace('\\"', '"').replace("\\\\", "\\").replace("\\]", "]")
|
||||
|
||||
attacker_ip = "Unknown"
|
||||
for fname in _IP_FIELDS:
|
||||
if fname in fields:
|
||||
attacker_ip = fields[fname]
|
||||
break
|
||||
|
||||
# Parse timestamp to normalize it
|
||||
try:
|
||||
ts = datetime.fromisoformat(ts_raw).strftime("%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
ts = ts_raw
|
||||
|
||||
payload = {
|
||||
"timestamp": ts,
|
||||
"decky": decky,
|
||||
"service": service,
|
||||
"event_type": event_type,
|
||||
"attacker_ip": attacker_ip,
|
||||
"raw_line": line
|
||||
}
|
||||
_get_json_logger().info(json.dumps(payload))
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
Reference in New Issue
Block a user