Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.
Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles
Excluded from snapshot: venv/, .env, data/, backups, .tgz archives
Co-authored-by: Cursor <cursoragent@cursor.com>
75 lines
2.5 KiB
Python
75 lines
2.5 KiB
Python
import os
|
|
import json
|
|
import time
|
|
import hashlib
|
|
import asyncio
|
|
from pathlib import Path
|
|
from nats.aio.client import Client as NATS
|
|
|
|
NATS_URL = os.getenv('NATS_URL', 'nats://localhost:4222')
|
|
AUDIT_FILE = os.getenv('AGX_AUDIT_FILE', 'artifacts/audit.log.jsonl')
|
|
MAX_BYTES = int(os.getenv('AGX_AUDIT_MAX_BYTES', '4096'))
|
|
REDACT_KEYS = set(k.strip().lower() for k in os.getenv('AGX_AUDIT_REDACT_KEYS', 'token,secret,password,authorization,cookie,api_key,signature').split(','))
|
|
TRACE_ID = os.getenv('AGX_TRACE_ID', '')
|
|
|
|
|
|
def _sanitize(obj):
|
|
if isinstance(obj, dict):
|
|
return {k: ("[REDACTED]" if k.lower() in REDACT_KEYS else _sanitize(v)) for k, v in obj.items()}
|
|
if isinstance(obj, list):
|
|
return [_sanitize(v) for v in obj]
|
|
return obj
|
|
|
|
|
|
def _preview(obj):
|
|
try:
|
|
sanitized = _sanitize(obj)
|
|
raw = json.dumps(sanitized, ensure_ascii=False, sort_keys=True, default=str)
|
|
size = len(raw.encode("utf-8"))
|
|
if size > MAX_BYTES:
|
|
return raw[:MAX_BYTES], True, size
|
|
return raw, False, size
|
|
except Exception:
|
|
return None, True, 0
|
|
|
|
|
|
def _hash(obj) -> str:
|
|
try:
|
|
raw = json.dumps(obj, ensure_ascii=False, sort_keys=True, default=str)
|
|
except Exception:
|
|
raw = str(obj)
|
|
return hashlib.sha256(raw.encode()).hexdigest()[:16]
|
|
|
|
|
|
async def _publish(subject: str, payload: dict):
|
|
nc = NATS()
|
|
await nc.connect(servers=[NATS_URL])
|
|
await nc.publish(subject, json.dumps(payload, ensure_ascii=False).encode())
|
|
await nc.flush(1)
|
|
await nc.drain()
|
|
|
|
|
|
def audit_tool_call(tool: str, inputs: dict, outputs: dict, success: bool, duration_ms: int):
|
|
inputs_preview, in_trunc, in_size = _preview(inputs)
|
|
outputs_preview, out_trunc, out_size = _preview(outputs)
|
|
event = {
|
|
'trace_id': TRACE_ID,
|
|
'tool': tool,
|
|
'inputs_hash': _hash(inputs),
|
|
'outputs_hash': _hash(outputs),
|
|
'inputs_preview': inputs_preview,
|
|
'outputs_preview': outputs_preview,
|
|
'payload_truncated': (in_trunc or out_trunc),
|
|
'payload_size': {'inputs': in_size, 'outputs': out_size},
|
|
'success': success,
|
|
'duration_ms': duration_ms,
|
|
'ts': int(time.time() * 1000)
|
|
}
|
|
Path(AUDIT_FILE).parent.mkdir(parents=True, exist_ok=True)
|
|
with open(AUDIT_FILE, 'a', encoding='utf-8') as f:
|
|
f.write(json.dumps(event, ensure_ascii=False) + '\n')
|
|
try:
|
|
asyncio.run(_publish('agx.audit.toolcall', event))
|
|
except Exception:
|
|
pass
|