Files
microdao-daarion/services/integration-service/src/dlq.ts
Apple ef3473db21 snapshot: NODE1 production state 2026-02-09
Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.

Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles

Excluded from snapshot: venv/, .env, data/, backups, .tgz archives

Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-09 08:46:46 -08:00

94 lines
2.8 KiB
TypeScript

import fs from "fs";
import path from "path";
import Redis from "ioredis";
import { logger } from "./logger";
import crypto from "crypto";
const enabled = (process.env.DLQ_ENABLED || "true") === "true";
const redisUrl = process.env.REDIS_URL;
const dlqFile = process.env.DLQ_FILE || "/data/dlq.jsonl";
let redis: Redis | null = null;
const MAX_BYTES = Number(process.env.AGX_AUDIT_MAX_BYTES || 4096);
const REDACT_KEYS = new Set((process.env.AGX_AUDIT_REDACT_KEYS || "token,secret,password,authorization,cookie,api_key,signature").split(",").map(s => s.trim().toLowerCase()));
function sanitize(obj: any): any {
if (Array.isArray(obj)) return obj.map(sanitize);
if (obj && typeof obj === "object") {
const out: any = {};
for (const [k, v] of Object.entries(obj)) {
if (REDACT_KEYS.has(k.toLowerCase())) {
out[k] = "***REDACTED***";
} else {
out[k] = sanitize(v);
}
}
return out;
}
return obj;
}
function previewPayload(payload: any) {
const sanitized = sanitize(payload);
const raw = JSON.stringify(sanitized);
const size = Buffer.byteLength(raw, "utf-8");
if (size > MAX_BYTES) {
return { preview: raw.slice(0, MAX_BYTES), truncated: true, size };
}
return { preview: raw, truncated: false, size };
}
if (redisUrl) {
redis = new Redis(redisUrl);
}
export async function enqueue(type: string, payload: any, meta: any) {
if (!enabled) return;
const { preview, truncated, size } = previewPayload(payload);
const entry = {
type,
trace_id: meta?.trace_id || "",
request_id: meta?.request_id || "",
route: meta?.route || "",
dedup_key: meta?.dedup_key || "",
error_class: meta?.error_class || "",
error_message: meta?.error_message || "",
ingest_ts: new Date().toISOString(),
payload_size: size,
payload_hash: `sha256:${crypto.createHash("sha256").update(preview).digest("hex")}` ,
payload_truncated: truncated,
payload: preview
};
if (redis) {
await redis.xadd("agx:dlq", "*", "data", JSON.stringify(entry));
return;
}
fs.mkdirSync(path.dirname(dlqFile), { recursive: true });
fs.appendFileSync(dlqFile, JSON.stringify(entry) + "
");
}
export async function replay(handler: (entry: any) => Promise<void>) {
if (redis) {
const entries = await redis.xrange("agx:dlq", "-", "+");
for (const [_id, fields] of entries) {
const dataIdx = fields.findIndex((v) => v === "data");
const raw = dataIdx >= 0 ? fields[dataIdx + 1] : "";
if (!raw) continue;
const entry = JSON.parse(raw);
await handler(entry);
}
await redis.del("agx:dlq");
return;
}
if (!fs.existsSync(dlqFile)) return;
const lines = fs.readFileSync(dlqFile, "utf-8").trim().split("
");
fs.writeFileSync(dlqFile, "");
for (const line of lines) {
if (!line) continue;
const entry = JSON.parse(line);
await handler(entry);
}
}