Files
microdao-daarion/services/integration-service/src/index.ts
Apple ef3473db21 snapshot: NODE1 production state 2026-02-09
Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.

Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles

Excluded from snapshot: venv/, .env, data/, backups, .tgz archives

Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-09 08:46:46 -08:00

231 lines
9.6 KiB
TypeScript

import express from "express";
import { randomUUID } from "crypto";
import pinoHttp from "pino-http";
import { logger } from "./logger";
import { initNats, publish } from "./nats";
import { initDedupStore, buildDedupKey, isDuplicate, cleanupDedup } from "./dedup";
import { verifySignature } from "./security";
import { rateLimit } from "./rate_limit";
import { enqueue, replay } from "./dlq";
import { loadMappings, resolveAssetId } from "./mappings";
import { ObservationSchema, EventSchema, TaskLogSchema, InventoryMovementSchema } from "./types";
import { writeObservation, writeEvent, writeTaskLog, writeInventoryMovement } from "./farmos";
import promClient from "prom-client";
declare global {
namespace Express {
interface Request {
context?: {
trace_id: string;
request_id: string;
start_ts: number;
route?: string;
};
rawBody?: string;
}
}
}
function logWithReq(req: any, level: "info"|"warn"|"error", msg: string, extra: any = {}) {
const ctx = req.context || { trace_id: "", request_id: "" };
logger[level]({ trace_id: ctx.trace_id, request_id: ctx.request_id, route: req.originalUrl, ...extra }, msg);
}
const app = express();
app.use(express.json({ limit: "2mb", verify: (req: any, _res, buf) => { req.rawBody = buf.toString(); } }));
app.use((req: any, res, next) => {
const traceId = req.headers["x-agx-trace-id"] || randomUUID();
req.context = { trace_id: traceId as string, request_id: randomUUID(), start_ts: Date.now(), route: req.originalUrl };
const start = req.context.start_ts;
res.on("finish", () => {
logger.info({
trace_id: req.context.trace_id,
request_id: req.context.request_id,
route: req.originalUrl,
method: req.method,
status: res.statusCode,
duration_ms: Date.now() - start,
ip: req.ip,
user_agent: req.headers["user-agent"],
replay: req.headers["x-agx-replay"] || ""
}, "request");
});
next();
});
app.use(pinoHttp({ logger }));
const register = new promClient.Registry();
promClient.collectDefaultMetrics({ register });
app.get("/healthz", (_req, res) => res.json({ status: "ok" }));
app.get("/readyz", (_req, res) => res.json({ status: "ready" }));
app.get("/metrics", async (_req, res) => {
res.set("Content-Type", register.contentType);
res.end(await register.metrics());
});
app.post("/dlq/replay", async (req: any, res) => {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
await replay(async (entry) => {
logger.info({ trace_id: entry.trace_id, route: entry.route, replay: true }, "dlq_replay");
if (entry.type === "observation") await writeObservation(entry.payload, entry.trace_id, entry.dedup_key);
if (entry.type === "event") await writeEvent(entry.payload, entry.trace_id, entry.dedup_key);
if (entry.type === "tasklog") await writeTaskLog(entry.payload, entry.trace_id, entry.dedup_key);
if (entry.type === "inventory") await writeInventoryMovement(entry.payload, entry.trace_id, entry.dedup_key);
});
res.json({ ok: true });
});
app.post("/ingest/thingsboard", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const body = req.body || {};
const deviceId = String(body.deviceId || "unknown");
const ts = Number(body.ts || Date.now());
const telemetry = body.telemetry || {};
const source = "thingsboard";
const mappedAssetId = resolveAssetId(deviceId);
const assetRef = body.assetRef || { source, deviceId, assetId: mappedAssetId };
const results = [];
for (const metric of Object.keys(telemetry)) {
const obs = ObservationSchema.parse({
assetRef,
metric,
value: telemetry[metric],
ts,
source
});
const key = buildDedupKey(source, deviceId, metric, ts, telemetry[metric]);
if (await isDuplicate(key)) {
results.push({ metric, status: "duplicate" });
continue;
}
await publish(`agx.obs.${metric}`, obs);
try {
await writeObservation(obs, req.context?.trace_id, key);
results.push({ metric, status: "ok" });
} catch (e: any) {
await enqueue("observation", obs, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
results.push({ metric, status: "dlq" });
}
}
res.json({ ok: true, results });
} catch (err: any) {
logWithReq(req, "error", "ThingsBoard ingest failed", { error: err.message });
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/observation", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const obs = ObservationSchema.parse(req.body);
const key = buildDedupKey(obs.source || "manual", obs.assetRef.deviceId || "na", obs.metric, obs.ts, obs.value);
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.obs.${obs.metric}`, obs);
try {
await writeObservation(obs, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("observation", obs, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/event", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const evt = EventSchema.parse(req.body);
const key = buildDedupKey(evt.assetRef.source || "manual", evt.assetRef.deviceId || "na", evt.type, evt.ts, evt.payload || "");
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.event.${evt.type}`, evt);
try {
await writeEvent(evt, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("event", evt, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/tasklog", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const task = TaskLogSchema.parse(req.body);
const key = buildDedupKey(task.assetRef.source || "manual", task.assetRef.deviceId || "na", task.task, task.ts, task.status);
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.task.${task.task}`, task);
try {
await writeTaskLog(task, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("tasklog", task, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/inventory", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const mov = InventoryMovementSchema.parse(req.body);
const key = buildDedupKey(mov.assetRef.source || "manual", mov.assetRef.deviceId || "na", mov.item, mov.ts, mov.quantity);
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.inv.${mov.item}`, mov);
try {
await writeInventoryMovement(mov, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("inventory", mov, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
const port = Number(process.env.PORT || 8800);
async function start() {
initDedupStore();
cleanupDedup();
loadMappings();
await initNats();
app.listen(port, () => logger.info({ port }, "integration-service started"));
}
start().catch((err) => {
logger.error({ err }, "Failed to start integration-service");
process.exit(1);
});