snapshot: NODE1 production state 2026-02-09

Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.

Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles

Excluded from snapshot: venv/, .env, data/, backups, .tgz archives

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
Apple
2026-02-09 08:46:46 -08:00
parent 134c044c21
commit ef3473db21
9473 changed files with 408933 additions and 2769877 deletions

View File

@@ -0,0 +1,9 @@
FROM node:20-alpine
WORKDIR /app
COPY package.json package-lock.json* ./
RUN npm install --production=false
COPY tsconfig.json ./
COPY src ./src
RUN npm run build
EXPOSE 8800
CMD ["npm", "run", "start"]

View File

@@ -0,0 +1,12 @@
{
"schema_version": "1.0",
"devices": {
"demo-device-1": {
"assetId": "",
"metrics": {
"soil_moisture": {"logType": "observation"},
"air_temp": {"logType": "observation"}
}
}
}
}

View File

@@ -0,0 +1,28 @@
{
"name": "agromatrix-integration-service",
"version": "0.1.0",
"private": true,
"main": "dist/index.js",
"scripts": {
"build": "tsc -p tsconfig.json",
"start": "node dist/index.js",
"dev": "ts-node-dev --respawn --transpile-only src/index.ts"
},
"dependencies": {
"axios": "^1.7.7",
"express": "^4.19.2",
"ioredis": "^5.4.1",
"nats": "^2.24.1",
"pino": "^9.4.0",
"pino-http": "^9.0.0",
"prom-client": "^15.1.3",
"zod": "^3.23.8",
"better-sqlite3": "^9.4.3"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/node": "^20.14.12",
"ts-node-dev": "^2.0.0",
"typescript": "^5.5.4"
}
}

View File

@@ -0,0 +1,54 @@
import crypto from "crypto";
import Redis from "ioredis";
import Database from "better-sqlite3";
import { logger } from "./logger";
const ttlSeconds = 7 * 24 * 3600;
let redis: Redis | null = null;
let db: Database.Database | null = null;
export function buildDedupKey(source: string, deviceId: string, metric: string, ts: number, value: unknown) {
const hash = crypto.createHash("sha256").update(JSON.stringify(value ?? "")).digest("hex").slice(0, 12);
return `${source}:${deviceId}:${metric}:${ts}:${hash}`;
}
export function initDedupStore() {
const redisUrl = process.env.REDIS_URL;
if (redisUrl) {
redis = new Redis(redisUrl);
logger.info({ redisUrl }, "Dedup store: Redis");
return;
}
const dbPath = process.env.DEDUP_DB_PATH || "/data/dedup.sqlite";
db = new Database(dbPath);
db.exec("CREATE TABLE IF NOT EXISTS dedup (key TEXT PRIMARY KEY, ts INTEGER)");
logger.info({ dbPath }, "Dedup store: SQLite");
}
export async function isDuplicate(key: string): Promise<boolean> {
if (redis) {
const result = await redis.set(key, "1", "NX", "EX", ttlSeconds);
return result !== "OK";
}
if (!db) {
initDedupStore();
}
try {
const stmt = db!.prepare("INSERT INTO dedup (key, ts) VALUES (?, ?)");
stmt.run(key, Date.now());
return false;
} catch {
return true;
}
}
export function cleanupDedup() {
if (!db) return;
const ttlMs = 7 * 24 * 3600 * 1000;
const cutoff = Date.now() - ttlMs;
try {
db.prepare("DELETE FROM dedup WHERE ts < ?").run(cutoff);
} catch {}
}

View File

@@ -0,0 +1,93 @@
import fs from "fs";
import path from "path";
import Redis from "ioredis";
import { logger } from "./logger";
import crypto from "crypto";
const enabled = (process.env.DLQ_ENABLED || "true") === "true";
const redisUrl = process.env.REDIS_URL;
const dlqFile = process.env.DLQ_FILE || "/data/dlq.jsonl";
let redis: Redis | null = null;
const MAX_BYTES = Number(process.env.AGX_AUDIT_MAX_BYTES || 4096);
const REDACT_KEYS = new Set((process.env.AGX_AUDIT_REDACT_KEYS || "token,secret,password,authorization,cookie,api_key,signature").split(",").map(s => s.trim().toLowerCase()));
function sanitize(obj: any): any {
if (Array.isArray(obj)) return obj.map(sanitize);
if (obj && typeof obj === "object") {
const out: any = {};
for (const [k, v] of Object.entries(obj)) {
if (REDACT_KEYS.has(k.toLowerCase())) {
out[k] = "***REDACTED***";
} else {
out[k] = sanitize(v);
}
}
return out;
}
return obj;
}
function previewPayload(payload: any) {
const sanitized = sanitize(payload);
const raw = JSON.stringify(sanitized);
const size = Buffer.byteLength(raw, "utf-8");
if (size > MAX_BYTES) {
return { preview: raw.slice(0, MAX_BYTES), truncated: true, size };
}
return { preview: raw, truncated: false, size };
}
if (redisUrl) {
redis = new Redis(redisUrl);
}
export async function enqueue(type: string, payload: any, meta: any) {
if (!enabled) return;
const { preview, truncated, size } = previewPayload(payload);
const entry = {
type,
trace_id: meta?.trace_id || "",
request_id: meta?.request_id || "",
route: meta?.route || "",
dedup_key: meta?.dedup_key || "",
error_class: meta?.error_class || "",
error_message: meta?.error_message || "",
ingest_ts: new Date().toISOString(),
payload_size: size,
payload_hash: `sha256:${crypto.createHash("sha256").update(preview).digest("hex")}` ,
payload_truncated: truncated,
payload: preview
};
if (redis) {
await redis.xadd("agx:dlq", "*", "data", JSON.stringify(entry));
return;
}
fs.mkdirSync(path.dirname(dlqFile), { recursive: true });
fs.appendFileSync(dlqFile, JSON.stringify(entry) + "
");
}
export async function replay(handler: (entry: any) => Promise<void>) {
if (redis) {
const entries = await redis.xrange("agx:dlq", "-", "+");
for (const [_id, fields] of entries) {
const dataIdx = fields.findIndex((v) => v === "data");
const raw = dataIdx >= 0 ? fields[dataIdx + 1] : "";
if (!raw) continue;
const entry = JSON.parse(raw);
await handler(entry);
}
await redis.del("agx:dlq");
return;
}
if (!fs.existsSync(dlqFile)) return;
const lines = fs.readFileSync(dlqFile, "utf-8").trim().split("
");
fs.writeFileSync(dlqFile, "");
for (const line of lines) {
if (!line) continue;
const entry = JSON.parse(line);
await handler(entry);
}
}

View File

@@ -0,0 +1,139 @@
import axios from "axios";
import { logger } from "./logger";
import { Observation, Event, TaskLog, InventoryMovement } from "./types";
function authHeaders() {
const token = process.env.FARMOS_TOKEN;
if (token) {
return { Authorization: `Bearer ${token}` };
}
const user = process.env.FARMOS_USER;
const pass = process.env.FARMOS_PASSWORD;
if (user && pass) {
const basic = Buffer.from(`${user}:${pass}`).toString("base64");
return { Authorization: `Basic ${basic}` };
}
return {};
}
const baseUrl = process.env.FARMOS_BASE_URL || "http://localhost:8080";
async function postJsonApi(path: string, data: any, trace_id?: string) {
const url = `${baseUrl}${path}`;
const headers: any = {
"Content-Type": "application/vnd.api+json",
...authHeaders()
};
if (trace_id) {
headers["X-AGX-TRACE-ID"] = trace_id;
}
const resp = await axios.post(url, data, { headers, timeout: 20000 });
return resp.data;
}
function assetRelationship(assetId?: string) {
if (!assetId) return undefined;
return {
asset: { data: [{ type: "asset--asset", id: assetId }] }
};
}
export async function writeObservation(obs: Observation, trace_id?: string, dedup_key?: string) {
if (!process.env.FARMOS_TOKEN && !process.env.FARMOS_USER) {
logger.warn("farmOS credentials missing, skipping write")
return null;
}
const payload = {
data: {
type: "log--observation",
attributes: {
name: `${obs.metric}`,
timestamp: new Date(obs.ts).toISOString(),
status: "done",
notes: `${obs.value}`
},
relationships: assetRelationship(obs.assetRef.assetId)
}
};
const start = Date.now();
try {
const res = await postJsonApi("/jsonapi/log/observation", payload, trace_id);
logger.info({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/observation", attempt: 1, duration_ms: Date.now() - start, success: true }, "farmos_write");
return res;
} catch (e: any) {
logger.error({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/observation", attempt: 1, duration_ms: Date.now() - start, success: false, error: e.message }, "farmos_write_failed");
throw e;
}
}
export async function writeEvent(evt: Event, trace_id?: string, dedup_key?: string) {
const payload = {
data: {
type: "log--activity",
attributes: {
name: evt.type,
timestamp: new Date(evt.ts).toISOString(),
status: "done",
notes: evt.payload ? JSON.stringify(evt.payload) : ""
},
relationships: assetRelationship(evt.assetRef.assetId)
}
};
const start = Date.now();
try {
const res = await postJsonApi("/jsonapi/log/activity", payload, trace_id);
logger.info({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/activity", attempt: 1, duration_ms: Date.now() - start, success: true }, "farmos_write");
return res;
} catch (e: any) {
logger.error({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/activity", attempt: 1, duration_ms: Date.now() - start, success: false, error: e.message }, "farmos_write_failed");
throw e;
}
}
export async function writeTaskLog(task: TaskLog, trace_id?: string, dedup_key?: string) {
const payload = {
data: {
type: "log--task",
attributes: {
name: task.task,
timestamp: new Date(task.ts).toISOString(),
status: task.status,
notes: task.notes || ""
},
relationships: assetRelationship(task.assetRef.assetId)
}
};
const start = Date.now();
try {
const res = await postJsonApi("/jsonapi/log/task", payload, trace_id);
logger.info({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/task", attempt: 1, duration_ms: Date.now() - start, success: true }, "farmos_write");
return res;
} catch (e: any) {
logger.error({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/task", attempt: 1, duration_ms: Date.now() - start, success: false, error: e.message }, "farmos_write_failed");
throw e;
}
}
export async function writeInventoryMovement(mov: InventoryMovement, trace_id?: string, dedup_key?: string) {
const payload = {
data: {
type: "log--inventory",
attributes: {
name: mov.item,
timestamp: new Date(mov.ts).toISOString(),
status: "done",
notes: `${mov.direction} ${mov.quantity} ${mov.unit || ""}`.trim()
},
relationships: assetRelationship(mov.assetRef.assetId)
}
};
const start = Date.now();
try {
const res = await postJsonApi("/jsonapi/log/inventory", payload, trace_id);
logger.info({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/inventory", attempt: 1, duration_ms: Date.now() - start, success: true }, "farmos_write");
return res;
} catch (e: any) {
logger.error({ trace_id, dedup_key, farmos_endpoint: "/jsonapi/log/inventory", attempt: 1, duration_ms: Date.now() - start, success: false, error: e.message }, "farmos_write_failed");
throw e;
}
}

View File

@@ -0,0 +1,230 @@
import express from "express";
import { randomUUID } from "crypto";
import pinoHttp from "pino-http";
import { logger } from "./logger";
import { initNats, publish } from "./nats";
import { initDedupStore, buildDedupKey, isDuplicate, cleanupDedup } from "./dedup";
import { verifySignature } from "./security";
import { rateLimit } from "./rate_limit";
import { enqueue, replay } from "./dlq";
import { loadMappings, resolveAssetId } from "./mappings";
import { ObservationSchema, EventSchema, TaskLogSchema, InventoryMovementSchema } from "./types";
import { writeObservation, writeEvent, writeTaskLog, writeInventoryMovement } from "./farmos";
import promClient from "prom-client";
declare global {
namespace Express {
interface Request {
context?: {
trace_id: string;
request_id: string;
start_ts: number;
route?: string;
};
rawBody?: string;
}
}
}
function logWithReq(req: any, level: "info"|"warn"|"error", msg: string, extra: any = {}) {
const ctx = req.context || { trace_id: "", request_id: "" };
logger[level]({ trace_id: ctx.trace_id, request_id: ctx.request_id, route: req.originalUrl, ...extra }, msg);
}
const app = express();
app.use(express.json({ limit: "2mb", verify: (req: any, _res, buf) => { req.rawBody = buf.toString(); } }));
app.use((req: any, res, next) => {
const traceId = req.headers["x-agx-trace-id"] || randomUUID();
req.context = { trace_id: traceId as string, request_id: randomUUID(), start_ts: Date.now(), route: req.originalUrl };
const start = req.context.start_ts;
res.on("finish", () => {
logger.info({
trace_id: req.context.trace_id,
request_id: req.context.request_id,
route: req.originalUrl,
method: req.method,
status: res.statusCode,
duration_ms: Date.now() - start,
ip: req.ip,
user_agent: req.headers["user-agent"],
replay: req.headers["x-agx-replay"] || ""
}, "request");
});
next();
});
app.use(pinoHttp({ logger }));
const register = new promClient.Registry();
promClient.collectDefaultMetrics({ register });
app.get("/healthz", (_req, res) => res.json({ status: "ok" }));
app.get("/readyz", (_req, res) => res.json({ status: "ready" }));
app.get("/metrics", async (_req, res) => {
res.set("Content-Type", register.contentType);
res.end(await register.metrics());
});
app.post("/dlq/replay", async (req: any, res) => {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
await replay(async (entry) => {
logger.info({ trace_id: entry.trace_id, route: entry.route, replay: true }, "dlq_replay");
if (entry.type === "observation") await writeObservation(entry.payload, entry.trace_id, entry.dedup_key);
if (entry.type === "event") await writeEvent(entry.payload, entry.trace_id, entry.dedup_key);
if (entry.type === "tasklog") await writeTaskLog(entry.payload, entry.trace_id, entry.dedup_key);
if (entry.type === "inventory") await writeInventoryMovement(entry.payload, entry.trace_id, entry.dedup_key);
});
res.json({ ok: true });
});
app.post("/ingest/thingsboard", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const body = req.body || {};
const deviceId = String(body.deviceId || "unknown");
const ts = Number(body.ts || Date.now());
const telemetry = body.telemetry || {};
const source = "thingsboard";
const mappedAssetId = resolveAssetId(deviceId);
const assetRef = body.assetRef || { source, deviceId, assetId: mappedAssetId };
const results = [];
for (const metric of Object.keys(telemetry)) {
const obs = ObservationSchema.parse({
assetRef,
metric,
value: telemetry[metric],
ts,
source
});
const key = buildDedupKey(source, deviceId, metric, ts, telemetry[metric]);
if (await isDuplicate(key)) {
results.push({ metric, status: "duplicate" });
continue;
}
await publish(`agx.obs.${metric}`, obs);
try {
await writeObservation(obs, req.context?.trace_id, key);
results.push({ metric, status: "ok" });
} catch (e: any) {
await enqueue("observation", obs, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
results.push({ metric, status: "dlq" });
}
}
res.json({ ok: true, results });
} catch (err: any) {
logWithReq(req, "error", "ThingsBoard ingest failed", { error: err.message });
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/observation", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const obs = ObservationSchema.parse(req.body);
const key = buildDedupKey(obs.source || "manual", obs.assetRef.deviceId || "na", obs.metric, obs.ts, obs.value);
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.obs.${obs.metric}`, obs);
try {
await writeObservation(obs, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("observation", obs, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/event", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const evt = EventSchema.parse(req.body);
const key = buildDedupKey(evt.assetRef.source || "manual", evt.assetRef.deviceId || "na", evt.type, evt.ts, evt.payload || "");
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.event.${evt.type}`, evt);
try {
await writeEvent(evt, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("event", evt, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/tasklog", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const task = TaskLogSchema.parse(req.body);
const key = buildDedupKey(task.assetRef.source || "manual", task.assetRef.deviceId || "na", task.task, task.ts, task.status);
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.task.${task.task}`, task);
try {
await writeTaskLog(task, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("tasklog", task, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
app.post("/write/inventory", async (req, res) => {
try {
const sig = verifySignature(req.headers, req.rawBody || JSON.stringify(req.body || {}));
if (!sig.ok) return res.status(401).json({ ok: false, error: sig.reason });
const mov = InventoryMovementSchema.parse(req.body);
const key = buildDedupKey(mov.assetRef.source || "manual", mov.assetRef.deviceId || "na", mov.item, mov.ts, mov.quantity);
if (await isDuplicate(key)) {
return res.json({ ok: true, status: "duplicate" });
}
await publish(`agx.inv.${mov.item}`, mov);
try {
await writeInventoryMovement(mov, req.context?.trace_id, key);
res.json({ ok: true });
} catch (e: any) {
await enqueue("inventory", mov, { trace_id: req.context?.trace_id, request_id: req.context?.request_id, route: req.originalUrl, dedup_key: key, error_class: "FarmOSWriteError", error_message: e.message || "write_failed" });
res.json({ ok: false, status: "dlq" });
}
} catch (err: any) {
logWithReq(req, "error", "request_failed", { error: err.message });
res.status(400).json({ ok: false, error: err.message });
}
});
const port = Number(process.env.PORT || 8800);
async function start() {
initDedupStore();
cleanupDedup();
loadMappings();
await initNats();
app.listen(port, () => logger.info({ port }, "integration-service started"));
}
start().catch((err) => {
logger.error({ err }, "Failed to start integration-service");
process.exit(1);
});

View File

@@ -0,0 +1,9 @@
import pino from "pino";
const level = process.env.LOG_LEVEL || "info";
export const logger = pino({
level,
base: { service: "agromatrix-integration" },
timestamp: pino.stdTimeFunctions.isoTime
});

View File

@@ -0,0 +1,21 @@
import fs from "fs";
const mappingPath = process.env.MAPPING_PATH || "/app/config/mappings.json";
let mappings: any = {};
export function loadMappings() {
try {
mappings = JSON.parse(fs.readFileSync(mappingPath, "utf-8"));
} catch {
mappings = {};
}
}
export function resolveAssetId(deviceId: string, metric?: string) {
const dev = mappings?.devices?.[deviceId];
if (!dev) return undefined;
if (metric && dev.metrics && dev.metrics[metric] && dev.metrics[metric].assetId) {
return dev.metrics[metric].assetId;
}
return dev.assetId || undefined;
}

View File

@@ -0,0 +1,20 @@
import { connect, StringCodec } from "nats";
import { logger } from "./logger";
const sc = StringCodec();
let nc: any;
export async function initNats() {
const url = process.env.NATS_URL || "nats://localhost:4222";
nc = await connect({ servers: url });
logger.info({ url }, "NATS connected");
}
export async function publish(subject: string, payload: unknown) {
if (!nc) {
await initNats();
}
const data = sc.encode(JSON.stringify(payload));
nc.publish(subject, data);
}

View File

@@ -0,0 +1,14 @@
const limit = Number(process.env.AGX_RATE_LIMIT_PER_MIN || 120);
const windowMs = 60 * 1000;
const buckets = new Map<string, { count: number; reset: number }>();
export function rateLimit(key: string) {
const now = Date.now();
const bucket = buckets.get(key);
if (!bucket || bucket.reset < now) {
buckets.set(key, { count: 1, reset: now + windowMs });
return { ok: true, remaining: limit - 1 };
}
bucket.count += 1;
return { ok: bucket.count <= limit, remaining: Math.max(0, limit - bucket.count) };
}

View File

@@ -0,0 +1,31 @@
import crypto from "crypto";
import { logger } from "./logger";
const secret = process.env.AGX_HMAC_SECRET || "";
const required = (process.env.AGX_HMAC_REQUIRED || "true") === "true";
const windowSeconds = Number(process.env.AGX_HMAC_WINDOW_SECONDS || 300);
export function buildSignature(ts: string, nonce: string, body: string) {
return crypto.createHmac("sha256", secret).update(`${ts}.${nonce}.${body}`).digest("hex");
}
export function verifySignature(headers: any, rawBody: string) {
if (!required) return { ok: true, reason: "not_required" };
if (!secret) return { ok: false, reason: "missing_secret" };
const sig = headers["x-agx-signature"] as string | undefined;
const ts = headers["x-agx-timestamp"] as string | undefined;
const nonce = headers["x-agx-nonce"] as string | undefined;
if (!sig || !ts || !nonce) return { ok: false, reason: "missing_headers" };
const tsNum = Number(ts);
const now = Date.now();
if (Number.isNaN(tsNum) || Math.abs(now - tsNum) > windowSeconds * 1000) {
return { ok: false, reason: "timestamp_out_of_window" };
}
const expected = buildSignature(ts, nonce, rawBody);
const ok = crypto.timingSafeEqual(Buffer.from(sig), Buffer.from(expected));
return { ok, reason: ok ? "ok" : "invalid_signature" };
}

View File

@@ -0,0 +1,51 @@
import { z } from "zod";
export const AssetRefSchema = z.object({
source: z.string().default("unknown"),
assetId: z.string().optional(),
deviceId: z.string().optional(),
name: z.string().optional()
});
export const ObservationSchema = z.object({
assetRef: AssetRefSchema,
metric: z.string(),
schema_version: z.string().default("1.0"),
value: z.union([z.string(), z.number(), z.boolean()]),
ts: z.number(),
unit: z.string().optional(),
source: z.string().optional()
});
export const EventSchema = z.object({
assetRef: AssetRefSchema,
type: z.string(),
schema_version: z.string().default("1.0"),
ts: z.number(),
payload: z.record(z.any()).optional()
});
export const TaskLogSchema = z.object({
assetRef: AssetRefSchema,
task: z.string(),
schema_version: z.string().default("1.0"),
status: z.string(),
ts: z.number(),
notes: z.string().optional()
});
export const InventoryMovementSchema = z.object({
assetRef: AssetRefSchema,
item: z.string(),
schema_version: z.string().default("1.0"),
quantity: z.number(),
unit: z.string().optional(),
direction: z.enum(["in", "out"]),
ts: z.number()
});
export type AssetRef = z.infer<typeof AssetRefSchema>;
export type Observation = z.infer<typeof ObservationSchema>;
export type Event = z.infer<typeof EventSchema>;
export type TaskLog = z.infer<typeof TaskLogSchema>;
export type InventoryMovement = z.infer<typeof InventoryMovementSchema>;

View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "CommonJS",
"rootDir": "src",
"outDir": "dist",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true
}
}