Files
microdao-daarion/packages/agromatrix-tools/agromatrix_tools/tool_integration_write.py
Apple ef3473db21 snapshot: NODE1 production state 2026-02-09
Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.

Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles

Excluded from snapshot: venv/, .env, data/, backups, .tgz archives

Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-09 08:46:46 -08:00

76 lines
2.7 KiB
Python

import os
import json
import hmac
import time
import uuid
import hashlib
import requests
from .audit import audit_tool_call
INTEGRATION_BASE_URL = os.getenv("INTEGRATION_BASE_URL", "http://localhost:8800")
AGX_HMAC_SECRET = os.getenv("AGX_HMAC_SECRET", "")
def _sign(body: dict):
if not AGX_HMAC_SECRET:
return {}, json.dumps(body)
ts = str(int(time.time() * 1000))
nonce = str(uuid.uuid4())
body_json = json.dumps(body, separators=(",", ":"), sort_keys=True)
payload = f"{ts}.{nonce}.{body_json}"
sig = hmac.new(AGX_HMAC_SECRET.encode(), payload.encode(), hashlib.sha256).hexdigest()
headers = {
"X-AGX-SIGNATURE": sig,
"X-AGX-TIMESTAMP": ts,
"X-AGX-NONCE": nonce,
"Content-Type": "application/json"
}
trace_id = os.getenv("AGX_TRACE_ID", "")
if trace_id:
headers["X-AGX-TRACE-ID"] = trace_id
return headers, body_json
def write_observation(assetRef: dict, observation: dict):
_t = time.time()
payload = {"assetRef": assetRef, **observation}
headers, body = _sign(payload)
r = requests.post(f"{INTEGRATION_BASE_URL}/write/observation", data=body, headers=headers, timeout=20)
r.raise_for_status()
out = r.json()
audit_tool_call("tool_integration_write.write_observation", {"assetRef": assetRef}, {"ok": True}, True, int((time.time()-_t)*1000))
return out
def write_event(assetRef: dict, event: dict):
_t = time.time()
payload = {"assetRef": assetRef, **event}
headers, body = _sign(payload)
r = requests.post(f"{INTEGRATION_BASE_URL}/write/event", data=body, headers=headers, timeout=20)
r.raise_for_status()
out = r.json()
audit_tool_call("tool_integration_write.write_event", {"assetRef": assetRef}, {"ok": True}, True, int((time.time()-_t)*1000))
return out
def write_tasklog(assetRef: dict, tasklog: dict):
_t = time.time()
payload = {"assetRef": assetRef, **tasklog}
headers, body = _sign(payload)
r = requests.post(f"{INTEGRATION_BASE_URL}/write/tasklog", data=body, headers=headers, timeout=20)
r.raise_for_status()
out = r.json()
audit_tool_call("tool_integration_write.write_tasklog", {"assetRef": assetRef}, {"ok": True}, True, int((time.time()-_t)*1000))
return out
def write_inventory_movement(assetRef: dict, movement: dict):
_t = time.time()
payload = {"assetRef": assetRef, **movement}
headers, body = _sign(payload)
r = requests.post(f"{INTEGRATION_BASE_URL}/write/inventory", data=body, headers=headers, timeout=20)
r.raise_for_status()
out = r.json()
audit_tool_call("tool_integration_write.write_inventory_movement", {"assetRef": assetRef}, {"ok": True}, True, int((time.time()-_t)*1000))
return out