Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.
Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles
Excluded from snapshot: venv/, .env, data/, backups, .tgz archives
Co-authored-by: Cursor <cursoragent@cursor.com>
66 lines
2.0 KiB
Python
Executable File
66 lines
2.0 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""Generate markdown summary from Loki logs."""
|
|
import datetime as dt
|
|
import os
|
|
from pathlib import Path
|
|
from typing import Any, Dict
|
|
|
|
import requests
|
|
|
|
LOKI_URL = os.getenv("LOKI_URL", "http://localhost:3100")
|
|
OUTPUT_DIR = Path(os.getenv("LAB_NOTES_DIR", "lab-notes"))
|
|
DEFAULT_QUERY = os.getenv("LOKI_QUERY", '{job="docker"}')
|
|
|
|
|
|
def query_loki(query: str, start: dt.datetime, end: dt.datetime) -> Dict[str, Any]:
|
|
params = {
|
|
"query": query,
|
|
"start": int(start.timestamp() * 1_000_000_000),
|
|
"end": int(end.timestamp() * 1_000_000_000),
|
|
"limit": int(os.getenv("LOKI_LIMIT", "5000")),
|
|
}
|
|
resp = requests.get(
|
|
f"{LOKI_URL.rstrip('/')}/loki/api/v1/query_range",
|
|
params=params,
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
return resp.json()
|
|
|
|
|
|
def main() -> None:
|
|
today = dt.datetime.utcnow().date()
|
|
start = dt.datetime.combine(today, dt.time.min)
|
|
end = dt.datetime.combine(today, dt.time.max)
|
|
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
|
|
|
md_path = OUTPUT_DIR / f"log_summary_{today.isoformat()}.md"
|
|
data = query_loki(DEFAULT_QUERY, start, end)
|
|
|
|
total_entries = 0
|
|
streams = data.get("data", {}).get("result", [])
|
|
for stream in streams:
|
|
total_entries += len(stream.get("values", []))
|
|
|
|
top_lines = min(5, len(streams))
|
|
highlights = []
|
|
for stream in streams[:top_lines]:
|
|
job = stream.get("stream", {}).get("job", "unknown")
|
|
highlights.append(f"- `{job}` → {len(stream.get('values', []))} entries")
|
|
|
|
with md_path.open("w", encoding="utf-8") as f:
|
|
f.write(f"# Log Summary — {today.isoformat()}\n\n")
|
|
f.write(f"- Time window: {start.isoformat()} — {end.isoformat()} UTC\n")
|
|
f.write(f"- Loki query: `{DEFAULT_QUERY}`\n")
|
|
f.write(f"- Total entries (approx): **{total_entries}**\n\n")
|
|
if highlights:
|
|
f.write("## Top Streams\n")
|
|
f.write("\n".join(highlights))
|
|
f.write("\n")
|
|
|
|
print(f"Written summary to {md_path}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|