feat: Add Alateya, Clan, Eonarch agents + fix gateway-router connection

## Agents Added
- Alateya: R&D, biotech, innovations
- Clan (Spirit): Community spirit agent
- Eonarch: Consciousness evolution agent

## Changes
- docker-compose.node1.yml: Added tokens for all 3 new agents
- gateway-bot/http_api.py: Added configs and webhook endpoints
- gateway-bot/clan_prompt.txt: New prompt file
- gateway-bot/eonarch_prompt.txt: New prompt file

## Fixes
- Fixed ROUTER_URL from :9102 to :8000 (internal container port)
- All 9 Telegram agents now working

## Documentation
- Created PROJECT-MASTER-INDEX.md - single entry point
- Added various status documents and scripts

Tokens configured:
- Helion, NUTRA, Agromatrix (existing)
- Alateya, Clan, Eonarch (new)
- Druid, GreenFood, DAARWIZZ (configured)
This commit is contained in:
Apple
2026-01-28 06:40:34 -08:00
parent 4aeb69e7ae
commit 0c8bef82f4
120 changed files with 21905 additions and 425 deletions

View File

@@ -436,20 +436,25 @@ class Database:
fact_key: str,
fact_value: Optional[str] = None,
fact_value_json: Optional[dict] = None,
team_id: Optional[str] = None
team_id: Optional[str] = None,
agent_id: Optional[str] = None
) -> Dict[str, Any]:
"""Create or update a user fact"""
"""Create or update a user fact (isolated by agent_id)"""
import json
# Convert dict to JSON string for asyncpg JSONB
json_value = json.dumps(fact_value_json) if fact_value_json else None
async with self.pool.acquire() as conn:
row = await conn.fetchrow("""
INSERT INTO user_facts (user_id, team_id, fact_key, fact_value, fact_value_json)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (user_id, team_id, fact_key)
INSERT INTO user_facts (user_id, team_id, agent_id, fact_key, fact_value, fact_value_json)
VALUES ($1, $2, $3, $4, $5, $6::jsonb)
ON CONFLICT (user_id, team_id, agent_id, fact_key)
DO UPDATE SET
fact_value = EXCLUDED.fact_value,
fact_value_json = EXCLUDED.fact_value_json,
updated_at = NOW()
RETURNING *
""", user_id, team_id, fact_key, fact_value, fact_value_json)
""", user_id, team_id, agent_id, fact_key, fact_value, json_value)
return dict(row) if row else {}
@@ -457,42 +462,58 @@ class Database:
self,
user_id: str,
fact_key: str,
team_id: Optional[str] = None
team_id: Optional[str] = None,
agent_id: Optional[str] = None
) -> Optional[Dict[str, Any]]:
"""Get a specific fact"""
"""Get a specific fact (isolated by agent_id)"""
async with self.pool.acquire() as conn:
# Build query with agent_id filter
query = "SELECT * FROM user_facts WHERE user_id = $1 AND fact_key = $2"
params = [user_id, fact_key]
if team_id:
row = await conn.fetchrow("""
SELECT * FROM user_facts
WHERE user_id = $1 AND fact_key = $2 AND team_id = $3
""", user_id, fact_key, team_id)
query += f" AND team_id = ${len(params) + 1}"
params.append(team_id)
else:
row = await conn.fetchrow("""
SELECT * FROM user_facts
WHERE user_id = $1 AND fact_key = $2 AND team_id IS NULL
""", user_id, fact_key)
query += " AND team_id IS NULL"
if agent_id:
query += f" AND agent_id = ${len(params) + 1}"
params.append(agent_id)
else:
query += " AND agent_id IS NULL"
row = await conn.fetchrow(query, *params)
return dict(row) if row else None
async def list_facts(
self,
user_id: str,
team_id: Optional[str] = None
team_id: Optional[str] = None,
agent_id: Optional[str] = None,
limit: Optional[int] = None
) -> List[Dict[str, Any]]:
"""List all facts for a user"""
"""List all facts for a user (isolated by agent_id)"""
async with self.pool.acquire() as conn:
query = "SELECT * FROM user_facts WHERE user_id = $1"
params = [user_id]
if team_id:
rows = await conn.fetch("""
SELECT * FROM user_facts
WHERE user_id = $1 AND team_id = $2
ORDER BY fact_key
""", user_id, team_id)
else:
rows = await conn.fetch("""
SELECT * FROM user_facts
WHERE user_id = $1
ORDER BY fact_key
""", user_id)
query += f" AND team_id = ${len(params) + 1}"
params.append(team_id)
if agent_id:
query += f" AND agent_id = ${len(params) + 1}"
params.append(agent_id)
query += " ORDER BY fact_key"
if limit is not None:
query += f" LIMIT ${len(params) + 1}"
params.append(limit)
rows = await conn.fetch(query, *params)
return [dict(row) for row in rows]
@@ -500,20 +521,27 @@ class Database:
self,
user_id: str,
fact_key: str,
team_id: Optional[str] = None
team_id: Optional[str] = None,
agent_id: Optional[str] = None
) -> bool:
"""Delete a fact"""
"""Delete a fact (isolated by agent_id)"""
async with self.pool.acquire() as conn:
query = "DELETE FROM user_facts WHERE user_id = $1 AND fact_key = $2"
params = [user_id, fact_key]
if team_id:
result = await conn.execute("""
DELETE FROM user_facts
WHERE user_id = $1 AND fact_key = $2 AND team_id = $3
""", user_id, fact_key, team_id)
query += f" AND team_id = ${len(params) + 1}"
params.append(team_id)
else:
result = await conn.execute("""
DELETE FROM user_facts
WHERE user_id = $1 AND fact_key = $2 AND team_id IS NULL
""", user_id, fact_key)
query += " AND team_id IS NULL"
if agent_id:
query += f" AND agent_id = ${len(params) + 1}"
params.append(agent_id)
else:
query += " AND agent_id IS NULL"
result = await conn.execute(query, *params)
return "DELETE 1" in result

View File

@@ -8,7 +8,7 @@ DAARION Memory Service - FastAPI Application
"""
from contextlib import asynccontextmanager
from typing import List, Optional
from fastapi import Depends
from fastapi import Depends, BackgroundTasks
from uuid import UUID
import structlog
from fastapi import FastAPI, HTTPException, Query
@@ -573,6 +573,323 @@ async def delete_fact(
raise HTTPException(status_code=500, detail=str(e))
# ============================================================================
# AGENT MEMORY (Gateway compatibility endpoint)
# ============================================================================
class AgentMemoryRequest(BaseModel):
"""Request format from Gateway for saving chat history"""
agent_id: str
team_id: Optional[str] = None
channel_id: Optional[str] = None
user_id: str
# Support both formats: new (content) and gateway (body_text)
content: Optional[str] = None
body_text: Optional[str] = None
role: str = "user" # user, assistant, system
# Support both formats: metadata and body_json
metadata: Optional[dict] = None
body_json: Optional[dict] = None
context: Optional[str] = None
scope: Optional[str] = None
kind: Optional[str] = None # "message", "event", etc.
def get_content(self) -> str:
"""Get content from either field"""
return self.content or self.body_text or ""
def get_metadata(self) -> dict:
"""Get metadata from either field"""
return self.metadata or self.body_json or {}
@app.post("/agents/{agent_id}/memory")
async def save_agent_memory(agent_id: str, request: AgentMemoryRequest, background_tasks: BackgroundTasks):
"""
Save chat turn to memory with full ingestion pipeline:
1. Save to PostgreSQL (facts table)
2. Create embedding via Cohere and save to Qdrant
3. Update Knowledge Graph in Neo4j
"""
try:
from datetime import datetime
from uuid import uuid4
# Create a unique key for this conversation event
timestamp = datetime.utcnow().isoformat()
message_id = str(uuid4())
fact_key = f"chat_event:{request.channel_id}:{timestamp}"
# Store as a fact with JSON payload
content = request.get_content()
metadata = request.get_metadata()
# Skip empty messages
if not content or content.startswith("[Photo:"):
logger.debug("skipping_empty_or_photo_message", content=content[:50] if content else "")
return {"status": "ok", "event_id": None, "indexed": False}
# Determine role from kind/body_json if not explicitly set
role = request.role
if request.body_json and request.body_json.get("type") == "agent_response":
role = "assistant"
event_data = {
"message_id": message_id,
"agent_id": agent_id,
"team_id": request.team_id,
"channel_id": request.channel_id,
"user_id": request.user_id,
"role": role,
"content": content,
"metadata": metadata,
"scope": request.scope,
"kind": request.kind,
"timestamp": timestamp
}
# 1. Save to PostgreSQL (isolated by agent_id)
await db.ensure_facts_table()
result = await db.upsert_fact(
user_id=request.user_id,
fact_key=fact_key,
fact_value_json=event_data,
team_id=request.team_id,
agent_id=agent_id # Agent isolation
)
logger.info("agent_memory_saved",
agent_id=agent_id,
user_id=request.user_id,
role=role,
channel_id=request.channel_id,
content_len=len(content))
# 2. Index in Qdrant (async background task)
background_tasks.add_task(
index_message_in_qdrant,
message_id=message_id,
content=content,
agent_id=agent_id,
user_id=request.user_id,
channel_id=request.channel_id,
role=role,
timestamp=timestamp
)
# 3. Update Neo4j graph (async background task)
background_tasks.add_task(
update_neo4j_graph,
message_id=message_id,
content=content,
agent_id=agent_id,
user_id=request.user_id,
channel_id=request.channel_id,
role=role
)
return {
"status": "ok",
"event_id": result.get("fact_id") if result else None,
"message_id": message_id,
"indexed": True
}
except Exception as e:
logger.error("agent_memory_save_failed", error=str(e), agent_id=agent_id)
raise HTTPException(status_code=500, detail=str(e))
async def index_message_in_qdrant(
message_id: str,
content: str,
agent_id: str,
user_id: str,
channel_id: str,
role: str,
timestamp: str
):
"""Index message in Qdrant for semantic search (isolated by agent_id)"""
try:
from .embedding import get_document_embeddings
from qdrant_client.http import models as qmodels
# Skip very short messages
if len(content) < 10:
return
# Generate embedding
embeddings = await get_document_embeddings([content])
if not embeddings or not embeddings[0]:
logger.warning("embedding_failed", message_id=message_id)
return
vector = embeddings[0]
# Use agent-specific collection (isolation!)
collection_name = f"{agent_id}_messages"
# Ensure collection exists
try:
vector_store.client.get_collection(collection_name)
except Exception:
# Create collection if not exists
vector_store.client.create_collection(
collection_name=collection_name,
vectors_config=qmodels.VectorParams(
size=len(vector),
distance=qmodels.Distance.COSINE
)
)
logger.info("created_collection", collection=collection_name)
# Save to agent-specific Qdrant collection
vector_store.client.upsert(
collection_name=collection_name,
points=[
qmodels.PointStruct(
id=message_id,
vector=vector,
payload={
"message_id": message_id,
"agent_id": agent_id,
"user_id": user_id,
"channel_id": channel_id,
"role": role,
"content": content,
"timestamp": timestamp,
"type": "chat_message"
}
)
]
)
logger.info("message_indexed_qdrant",
message_id=message_id,
collection=collection_name,
content_len=len(content),
vector_dim=len(vector))
except Exception as e:
logger.error("qdrant_indexing_failed", error=str(e), message_id=message_id)
async def update_neo4j_graph(
message_id: str,
content: str,
agent_id: str,
user_id: str,
channel_id: str,
role: str
):
"""Update Knowledge Graph in Neo4j (with agent isolation)"""
try:
import httpx
import os
neo4j_url = os.getenv("NEO4J_HTTP_URL", "http://neo4j:7474")
neo4j_user = os.getenv("NEO4J_USER", "neo4j")
neo4j_password = os.getenv("NEO4J_PASSWORD", "DaarionNeo4j2026!")
# Create/update User node and Message relationship
# IMPORTANT: agent_id is added to relationships for filtering
cypher = """
MERGE (u:User {user_id: $user_id})
ON CREATE SET u.created_at = datetime()
ON MATCH SET u.last_seen = datetime()
MERGE (ch:Channel {channel_id: $channel_id})
ON CREATE SET ch.created_at = datetime()
MERGE (a:Agent {agent_id: $agent_id})
ON CREATE SET a.created_at = datetime()
MERGE (u)-[p:PARTICIPATES_IN {agent_id: $agent_id}]->(ch)
ON CREATE SET p.first_seen = datetime()
ON MATCH SET p.last_seen = datetime()
CREATE (m:Message {
message_id: $message_id,
role: $role,
content_preview: $content_preview,
agent_id: $agent_id,
created_at: datetime()
})
CREATE (u)-[:SENT {agent_id: $agent_id}]->(m)
CREATE (m)-[:IN_CHANNEL {agent_id: $agent_id}]->(ch)
CREATE (m)-[:HANDLED_BY]->(a)
RETURN m.message_id as id
"""
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(
f"{neo4j_url}/db/neo4j/tx/commit",
auth=(neo4j_user, neo4j_password),
json={
"statements": [{
"statement": cypher,
"parameters": {
"user_id": user_id,
"channel_id": channel_id,
"message_id": message_id,
"role": role,
"content_preview": content[:200] if content else "",
"agent_id": agent_id
}
}]
}
)
if response.status_code == 200:
logger.info("neo4j_graph_updated", message_id=message_id, user_id=user_id, agent_id=agent_id)
else:
logger.warning("neo4j_update_failed",
status=response.status_code,
response=response.text[:200])
except Exception as e:
logger.error("neo4j_update_error", error=str(e), message_id=message_id)
@app.get("/agents/{agent_id}/memory")
async def get_agent_memory(
agent_id: str,
user_id: str = Query(...),
channel_id: Optional[str] = None,
limit: int = Query(default=20, le=100)
):
"""
Get recent chat events for an agent/user (isolated by agent_id).
"""
import json as json_lib
try:
# Query facts filtered by agent_id (database-level filtering)
facts = await db.list_facts(user_id=user_id, agent_id=agent_id, limit=limit)
# Filter for chat events from this channel
events = []
for fact in facts:
if fact.get("fact_key", "").startswith("chat_event:"):
# Handle fact_value_json being string or dict
event_data = fact.get("fact_value_json", {})
if isinstance(event_data, str):
try:
event_data = json_lib.loads(event_data)
except:
event_data = {}
if not isinstance(event_data, dict):
event_data = {}
if channel_id is None or event_data.get("channel_id") == channel_id:
events.append(event_data)
return {"events": events[:limit]}
except Exception as e:
logger.error("agent_memory_get_failed", error=str(e), agent_id=agent_id)
raise HTTPException(status_code=500, detail=str(e))
# ============================================================================
# ADMIN
# ============================================================================