feat: Add Alateya, Clan, Eonarch agents + fix gateway-router connection
## Agents Added - Alateya: R&D, biotech, innovations - Clan (Spirit): Community spirit agent - Eonarch: Consciousness evolution agent ## Changes - docker-compose.node1.yml: Added tokens for all 3 new agents - gateway-bot/http_api.py: Added configs and webhook endpoints - gateway-bot/clan_prompt.txt: New prompt file - gateway-bot/eonarch_prompt.txt: New prompt file ## Fixes - Fixed ROUTER_URL from :9102 to :8000 (internal container port) - All 9 Telegram agents now working ## Documentation - Created PROJECT-MASTER-INDEX.md - single entry point - Added various status documents and scripts Tokens configured: - Helion, NUTRA, Agromatrix (existing) - Alateya, Clan, Eonarch (new) - Druid, GreenFood, DAARWIZZ (configured)
This commit is contained in:
@@ -436,20 +436,25 @@ class Database:
|
||||
fact_key: str,
|
||||
fact_value: Optional[str] = None,
|
||||
fact_value_json: Optional[dict] = None,
|
||||
team_id: Optional[str] = None
|
||||
team_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Create or update a user fact"""
|
||||
"""Create or update a user fact (isolated by agent_id)"""
|
||||
import json
|
||||
# Convert dict to JSON string for asyncpg JSONB
|
||||
json_value = json.dumps(fact_value_json) if fact_value_json else None
|
||||
|
||||
async with self.pool.acquire() as conn:
|
||||
row = await conn.fetchrow("""
|
||||
INSERT INTO user_facts (user_id, team_id, fact_key, fact_value, fact_value_json)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (user_id, team_id, fact_key)
|
||||
INSERT INTO user_facts (user_id, team_id, agent_id, fact_key, fact_value, fact_value_json)
|
||||
VALUES ($1, $2, $3, $4, $5, $6::jsonb)
|
||||
ON CONFLICT (user_id, team_id, agent_id, fact_key)
|
||||
DO UPDATE SET
|
||||
fact_value = EXCLUDED.fact_value,
|
||||
fact_value_json = EXCLUDED.fact_value_json,
|
||||
updated_at = NOW()
|
||||
RETURNING *
|
||||
""", user_id, team_id, fact_key, fact_value, fact_value_json)
|
||||
""", user_id, team_id, agent_id, fact_key, fact_value, json_value)
|
||||
|
||||
return dict(row) if row else {}
|
||||
|
||||
@@ -457,42 +462,58 @@ class Database:
|
||||
self,
|
||||
user_id: str,
|
||||
fact_key: str,
|
||||
team_id: Optional[str] = None
|
||||
team_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get a specific fact"""
|
||||
"""Get a specific fact (isolated by agent_id)"""
|
||||
async with self.pool.acquire() as conn:
|
||||
# Build query with agent_id filter
|
||||
query = "SELECT * FROM user_facts WHERE user_id = $1 AND fact_key = $2"
|
||||
params = [user_id, fact_key]
|
||||
|
||||
if team_id:
|
||||
row = await conn.fetchrow("""
|
||||
SELECT * FROM user_facts
|
||||
WHERE user_id = $1 AND fact_key = $2 AND team_id = $3
|
||||
""", user_id, fact_key, team_id)
|
||||
query += f" AND team_id = ${len(params) + 1}"
|
||||
params.append(team_id)
|
||||
else:
|
||||
row = await conn.fetchrow("""
|
||||
SELECT * FROM user_facts
|
||||
WHERE user_id = $1 AND fact_key = $2 AND team_id IS NULL
|
||||
""", user_id, fact_key)
|
||||
query += " AND team_id IS NULL"
|
||||
|
||||
if agent_id:
|
||||
query += f" AND agent_id = ${len(params) + 1}"
|
||||
params.append(agent_id)
|
||||
else:
|
||||
query += " AND agent_id IS NULL"
|
||||
|
||||
row = await conn.fetchrow(query, *params)
|
||||
|
||||
return dict(row) if row else None
|
||||
|
||||
async def list_facts(
|
||||
self,
|
||||
user_id: str,
|
||||
team_id: Optional[str] = None
|
||||
team_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None,
|
||||
limit: Optional[int] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List all facts for a user"""
|
||||
"""List all facts for a user (isolated by agent_id)"""
|
||||
async with self.pool.acquire() as conn:
|
||||
query = "SELECT * FROM user_facts WHERE user_id = $1"
|
||||
params = [user_id]
|
||||
|
||||
if team_id:
|
||||
rows = await conn.fetch("""
|
||||
SELECT * FROM user_facts
|
||||
WHERE user_id = $1 AND team_id = $2
|
||||
ORDER BY fact_key
|
||||
""", user_id, team_id)
|
||||
else:
|
||||
rows = await conn.fetch("""
|
||||
SELECT * FROM user_facts
|
||||
WHERE user_id = $1
|
||||
ORDER BY fact_key
|
||||
""", user_id)
|
||||
query += f" AND team_id = ${len(params) + 1}"
|
||||
params.append(team_id)
|
||||
|
||||
if agent_id:
|
||||
query += f" AND agent_id = ${len(params) + 1}"
|
||||
params.append(agent_id)
|
||||
|
||||
query += " ORDER BY fact_key"
|
||||
|
||||
if limit is not None:
|
||||
query += f" LIMIT ${len(params) + 1}"
|
||||
params.append(limit)
|
||||
|
||||
rows = await conn.fetch(query, *params)
|
||||
|
||||
return [dict(row) for row in rows]
|
||||
|
||||
@@ -500,20 +521,27 @@ class Database:
|
||||
self,
|
||||
user_id: str,
|
||||
fact_key: str,
|
||||
team_id: Optional[str] = None
|
||||
team_id: Optional[str] = None,
|
||||
agent_id: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Delete a fact"""
|
||||
"""Delete a fact (isolated by agent_id)"""
|
||||
async with self.pool.acquire() as conn:
|
||||
query = "DELETE FROM user_facts WHERE user_id = $1 AND fact_key = $2"
|
||||
params = [user_id, fact_key]
|
||||
|
||||
if team_id:
|
||||
result = await conn.execute("""
|
||||
DELETE FROM user_facts
|
||||
WHERE user_id = $1 AND fact_key = $2 AND team_id = $3
|
||||
""", user_id, fact_key, team_id)
|
||||
query += f" AND team_id = ${len(params) + 1}"
|
||||
params.append(team_id)
|
||||
else:
|
||||
result = await conn.execute("""
|
||||
DELETE FROM user_facts
|
||||
WHERE user_id = $1 AND fact_key = $2 AND team_id IS NULL
|
||||
""", user_id, fact_key)
|
||||
query += " AND team_id IS NULL"
|
||||
|
||||
if agent_id:
|
||||
query += f" AND agent_id = ${len(params) + 1}"
|
||||
params.append(agent_id)
|
||||
else:
|
||||
query += " AND agent_id IS NULL"
|
||||
|
||||
result = await conn.execute(query, *params)
|
||||
|
||||
return "DELETE 1" in result
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ DAARION Memory Service - FastAPI Application
|
||||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import List, Optional
|
||||
from fastapi import Depends
|
||||
from fastapi import Depends, BackgroundTasks
|
||||
from uuid import UUID
|
||||
import structlog
|
||||
from fastapi import FastAPI, HTTPException, Query
|
||||
@@ -573,6 +573,323 @@ async def delete_fact(
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# AGENT MEMORY (Gateway compatibility endpoint)
|
||||
# ============================================================================
|
||||
|
||||
class AgentMemoryRequest(BaseModel):
|
||||
"""Request format from Gateway for saving chat history"""
|
||||
agent_id: str
|
||||
team_id: Optional[str] = None
|
||||
channel_id: Optional[str] = None
|
||||
user_id: str
|
||||
# Support both formats: new (content) and gateway (body_text)
|
||||
content: Optional[str] = None
|
||||
body_text: Optional[str] = None
|
||||
role: str = "user" # user, assistant, system
|
||||
# Support both formats: metadata and body_json
|
||||
metadata: Optional[dict] = None
|
||||
body_json: Optional[dict] = None
|
||||
context: Optional[str] = None
|
||||
scope: Optional[str] = None
|
||||
kind: Optional[str] = None # "message", "event", etc.
|
||||
|
||||
def get_content(self) -> str:
|
||||
"""Get content from either field"""
|
||||
return self.content or self.body_text or ""
|
||||
|
||||
def get_metadata(self) -> dict:
|
||||
"""Get metadata from either field"""
|
||||
return self.metadata or self.body_json or {}
|
||||
|
||||
@app.post("/agents/{agent_id}/memory")
|
||||
async def save_agent_memory(agent_id: str, request: AgentMemoryRequest, background_tasks: BackgroundTasks):
|
||||
"""
|
||||
Save chat turn to memory with full ingestion pipeline:
|
||||
1. Save to PostgreSQL (facts table)
|
||||
2. Create embedding via Cohere and save to Qdrant
|
||||
3. Update Knowledge Graph in Neo4j
|
||||
"""
|
||||
try:
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
# Create a unique key for this conversation event
|
||||
timestamp = datetime.utcnow().isoformat()
|
||||
message_id = str(uuid4())
|
||||
fact_key = f"chat_event:{request.channel_id}:{timestamp}"
|
||||
|
||||
# Store as a fact with JSON payload
|
||||
content = request.get_content()
|
||||
metadata = request.get_metadata()
|
||||
|
||||
# Skip empty messages
|
||||
if not content or content.startswith("[Photo:"):
|
||||
logger.debug("skipping_empty_or_photo_message", content=content[:50] if content else "")
|
||||
return {"status": "ok", "event_id": None, "indexed": False}
|
||||
|
||||
# Determine role from kind/body_json if not explicitly set
|
||||
role = request.role
|
||||
if request.body_json and request.body_json.get("type") == "agent_response":
|
||||
role = "assistant"
|
||||
|
||||
event_data = {
|
||||
"message_id": message_id,
|
||||
"agent_id": agent_id,
|
||||
"team_id": request.team_id,
|
||||
"channel_id": request.channel_id,
|
||||
"user_id": request.user_id,
|
||||
"role": role,
|
||||
"content": content,
|
||||
"metadata": metadata,
|
||||
"scope": request.scope,
|
||||
"kind": request.kind,
|
||||
"timestamp": timestamp
|
||||
}
|
||||
|
||||
# 1. Save to PostgreSQL (isolated by agent_id)
|
||||
await db.ensure_facts_table()
|
||||
result = await db.upsert_fact(
|
||||
user_id=request.user_id,
|
||||
fact_key=fact_key,
|
||||
fact_value_json=event_data,
|
||||
team_id=request.team_id,
|
||||
agent_id=agent_id # Agent isolation
|
||||
)
|
||||
|
||||
logger.info("agent_memory_saved",
|
||||
agent_id=agent_id,
|
||||
user_id=request.user_id,
|
||||
role=role,
|
||||
channel_id=request.channel_id,
|
||||
content_len=len(content))
|
||||
|
||||
# 2. Index in Qdrant (async background task)
|
||||
background_tasks.add_task(
|
||||
index_message_in_qdrant,
|
||||
message_id=message_id,
|
||||
content=content,
|
||||
agent_id=agent_id,
|
||||
user_id=request.user_id,
|
||||
channel_id=request.channel_id,
|
||||
role=role,
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
# 3. Update Neo4j graph (async background task)
|
||||
background_tasks.add_task(
|
||||
update_neo4j_graph,
|
||||
message_id=message_id,
|
||||
content=content,
|
||||
agent_id=agent_id,
|
||||
user_id=request.user_id,
|
||||
channel_id=request.channel_id,
|
||||
role=role
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"event_id": result.get("fact_id") if result else None,
|
||||
"message_id": message_id,
|
||||
"indexed": True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("agent_memory_save_failed", error=str(e), agent_id=agent_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
async def index_message_in_qdrant(
|
||||
message_id: str,
|
||||
content: str,
|
||||
agent_id: str,
|
||||
user_id: str,
|
||||
channel_id: str,
|
||||
role: str,
|
||||
timestamp: str
|
||||
):
|
||||
"""Index message in Qdrant for semantic search (isolated by agent_id)"""
|
||||
try:
|
||||
from .embedding import get_document_embeddings
|
||||
from qdrant_client.http import models as qmodels
|
||||
|
||||
# Skip very short messages
|
||||
if len(content) < 10:
|
||||
return
|
||||
|
||||
# Generate embedding
|
||||
embeddings = await get_document_embeddings([content])
|
||||
if not embeddings or not embeddings[0]:
|
||||
logger.warning("embedding_failed", message_id=message_id)
|
||||
return
|
||||
|
||||
vector = embeddings[0]
|
||||
|
||||
# Use agent-specific collection (isolation!)
|
||||
collection_name = f"{agent_id}_messages"
|
||||
|
||||
# Ensure collection exists
|
||||
try:
|
||||
vector_store.client.get_collection(collection_name)
|
||||
except Exception:
|
||||
# Create collection if not exists
|
||||
vector_store.client.create_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config=qmodels.VectorParams(
|
||||
size=len(vector),
|
||||
distance=qmodels.Distance.COSINE
|
||||
)
|
||||
)
|
||||
logger.info("created_collection", collection=collection_name)
|
||||
|
||||
# Save to agent-specific Qdrant collection
|
||||
vector_store.client.upsert(
|
||||
collection_name=collection_name,
|
||||
points=[
|
||||
qmodels.PointStruct(
|
||||
id=message_id,
|
||||
vector=vector,
|
||||
payload={
|
||||
"message_id": message_id,
|
||||
"agent_id": agent_id,
|
||||
"user_id": user_id,
|
||||
"channel_id": channel_id,
|
||||
"role": role,
|
||||
"content": content,
|
||||
"timestamp": timestamp,
|
||||
"type": "chat_message"
|
||||
}
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
logger.info("message_indexed_qdrant",
|
||||
message_id=message_id,
|
||||
collection=collection_name,
|
||||
content_len=len(content),
|
||||
vector_dim=len(vector))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("qdrant_indexing_failed", error=str(e), message_id=message_id)
|
||||
|
||||
|
||||
async def update_neo4j_graph(
|
||||
message_id: str,
|
||||
content: str,
|
||||
agent_id: str,
|
||||
user_id: str,
|
||||
channel_id: str,
|
||||
role: str
|
||||
):
|
||||
"""Update Knowledge Graph in Neo4j (with agent isolation)"""
|
||||
try:
|
||||
import httpx
|
||||
import os
|
||||
|
||||
neo4j_url = os.getenv("NEO4J_HTTP_URL", "http://neo4j:7474")
|
||||
neo4j_user = os.getenv("NEO4J_USER", "neo4j")
|
||||
neo4j_password = os.getenv("NEO4J_PASSWORD", "DaarionNeo4j2026!")
|
||||
|
||||
# Create/update User node and Message relationship
|
||||
# IMPORTANT: agent_id is added to relationships for filtering
|
||||
cypher = """
|
||||
MERGE (u:User {user_id: $user_id})
|
||||
ON CREATE SET u.created_at = datetime()
|
||||
ON MATCH SET u.last_seen = datetime()
|
||||
|
||||
MERGE (ch:Channel {channel_id: $channel_id})
|
||||
ON CREATE SET ch.created_at = datetime()
|
||||
|
||||
MERGE (a:Agent {agent_id: $agent_id})
|
||||
ON CREATE SET a.created_at = datetime()
|
||||
|
||||
MERGE (u)-[p:PARTICIPATES_IN {agent_id: $agent_id}]->(ch)
|
||||
ON CREATE SET p.first_seen = datetime()
|
||||
ON MATCH SET p.last_seen = datetime()
|
||||
|
||||
CREATE (m:Message {
|
||||
message_id: $message_id,
|
||||
role: $role,
|
||||
content_preview: $content_preview,
|
||||
agent_id: $agent_id,
|
||||
created_at: datetime()
|
||||
})
|
||||
|
||||
CREATE (u)-[:SENT {agent_id: $agent_id}]->(m)
|
||||
CREATE (m)-[:IN_CHANNEL {agent_id: $agent_id}]->(ch)
|
||||
CREATE (m)-[:HANDLED_BY]->(a)
|
||||
|
||||
RETURN m.message_id as id
|
||||
"""
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.post(
|
||||
f"{neo4j_url}/db/neo4j/tx/commit",
|
||||
auth=(neo4j_user, neo4j_password),
|
||||
json={
|
||||
"statements": [{
|
||||
"statement": cypher,
|
||||
"parameters": {
|
||||
"user_id": user_id,
|
||||
"channel_id": channel_id,
|
||||
"message_id": message_id,
|
||||
"role": role,
|
||||
"content_preview": content[:200] if content else "",
|
||||
"agent_id": agent_id
|
||||
}
|
||||
}]
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info("neo4j_graph_updated", message_id=message_id, user_id=user_id, agent_id=agent_id)
|
||||
else:
|
||||
logger.warning("neo4j_update_failed",
|
||||
status=response.status_code,
|
||||
response=response.text[:200])
|
||||
|
||||
except Exception as e:
|
||||
logger.error("neo4j_update_error", error=str(e), message_id=message_id)
|
||||
|
||||
|
||||
@app.get("/agents/{agent_id}/memory")
|
||||
async def get_agent_memory(
|
||||
agent_id: str,
|
||||
user_id: str = Query(...),
|
||||
channel_id: Optional[str] = None,
|
||||
limit: int = Query(default=20, le=100)
|
||||
):
|
||||
"""
|
||||
Get recent chat events for an agent/user (isolated by agent_id).
|
||||
"""
|
||||
import json as json_lib
|
||||
try:
|
||||
# Query facts filtered by agent_id (database-level filtering)
|
||||
facts = await db.list_facts(user_id=user_id, agent_id=agent_id, limit=limit)
|
||||
|
||||
# Filter for chat events from this channel
|
||||
events = []
|
||||
for fact in facts:
|
||||
if fact.get("fact_key", "").startswith("chat_event:"):
|
||||
# Handle fact_value_json being string or dict
|
||||
event_data = fact.get("fact_value_json", {})
|
||||
if isinstance(event_data, str):
|
||||
try:
|
||||
event_data = json_lib.loads(event_data)
|
||||
except:
|
||||
event_data = {}
|
||||
if not isinstance(event_data, dict):
|
||||
event_data = {}
|
||||
if channel_id is None or event_data.get("channel_id") == channel_id:
|
||||
events.append(event_data)
|
||||
|
||||
return {"events": events[:limit]}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("agent_memory_get_failed", error=str(e), agent_id=agent_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# ADMIN
|
||||
# ============================================================================
|
||||
|
||||
177
services/memory-service/identity_endpoints.py
Normal file
177
services/memory-service/identity_endpoints.py
Normal file
@@ -0,0 +1,177 @@
|
||||
# Identity Endpoints for Account Linking (Telegram ↔ Energy Union)
|
||||
# This file is appended to main.py
|
||||
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# IDENTITY & ACCOUNT LINKING
|
||||
# ============================================================================
|
||||
|
||||
class LinkStartRequest(BaseModel):
|
||||
account_id: str # UUID as string
|
||||
ttl_minutes: int = 10
|
||||
|
||||
|
||||
class LinkStartResponse(BaseModel):
|
||||
link_code: str
|
||||
expires_at: datetime
|
||||
|
||||
|
||||
class ResolveResponse(BaseModel):
|
||||
account_id: Optional[str] = None
|
||||
linked: bool = False
|
||||
linked_at: Optional[datetime] = None
|
||||
|
||||
|
||||
@app.post("/identity/link/start", response_model=LinkStartResponse)
|
||||
async def start_link(request: LinkStartRequest):
|
||||
"""
|
||||
Generate a one-time link code for account linking.
|
||||
This is called from Energy Union dashboard when user clicks "Link Telegram".
|
||||
"""
|
||||
try:
|
||||
# Generate secure random code
|
||||
link_code = secrets.token_urlsafe(16)[:20].upper()
|
||||
expires_at = datetime.utcnow() + timedelta(minutes=request.ttl_minutes)
|
||||
|
||||
# Store in database
|
||||
await db.pool.execute(
|
||||
"""
|
||||
INSERT INTO link_codes (code, account_id, expires_at, generated_via)
|
||||
VALUES ($1, $2::uuid, $3, 'api')
|
||||
""",
|
||||
link_code,
|
||||
request.account_id,
|
||||
expires_at
|
||||
)
|
||||
|
||||
logger.info("link_code_generated", account_id=request.account_id, code=link_code[:4] + "***")
|
||||
|
||||
return LinkStartResponse(
|
||||
link_code=link_code,
|
||||
expires_at=expires_at
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("link_code_generation_failed", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/identity/resolve", response_model=ResolveResponse)
|
||||
async def resolve_telegram(telegram_user_id: int):
|
||||
"""
|
||||
Resolve Telegram user ID to Energy Union account ID.
|
||||
Returns null account_id if not linked.
|
||||
"""
|
||||
try:
|
||||
row = await db.pool.fetchrow(
|
||||
"""
|
||||
SELECT account_id, linked_at
|
||||
FROM account_links
|
||||
WHERE telegram_user_id = $1 AND status = 'active'
|
||||
""",
|
||||
telegram_user_id
|
||||
)
|
||||
|
||||
if row:
|
||||
return ResolveResponse(
|
||||
account_id=str(row['account_id']),
|
||||
linked=True,
|
||||
linked_at=row['linked_at']
|
||||
)
|
||||
else:
|
||||
return ResolveResponse(linked=False)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("telegram_resolve_failed", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/identity/user/{account_id}/timeline")
|
||||
async def get_user_timeline(
|
||||
account_id: str,
|
||||
limit: int = Query(default=20, le=100),
|
||||
channel: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Get user's interaction timeline across all channels.
|
||||
Only available for linked accounts.
|
||||
"""
|
||||
try:
|
||||
if channel:
|
||||
rows = await db.pool.fetch(
|
||||
"""
|
||||
SELECT id, channel, channel_id, event_type, summary,
|
||||
metadata, importance_score, event_at
|
||||
FROM user_timeline
|
||||
WHERE account_id = $1::uuid AND channel = $2
|
||||
ORDER BY event_at DESC
|
||||
LIMIT $3
|
||||
""",
|
||||
account_id, channel, limit
|
||||
)
|
||||
else:
|
||||
rows = await db.pool.fetch(
|
||||
"""
|
||||
SELECT id, channel, channel_id, event_type, summary,
|
||||
metadata, importance_score, event_at
|
||||
FROM user_timeline
|
||||
WHERE account_id = $1::uuid
|
||||
ORDER BY event_at DESC
|
||||
LIMIT $3
|
||||
""",
|
||||
account_id, limit
|
||||
)
|
||||
|
||||
events = []
|
||||
for row in rows:
|
||||
events.append({
|
||||
"id": str(row['id']),
|
||||
"channel": row['channel'],
|
||||
"channel_id": row['channel_id'],
|
||||
"event_type": row['event_type'],
|
||||
"summary": row['summary'],
|
||||
"metadata": row['metadata'] or {},
|
||||
"importance_score": row['importance_score'],
|
||||
"event_at": row['event_at'].isoformat()
|
||||
})
|
||||
|
||||
return {"events": events, "account_id": account_id, "count": len(events)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("timeline_fetch_failed", error=str(e), account_id=account_id)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/identity/timeline/add")
|
||||
async def add_timeline_event(
|
||||
account_id: str,
|
||||
channel: str,
|
||||
channel_id: str,
|
||||
event_type: str,
|
||||
summary: str,
|
||||
metadata: Optional[dict] = None,
|
||||
importance_score: float = 0.5
|
||||
):
|
||||
"""
|
||||
Add an event to user's timeline.
|
||||
Called by Gateway when processing messages from linked accounts.
|
||||
"""
|
||||
try:
|
||||
event_id = await db.pool.fetchval(
|
||||
"""
|
||||
SELECT add_timeline_event($1::uuid, $2, $3, $4, $5, $6::jsonb, $7)
|
||||
""",
|
||||
account_id, channel, channel_id, event_type,
|
||||
summary, metadata or {}, importance_score
|
||||
)
|
||||
|
||||
return {"event_id": str(event_id), "success": True}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("timeline_add_failed", error=str(e))
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
210
services/memory-service/kyc_endpoints.py
Normal file
210
services/memory-service/kyc_endpoints.py
Normal file
@@ -0,0 +1,210 @@
|
||||
# KYC Attestation Endpoints (NO PII to LLM)
|
||||
# To be appended to memory-service/app/main.py
|
||||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# KYC ATTESTATIONS
|
||||
# ============================================================================
|
||||
|
||||
class KYCAttestationUpdate(BaseModel):
|
||||
account_id: str
|
||||
kyc_status: str # unverified, pending, passed, failed
|
||||
kyc_provider: Optional[str] = None
|
||||
jurisdiction: Optional[str] = None # ISO country code
|
||||
risk_tier: Optional[str] = "unknown" # low, medium, high, unknown
|
||||
pep_sanctions_flag: bool = False
|
||||
wallet_verified: bool = False
|
||||
|
||||
|
||||
class KYCAttestationResponse(BaseModel):
|
||||
account_id: str
|
||||
kyc_status: str
|
||||
kyc_provider: Optional[str]
|
||||
jurisdiction: Optional[str]
|
||||
risk_tier: str
|
||||
pep_sanctions_flag: bool
|
||||
wallet_verified: bool
|
||||
attested_at: Optional[datetime]
|
||||
created_at: datetime
|
||||
|
||||
|
||||
@app.get("/kyc/attestation")
|
||||
async def get_kyc_attestation(account_id: str) -> KYCAttestationResponse:
|
||||
"""
|
||||
Get KYC attestation for an account.
|
||||
Returns status flags only - NO personal data.
|
||||
"""
|
||||
try:
|
||||
row = await db.pool.fetchrow(
|
||||
"""
|
||||
SELECT * FROM kyc_attestations WHERE account_id = $1::uuid
|
||||
""",
|
||||
account_id
|
||||
)
|
||||
|
||||
if not row:
|
||||
# Return default unverified status
|
||||
return KYCAttestationResponse(
|
||||
account_id=account_id,
|
||||
kyc_status="unverified",
|
||||
kyc_provider=None,
|
||||
jurisdiction=None,
|
||||
risk_tier="unknown",
|
||||
pep_sanctions_flag=False,
|
||||
wallet_verified=False,
|
||||
attested_at=None,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
return KYCAttestationResponse(
|
||||
account_id=str(row['account_id']),
|
||||
kyc_status=row['kyc_status'],
|
||||
kyc_provider=row['kyc_provider'],
|
||||
jurisdiction=row['jurisdiction'],
|
||||
risk_tier=row['risk_tier'],
|
||||
pep_sanctions_flag=row['pep_sanctions_flag'],
|
||||
wallet_verified=row['wallet_verified'],
|
||||
attested_at=row['attested_at'],
|
||||
created_at=row['created_at']
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get KYC attestation: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/kyc/attestation")
|
||||
async def update_kyc_attestation(attestation: KYCAttestationUpdate):
|
||||
"""
|
||||
Update KYC attestation for an account.
|
||||
Called by KYC provider webhook or admin.
|
||||
"""
|
||||
try:
|
||||
valid_statuses = ['unverified', 'pending', 'passed', 'failed']
|
||||
if attestation.kyc_status not in valid_statuses:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid kyc_status. Must be one of: {valid_statuses}"
|
||||
)
|
||||
|
||||
valid_tiers = ['low', 'medium', 'high', 'unknown']
|
||||
if attestation.risk_tier not in valid_tiers:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid risk_tier. Must be one of: {valid_tiers}"
|
||||
)
|
||||
|
||||
await db.pool.execute(
|
||||
"""
|
||||
INSERT INTO kyc_attestations (
|
||||
account_id, kyc_status, kyc_provider, jurisdiction,
|
||||
risk_tier, pep_sanctions_flag, wallet_verified, attested_at
|
||||
) VALUES ($1::uuid, $2, $3, $4, $5, $6, $7, NOW())
|
||||
ON CONFLICT (account_id) DO UPDATE SET
|
||||
kyc_status = EXCLUDED.kyc_status,
|
||||
kyc_provider = EXCLUDED.kyc_provider,
|
||||
jurisdiction = EXCLUDED.jurisdiction,
|
||||
risk_tier = EXCLUDED.risk_tier,
|
||||
pep_sanctions_flag = EXCLUDED.pep_sanctions_flag,
|
||||
wallet_verified = EXCLUDED.wallet_verified,
|
||||
attested_at = NOW(),
|
||||
updated_at = NOW()
|
||||
""",
|
||||
attestation.account_id,
|
||||
attestation.kyc_status,
|
||||
attestation.kyc_provider,
|
||||
attestation.jurisdiction,
|
||||
attestation.risk_tier,
|
||||
attestation.pep_sanctions_flag,
|
||||
attestation.wallet_verified
|
||||
)
|
||||
|
||||
logger.info(f"KYC attestation updated for account {attestation.account_id}: {attestation.kyc_status}")
|
||||
|
||||
return {"success": True, "account_id": attestation.account_id, "status": attestation.kyc_status}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update KYC attestation: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/kyc/webhook/provider")
|
||||
async def kyc_provider_webhook(
|
||||
account_id: str,
|
||||
status: str,
|
||||
provider: str,
|
||||
jurisdiction: Optional[str] = None,
|
||||
risk_tier: Optional[str] = "unknown",
|
||||
pep_flag: bool = False
|
||||
):
|
||||
"""
|
||||
Webhook endpoint for KYC providers.
|
||||
Updates attestation when KYC check completes.
|
||||
"""
|
||||
try:
|
||||
# Map provider status to our status
|
||||
status_map = {
|
||||
'approved': 'passed',
|
||||
'verified': 'passed',
|
||||
'rejected': 'failed',
|
||||
'denied': 'failed',
|
||||
'pending': 'pending',
|
||||
'review': 'pending'
|
||||
}
|
||||
|
||||
mapped_status = status_map.get(status.lower(), status.lower())
|
||||
|
||||
attestation = KYCAttestationUpdate(
|
||||
account_id=account_id,
|
||||
kyc_status=mapped_status,
|
||||
kyc_provider=provider,
|
||||
jurisdiction=jurisdiction,
|
||||
risk_tier=risk_tier,
|
||||
pep_sanctions_flag=pep_flag,
|
||||
wallet_verified=False # Wallet verification is separate
|
||||
)
|
||||
|
||||
return await update_kyc_attestation(attestation)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"KYC webhook failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/kyc/stats")
|
||||
async def get_kyc_stats():
|
||||
"""Get KYC statistics for the platform."""
|
||||
try:
|
||||
stats = await db.pool.fetchrow(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE kyc_status = 'passed') as passed,
|
||||
COUNT(*) FILTER (WHERE kyc_status = 'pending') as pending,
|
||||
COUNT(*) FILTER (WHERE kyc_status = 'failed') as failed,
|
||||
COUNT(*) FILTER (WHERE kyc_status = 'unverified') as unverified,
|
||||
COUNT(*) FILTER (WHERE wallet_verified = true) as wallets_verified,
|
||||
COUNT(*) FILTER (WHERE pep_sanctions_flag = true) as pep_flagged,
|
||||
COUNT(*) as total
|
||||
FROM kyc_attestations
|
||||
"""
|
||||
)
|
||||
|
||||
return {
|
||||
"passed": stats['passed'] or 0,
|
||||
"pending": stats['pending'] or 0,
|
||||
"failed": stats['failed'] or 0,
|
||||
"unverified": stats['unverified'] or 0,
|
||||
"wallets_verified": stats['wallets_verified'] or 0,
|
||||
"pep_flagged": stats['pep_flagged'] or 0,
|
||||
"total": stats['total'] or 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get KYC stats: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
285
services/memory-service/org_chat_endpoints.py
Normal file
285
services/memory-service/org_chat_endpoints.py
Normal file
@@ -0,0 +1,285 @@
|
||||
# Org Chat Logging & Decision Extraction Endpoints
|
||||
# To be appended to memory-service/app/main.py
|
||||
|
||||
import re
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
from datetime import datetime, date
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# ORG CHAT LOGGING
|
||||
# ============================================================================
|
||||
|
||||
class OrgChatMessageCreate(BaseModel):
|
||||
chat_id: int
|
||||
chat_type: str # official_ops, mentor_room, public_community
|
||||
chat_title: Optional[str] = None
|
||||
message_id: int
|
||||
sender_telegram_id: Optional[int] = None
|
||||
sender_account_id: Optional[str] = None # UUID if linked
|
||||
sender_username: Optional[str] = None
|
||||
sender_display_name: Optional[str] = None
|
||||
text: Optional[str] = None
|
||||
has_media: bool = False
|
||||
media_type: Optional[str] = None
|
||||
reply_to_message_id: Optional[int] = None
|
||||
message_at: datetime
|
||||
|
||||
|
||||
class DecisionRecord(BaseModel):
|
||||
decision: str
|
||||
action: Optional[str] = None
|
||||
owner: Optional[str] = None
|
||||
due_date: Optional[date] = None
|
||||
canon_change: bool = False
|
||||
|
||||
|
||||
# Decision extraction patterns
|
||||
DECISION_PATTERNS = {
|
||||
'decision': re.compile(r'DECISION:\s*(.+?)(?=\n[A-Z]+:|$)', re.IGNORECASE | re.DOTALL),
|
||||
'action': re.compile(r'ACTION:\s*(.+?)(?=\n[A-Z]+:|$)', re.IGNORECASE | re.DOTALL),
|
||||
'owner': re.compile(r'OWNER:\s*(@?\w+)', re.IGNORECASE),
|
||||
'due': re.compile(r'DUE:\s*(\d{4}-\d{2}-\d{2}|\d{2}\.\d{2}\.\d{4})', re.IGNORECASE),
|
||||
'canon_change': re.compile(r'CANON_CHANGE:\s*(yes|true|так|1)', re.IGNORECASE),
|
||||
}
|
||||
|
||||
|
||||
def extract_decision_from_text(text: str) -> Optional[DecisionRecord]:
|
||||
"""Extract structured decision from message text."""
|
||||
if not text:
|
||||
return None
|
||||
|
||||
# Check if this looks like a decision
|
||||
if 'DECISION:' not in text.upper():
|
||||
return None
|
||||
|
||||
decision_match = DECISION_PATTERNS['decision'].search(text)
|
||||
if not decision_match:
|
||||
return None
|
||||
|
||||
decision_text = decision_match.group(1).strip()
|
||||
|
||||
action_match = DECISION_PATTERNS['action'].search(text)
|
||||
owner_match = DECISION_PATTERNS['owner'].search(text)
|
||||
due_match = DECISION_PATTERNS['due'].search(text)
|
||||
canon_match = DECISION_PATTERNS['canon_change'].search(text)
|
||||
|
||||
due_date = None
|
||||
if due_match:
|
||||
date_str = due_match.group(1)
|
||||
try:
|
||||
if '-' in date_str:
|
||||
due_date = datetime.strptime(date_str, '%Y-%m-%d').date()
|
||||
else:
|
||||
due_date = datetime.strptime(date_str, '%d.%m.%Y').date()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return DecisionRecord(
|
||||
decision=decision_text,
|
||||
action=action_match.group(1).strip() if action_match else None,
|
||||
owner=owner_match.group(1) if owner_match else None,
|
||||
due_date=due_date,
|
||||
canon_change=bool(canon_match)
|
||||
)
|
||||
|
||||
|
||||
@app.post("/org-chat/message")
|
||||
async def log_org_chat_message(msg: OrgChatMessageCreate):
|
||||
"""
|
||||
Log a message from an organizational chat.
|
||||
Automatically extracts decisions if present.
|
||||
"""
|
||||
try:
|
||||
# Insert message
|
||||
await db.pool.execute(
|
||||
"""
|
||||
INSERT INTO org_chat_messages (
|
||||
chat_id, chat_type, chat_title, message_id,
|
||||
sender_telegram_id, sender_account_id, sender_username, sender_display_name,
|
||||
text, has_media, media_type, reply_to_message_id, message_at
|
||||
) VALUES ($1, $2, $3, $4, $5, $6::uuid, $7, $8, $9, $10, $11, $12, $13)
|
||||
ON CONFLICT (chat_id, message_id) DO UPDATE SET
|
||||
text = EXCLUDED.text,
|
||||
has_media = EXCLUDED.has_media
|
||||
""",
|
||||
msg.chat_id, msg.chat_type, msg.chat_title, msg.message_id,
|
||||
msg.sender_telegram_id, msg.sender_account_id, msg.sender_username, msg.sender_display_name,
|
||||
msg.text, msg.has_media, msg.media_type, msg.reply_to_message_id, msg.message_at
|
||||
)
|
||||
|
||||
# Try to extract decision
|
||||
decision = None
|
||||
if msg.text:
|
||||
decision = extract_decision_from_text(msg.text)
|
||||
if decision:
|
||||
await db.pool.execute(
|
||||
"""
|
||||
INSERT INTO decision_records (
|
||||
chat_id, source_message_id, decision, action, owner, due_date, canon_change
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
""",
|
||||
msg.chat_id, msg.message_id, decision.decision, decision.action,
|
||||
decision.owner, decision.due_date, decision.canon_change
|
||||
)
|
||||
logger.info(f"Decision extracted from message {msg.message_id} in chat {msg.chat_id}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message_id": msg.message_id,
|
||||
"decision_extracted": decision is not None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to log org chat message: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/org-chat/{chat_id}/messages")
|
||||
async def get_org_chat_messages(
|
||||
chat_id: int,
|
||||
limit: int = Query(default=50, le=200),
|
||||
since: Optional[datetime] = None
|
||||
):
|
||||
"""Get messages from an organizational chat."""
|
||||
try:
|
||||
if since:
|
||||
rows = await db.pool.fetch(
|
||||
"""
|
||||
SELECT * FROM org_chat_messages
|
||||
WHERE chat_id = $1 AND message_at > $2
|
||||
ORDER BY message_at DESC LIMIT $3
|
||||
""",
|
||||
chat_id, since, limit
|
||||
)
|
||||
else:
|
||||
rows = await db.pool.fetch(
|
||||
"""
|
||||
SELECT * FROM org_chat_messages
|
||||
WHERE chat_id = $1
|
||||
ORDER BY message_at DESC LIMIT $2
|
||||
""",
|
||||
chat_id, limit
|
||||
)
|
||||
|
||||
return {"messages": [dict(r) for r in rows], "count": len(rows)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get org chat messages: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/decisions")
|
||||
async def get_decisions(
|
||||
status: Optional[str] = None,
|
||||
chat_id: Optional[int] = None,
|
||||
canon_only: bool = False,
|
||||
overdue_only: bool = False,
|
||||
limit: int = Query(default=20, le=100)
|
||||
):
|
||||
"""Get decision records with filters."""
|
||||
try:
|
||||
conditions = []
|
||||
params = []
|
||||
param_idx = 1
|
||||
|
||||
if status:
|
||||
conditions.append(f"status = ${param_idx}")
|
||||
params.append(status)
|
||||
param_idx += 1
|
||||
|
||||
if chat_id:
|
||||
conditions.append(f"chat_id = ${param_idx}")
|
||||
params.append(chat_id)
|
||||
param_idx += 1
|
||||
|
||||
if canon_only:
|
||||
conditions.append("canon_change = true")
|
||||
|
||||
if overdue_only:
|
||||
conditions.append(f"due_date < ${param_idx} AND status NOT IN ('completed', 'cancelled')")
|
||||
params.append(date.today())
|
||||
param_idx += 1
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
params.append(limit)
|
||||
|
||||
rows = await db.pool.fetch(
|
||||
f"""
|
||||
SELECT dr.*, ocm.text as source_text, ocm.sender_username
|
||||
FROM decision_records dr
|
||||
LEFT JOIN org_chat_messages ocm ON dr.chat_id = ocm.chat_id AND dr.source_message_id = ocm.message_id
|
||||
WHERE {where_clause}
|
||||
ORDER BY dr.created_at DESC
|
||||
LIMIT ${param_idx}
|
||||
""",
|
||||
*params
|
||||
)
|
||||
|
||||
return {"decisions": [dict(r) for r in rows], "count": len(rows)}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get decisions: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.patch("/decisions/{decision_id}/status")
|
||||
async def update_decision_status(
|
||||
decision_id: str,
|
||||
status: str,
|
||||
updated_by: Optional[str] = None
|
||||
):
|
||||
"""Update decision status (pending, in_progress, completed, cancelled)."""
|
||||
try:
|
||||
valid_statuses = ['pending', 'in_progress', 'completed', 'cancelled']
|
||||
if status not in valid_statuses:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid status. Must be one of: {valid_statuses}")
|
||||
|
||||
await db.pool.execute(
|
||||
"""
|
||||
UPDATE decision_records
|
||||
SET status = $1, status_updated_at = NOW(), status_updated_by = $2
|
||||
WHERE id = $3::uuid
|
||||
""",
|
||||
status, updated_by, decision_id
|
||||
)
|
||||
|
||||
return {"success": True, "decision_id": decision_id, "new_status": status}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update decision status: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/decisions/summary")
|
||||
async def get_decisions_summary():
|
||||
"""Get summary of decisions by status."""
|
||||
try:
|
||||
rows = await db.pool.fetch(
|
||||
"""
|
||||
SELECT
|
||||
status,
|
||||
COUNT(*) as count,
|
||||
COUNT(*) FILTER (WHERE due_date < CURRENT_DATE AND status NOT IN ('completed', 'cancelled')) as overdue
|
||||
FROM decision_records
|
||||
GROUP BY status
|
||||
"""
|
||||
)
|
||||
|
||||
summary = {r['status']: {'count': r['count'], 'overdue': r['overdue']} for r in rows}
|
||||
|
||||
# Count canon changes
|
||||
canon_count = await db.pool.fetchval(
|
||||
"SELECT COUNT(*) FROM decision_records WHERE canon_change = true"
|
||||
)
|
||||
|
||||
return {
|
||||
"by_status": summary,
|
||||
"canon_changes": canon_count,
|
||||
"total": sum(s['count'] for s in summary.values())
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get decisions summary: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@@ -23,6 +23,7 @@ python-dotenv==1.0.0
|
||||
httpx==0.26.0
|
||||
tenacity==8.2.3
|
||||
structlog==24.1.0
|
||||
PyJWT==2.8.0
|
||||
|
||||
# Token counting
|
||||
tiktoken==0.5.2
|
||||
|
||||
Reference in New Issue
Block a user