feat: Add presence heartbeat for Matrix online status
- matrix-gateway: POST /internal/matrix/presence/online endpoint - usePresenceHeartbeat hook with activity tracking - Auto away after 5 min inactivity - Offline on page close/visibility change - Integrated in MatrixChatRoom component
This commit is contained in:
17
services/living-map-service/Dockerfile
Normal file
17
services/living-map-service/Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application
|
||||
COPY . .
|
||||
|
||||
# Expose port
|
||||
EXPOSE 7017
|
||||
|
||||
# Run application
|
||||
CMD ["python", "main.py"]
|
||||
|
||||
2
services/living-map-service/adapters/__init__.py
Normal file
2
services/living-map-service/adapters/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Adapters for Living Map Service
|
||||
|
||||
61
services/living-map-service/adapters/agents_client.py
Normal file
61
services/living-map-service/adapters/agents_client.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
Agents Service Client Adapter
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from typing import List, Dict, Any
|
||||
from .base_client import BaseServiceClient
|
||||
import os
|
||||
|
||||
class AgentsClient(BaseServiceClient):
|
||||
"""Client for agents-service"""
|
||||
|
||||
def __init__(self):
|
||||
base_url = os.getenv("AGENTS_SERVICE_URL", "http://localhost:7014")
|
||||
super().__init__(base_url)
|
||||
|
||||
async def get_agents_list(self) -> List[Dict[str, Any]]:
|
||||
"""Get list of all agents"""
|
||||
result = await self.get_with_fallback("/agents", fallback=[])
|
||||
return result if isinstance(result, list) else []
|
||||
|
||||
async def get_agent_metrics_summary(self) -> Dict[str, Any]:
|
||||
"""Get aggregated agent metrics"""
|
||||
# This endpoint might not exist yet, use fallback
|
||||
result = await self.get_with_fallback("/agents/metrics/summary", fallback={
|
||||
"total_agents": 0,
|
||||
"online_agents": 0,
|
||||
"total_llm_calls_24h": 0,
|
||||
"total_tokens_24h": 0
|
||||
})
|
||||
return result
|
||||
|
||||
async def get_layer_data(self) -> Dict[str, Any]:
|
||||
"""Get agents layer data for Living Map"""
|
||||
agents_list = await self.get_agents_list()
|
||||
metrics_summary = await self.get_agent_metrics_summary()
|
||||
|
||||
# Transform to Living Map format
|
||||
items = []
|
||||
for agent in agents_list:
|
||||
items.append({
|
||||
"id": agent.get("id", agent.get("external_id", "unknown")),
|
||||
"name": agent.get("name", "Unknown Agent"),
|
||||
"kind": agent.get("kind", "assistant"),
|
||||
"microdao_id": agent.get("microdao_id"),
|
||||
"status": "online" if agent.get("is_active") else "offline",
|
||||
"usage": {
|
||||
"llm_calls_24h": 0, # TODO: Get from usage-engine
|
||||
"tokens_24h": 0,
|
||||
"messages_24h": 0
|
||||
},
|
||||
"model": agent.get("model"),
|
||||
"last_active": agent.get("updated_at")
|
||||
})
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"total_agents": len(items),
|
||||
"online_agents": sum(1 for a in items if a["status"] == "online"),
|
||||
"total_llm_calls_24h": metrics_summary.get("total_llm_calls_24h", 0)
|
||||
}
|
||||
|
||||
51
services/living-map-service/adapters/base_client.py
Normal file
51
services/living-map-service/adapters/base_client.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Base HTTP Client for service adapters
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
import httpx
|
||||
from typing import Optional, Any
|
||||
import asyncio
|
||||
|
||||
class BaseServiceClient:
|
||||
"""Base client with timeout and retry logic"""
|
||||
|
||||
def __init__(self, base_url: str, timeout: float = 5.0):
|
||||
self.base_url = base_url.rstrip('/')
|
||||
self.timeout = timeout
|
||||
|
||||
async def get(
|
||||
self,
|
||||
path: str,
|
||||
params: Optional[dict] = None,
|
||||
headers: Optional[dict] = None
|
||||
) -> Optional[Any]:
|
||||
"""GET request with error handling"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}{path}",
|
||||
params=params,
|
||||
headers=headers or {}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except httpx.TimeoutException:
|
||||
print(f"⚠️ Timeout calling {self.base_url}{path}")
|
||||
return None
|
||||
except httpx.HTTPError as e:
|
||||
print(f"⚠️ HTTP error calling {self.base_url}{path}: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error calling {self.base_url}{path}: {e}")
|
||||
return None
|
||||
|
||||
async def get_with_fallback(
|
||||
self,
|
||||
path: str,
|
||||
fallback: Any,
|
||||
params: Optional[dict] = None
|
||||
) -> Any:
|
||||
"""GET with fallback value if fails"""
|
||||
result = await self.get(path, params)
|
||||
return result if result is not None else fallback
|
||||
|
||||
34
services/living-map-service/adapters/city_client.py
Normal file
34
services/living-map-service/adapters/city_client.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""
|
||||
City Service Client Adapter
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from typing import Dict, Any
|
||||
from .base_client import BaseServiceClient
|
||||
import os
|
||||
|
||||
class CityClient(BaseServiceClient):
|
||||
"""Client for city-service"""
|
||||
|
||||
def __init__(self):
|
||||
base_url = os.getenv("CITY_SERVICE_URL", "http://localhost:7001")
|
||||
super().__init__(base_url)
|
||||
|
||||
async def get_city_snapshot(self) -> Dict[str, Any]:
|
||||
"""Get city snapshot"""
|
||||
result = await self.get_with_fallback("/api/city/snapshot", fallback={})
|
||||
return result
|
||||
|
||||
async def get_layer_data(self) -> Dict[str, Any]:
|
||||
"""Get city layer data for Living Map"""
|
||||
snapshot = await self.get_city_snapshot()
|
||||
|
||||
# Return as-is or transform if needed
|
||||
# This is a placeholder - actual structure depends on city-service
|
||||
return snapshot if snapshot else {
|
||||
"microdaos_total": 0,
|
||||
"active_users": 0,
|
||||
"active_agents": 0,
|
||||
"health": "unknown",
|
||||
"items": []
|
||||
}
|
||||
|
||||
50
services/living-map-service/adapters/dao_client.py
Normal file
50
services/living-map-service/adapters/dao_client.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
DAO Service Client Adapter
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from typing import List, Dict, Any
|
||||
from .base_client import BaseServiceClient
|
||||
import os
|
||||
|
||||
class DaoClient(BaseServiceClient):
|
||||
"""Client for dao-service"""
|
||||
|
||||
def __init__(self):
|
||||
base_url = os.getenv("DAO_SERVICE_URL", "http://localhost:7016")
|
||||
super().__init__(base_url)
|
||||
|
||||
async def get_daos_list(self) -> List[Dict[str, Any]]:
|
||||
"""Get list of all DAOs"""
|
||||
result = await self.get_with_fallback("/dao", fallback=[])
|
||||
return result if isinstance(result, list) else []
|
||||
|
||||
async def get_proposals_summary(self) -> Dict[str, Any]:
|
||||
"""Get proposals summary across all DAOs"""
|
||||
# This endpoint might not exist, return placeholder
|
||||
return {
|
||||
"total_proposals": 0,
|
||||
"active_proposals": 0
|
||||
}
|
||||
|
||||
async def get_layer_data(self) -> Dict[str, Any]:
|
||||
"""Get space layer data for Living Map (DAO planets)"""
|
||||
daos_list = await self.get_daos_list()
|
||||
|
||||
# Transform to Living Map format (DAOs as planets)
|
||||
planets = []
|
||||
for dao in daos_list:
|
||||
planets.append({
|
||||
"id": f"dao:{dao.get('slug', dao.get('id'))}",
|
||||
"name": dao.get("name", "Unknown DAO"),
|
||||
"type": "dao",
|
||||
"status": "active" if dao.get("is_active") else "inactive",
|
||||
"orbits": [], # TODO: Link nodes to DAOs
|
||||
"treasury_value": None,
|
||||
"active_proposals": 0
|
||||
})
|
||||
|
||||
return {
|
||||
"planets": planets,
|
||||
"nodes": [] # Nodes will be added from space-service
|
||||
}
|
||||
|
||||
61
services/living-map-service/adapters/microdao_client.py
Normal file
61
services/living-map-service/adapters/microdao_client.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
MicroDAO Service Client Adapter
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from typing import List, Dict, Any
|
||||
from .base_client import BaseServiceClient
|
||||
import os
|
||||
|
||||
class MicrodaoClient(BaseServiceClient):
|
||||
"""Client for microdao-service"""
|
||||
|
||||
def __init__(self):
|
||||
base_url = os.getenv("MICRODAO_SERVICE_URL", "http://localhost:7015")
|
||||
super().__init__(base_url)
|
||||
|
||||
async def get_microdaos_list(self) -> List[Dict[str, Any]]:
|
||||
"""Get list of all microDAOs"""
|
||||
# Note: This endpoint might require auth, using internal endpoint if available
|
||||
result = await self.get_with_fallback("/internal/microdaos", fallback=[])
|
||||
if not result:
|
||||
result = await self.get_with_fallback("/microdao", fallback=[])
|
||||
return result if isinstance(result, list) else []
|
||||
|
||||
async def get_layer_data(self) -> Dict[str, Any]:
|
||||
"""Get city layer data for Living Map"""
|
||||
microdaos_list = await self.get_microdaos_list()
|
||||
|
||||
# Transform to Living Map format
|
||||
items = []
|
||||
total_agents = 0
|
||||
total_nodes = 0
|
||||
total_members = 0
|
||||
|
||||
for microdao in microdaos_list:
|
||||
agents_count = microdao.get("agent_count", 0)
|
||||
nodes_count = microdao.get("node_count", 0)
|
||||
members_count = microdao.get("member_count", 0)
|
||||
|
||||
total_agents += agents_count
|
||||
total_nodes += nodes_count
|
||||
total_members += members_count
|
||||
|
||||
items.append({
|
||||
"id": microdao.get("external_id", f"microdao:{microdao.get('id')}"),
|
||||
"slug": microdao.get("slug", "unknown"),
|
||||
"name": microdao.get("name", "Unknown microDAO"),
|
||||
"status": "active" if microdao.get("is_active") else "inactive",
|
||||
"agents": agents_count,
|
||||
"nodes": nodes_count,
|
||||
"members": members_count,
|
||||
"description": microdao.get("description")
|
||||
})
|
||||
|
||||
return {
|
||||
"microdaos_total": len(items),
|
||||
"active_users": total_members,
|
||||
"active_agents": total_agents,
|
||||
"health": "green" if len(items) > 0 else "yellow",
|
||||
"items": items
|
||||
}
|
||||
|
||||
34
services/living-map-service/adapters/space_client.py
Normal file
34
services/living-map-service/adapters/space_client.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""
|
||||
Space Service Client Adapter
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from typing import Dict, Any
|
||||
from .base_client import BaseServiceClient
|
||||
import os
|
||||
|
||||
class SpaceClient(BaseServiceClient):
|
||||
"""Client for space-service"""
|
||||
|
||||
def __init__(self):
|
||||
base_url = os.getenv("SPACE_SERVICE_URL", "http://localhost:7002")
|
||||
super().__init__(base_url)
|
||||
|
||||
async def get_space_scene(self) -> Dict[str, Any]:
|
||||
"""Get space scene data"""
|
||||
result = await self.get_with_fallback("/api/space/scene", fallback={})
|
||||
return result
|
||||
|
||||
async def get_layer_data(self) -> Dict[str, Any]:
|
||||
"""Get space layer data for Living Map"""
|
||||
scene = await self.get_space_scene()
|
||||
|
||||
# Extract planets and nodes from scene
|
||||
# Format depends on space-service implementation
|
||||
planets = scene.get("planets", [])
|
||||
nodes = scene.get("nodes", [])
|
||||
|
||||
return {
|
||||
"planets": planets,
|
||||
"nodes": nodes
|
||||
}
|
||||
|
||||
40
services/living-map-service/adapters/usage_client.py
Normal file
40
services/living-map-service/adapters/usage_client.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
Usage Engine Client Adapter
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from typing import Dict, Any
|
||||
from .base_client import BaseServiceClient
|
||||
import os
|
||||
|
||||
class UsageClient(BaseServiceClient):
|
||||
"""Client for usage-engine"""
|
||||
|
||||
def __init__(self):
|
||||
base_url = os.getenv("USAGE_ENGINE_URL", "http://localhost:7013")
|
||||
super().__init__(base_url)
|
||||
|
||||
async def get_usage_summary(self, period_hours: int = 24) -> Dict[str, Any]:
|
||||
"""Get usage summary for period"""
|
||||
result = await self.get_with_fallback(
|
||||
"/internal/usage/summary",
|
||||
fallback={
|
||||
"total_llm_calls": 0,
|
||||
"total_tokens": 0,
|
||||
"period_hours": period_hours
|
||||
},
|
||||
params={"period_hours": period_hours}
|
||||
)
|
||||
return result
|
||||
|
||||
async def get_agent_usage(self, agent_id: str) -> Dict[str, Any]:
|
||||
"""Get usage for specific agent"""
|
||||
result = await self.get_with_fallback(
|
||||
f"/internal/usage/agent/{agent_id}",
|
||||
fallback={
|
||||
"llm_calls_24h": 0,
|
||||
"tokens_24h": 0,
|
||||
"messages_24h": 0
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
||||
146
services/living-map-service/main.py
Normal file
146
services/living-map-service/main.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
DAARION Living Map Service — Phase 9A
|
||||
Port: 7017
|
||||
Aggregated network state visualization
|
||||
"""
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import asyncpg
|
||||
|
||||
# Import modules
|
||||
import routes
|
||||
from snapshot_builder import SnapshotBuilder
|
||||
from repository_history import HistoryRepository
|
||||
from nats_subscriber import NATSSubscriber
|
||||
from ws_stream import websocket_endpoint, broadcast_event
|
||||
|
||||
# ============================================================================
|
||||
# Configuration
|
||||
# ============================================================================
|
||||
|
||||
PORT = int(os.getenv("PORT", "7017"))
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:postgres@localhost:5432/daarion")
|
||||
NATS_URL = os.getenv("NATS_URL", "nats://localhost:4222")
|
||||
|
||||
# ============================================================================
|
||||
# Lifespan — Startup & Shutdown
|
||||
# ============================================================================
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Startup and Shutdown"""
|
||||
print("🚀 Living Map Service starting...")
|
||||
|
||||
# Connect to PostgreSQL
|
||||
try:
|
||||
db_pool = await asyncpg.create_pool(DATABASE_URL, min_size=2, max_size=10)
|
||||
print(f"✅ PostgreSQL connected")
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to connect to PostgreSQL: {e}")
|
||||
raise
|
||||
|
||||
app.state.db_pool = db_pool
|
||||
|
||||
# Initialize components
|
||||
history_repo = HistoryRepository(db_pool)
|
||||
snapshot_builder = SnapshotBuilder()
|
||||
|
||||
app.state.history_repo = history_repo
|
||||
app.state.snapshot_builder = snapshot_builder
|
||||
|
||||
# Set dependencies for routes
|
||||
routes.set_snapshot_builder(snapshot_builder)
|
||||
routes.set_history_repo(history_repo)
|
||||
routes.set_ws_handler(websocket_endpoint)
|
||||
|
||||
# Connect to NATS
|
||||
nats_sub = NATSSubscriber(NATS_URL, history_repo)
|
||||
try:
|
||||
await nats_sub.connect()
|
||||
await nats_sub.subscribe_all(event_callback=broadcast_event)
|
||||
app.state.nats_sub = nats_sub
|
||||
print("✅ NATS subscriber configured")
|
||||
except Exception as e:
|
||||
print(f"⚠️ NATS connection failed: {e}")
|
||||
print("⚠️ Service will run without NATS events")
|
||||
app.state.nats_sub = None
|
||||
|
||||
print(f"🎉 Living Map Service ready on port {PORT}")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
if hasattr(app.state, 'nats_sub') and app.state.nats_sub:
|
||||
await app.state.nats_sub.close()
|
||||
|
||||
await db_pool.close()
|
||||
print("✅ Living Map Service stopped")
|
||||
|
||||
# ============================================================================
|
||||
# FastAPI App
|
||||
# ============================================================================
|
||||
|
||||
app = FastAPI(
|
||||
title="DAARION Living Map Service",
|
||||
description="Aggregated network state visualization",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Include routes
|
||||
app.include_router(routes.router)
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {
|
||||
"service": "DAARION Living Map Service",
|
||||
"version": "1.0.0",
|
||||
"phase": "9A",
|
||||
"endpoints": {
|
||||
"health": "/living-map/health",
|
||||
"snapshot": "/living-map/snapshot",
|
||||
"entities": "/living-map/entities",
|
||||
"history": "/living-map/history",
|
||||
"stream": "ws://localhost:7017/living-map/stream"
|
||||
}
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
print(f"""
|
||||
╔══════════════════════════════════════════════════════════════╗
|
||||
║ ║
|
||||
║ 🗺️ DAARION LIVING MAP SERVICE — PHASE 9A 🗺️ ║
|
||||
║ ║
|
||||
║ Port: {PORT:<50} ║
|
||||
║ Database: PostgreSQL ║
|
||||
║ NATS: JetStream ║
|
||||
║ ║
|
||||
║ Features: ║
|
||||
║ ✅ Network State Aggregation ║
|
||||
║ ✅ 4 Layers (City/Space/Nodes/Agents) ║
|
||||
║ ✅ Real-time WebSocket Stream ║
|
||||
║ ✅ Event History ║
|
||||
║ ║
|
||||
╚══════════════════════════════════════════════════════════════╝
|
||||
""")
|
||||
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=PORT,
|
||||
reload=False,
|
||||
log_level="info"
|
||||
)
|
||||
|
||||
247
services/living-map-service/models.py
Normal file
247
services/living-map-service/models.py
Normal file
@@ -0,0 +1,247 @@
|
||||
"""
|
||||
Living Map Service Models
|
||||
Phase 9: Full Stack Living Map
|
||||
"""
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List, Dict, Any, Literal
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
# ============================================================================
|
||||
# Layer Types
|
||||
# ============================================================================
|
||||
|
||||
LayerType = Literal["city", "space", "nodes", "agents"]
|
||||
EntityType = Literal["city", "space", "node", "agent", "dao", "microdao", "channel"]
|
||||
EntityStatus = Literal["active", "inactive", "online", "offline", "warning", "error", "idle"]
|
||||
|
||||
# ============================================================================
|
||||
# City Layer Models
|
||||
# ============================================================================
|
||||
|
||||
class CityItem(BaseModel):
|
||||
"""MicroDAO in City layer"""
|
||||
id: str
|
||||
slug: str
|
||||
name: str
|
||||
status: EntityStatus
|
||||
agents: int = 0
|
||||
nodes: int = 0
|
||||
members: int = 0
|
||||
description: Optional[str] = None
|
||||
|
||||
class CityLayer(BaseModel):
|
||||
"""City layer data"""
|
||||
microdaos_total: int = 0
|
||||
active_users: int = 0
|
||||
active_agents: int = 0
|
||||
health: str = "green"
|
||||
items: List[CityItem] = []
|
||||
|
||||
# ============================================================================
|
||||
# Space Layer Models
|
||||
# ============================================================================
|
||||
|
||||
class PlanetItem(BaseModel):
|
||||
"""DAO planet in Space layer"""
|
||||
id: str
|
||||
name: str
|
||||
type: str = "dao"
|
||||
status: EntityStatus
|
||||
orbits: List[str] = [] # node IDs
|
||||
treasury_value: Optional[Decimal] = None
|
||||
active_proposals: int = 0
|
||||
|
||||
class NodeInSpace(BaseModel):
|
||||
"""Node in Space layer (orbital view)"""
|
||||
id: str
|
||||
name: str
|
||||
status: EntityStatus
|
||||
cpu: float = 0.0
|
||||
gpu: float = 0.0
|
||||
memory: float = 0.0
|
||||
alerts: List[str] = []
|
||||
|
||||
class SpaceLayer(BaseModel):
|
||||
"""Space layer data"""
|
||||
planets: List[PlanetItem] = []
|
||||
nodes: List[NodeInSpace] = []
|
||||
|
||||
# ============================================================================
|
||||
# Nodes Layer Models
|
||||
# ============================================================================
|
||||
|
||||
class NodeMetrics(BaseModel):
|
||||
"""Node hardware metrics"""
|
||||
cpu: float = 0.0
|
||||
gpu: float = 0.0
|
||||
ram: float = 0.0
|
||||
disk: float = 0.0
|
||||
net_in: int = 0
|
||||
net_out: int = 0
|
||||
temperature: Optional[float] = None
|
||||
|
||||
class NodeItem(BaseModel):
|
||||
"""Node in Nodes layer"""
|
||||
id: str
|
||||
name: str
|
||||
microdao_id: Optional[str] = None
|
||||
status: EntityStatus
|
||||
metrics: NodeMetrics
|
||||
alerts: List[str] = []
|
||||
uptime_seconds: Optional[int] = None
|
||||
last_seen: Optional[datetime] = None
|
||||
|
||||
class NodesLayer(BaseModel):
|
||||
"""Nodes layer data"""
|
||||
items: List[NodeItem] = []
|
||||
total_cpu: float = 0.0
|
||||
total_gpu: float = 0.0
|
||||
total_ram: float = 0.0
|
||||
|
||||
# ============================================================================
|
||||
# Agents Layer Models
|
||||
# ============================================================================
|
||||
|
||||
class AgentUsage(BaseModel):
|
||||
"""Agent usage statistics"""
|
||||
llm_calls_24h: int = 0
|
||||
tokens_24h: int = 0
|
||||
messages_24h: int = 0
|
||||
avg_response_time_ms: Optional[float] = None
|
||||
|
||||
class AgentItem(BaseModel):
|
||||
"""Agent in Agents layer"""
|
||||
id: str
|
||||
name: str
|
||||
kind: str
|
||||
microdao_id: Optional[str] = None
|
||||
status: EntityStatus
|
||||
usage: AgentUsage
|
||||
model: Optional[str] = None
|
||||
last_active: Optional[datetime] = None
|
||||
|
||||
class AgentsLayer(BaseModel):
|
||||
"""Agents layer data"""
|
||||
items: List[AgentItem] = []
|
||||
total_agents: int = 0
|
||||
online_agents: int = 0
|
||||
total_llm_calls_24h: int = 0
|
||||
|
||||
# ============================================================================
|
||||
# Snapshot Models
|
||||
# ============================================================================
|
||||
|
||||
class SnapshotMeta(BaseModel):
|
||||
"""Metadata for snapshot"""
|
||||
source_services: List[str] = []
|
||||
generated_at: datetime
|
||||
version: str = "1.0"
|
||||
request_id: Optional[str] = None
|
||||
|
||||
class LivingMapSnapshot(BaseModel):
|
||||
"""Complete Living Map state"""
|
||||
generated_at: datetime
|
||||
layers: Dict[str, Any] # Flexible for different layer structures
|
||||
meta: SnapshotMeta
|
||||
|
||||
class LivingMapSnapshotTyped(BaseModel):
|
||||
"""Typed version of Living Map snapshot"""
|
||||
generated_at: datetime
|
||||
layers: Dict[str, Any] = Field(default_factory=dict)
|
||||
meta: SnapshotMeta
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
# ============================================================================
|
||||
# Entity Models
|
||||
# ============================================================================
|
||||
|
||||
class EntitySummary(BaseModel):
|
||||
"""Minimal entity info for lists"""
|
||||
id: str
|
||||
type: EntityType
|
||||
label: str
|
||||
status: EntityStatus
|
||||
layer: LayerType
|
||||
|
||||
class EntityDetail(BaseModel):
|
||||
"""Detailed entity information"""
|
||||
id: str
|
||||
type: EntityType
|
||||
layer: LayerType
|
||||
data: Dict[str, Any]
|
||||
|
||||
# ============================================================================
|
||||
# History / Event Models
|
||||
# ============================================================================
|
||||
|
||||
class HistoryItem(BaseModel):
|
||||
"""Single history event"""
|
||||
id: str
|
||||
timestamp: datetime
|
||||
event_type: str
|
||||
payload: Dict[str, Any]
|
||||
source_service: Optional[str] = None
|
||||
entity_id: Optional[str] = None
|
||||
entity_type: Optional[str] = None
|
||||
|
||||
class HistoryResponse(BaseModel):
|
||||
"""History query response"""
|
||||
items: List[HistoryItem]
|
||||
total: int
|
||||
has_more: bool = False
|
||||
|
||||
# ============================================================================
|
||||
# WebSocket Models
|
||||
# ============================================================================
|
||||
|
||||
class WSMessageType(BaseModel):
|
||||
"""WebSocket message type"""
|
||||
kind: Literal["snapshot", "event", "ping", "error"]
|
||||
|
||||
class WSSnapshotMessage(BaseModel):
|
||||
"""WebSocket snapshot message"""
|
||||
kind: Literal["snapshot"] = "snapshot"
|
||||
data: LivingMapSnapshot
|
||||
|
||||
class WSEventMessage(BaseModel):
|
||||
"""WebSocket event message"""
|
||||
kind: Literal["event"] = "event"
|
||||
event_type: str
|
||||
timestamp: datetime
|
||||
payload: Dict[str, Any]
|
||||
|
||||
class WSPingMessage(BaseModel):
|
||||
"""WebSocket ping message"""
|
||||
kind: Literal["ping"] = "ping"
|
||||
timestamp: datetime
|
||||
|
||||
class WSErrorMessage(BaseModel):
|
||||
"""WebSocket error message"""
|
||||
kind: Literal["error"] = "error"
|
||||
error: str
|
||||
timestamp: datetime
|
||||
|
||||
# ============================================================================
|
||||
# Query Parameters
|
||||
# ============================================================================
|
||||
|
||||
class HistoryQueryParams(BaseModel):
|
||||
"""Query parameters for history endpoint"""
|
||||
since: Optional[datetime] = None
|
||||
until: Optional[datetime] = None
|
||||
event_type: Optional[str] = None
|
||||
entity_id: Optional[str] = None
|
||||
limit: int = Field(default=200, ge=1, le=1000)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
class EntitiesQueryParams(BaseModel):
|
||||
"""Query parameters for entities endpoint"""
|
||||
type: Optional[EntityType] = None
|
||||
layer: Optional[LayerType] = None
|
||||
status: Optional[EntityStatus] = None
|
||||
limit: int = Field(default=100, ge=1, le=500)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
116
services/living-map-service/nats_subscriber.py
Normal file
116
services/living-map-service/nats_subscriber.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
NATS Subscriber for Living Map events
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
import json
|
||||
import asyncio
|
||||
from nats.aio.client import Client as NATS
|
||||
from typing import Callable, Optional
|
||||
from repository_history import HistoryRepository
|
||||
|
||||
class NATSSubscriber:
|
||||
"""Subscribe to NATS subjects and log events"""
|
||||
|
||||
def __init__(self, nats_url: str, history_repo: HistoryRepository):
|
||||
self.nats_url = nats_url
|
||||
self.history_repo = history_repo
|
||||
self.nc: Optional[NATS] = None
|
||||
self.subscriptions = []
|
||||
self.event_callback: Optional[Callable] = None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to NATS"""
|
||||
self.nc = NATS()
|
||||
await self.nc.connect(self.nats_url)
|
||||
print(f"✅ NATS connected: {self.nats_url}")
|
||||
|
||||
async def subscribe_all(self, event_callback: Optional[Callable] = None):
|
||||
"""Subscribe to all Living Map relevant subjects"""
|
||||
if not self.nc:
|
||||
raise RuntimeError("NATS not connected")
|
||||
|
||||
self.event_callback = event_callback
|
||||
|
||||
subjects = [
|
||||
"city.event.*",
|
||||
"dao.event.*",
|
||||
"microdao.event.*",
|
||||
"node.metrics.*",
|
||||
"agent.event.*",
|
||||
"usage.llm.*",
|
||||
"usage.agent.*",
|
||||
"messaging.message.created"
|
||||
]
|
||||
|
||||
for subject in subjects:
|
||||
try:
|
||||
sub = await self.nc.subscribe(subject, cb=self._handle_message)
|
||||
self.subscriptions.append(sub)
|
||||
print(f"📡 Subscribed to: {subject}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Failed to subscribe to {subject}: {e}")
|
||||
|
||||
async def _handle_message(self, msg):
|
||||
"""Handle incoming NATS message"""
|
||||
try:
|
||||
# Decode payload
|
||||
payload = json.loads(msg.data.decode())
|
||||
subject = msg.subject
|
||||
|
||||
# Extract entity info
|
||||
entity_id = payload.get("id") or payload.get("entity_id") or payload.get("agent_id") or payload.get("dao_id")
|
||||
entity_type = self._infer_entity_type(subject)
|
||||
|
||||
# Log to history
|
||||
await self.history_repo.add_event(
|
||||
event_type=subject,
|
||||
payload=payload,
|
||||
source_service=self._extract_service(subject),
|
||||
entity_id=entity_id,
|
||||
entity_type=entity_type
|
||||
)
|
||||
|
||||
# Notify callback (for WebSocket broadcast)
|
||||
if self.event_callback:
|
||||
await self.event_callback({
|
||||
"kind": "event",
|
||||
"event_type": subject,
|
||||
"timestamp": payload.get("ts") or payload.get("timestamp"),
|
||||
"payload": payload
|
||||
})
|
||||
|
||||
print(f"📥 Event logged: {subject}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error handling message from {msg.subject}: {e}")
|
||||
|
||||
def _infer_entity_type(self, subject: str) -> str:
|
||||
"""Infer entity type from NATS subject"""
|
||||
if "agent" in subject:
|
||||
return "agent"
|
||||
elif "dao" in subject:
|
||||
return "dao"
|
||||
elif "microdao" in subject:
|
||||
return "microdao"
|
||||
elif "node" in subject:
|
||||
return "node"
|
||||
elif "city" in subject:
|
||||
return "city"
|
||||
elif "space" in subject:
|
||||
return "space"
|
||||
else:
|
||||
return "unknown"
|
||||
|
||||
def _extract_service(self, subject: str) -> str:
|
||||
"""Extract service name from subject"""
|
||||
parts = subject.split(".")
|
||||
if len(parts) > 0:
|
||||
return f"{parts[0]}-service"
|
||||
return "unknown"
|
||||
|
||||
async def close(self):
|
||||
"""Close NATS connection"""
|
||||
if self.nc:
|
||||
await self.nc.close()
|
||||
print("✅ NATS connection closed")
|
||||
|
||||
116
services/living-map-service/repository_history.py
Normal file
116
services/living-map-service/repository_history.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
History Repository — Database operations for Living Map events
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
import uuid
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
import asyncpg
|
||||
from models import HistoryItem, HistoryQueryParams
|
||||
|
||||
class HistoryRepository:
|
||||
def __init__(self, db_pool: asyncpg.Pool):
|
||||
self.db = db_pool
|
||||
|
||||
async def add_event(
|
||||
self,
|
||||
event_type: str,
|
||||
payload: dict,
|
||||
source_service: Optional[str] = None,
|
||||
entity_id: Optional[str] = None,
|
||||
entity_type: Optional[str] = None
|
||||
) -> str:
|
||||
"""Add event to history"""
|
||||
event_id = uuid.uuid4()
|
||||
|
||||
await self.db.execute(
|
||||
"""
|
||||
INSERT INTO living_map_history (
|
||||
id, event_type, payload, source_service, entity_id, entity_type
|
||||
)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
""",
|
||||
event_id,
|
||||
event_type,
|
||||
payload,
|
||||
source_service,
|
||||
entity_id,
|
||||
entity_type
|
||||
)
|
||||
|
||||
return str(event_id)
|
||||
|
||||
async def query_history(
|
||||
self,
|
||||
params: HistoryQueryParams
|
||||
) -> tuple[List[HistoryItem], int]:
|
||||
"""Query history with filters"""
|
||||
conditions = []
|
||||
values = []
|
||||
param_idx = 1
|
||||
|
||||
if params.since:
|
||||
conditions.append(f"timestamp >= ${param_idx}")
|
||||
values.append(params.since)
|
||||
param_idx += 1
|
||||
|
||||
if params.until:
|
||||
conditions.append(f"timestamp <= ${param_idx}")
|
||||
values.append(params.until)
|
||||
param_idx += 1
|
||||
|
||||
if params.event_type:
|
||||
conditions.append(f"event_type = ${param_idx}")
|
||||
values.append(params.event_type)
|
||||
param_idx += 1
|
||||
|
||||
if params.entity_id:
|
||||
conditions.append(f"entity_id = ${param_idx}")
|
||||
values.append(params.entity_id)
|
||||
param_idx += 1
|
||||
|
||||
where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
|
||||
|
||||
# Get total count
|
||||
count_query = f"SELECT COUNT(*) FROM living_map_history {where_clause}"
|
||||
total = await self.db.fetchval(count_query, *values)
|
||||
|
||||
# Get paginated results
|
||||
values.extend([params.limit, params.offset])
|
||||
query = f"""
|
||||
SELECT id, timestamp, event_type, payload, source_service, entity_id, entity_type
|
||||
FROM living_map_history
|
||||
{where_clause}
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ${param_idx} OFFSET ${param_idx + 1}
|
||||
"""
|
||||
|
||||
rows = await self.db.fetch(query, *values)
|
||||
|
||||
items = [
|
||||
HistoryItem(
|
||||
id=str(row['id']),
|
||||
timestamp=row['timestamp'],
|
||||
event_type=row['event_type'],
|
||||
payload=row['payload'],
|
||||
source_service=row['source_service'],
|
||||
entity_id=row['entity_id'],
|
||||
entity_type=row['entity_type']
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
|
||||
return items, total or 0
|
||||
|
||||
async def cleanup_old_events(self, days: int = 30) -> int:
|
||||
"""Cleanup events older than N days"""
|
||||
result = await self.db.execute(
|
||||
"""
|
||||
DELETE FROM living_map_history
|
||||
WHERE timestamp < NOW() - INTERVAL '%s days'
|
||||
""",
|
||||
days
|
||||
)
|
||||
# Extract count from result string like "DELETE 123"
|
||||
return int(result.split()[-1]) if result else 0
|
||||
|
||||
9
services/living-map-service/requirements.txt
Normal file
9
services/living-map-service/requirements.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
fastapi==0.104.1
|
||||
uvicorn==0.24.0
|
||||
pydantic==2.5.0
|
||||
asyncpg==0.29.0
|
||||
httpx==0.25.1
|
||||
python-dotenv==1.0.0
|
||||
nats-py==2.6.0
|
||||
websockets==12.0
|
||||
|
||||
156
services/living-map-service/routes.py
Normal file
156
services/living-map-service/routes.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
Living Map Service Routes
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from fastapi import APIRouter, Query, WebSocket
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from models import (
|
||||
LivingMapSnapshot, SnapshotMeta, HistoryResponse,
|
||||
HistoryQueryParams, EntitiesQueryParams, EntitySummary
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/living-map", tags=["living-map"])
|
||||
|
||||
# These will be injected from main.py
|
||||
snapshot_builder = None
|
||||
history_repo = None
|
||||
ws_handler = None
|
||||
|
||||
@router.get("/health")
|
||||
async def health():
|
||||
"""Health check"""
|
||||
return {
|
||||
"status": "ok",
|
||||
"service": "living-map-service",
|
||||
"version": "1.0.0",
|
||||
"time": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
@router.get("/snapshot")
|
||||
async def get_snapshot():
|
||||
"""Get complete Living Map snapshot"""
|
||||
if not snapshot_builder:
|
||||
return {"error": "Snapshot builder not initialized"}
|
||||
|
||||
snapshot = await snapshot_builder.build_snapshot()
|
||||
return snapshot
|
||||
|
||||
@router.get("/entities")
|
||||
async def list_entities(
|
||||
type: Optional[str] = Query(None),
|
||||
layer: Optional[str] = Query(None),
|
||||
limit: int = Query(100, ge=1, le=500)
|
||||
):
|
||||
"""List entities across all layers"""
|
||||
if not snapshot_builder:
|
||||
return {"items": []}
|
||||
|
||||
snapshot = await snapshot_builder.build_snapshot()
|
||||
entities = []
|
||||
|
||||
# Extract entities from all layers
|
||||
layers_data = snapshot.get("layers", {})
|
||||
|
||||
# City layer
|
||||
if "city" in layers_data:
|
||||
for item in layers_data["city"].get("items", []):
|
||||
entities.append({
|
||||
"id": item.get("id"),
|
||||
"type": "microdao",
|
||||
"label": item.get("name"),
|
||||
"status": item.get("status"),
|
||||
"layer": "city"
|
||||
})
|
||||
|
||||
# Agents layer
|
||||
if "agents" in layers_data:
|
||||
for item in layers_data["agents"].get("items", []):
|
||||
entities.append({
|
||||
"id": item.get("id"),
|
||||
"type": "agent",
|
||||
"label": item.get("name"),
|
||||
"status": item.get("status"),
|
||||
"layer": "agents"
|
||||
})
|
||||
|
||||
# Filter by type/layer
|
||||
if type:
|
||||
entities = [e for e in entities if e["type"] == type]
|
||||
if layer:
|
||||
entities = [e for e in entities if e["layer"] == layer]
|
||||
|
||||
return {"items": entities[:limit]}
|
||||
|
||||
@router.get("/entities/{entity_id}")
|
||||
async def get_entity(entity_id: str):
|
||||
"""Get entity details"""
|
||||
if not snapshot_builder:
|
||||
return {"error": "Snapshot builder not initialized"}
|
||||
|
||||
snapshot = await snapshot_builder.build_snapshot()
|
||||
layers = snapshot.get("layers", {})
|
||||
|
||||
# Search in all layers
|
||||
for layer_name, layer_data in layers.items():
|
||||
items = layer_data.get("items", [])
|
||||
for item in items:
|
||||
if item.get("id") == entity_id:
|
||||
return {
|
||||
"id": entity_id,
|
||||
"type": "entity",
|
||||
"layer": layer_name,
|
||||
"data": item
|
||||
}
|
||||
|
||||
return {"error": "Entity not found"}
|
||||
|
||||
@router.get("/history")
|
||||
async def get_history(
|
||||
since: Optional[str] = Query(None),
|
||||
limit: int = Query(200, ge=1, le=1000),
|
||||
offset: int = Query(0, ge=0)
|
||||
):
|
||||
"""Get event history"""
|
||||
if not history_repo:
|
||||
return {"items": [], "total": 0}
|
||||
|
||||
params = HistoryQueryParams(
|
||||
since=datetime.fromisoformat(since) if since else None,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
|
||||
items, total = await history_repo.query_history(params)
|
||||
|
||||
return {
|
||||
"items": [item.model_dump() for item in items],
|
||||
"total": total,
|
||||
"has_more": (offset + len(items)) < total
|
||||
}
|
||||
|
||||
@router.websocket("/stream")
|
||||
async def websocket_stream(websocket: WebSocket):
|
||||
"""WebSocket stream for real-time events"""
|
||||
if not ws_handler or not snapshot_builder:
|
||||
await websocket.close(code=1011, reason="Service not ready")
|
||||
return
|
||||
|
||||
async def get_snapshot():
|
||||
return await snapshot_builder.build_snapshot()
|
||||
|
||||
await ws_handler(websocket, get_snapshot)
|
||||
|
||||
# Helper functions to inject dependencies
|
||||
def set_snapshot_builder(builder):
|
||||
global snapshot_builder
|
||||
snapshot_builder = builder
|
||||
|
||||
def set_history_repo(repo):
|
||||
global history_repo
|
||||
history_repo = repo
|
||||
|
||||
def set_ws_handler(handler):
|
||||
global ws_handler
|
||||
ws_handler = handler
|
||||
|
||||
100
services/living-map-service/snapshot_builder.py
Normal file
100
services/living-map-service/snapshot_builder.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""
|
||||
Snapshot Builder — Aggregates data from all services
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from adapters.city_client import CityClient
|
||||
from adapters.space_client import SpaceClient
|
||||
from adapters.agents_client import AgentsClient
|
||||
from adapters.microdao_client import MicrodaoClient
|
||||
from adapters.dao_client import DaoClient
|
||||
from adapters.usage_client import UsageClient
|
||||
|
||||
class SnapshotBuilder:
|
||||
"""Build complete Living Map snapshot"""
|
||||
|
||||
def __init__(self):
|
||||
self.city_client = CityClient()
|
||||
self.space_client = SpaceClient()
|
||||
self.agents_client = AgentsClient()
|
||||
self.microdao_client = MicrodaoClient()
|
||||
self.dao_client = DaoClient()
|
||||
self.usage_client = UsageClient()
|
||||
|
||||
async def build_snapshot(self) -> Dict[str, Any]:
|
||||
"""Build complete snapshot from all services"""
|
||||
generated_at = datetime.now()
|
||||
|
||||
# Fetch data from all services in parallel
|
||||
import asyncio
|
||||
city_data, agents_data, microdao_data, dao_data, space_data = await asyncio.gather(
|
||||
self.city_client.get_layer_data(),
|
||||
self.agents_client.get_layer_data(),
|
||||
self.microdao_client.get_layer_data(),
|
||||
self.dao_client.get_layer_data(),
|
||||
self.space_client.get_layer_data(),
|
||||
return_exceptions=True
|
||||
)
|
||||
|
||||
# Handle errors gracefully
|
||||
if isinstance(city_data, Exception):
|
||||
print(f"⚠️ City data error: {city_data}")
|
||||
city_data = {}
|
||||
if isinstance(agents_data, Exception):
|
||||
print(f"⚠️ Agents data error: {agents_data}")
|
||||
agents_data = {}
|
||||
if isinstance(microdao_data, Exception):
|
||||
print(f"⚠️ MicroDAO data error: {microdao_data}")
|
||||
microdao_data = {}
|
||||
if isinstance(dao_data, Exception):
|
||||
print(f"⚠️ DAO data error: {dao_data}")
|
||||
dao_data = {}
|
||||
if isinstance(space_data, Exception):
|
||||
print(f"⚠️ Space data error: {space_data}")
|
||||
space_data = {}
|
||||
|
||||
# Merge city and microdao data (they're the same layer)
|
||||
city_layer = {
|
||||
**microdao_data,
|
||||
**(city_data if city_data else {})
|
||||
}
|
||||
|
||||
# Merge space and dao data
|
||||
space_layer = {
|
||||
"planets": dao_data.get("planets", []) + space_data.get("planets", []),
|
||||
"nodes": space_data.get("nodes", [])
|
||||
}
|
||||
|
||||
# Build nodes layer (simplified for now)
|
||||
nodes_layer = {
|
||||
"items": space_data.get("nodes", []),
|
||||
"total_cpu": 0.0,
|
||||
"total_gpu": 0.0,
|
||||
"total_ram": 0.0
|
||||
}
|
||||
|
||||
snapshot = {
|
||||
"generated_at": generated_at.isoformat(),
|
||||
"layers": {
|
||||
"city": city_layer,
|
||||
"space": space_layer,
|
||||
"nodes": nodes_layer,
|
||||
"agents": agents_data
|
||||
},
|
||||
"meta": {
|
||||
"source_services": [
|
||||
"city-service",
|
||||
"space-service",
|
||||
"agents-service",
|
||||
"microdao-service",
|
||||
"dao-service",
|
||||
"usage-engine"
|
||||
],
|
||||
"generated_at": generated_at.isoformat(),
|
||||
"version": "1.0"
|
||||
}
|
||||
}
|
||||
|
||||
return snapshot
|
||||
|
||||
110
services/living-map-service/ws_stream.py
Normal file
110
services/living-map-service/ws_stream.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""
|
||||
WebSocket Stream for Living Map
|
||||
Phase 9: Living Map
|
||||
"""
|
||||
from fastapi import WebSocket, WebSocketDisconnect
|
||||
from typing import List, Dict, Any, Callable
|
||||
import asyncio
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
class ConnectionManager:
|
||||
"""Manage WebSocket connections"""
|
||||
|
||||
def __init__(self):
|
||||
self.active_connections: List[WebSocket] = []
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def connect(self, websocket: WebSocket):
|
||||
"""Accept new connection"""
|
||||
await websocket.accept()
|
||||
async with self._lock:
|
||||
self.active_connections.append(websocket)
|
||||
print(f"✅ WebSocket connected. Total: {len(self.active_connections)}")
|
||||
|
||||
def disconnect(self, websocket: WebSocket):
|
||||
"""Remove disconnected client"""
|
||||
if websocket in self.active_connections:
|
||||
self.active_connections.remove(websocket)
|
||||
print(f"❌ WebSocket disconnected. Total: {len(self.active_connections)}")
|
||||
|
||||
async def send_to_all(self, message: Dict[str, Any]):
|
||||
"""Broadcast message to all connected clients"""
|
||||
if not self.active_connections:
|
||||
return
|
||||
|
||||
# Serialize once
|
||||
text = json.dumps(message, default=str)
|
||||
|
||||
# Send to all connections (remove failed ones)
|
||||
disconnected = []
|
||||
for connection in self.active_connections:
|
||||
try:
|
||||
await connection.send_text(text)
|
||||
except Exception as e:
|
||||
print(f"⚠️ Failed to send to WebSocket: {e}")
|
||||
disconnected.append(connection)
|
||||
|
||||
# Clean up disconnected
|
||||
for conn in disconnected:
|
||||
self.disconnect(conn)
|
||||
|
||||
async def send_snapshot(self, snapshot: Dict[str, Any]):
|
||||
"""Send snapshot to all connections"""
|
||||
await self.send_to_all({
|
||||
"kind": "snapshot",
|
||||
"data": snapshot
|
||||
})
|
||||
|
||||
async def send_event(self, event: Dict[str, Any]):
|
||||
"""Send event to all connections"""
|
||||
await self.send_to_all(event)
|
||||
|
||||
async def send_ping(self):
|
||||
"""Send ping to keep connections alive"""
|
||||
await self.send_to_all({
|
||||
"kind": "ping",
|
||||
"timestamp": datetime.now().isoformat()
|
||||
})
|
||||
|
||||
# Global connection manager instance
|
||||
ws_manager = ConnectionManager()
|
||||
|
||||
async def broadcast_event(event: Dict[str, Any]):
|
||||
"""Callback for NATS subscriber to broadcast events"""
|
||||
await ws_manager.send_event(event)
|
||||
|
||||
async def websocket_endpoint(websocket: WebSocket, get_snapshot_fn: Callable):
|
||||
"""WebSocket endpoint handler"""
|
||||
await ws_manager.connect(websocket)
|
||||
|
||||
try:
|
||||
# Send initial snapshot
|
||||
snapshot = await get_snapshot_fn()
|
||||
await websocket.send_json({
|
||||
"kind": "snapshot",
|
||||
"data": snapshot
|
||||
})
|
||||
|
||||
# Keep connection alive and listen for messages
|
||||
while True:
|
||||
try:
|
||||
# Wait for any message (ping/pong)
|
||||
data = await asyncio.wait_for(
|
||||
websocket.receive_text(),
|
||||
timeout=30.0
|
||||
)
|
||||
# Echo back or ignore
|
||||
except asyncio.TimeoutError:
|
||||
# Send ping to keep alive
|
||||
await websocket.send_json({
|
||||
"kind": "ping",
|
||||
"timestamp": datetime.now().isoformat()
|
||||
})
|
||||
|
||||
except WebSocketDisconnect:
|
||||
ws_manager.disconnect(websocket)
|
||||
except Exception as e:
|
||||
print(f"❌ WebSocket error: {e}")
|
||||
ws_manager.disconnect(websocket)
|
||||
|
||||
Reference in New Issue
Block a user