feat: Add presence heartbeat for Matrix online status

- matrix-gateway: POST /internal/matrix/presence/online endpoint
- usePresenceHeartbeat hook with activity tracking
- Auto away after 5 min inactivity
- Offline on page close/visibility change
- Integrated in MatrixChatRoom component
This commit is contained in:
Apple
2025-11-27 00:19:40 -08:00
parent 5bed515852
commit 3de3c8cb36
6371 changed files with 1317450 additions and 932 deletions

View File

@@ -0,0 +1,9 @@
from .short_term_pg import ShortTermBackend
from .vector_store_pg import VectorStoreBackend
from .kb_filesystem import KnowledgeBaseBackend
__all__ = ['ShortTermBackend', 'VectorStoreBackend', 'KnowledgeBaseBackend']

View File

@@ -0,0 +1,75 @@
import os
import json
from typing import Optional
from models import MemoryItem
from datetime import datetime
class KnowledgeBaseBackend:
"""
Long-term knowledge base (filesystem)
Phase 3: Stub implementation
Stores docs, roadmaps, and structured knowledge
"""
def __init__(self, kb_path: str = "/data/kb"):
self.kb_path = kb_path
async def initialize(self):
"""Create KB directory"""
if not os.path.exists(self.kb_path):
try:
os.makedirs(self.kb_path, exist_ok=True)
print(f"✅ KB directory created: {self.kb_path}")
except Exception as e:
print(f"⚠️ Failed to create KB directory: {e}")
print(" Using in-memory stub")
async def query(
self,
agent_id: str,
query_text: str,
limit: int = 5
) -> list[MemoryItem]:
"""
Query knowledge base
Phase 3: Returns stub/empty results
Phase 4: Implement proper KB indexing and search
"""
# Stub implementation for Phase 3
print(f" KB query (stub): {query_text[:50]}...")
# Return empty results for now
# In Phase 4, this would:
# 1. Index docs/roadmaps with embeddings
# 2. Perform semantic search
# 3. Return relevant knowledge chunks
return []
async def store(
self,
agent_id: str,
microdao_id: str,
kind: str,
content: dict,
metadata: Optional[dict] = None
) -> str:
"""
Store knowledge base entry
Phase 3: Stub implementation
"""
# Stub for Phase 3
entry_id = f"kb-{datetime.now().timestamp()}"
print(f" KB store (stub): {entry_id}")
# In Phase 4, would write to filesystem or DB
# with proper indexing
return entry_id

View File

@@ -0,0 +1,109 @@
import asyncpg
import json
from datetime import datetime
from uuid import uuid4
from typing import Optional
from models import MemoryItem
class ShortTermBackend:
"""
Short-term memory backend (PostgreSQL)
Stores recent conversations and events for quick retrieval
"""
def __init__(self, pool: asyncpg.Pool):
self.pool = pool
async def initialize(self):
"""Create tables if not exist"""
async with self.pool.acquire() as conn:
await conn.execute("""
CREATE TABLE IF NOT EXISTS agent_memories_short (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
agent_id TEXT NOT NULL,
microdao_id TEXT NOT NULL,
channel_id TEXT,
kind TEXT NOT NULL,
content JSONB NOT NULL,
metadata JSONB DEFAULT '{}',
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_short_agent_time
ON agent_memories_short (agent_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_short_microdao
ON agent_memories_short (microdao_id);
""")
print("✅ Short-term memory table initialized")
async def store(
self,
agent_id: str,
microdao_id: str,
kind: str,
content: dict,
channel_id: Optional[str] = None,
metadata: Optional[dict] = None
) -> str:
"""Store a memory entry"""
memory_id = str(uuid4())
async with self.pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO agent_memories_short
(id, agent_id, microdao_id, channel_id, kind, content, metadata, created_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
""",
memory_id, agent_id, microdao_id, channel_id, kind,
json.dumps(content), json.dumps(metadata or {}), datetime.now()
)
return memory_id
async def query(
self,
agent_id: str,
limit: int = 10,
kind_filter: Optional[list[str]] = None
) -> list[MemoryItem]:
"""Query recent memories (simple time-based retrieval)"""
query = """
SELECT id, kind, content, metadata, created_at
FROM agent_memories_short
WHERE agent_id = $1
"""
params = [agent_id]
if kind_filter:
query += f" AND kind = ANY($2)"
params.append(kind_filter)
query += " ORDER BY created_at DESC LIMIT $" + str(len(params) + 1)
params.append(limit)
async with self.pool.acquire() as conn:
rows = await conn.fetch(query, *params)
items = []
for row in rows:
content_dict = row['content']
# Convert content dict to string for MemoryItem
content_str = json.dumps(content_dict) if isinstance(content_dict, dict) else str(content_dict)
items.append(MemoryItem(
id=str(row['id']),
kind=row['kind'],
score=1.0, # Time-based, no relevance score
content=content_str,
meta=row['metadata'] or {},
created_at=row['created_at']
))
return items

View File

@@ -0,0 +1,185 @@
import asyncpg
import json
from datetime import datetime
from uuid import uuid4
from typing import Optional
from models import MemoryItem
from embedding_client import EmbeddingClient
class VectorStoreBackend:
"""
Mid-term memory backend with vector search (PostgreSQL + pgvector)
For Phase 3: Uses simple stub if pgvector not available
"""
def __init__(self, pool: asyncpg.Pool, embedding_client: EmbeddingClient):
self.pool = pool
self.embedding_client = embedding_client
self.pgvector_available = False
async def initialize(self):
"""Create tables if not exist"""
async with self.pool.acquire() as conn:
# Try to enable pgvector extension
try:
await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;")
self.pgvector_available = True
print("✅ pgvector extension enabled")
except Exception as e:
print(f"⚠️ pgvector not available: {e}")
print(" Will use fallback (simple text search)")
# Create table (with or without vector column)
if self.pgvector_available:
await conn.execute("""
CREATE TABLE IF NOT EXISTS agent_memories_vector (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
agent_id TEXT NOT NULL,
microdao_id TEXT NOT NULL,
channel_id TEXT,
kind TEXT NOT NULL,
content TEXT NOT NULL,
content_json JSONB,
embedding vector(1024),
metadata JSONB DEFAULT '{}',
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_vector_agent
ON agent_memories_vector (agent_id);
CREATE INDEX IF NOT EXISTS idx_vector_embedding
ON agent_memories_vector USING ivfflat (embedding vector_cosine_ops)
WITH (lists = 100);
""")
else:
# Fallback table without vector column
await conn.execute("""
CREATE TABLE IF NOT EXISTS agent_memories_vector (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
agent_id TEXT NOT NULL,
microdao_id TEXT NOT NULL,
channel_id TEXT,
kind TEXT NOT NULL,
content TEXT NOT NULL,
content_json JSONB,
metadata JSONB DEFAULT '{}',
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_vector_agent
ON agent_memories_vector (agent_id);
""")
print("✅ Vector memory table initialized")
async def store(
self,
agent_id: str,
microdao_id: str,
kind: str,
content: dict,
channel_id: Optional[str] = None,
metadata: Optional[dict] = None
) -> str:
"""Store a memory with embedding"""
memory_id = str(uuid4())
# Convert content to text for embedding
content_text = json.dumps(content)
# Generate embedding
embedding = await self.embedding_client.embed(content_text)
async with self.pool.acquire() as conn:
if self.pgvector_available:
await conn.execute(
"""
INSERT INTO agent_memories_vector
(id, agent_id, microdao_id, channel_id, kind, content, content_json, embedding, metadata)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8::vector, $9)
""",
memory_id, agent_id, microdao_id, channel_id, kind,
content_text, json.dumps(content), embedding, json.dumps(metadata or {})
)
else:
# Fallback without embedding
await conn.execute(
"""
INSERT INTO agent_memories_vector
(id, agent_id, microdao_id, channel_id, kind, content, content_json, metadata)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
""",
memory_id, agent_id, microdao_id, channel_id, kind,
content_text, json.dumps(content), json.dumps(metadata or {})
)
return memory_id
async def query(
self,
agent_id: str,
query_text: str,
limit: int = 5,
kind_filter: Optional[list[str]] = None
) -> list[MemoryItem]:
"""Query memories by semantic similarity"""
if self.pgvector_available:
# Vector search
query_embedding = await self.embedding_client.embed(query_text)
query_sql = """
SELECT id, kind, content, metadata, created_at,
1 - (embedding <=> $2::vector) as score
FROM agent_memories_vector
WHERE agent_id = $1
"""
params = [agent_id, query_embedding]
if kind_filter:
query_sql += f" AND kind = ANY($3)"
params.append(kind_filter)
query_sql += f" ORDER BY embedding <=> $2::vector LIMIT ${len(params) + 1}"
params.append(limit)
async with self.pool.acquire() as conn:
rows = await conn.fetch(query_sql, *params)
else:
# Fallback: simple text search (ILIKE)
query_sql = """
SELECT id, kind, content, metadata, created_at, 0.5 as score
FROM agent_memories_vector
WHERE agent_id = $1 AND content ILIKE $2
"""
params = [agent_id, f"%{query_text}%"]
if kind_filter:
query_sql += f" AND kind = ANY($3)"
params.append(kind_filter)
query_sql += f" ORDER BY created_at DESC LIMIT ${len(params) + 1}"
params.append(limit)
async with self.pool.acquire() as conn:
rows = await conn.fetch(query_sql, *params)
items = []
for row in rows:
items.append(MemoryItem(
id=str(row['id']),
kind=row['kind'],
score=float(row['score']),
content=row['content'],
meta=row['metadata'] or {},
created_at=row['created_at']
))
return items