feat(foundation): FOUNDATION_UPDATE implementation

## Documentation (20 files)
- DAARION Ontology Core v1 (Agent → MicroDAO → Node → District)
- User Onboarding & Identity Layer (DAIS)
- Data Model UPDATE, Event Catalog, Governance & Permissions
- Rooms Layer, City/MicroDAO/Agents/Nodes Interface Architecture
- Helper files: ontology-summary, lifecycles, event-schemas

## Database Migration (027)
- DAIS tables: dais_identities, dais_emails, dais_wallets, dais_keys
- agent_assignments table for Assignment Layer
- rooms table for Rooms Layer
- event_outbox for NATS event delivery
- New enums: agent_role, microdao_type, node_kind, node_status, etc.
- Updated agents, microdaos, nodes tables with ontology fields

## Backend
- DAIS service & routes (/api/v1/dais/*)
- Assignment service & routes (/api/v1/assignments/*)
- Domain types for DAIS and Ontology

## Frontend
- Ontology types (Agent, MicroDAO, Node, DAIS, Assignments)
- API clients for DAIS and Assignments
- UI components: DaisProfileCard, AssignmentsPanel, OntologyBadge

Non-breaking update - all existing functionality preserved.
This commit is contained in:
Apple
2025-11-29 15:24:38 -08:00
parent deeaf26b0b
commit 7b91c8e83c
43 changed files with 5733 additions and 47 deletions

View File

@@ -80,7 +80,7 @@ async def handle_parser_document_parsed(msg):
}
# Ingest the document
result = ingest_parsed_document(
result = await ingest_parsed_document(
dao_id=dao_id or team_id,
doc_id=doc_id,
parsed_json=mock_parsed_json,

View File

@@ -18,7 +18,7 @@ from app.events import publish_document_ingested, publish_document_indexed
logger = logging.getLogger(__name__)
def ingest_parsed_document(
async def ingest_parsed_document(
dao_id: str,
doc_id: str,
parsed_json: Dict[str, Any],
@@ -81,46 +81,14 @@ def ingest_parsed_document(
)
# Publish events
try:
# First publish rag.document.ingested event
await publish_document_ingested(
doc_id=doc_id,
team_id=dao_id,
dao_id=dao_id,
chunk_count=written_docs,
indexed=True,
visibility="public",
metadata={
"ingestion_time_ms": round(pipeline_time * 1000),
"embed_model": settings.EMBEDDING_MODEL or "bge-m3@v1",
"pages_processed": pages_count,
"blocks_processed": blocks_count
}
)
logger.info(f"Published rag.document.ingested event for doc_id={doc_id}")
# Then publish rag.document.indexed event
chunk_ids = []
for i in range(written_docs):
chunk_ids.append(f"{doc_id}_chunk_{i+1}")
await publish_document_indexed(
doc_id=doc_id,
team_id=dao_id,
dao_id=dao_id,
chunk_ids=chunk_ids,
indexed=True,
visibility="public",
metadata={
"indexing_time_ms": 0, # TODO: track actual indexing time
"milvus_collection": "documents_v1",
"neo4j_nodes_created": len(chunk_ids),
"embed_model": settings.EMBEDDING_MODEL or "bge-m3@v1"
}
)
logger.info(f"Published rag.document.indexed event for doc_id={doc_id}")
except Exception as e:
logger.error(f"Failed to publish RAG events for doc_id={doc_id}: {e}")
await _publish_events_async(
dao_id=dao_id,
doc_id=doc_id,
written_docs=written_docs,
pages_count=pages_count,
blocks_count=blocks_count,
pipeline_time=pipeline_time
)
return {
"status": "success",
@@ -229,6 +197,51 @@ def _parsed_json_to_documents(
return documents
async def _publish_events_async(
dao_id: str,
doc_id: str,
written_docs: int,
pages_count: int,
blocks_count: int,
pipeline_time: float
):
try:
await publish_document_ingested(
doc_id=doc_id,
team_id=dao_id,
dao_id=dao_id,
chunk_count=written_docs,
indexed=True,
visibility="public",
metadata={
"ingestion_time_ms": round(pipeline_time * 1000),
"embed_model": settings.EMBEDDING_MODEL or "bge-m3@v1",
"pages_processed": pages_count,
"blocks_processed": blocks_count
}
)
logger.info(f"Published rag.document.ingested event for doc_id={doc_id}")
chunk_ids = [f"{doc_id}_chunk_{i+1}" for i in range(written_docs)]
await publish_document_indexed(
doc_id=doc_id,
team_id=dao_id,
dao_id=dao_id,
chunk_ids=chunk_ids,
indexed=True,
visibility="public",
metadata={
"indexing_time_ms": 0,
"milvus_collection": "documents_v1",
"neo4j_nodes_created": len(chunk_ids),
"embed_model": settings.EMBEDDING_MODEL or "bge-m3@v1"
}
)
logger.info(f"Published rag.document.indexed event for doc_id={doc_id}")
except Exception as e:
logger.error(f"Failed to publish RAG events for doc_id={doc_id}: {e}")
def _create_ingest_pipeline() -> Pipeline:
"""
Create Haystack ingest pipeline

View File

@@ -87,7 +87,7 @@ async def ingest_endpoint(request: IngestRequest):
- user_id: Optional user identifier
"""
try:
result = ingest_parsed_document(
result = await ingest_parsed_document(
dao_id=request.dao_id,
doc_id=request.doc_id,
parsed_json=request.parsed_json,

View File

@@ -2,7 +2,7 @@ fastapi>=0.115.0
uvicorn[standard]>=0.30.0
pydantic>=2.0.0
pydantic-settings>=2.0.0
haystack-ai>=2.0.0
farm-haystack[postgresql]>=1.25.3
sentence-transformers>=2.2.0
psycopg2-binary>=2.9.0
httpx>=0.27.0