feat(platform): add new services, tools, tests and crews modules
New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
This commit is contained in:
220
services/memory-service/app/integration_endpoints.py
Normal file
220
services/memory-service/app/integration_endpoints.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""
|
||||
DAARION Memory Service - Integration API Endpoints
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
import logging
|
||||
|
||||
from .integrations import obsidian_integrator, gdrive_integrator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/integrations", tags=["integrations"])
|
||||
|
||||
|
||||
class SetVaultRequest(BaseModel):
|
||||
vault_path: str
|
||||
|
||||
|
||||
class SyncRequest(BaseModel):
|
||||
output_dir: Optional[str] = "/tmp/daarion_sync"
|
||||
include_attachments: Optional[bool] = False
|
||||
folder_filter: Optional[List[str]] = None
|
||||
tag_filter: Optional[List[str]] = None
|
||||
|
||||
|
||||
class GDriveSyncRequest(BaseModel):
|
||||
output_dir: Optional[str] = "/tmp/daarion_sync"
|
||||
folder_ids: Optional[List[str]] = None
|
||||
file_extensions: Optional[List[str]] = None
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
async def get_integrations_status():
|
||||
"""Get status of all integrations"""
|
||||
obsidian_status = obsidian_integrator.get_status()
|
||||
gdrive_status = gdrive_integrator.get_status()
|
||||
|
||||
return {
|
||||
"obsidian": obsidian_status,
|
||||
"google_drive": gdrive_status
|
||||
}
|
||||
|
||||
|
||||
# =====================
|
||||
# OBSIDIAN ENDPOINTS
|
||||
# =====================
|
||||
|
||||
@router.post("/obsidian/set-vault")
|
||||
async def set_obsidian_vault(request: SetVaultRequest):
|
||||
"""Set Obsidian vault path"""
|
||||
success = obsidian_integrator.set_vault_path(request.vault_path)
|
||||
if not success:
|
||||
raise HTTPException(status_code=400, detail="Invalid vault path")
|
||||
return {"status": "ok", "vault_path": request.vault_path}
|
||||
|
||||
|
||||
@router.post("/obsidian/scan")
|
||||
async def scan_obsidian_vault():
|
||||
"""Scan Obsidian vault for notes"""
|
||||
if not obsidian_integrator.vault_path:
|
||||
raise HTTPException(status_code=400, detail="Vault path not set")
|
||||
|
||||
stats = obsidian_integrator.scan_vault()
|
||||
return {
|
||||
"status": "ok",
|
||||
"stats": stats
|
||||
}
|
||||
|
||||
|
||||
@router.get("/obsidian/search")
|
||||
async def search_obsidian_notes(query: str, limit: int = 10):
|
||||
"""Search Obsidian notes"""
|
||||
if not obsidian_integrator.notes_cache:
|
||||
raise HTTPException(status_code=400, detail="Vault not scanned")
|
||||
|
||||
results = obsidian_integrator.search_notes(query, limit=limit)
|
||||
return {
|
||||
"query": query,
|
||||
"results": [
|
||||
{
|
||||
"title": r["title"],
|
||||
"path": r["path"],
|
||||
"tags": r["tags"],
|
||||
"match_score": r.get("match_score", 0),
|
||||
"preview": r["content"][:200] + "..." if len(r["content"]) > 200 else r["content"]
|
||||
}
|
||||
for r in results
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@router.post("/obsidian/sync")
|
||||
async def sync_obsidian_vault(request: SyncRequest):
|
||||
"""Sync Obsidian vault to DAARION"""
|
||||
if not obsidian_integrator.vault_path:
|
||||
raise HTTPException(status_code=400, detail="Vault path not set")
|
||||
|
||||
if not obsidian_integrator.notes_cache:
|
||||
obsidian_integrator.scan_vault()
|
||||
|
||||
from pathlib import Path
|
||||
output_path = Path(request.output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
stats = obsidian_integrator.sync_to_daarion(
|
||||
output_path,
|
||||
include_attachments=request.include_attachments,
|
||||
folder_filter=request.folder_filter,
|
||||
tag_filter=request.tag_filter
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"stats": stats
|
||||
}
|
||||
|
||||
|
||||
@router.get("/obsidian/tags")
|
||||
async def get_obsidian_tags():
|
||||
"""Get all tags from Obsidian vault"""
|
||||
if not obsidian_integrator.tags_index:
|
||||
raise HTTPException(status_code=400, detail="Vault not scanned")
|
||||
|
||||
return {
|
||||
"tags": [
|
||||
{"name": tag, "count": len(notes)}
|
||||
for tag, notes in sorted(
|
||||
obsidian_integrator.tags_index.items(),
|
||||
key=lambda x: len(x[1]),
|
||||
reverse=True
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@router.get("/obsidian/graph")
|
||||
async def get_obsidian_graph():
|
||||
"""Get note connection graph"""
|
||||
if not obsidian_integrator.links_graph:
|
||||
raise HTTPException(status_code=400, detail="Vault not scanned")
|
||||
|
||||
nodes = []
|
||||
links = []
|
||||
|
||||
for note_title, note_data in obsidian_integrator.notes_cache.items():
|
||||
nodes.append({
|
||||
"id": note_title,
|
||||
"title": note_title,
|
||||
"tags": note_data["tags"],
|
||||
"size": note_data["size"]
|
||||
})
|
||||
|
||||
for note_title, graph_data in obsidian_integrator.links_graph.items():
|
||||
for linked_note in graph_data["outbound"]:
|
||||
if linked_note in obsidian_integrator.notes_cache:
|
||||
links.append({
|
||||
"source": note_title,
|
||||
"target": linked_note
|
||||
})
|
||||
|
||||
return {
|
||||
"nodes": nodes,
|
||||
"links": links
|
||||
}
|
||||
|
||||
|
||||
# =====================
|
||||
# GOOGLE DRIVE ENDPOINTS
|
||||
# =====================
|
||||
|
||||
@router.post("/google-drive/auth")
|
||||
async def authenticate_google_drive():
|
||||
"""Authenticate with Google Drive"""
|
||||
success = gdrive_integrator.authenticate()
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Authentication failed. Check client_secrets.json"
|
||||
)
|
||||
return {"status": "ok", "authenticated": True}
|
||||
|
||||
|
||||
@router.get("/google-drive/files")
|
||||
async def list_google_drive_files(
|
||||
folder_id: Optional[str] = None,
|
||||
max_results: int = 50
|
||||
):
|
||||
"""List files from Google Drive"""
|
||||
files = gdrive_integrator.list_files(folder_id=folder_id, max_results=max_results)
|
||||
return {
|
||||
"files": files,
|
||||
"count": len(files)
|
||||
}
|
||||
|
||||
|
||||
@router.post("/google-drive/sync")
|
||||
async def sync_google_drive(request: GDriveSyncRequest):
|
||||
"""Sync files from Google Drive"""
|
||||
from pathlib import Path
|
||||
output_path = Path(request.output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
stats = gdrive_integrator.sync_to_daarion(
|
||||
output_path,
|
||||
folder_ids=request.folder_ids,
|
||||
file_extensions=request.file_extensions
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"stats": stats
|
||||
}
|
||||
|
||||
|
||||
@router.get("/google-drive/folders")
|
||||
async def get_google_drive_folders():
|
||||
"""Get folder structure from Google Drive"""
|
||||
structure = gdrive_integrator.get_folder_structure()
|
||||
return {"structure": structure}
|
||||
Reference in New Issue
Block a user