feat(platform): add new services, tools, tests and crews modules
New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
This commit is contained in:
21
services/sofiia-console/Dockerfile
Normal file
21
services/sofiia-console/Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY app/ ./app/
|
||||
COPY static/ ./static/
|
||||
|
||||
# Build metadata — inject at build time:
|
||||
# docker build --build-arg BUILD_SHA=$(git rev-parse --short HEAD) \
|
||||
# --build-arg BUILD_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ) ...
|
||||
ARG BUILD_SHA=dev
|
||||
ARG BUILD_TIME=local
|
||||
ENV BUILD_SHA=${BUILD_SHA}
|
||||
ENV BUILD_TIME=${BUILD_TIME}
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PORT=8002
|
||||
EXPOSE 8002
|
||||
|
||||
CMD ["sh", "-c", "uvicorn app.main:app --host 0.0.0.0 --port ${PORT:-8002}"]
|
||||
1
services/sofiia-console/app/__init__.py
Normal file
1
services/sofiia-console/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Sofiia Control Console — DAARION.city
|
||||
0
services/sofiia-console/app/adapters/__init__.py
Normal file
0
services/sofiia-console/app/adapters/__init__.py
Normal file
262
services/sofiia-console/app/adapters/aistalk.py
Normal file
262
services/sofiia-console/app/adapters/aistalk.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""
|
||||
AISTALK Adapter — HTTP bridge integration.
|
||||
|
||||
Enables forwarding BFF events/messages to an external AISTALK bridge service.
|
||||
The adapter is best-effort and non-blocking for callers.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _split_paths(raw: str, default: str) -> List[str]:
|
||||
src = (raw or "").strip()
|
||||
if not src:
|
||||
src = default
|
||||
parts = [p.strip() for p in src.split(",") if p.strip()]
|
||||
normalized: List[str] = []
|
||||
for p in parts:
|
||||
if not p.startswith("/"):
|
||||
p = "/" + p
|
||||
normalized.append(p)
|
||||
return normalized
|
||||
|
||||
|
||||
class AISTALKAdapter:
|
||||
"""
|
||||
AISTALK relay adapter.
|
||||
|
||||
Env overrides (optional):
|
||||
AISTALK_HEALTH_PATHS=/healthz,/health,/api/health
|
||||
AISTALK_EVENT_PATHS=/api/events,/events,/v1/events
|
||||
AISTALK_TEXT_PATHS=/api/text,/text,/v1/text
|
||||
AISTALK_AUDIO_PATHS=/api/audio,/audio,/v1/audio
|
||||
"""
|
||||
|
||||
def __init__(self, base_url: str, api_key: Optional[str] = None) -> None:
|
||||
self.base_url = base_url.rstrip("/") if base_url else ""
|
||||
self.api_key = api_key or ""
|
||||
self._enabled = bool(self.base_url)
|
||||
|
||||
self._health_paths = _split_paths(
|
||||
os.getenv("AISTALK_HEALTH_PATHS", ""),
|
||||
"/healthz,/health,/api/health",
|
||||
)
|
||||
self._event_paths = _split_paths(
|
||||
os.getenv("AISTALK_EVENT_PATHS", ""),
|
||||
"/api/events,/events,/v1/events",
|
||||
)
|
||||
self._text_paths = _split_paths(
|
||||
os.getenv("AISTALK_TEXT_PATHS", ""),
|
||||
"/api/text,/text,/v1/text",
|
||||
)
|
||||
self._audio_paths = _split_paths(
|
||||
os.getenv("AISTALK_AUDIO_PATHS", ""),
|
||||
"/api/audio,/audio,/v1/audio",
|
||||
)
|
||||
|
||||
self._lock = threading.Lock()
|
||||
self._last_ok_at: Optional[float] = None
|
||||
self._last_error: str = ""
|
||||
self._last_endpoint: str = ""
|
||||
self._last_probe_ok: Optional[bool] = None
|
||||
self._last_probe_at: Optional[float] = None
|
||||
|
||||
# Fire-and-forget outbound queue to avoid adding latency to BFF handlers.
|
||||
self._pool = ThreadPoolExecutor(max_workers=2, thread_name_prefix="aistalk-relay")
|
||||
|
||||
if self._enabled:
|
||||
logger.info("AISTALKAdapter init: url=%s (HTTP relay mode)", self.base_url)
|
||||
else:
|
||||
logger.info("AISTALKAdapter init: no base_url, adapter disabled")
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
def _headers(self) -> Dict[str, str]:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
if self.api_key:
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
headers["X-API-Key"] = self.api_key
|
||||
return headers
|
||||
|
||||
def _mark_ok(self, endpoint: str) -> None:
|
||||
with self._lock:
|
||||
self._last_ok_at = time.time()
|
||||
self._last_error = ""
|
||||
self._last_endpoint = endpoint
|
||||
|
||||
def _mark_err(self, err: str) -> None:
|
||||
with self._lock:
|
||||
self._last_error = (err or "")[:300]
|
||||
|
||||
def _post_json(self, payload: Dict[str, Any], paths: List[str], kind: str) -> bool:
|
||||
if not self._enabled:
|
||||
return False
|
||||
last_err = "unreachable"
|
||||
timeout = httpx.Timeout(connect=0.6, read=1.8, write=1.8, pool=0.6)
|
||||
for path in paths:
|
||||
endpoint = f"{self.base_url}{path}"
|
||||
try:
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
r = client.post(endpoint, headers=self._headers(), json=payload)
|
||||
if 200 <= r.status_code < 300:
|
||||
self._mark_ok(endpoint)
|
||||
return True
|
||||
last_err = f"HTTP {r.status_code} @ {path}"
|
||||
except Exception as e:
|
||||
last_err = f"{e.__class__.__name__}: {str(e)[:180]} @ {path}"
|
||||
continue
|
||||
self._mark_err(last_err)
|
||||
logger.debug("AISTALK %s relay failed: %s", kind, last_err)
|
||||
return False
|
||||
|
||||
def _post_audio(self, payload: Dict[str, Any], audio_bytes: bytes, mime: str) -> bool:
|
||||
if not self._enabled:
|
||||
return False
|
||||
last_err = "unreachable"
|
||||
timeout = httpx.Timeout(connect=0.8, read=2.5, write=2.5, pool=0.8)
|
||||
for path in self._audio_paths:
|
||||
endpoint = f"{self.base_url}{path}"
|
||||
files = {"audio": ("chunk", audio_bytes, mime or "audio/wav")}
|
||||
data = {"meta": str(payload)}
|
||||
try:
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
headers = {}
|
||||
if self.api_key:
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
headers["X-API-Key"] = self.api_key
|
||||
r = client.post(endpoint, headers=headers, data=data, files=files)
|
||||
if 200 <= r.status_code < 300:
|
||||
self._mark_ok(endpoint)
|
||||
return True
|
||||
last_err = f"HTTP {r.status_code} @ {path}"
|
||||
except Exception as e:
|
||||
last_err = f"{e.__class__.__name__}: {str(e)[:180]} @ {path}"
|
||||
continue
|
||||
self._mark_err(last_err)
|
||||
logger.debug("AISTALK audio relay failed: %s", last_err)
|
||||
return False
|
||||
|
||||
def _dispatch(self, fn, *args: Any) -> None:
|
||||
if not self._enabled:
|
||||
return
|
||||
try:
|
||||
self._pool.submit(fn, *args)
|
||||
except Exception as e:
|
||||
self._mark_err(str(e))
|
||||
logger.debug("AISTALK dispatch failed: %s", e)
|
||||
|
||||
def send_text(
|
||||
self,
|
||||
project_id: str,
|
||||
session_id: str,
|
||||
text: str,
|
||||
user_id: str = "console_user",
|
||||
) -> None:
|
||||
if not self._enabled:
|
||||
return
|
||||
payload = {
|
||||
"v": 1,
|
||||
"type": "chat.reply",
|
||||
"project_id": project_id,
|
||||
"session_id": session_id,
|
||||
"user_id": user_id,
|
||||
"data": {"text": text},
|
||||
}
|
||||
self._dispatch(self._post_json, payload, self._text_paths, "text")
|
||||
|
||||
def send_audio(
|
||||
self,
|
||||
project_id: str,
|
||||
session_id: str,
|
||||
audio_bytes: bytes,
|
||||
mime: str = "audio/wav",
|
||||
) -> None:
|
||||
if not self._enabled:
|
||||
return
|
||||
payload = {
|
||||
"v": 1,
|
||||
"type": "voice.tts",
|
||||
"project_id": project_id,
|
||||
"session_id": session_id,
|
||||
"user_id": "console_user",
|
||||
"data": {"mime": mime, "bytes": len(audio_bytes)},
|
||||
}
|
||||
self._dispatch(self._post_audio, payload, audio_bytes, mime)
|
||||
|
||||
def handle_event(self, event: Dict[str, Any]) -> None:
|
||||
if not self._enabled:
|
||||
return
|
||||
self._dispatch(self._post_json, event, self._event_paths, "event")
|
||||
|
||||
def on_event(self, event: Dict[str, Any]) -> None:
|
||||
self.handle_event(event)
|
||||
|
||||
def probe_health(self) -> Dict[str, Any]:
|
||||
if not self._enabled:
|
||||
return {"enabled": False, "ok": False, "error": "disabled"}
|
||||
timeout = httpx.Timeout(connect=0.5, read=1.2, write=1.2, pool=0.5)
|
||||
last_err = "unreachable"
|
||||
for path in self._health_paths:
|
||||
endpoint = f"{self.base_url}{path}"
|
||||
try:
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
headers = {}
|
||||
if self.api_key:
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
headers["X-API-Key"] = self.api_key
|
||||
r = client.get(endpoint, headers=headers)
|
||||
if r.status_code < 500:
|
||||
with self._lock:
|
||||
self._last_probe_ok = r.status_code == 200
|
||||
self._last_probe_at = time.time()
|
||||
if r.status_code == 200:
|
||||
self._mark_ok(endpoint)
|
||||
return {"enabled": True, "ok": True, "url": endpoint, "status": r.status_code}
|
||||
last_err = f"HTTP {r.status_code} @ {path}"
|
||||
else:
|
||||
last_err = f"HTTP {r.status_code} @ {path}"
|
||||
except Exception as e:
|
||||
last_err = f"{e.__class__.__name__}: {str(e)[:180]} @ {path}"
|
||||
continue
|
||||
with self._lock:
|
||||
self._last_probe_ok = False
|
||||
self._last_probe_at = time.time()
|
||||
self._mark_err(last_err)
|
||||
return {"enabled": True, "ok": False, "error": last_err}
|
||||
|
||||
def status(self) -> Dict[str, Any]:
|
||||
with self._lock:
|
||||
return {
|
||||
"enabled": self._enabled,
|
||||
"base_url": self.base_url,
|
||||
"last_ok_at": self._last_ok_at,
|
||||
"last_endpoint": self._last_endpoint,
|
||||
"last_error": self._last_error,
|
||||
"last_probe_ok": self._last_probe_ok,
|
||||
"last_probe_at": self._last_probe_at,
|
||||
"paths": {
|
||||
"health": self._health_paths,
|
||||
"events": self._event_paths,
|
||||
"text": self._text_paths,
|
||||
"audio": self._audio_paths,
|
||||
},
|
||||
}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
s = self.status()
|
||||
return (
|
||||
f"AISTALKAdapter(url={s['base_url']!r}, enabled={s['enabled']}, "
|
||||
f"last_probe_ok={s['last_probe_ok']}, last_endpoint={s['last_endpoint']!r})"
|
||||
)
|
||||
757
services/sofiia-console/app/docs_router.py
Normal file
757
services/sofiia-console/app/docs_router.py
Normal file
@@ -0,0 +1,757 @@
|
||||
"""
|
||||
sofiia-console — Projects, Documents, Sessions, Dialog Map endpoints.
|
||||
|
||||
All endpoints are mounted on the main FastAPI app in main.py via:
|
||||
app.include_router(docs_router)
|
||||
|
||||
Features:
|
||||
- File upload with sha256, mime detection, size limits
|
||||
- Projects CRUD
|
||||
- Documents per project with keyword search
|
||||
- Sessions with persistence (aiosqlite)
|
||||
- Messages with branching (parent_msg_id)
|
||||
- Dialog map (nodes + edges JSON)
|
||||
- Session fork
|
||||
"""
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, HTTPException, Query, Request, UploadFile, File
|
||||
from fastapi.responses import FileResponse, JSONResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
from . import db as _db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
docs_router = APIRouter(prefix="/api", tags=["projects-docs-sessions"])
|
||||
|
||||
# ── Config ────────────────────────────────────────────────────────────────────
|
||||
|
||||
_DATA_DIR = Path(os.getenv("SOFIIA_DATA_DIR", "/app/data"))
|
||||
_UPLOADS_DIR = _DATA_DIR / "uploads"
|
||||
_ROUTER_URL = os.getenv("ROUTER_URL", "http://router:8000")
|
||||
|
||||
_MAX_IMAGE_MB = int(os.getenv("UPLOAD_MAX_IMAGE_MB", "10"))
|
||||
_MAX_VIDEO_MB = int(os.getenv("UPLOAD_MAX_VIDEO_MB", "200"))
|
||||
_MAX_DOC_MB = int(os.getenv("UPLOAD_MAX_DOC_MB", "50"))
|
||||
|
||||
_USE_FABRIC_OCR = os.getenv("USE_FABRIC_OCR", "false").lower() == "true"
|
||||
_USE_EMBEDDINGS = os.getenv("USE_EMBEDDINGS", "false").lower() == "true"
|
||||
|
||||
_ALLOWED_MIMES = {
|
||||
# images
|
||||
"image/jpeg", "image/png", "image/gif", "image/webp", "image/bmp",
|
||||
# video
|
||||
"video/mp4", "video/mpeg", "video/webm", "video/quicktime",
|
||||
# documents
|
||||
"application/pdf",
|
||||
"application/msword",
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"application/vnd.ms-excel",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"application/vnd.ms-powerpoint",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||
"text/plain", "text/markdown", "text/csv",
|
||||
"application/json",
|
||||
"application/zip",
|
||||
}
|
||||
|
||||
def _safe_filename(name: str) -> str:
|
||||
"""Remove path traversal attempts and dangerous chars."""
|
||||
name = os.path.basename(name)
|
||||
name = re.sub(r"[^\w\-_.()]", "_", name)
|
||||
return name[:128] or "upload"
|
||||
|
||||
|
||||
def _size_limit_mb(mime: str) -> int:
|
||||
if mime.startswith("image/"): return _MAX_IMAGE_MB
|
||||
if mime.startswith("video/"): return _MAX_VIDEO_MB
|
||||
return _MAX_DOC_MB
|
||||
|
||||
|
||||
def _detect_mime(filename: str, data: bytes) -> str:
|
||||
"""Detect MIME by magic bytes first, fall back to extension."""
|
||||
try:
|
||||
import magic
|
||||
return magic.from_buffer(data[:2048], mime=True)
|
||||
except Exception:
|
||||
pass
|
||||
guessed, _ = mimetypes.guess_type(filename)
|
||||
return guessed or "application/octet-stream"
|
||||
|
||||
|
||||
def _extract_text_simple(filename: str, data: bytes, mime: str) -> str:
|
||||
"""Best-effort text extraction without external services."""
|
||||
try:
|
||||
if mime == "text/plain" or filename.endswith((".txt", ".md", ".markdown")):
|
||||
return data.decode("utf-8", errors="replace")[:4096]
|
||||
if mime == "application/json":
|
||||
return data.decode("utf-8", errors="replace")[:4096]
|
||||
if mime == "application/pdf":
|
||||
try:
|
||||
import pypdf
|
||||
reader = pypdf.PdfReader(io.BytesIO(data))
|
||||
text = "\n".join(p.extract_text() or "" for p in reader.pages[:10])
|
||||
return text[:4096]
|
||||
except Exception:
|
||||
pass
|
||||
if mime in (
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
):
|
||||
try:
|
||||
import docx
|
||||
doc = docx.Document(io.BytesIO(data))
|
||||
return "\n".join(p.text for p in doc.paragraphs)[:4096]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.debug("extract_text_simple failed: %s", e)
|
||||
return ""
|
||||
|
||||
|
||||
# ── Projects ──────────────────────────────────────────────────────────────────
|
||||
|
||||
class ProjectCreate(BaseModel):
|
||||
name: str
|
||||
description: str = ""
|
||||
|
||||
|
||||
class ProjectUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
@docs_router.get("/projects")
|
||||
async def list_projects():
|
||||
return await _db.list_projects()
|
||||
|
||||
|
||||
@docs_router.post("/projects", status_code=201)
|
||||
async def create_project(body: ProjectCreate):
|
||||
if not body.name.strip():
|
||||
raise HTTPException(status_code=400, detail="name is required")
|
||||
result = await _db.create_project(body.name.strip(), body.description)
|
||||
|
||||
# Fire-and-forget: compute initial snapshot + signals so Portfolio is populated
|
||||
import asyncio as _asyncio
|
||||
async def _bootstrap_project(pid: str) -> None:
|
||||
try:
|
||||
await _db.compute_graph_snapshot(project_id=pid, window="7d")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
await _db.recompute_graph_signals(project_id=pid, window="7d", dry_run=False)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_asyncio.ensure_future(_bootstrap_project(result.get("project_id", "")))
|
||||
return result
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}")
|
||||
async def get_project(project_id: str):
|
||||
p = await _db.get_project(project_id)
|
||||
if not p:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
return p
|
||||
|
||||
|
||||
@docs_router.patch("/projects/{project_id}")
|
||||
async def update_project(project_id: str, body: ProjectUpdate):
|
||||
ok = await _db.update_project(project_id, name=body.name, description=body.description)
|
||||
if not ok:
|
||||
raise HTTPException(status_code=404, detail="Project not found or no changes")
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
# ── File Upload ───────────────────────────────────────────────────────────────
|
||||
|
||||
@docs_router.post("/files/upload")
|
||||
async def upload_file(
|
||||
request: Request,
|
||||
project_id: str = Query("default"),
|
||||
title: str = Query(""),
|
||||
tags: str = Query(""), # comma-separated
|
||||
file: UploadFile = File(...),
|
||||
):
|
||||
"""Upload a file, extract text, store metadata.
|
||||
|
||||
Returns: {file_id, doc_id, sha256, mime, size_bytes, filename, preview_text}
|
||||
"""
|
||||
raw_name = _safe_filename(file.filename or "upload")
|
||||
data = await file.read()
|
||||
|
||||
# Detect real mime from bytes
|
||||
mime = _detect_mime(raw_name, data)
|
||||
|
||||
# Validate mime
|
||||
if mime not in _ALLOWED_MIMES:
|
||||
raise HTTPException(status_code=415, detail=f"Unsupported file type: {mime}")
|
||||
|
||||
# Size limits
|
||||
size_mb = len(data) / (1024 * 1024)
|
||||
limit_mb = _size_limit_mb(mime)
|
||||
if size_mb > limit_mb:
|
||||
raise HTTPException(
|
||||
status_code=413,
|
||||
detail=f"File too large: {size_mb:.1f}MB > {limit_mb}MB limit for {mime}",
|
||||
)
|
||||
|
||||
# SHA-256 (content-addressed storage)
|
||||
sha = hashlib.sha256(data).hexdigest()
|
||||
|
||||
# Store file (content-addressed)
|
||||
_UPLOADS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
shard = sha[:2]
|
||||
dest = _UPLOADS_DIR / shard / f"{sha}_{raw_name}"
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
if not dest.exists():
|
||||
dest.write_bytes(data)
|
||||
|
||||
file_id = sha[:16] # short reference
|
||||
|
||||
# Extract text
|
||||
extracted = _extract_text_simple(raw_name, data, mime)
|
||||
|
||||
# Fabric OCR for images (feature flag)
|
||||
if _USE_FABRIC_OCR and mime.startswith("image/") and not extracted:
|
||||
try:
|
||||
import base64 as _b64
|
||||
router_url = os.getenv("ROUTER_URL", "http://router:8000")
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
r = await client.post(
|
||||
f"{router_url}/v1/capability/ocr",
|
||||
json={"image_b64": _b64.b64encode(data).decode(), "filename": raw_name},
|
||||
)
|
||||
if r.status_code == 200:
|
||||
extracted = r.json().get("text", "")[:4096]
|
||||
except Exception as e:
|
||||
logger.debug("Fabric OCR failed (skipping): %s", e)
|
||||
|
||||
# Parse tags
|
||||
tag_list = [t.strip() for t in tags.split(",") if t.strip()]
|
||||
|
||||
# Ensure project exists
|
||||
if not await _db.get_project(project_id):
|
||||
project_id = "default"
|
||||
|
||||
# Save to DB
|
||||
doc = await _db.create_document(
|
||||
project_id=project_id,
|
||||
file_id=file_id,
|
||||
sha256=sha,
|
||||
mime=mime,
|
||||
size_bytes=len(data),
|
||||
filename=raw_name,
|
||||
title=title or raw_name,
|
||||
tags=tag_list,
|
||||
extracted_text=extracted,
|
||||
)
|
||||
|
||||
# Async ingest to Qdrant via Router (best-effort, non-blocking)
|
||||
if _USE_EMBEDDINGS and extracted:
|
||||
try:
|
||||
router_url = os.getenv("ROUTER_URL", "http://router:8000")
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
await client.post(f"{router_url}/v1/documents/ingest", json={
|
||||
"agent_id": "sofiia",
|
||||
"text": extracted,
|
||||
"doc_id": doc["doc_id"],
|
||||
"project_id": project_id,
|
||||
"filename": raw_name,
|
||||
"mime": mime,
|
||||
"tags": tag_list,
|
||||
})
|
||||
except Exception as e:
|
||||
logger.debug("Doc ingest (best-effort) failed: %s", e)
|
||||
|
||||
return {
|
||||
**doc,
|
||||
"preview_text": extracted[:300],
|
||||
"storage_path": str(dest.relative_to(_DATA_DIR)),
|
||||
}
|
||||
|
||||
|
||||
# ── Documents ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@docs_router.get("/projects/{project_id}/documents")
|
||||
async def list_documents(project_id: str, limit: int = Query(50, ge=1, le=200)):
|
||||
return await _db.list_documents(project_id, limit=limit)
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/documents/{doc_id}")
|
||||
async def get_document(project_id: str, doc_id: str):
|
||||
doc = await _db.get_document(doc_id)
|
||||
if not doc or doc["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Document not found")
|
||||
return doc
|
||||
|
||||
|
||||
@docs_router.post("/projects/{project_id}/search")
|
||||
async def search_project(project_id: str, request: Request):
|
||||
body = await request.json()
|
||||
query = body.get("query", "").strip()
|
||||
if not query:
|
||||
raise HTTPException(status_code=400, detail="query is required")
|
||||
docs = await _db.search_documents(project_id, query, limit=body.get("limit", 20))
|
||||
sessions = [] # Phase 2: semantic session search
|
||||
return {"query": query, "documents": docs, "sessions": sessions}
|
||||
|
||||
|
||||
@docs_router.get("/files/{file_id}/download")
|
||||
async def download_file(file_id: str):
|
||||
"""Download a file by its file_id (first 16 chars of sha256)."""
|
||||
matches = list(_UPLOADS_DIR.rglob(f"{file_id}_*"))
|
||||
if not matches:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
path = matches[0]
|
||||
return FileResponse(str(path), filename=path.name)
|
||||
|
||||
|
||||
# ── Sessions ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@docs_router.get("/sessions")
|
||||
async def list_sessions(
|
||||
project_id: str = Query("default"),
|
||||
limit: int = Query(30, ge=1, le=100),
|
||||
):
|
||||
return await _db.list_sessions(project_id, limit=limit)
|
||||
|
||||
|
||||
@docs_router.get("/sessions/{session_id}")
|
||||
async def get_session(session_id: str):
|
||||
s = await _db.get_session(session_id)
|
||||
if not s:
|
||||
raise HTTPException(status_code=404, detail="Session not found")
|
||||
return s
|
||||
|
||||
|
||||
@docs_router.patch("/sessions/{session_id}/title")
|
||||
async def update_session_title(session_id: str, request: Request):
|
||||
body = await request.json()
|
||||
title = body.get("title", "").strip()
|
||||
await _db.update_session_title(session_id, title)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
# ── Chat History ──────────────────────────────────────────────────────────────
|
||||
|
||||
@docs_router.get("/chat/history")
|
||||
async def get_chat_history(
|
||||
session_id: str = Query(...),
|
||||
limit: int = Query(50, ge=1, le=200),
|
||||
branch_label: Optional[str] = Query(None),
|
||||
):
|
||||
"""Load persisted message history for a session (for UI restore on page reload)."""
|
||||
msgs = await _db.list_messages(session_id, limit=limit, branch_label=branch_label)
|
||||
return {"session_id": session_id, "messages": msgs, "count": len(msgs)}
|
||||
|
||||
|
||||
# ── Dialog Map ────────────────────────────────────────────────────────────────
|
||||
|
||||
@docs_router.get("/sessions/{session_id}/map")
|
||||
async def get_dialog_map(session_id: str):
|
||||
"""Return nodes and edges for dialog map visualization."""
|
||||
return await _db.get_dialog_map(session_id)
|
||||
|
||||
|
||||
class ForkRequest(BaseModel):
|
||||
from_msg_id: str
|
||||
new_title: str = ""
|
||||
project_id: str = "default"
|
||||
|
||||
|
||||
@docs_router.post("/sessions/{session_id}/fork")
|
||||
async def fork_session(session_id: str, body: ForkRequest):
|
||||
"""Fork a session from a specific message (creates new session with ancestor messages)."""
|
||||
result = await _db.fork_session(
|
||||
source_session_id=session_id,
|
||||
from_msg_id=body.from_msg_id,
|
||||
new_title=body.new_title,
|
||||
project_id=body.project_id,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
# ── Delete endpoints ───────────────────────────────────────────────────────────
|
||||
|
||||
@docs_router.delete("/projects/{project_id}")
|
||||
async def delete_project(project_id: str):
|
||||
if project_id == "default":
|
||||
raise HTTPException(status_code=400, detail="Cannot delete default project")
|
||||
db = await _db.get_db()
|
||||
await db.execute("DELETE FROM projects WHERE project_id=?", (project_id,))
|
||||
await db.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@docs_router.delete("/projects/{project_id}/documents/{doc_id}")
|
||||
async def delete_document(project_id: str, doc_id: str):
|
||||
doc = await _db.get_document(doc_id)
|
||||
if not doc or doc["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Document not found")
|
||||
db = await _db.get_db()
|
||||
await db.execute("DELETE FROM documents WHERE doc_id=?", (doc_id,))
|
||||
await db.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
# ── Tasks (Kanban) ─────────────────────────────────────────────────────────────
|
||||
|
||||
class TaskCreate(BaseModel):
|
||||
title: str
|
||||
description: str = ""
|
||||
status: str = "backlog"
|
||||
priority: str = "normal"
|
||||
labels: List[str] = []
|
||||
assignees: List[str] = []
|
||||
due_at: Optional[str] = None
|
||||
created_by: str = ""
|
||||
|
||||
|
||||
class TaskUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
priority: Optional[str] = None
|
||||
labels: Optional[List[str]] = None
|
||||
assignees: Optional[List[str]] = None
|
||||
due_at: Optional[str] = None
|
||||
sort_key: Optional[float] = None
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/tasks")
|
||||
async def list_tasks(
|
||||
project_id: str,
|
||||
status: Optional[str] = Query(None),
|
||||
limit: int = Query(100, ge=1, le=500),
|
||||
):
|
||||
"""List tasks for a project, optionally filtered by status."""
|
||||
return await _db.list_tasks(project_id, status=status, limit=limit)
|
||||
|
||||
|
||||
@docs_router.post("/projects/{project_id}/tasks", status_code=201)
|
||||
async def create_task(project_id: str, body: TaskCreate):
|
||||
if not body.title.strip():
|
||||
raise HTTPException(status_code=400, detail="title is required")
|
||||
if not await _db.get_project(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
task = await _db.create_task(
|
||||
project_id=project_id,
|
||||
title=body.title.strip(),
|
||||
description=body.description,
|
||||
status=body.status,
|
||||
priority=body.priority,
|
||||
labels=body.labels,
|
||||
assignees=body.assignees,
|
||||
due_at=body.due_at,
|
||||
created_by=body.created_by,
|
||||
)
|
||||
# Auto-upsert dialog node
|
||||
await _db.upsert_dialog_node(
|
||||
project_id=project_id,
|
||||
node_type="task",
|
||||
ref_id=task["task_id"],
|
||||
title=task["title"],
|
||||
summary=task["description"][:200],
|
||||
props={"status": task["status"], "priority": task["priority"]},
|
||||
)
|
||||
return task
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/tasks/{task_id}")
|
||||
async def get_task(project_id: str, task_id: str):
|
||||
task = await _db.get_task(task_id)
|
||||
if not task or task["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
return task
|
||||
|
||||
|
||||
@docs_router.patch("/projects/{project_id}/tasks/{task_id}")
|
||||
async def update_task(project_id: str, task_id: str, body: TaskUpdate):
|
||||
task = await _db.get_task(task_id)
|
||||
if not task or task["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
updates = body.model_dump(exclude_none=True)
|
||||
ok = await _db.update_task(task_id, **updates)
|
||||
if ok and "status" in updates:
|
||||
await _db.upsert_dialog_node(
|
||||
project_id=project_id,
|
||||
node_type="task",
|
||||
ref_id=task_id,
|
||||
title=task["title"],
|
||||
props={"status": updates["status"]},
|
||||
)
|
||||
return {"ok": ok}
|
||||
|
||||
|
||||
@docs_router.delete("/projects/{project_id}/tasks/{task_id}")
|
||||
async def delete_task(project_id: str, task_id: str):
|
||||
task = await _db.get_task(task_id)
|
||||
if not task or task["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
ok = await _db.delete_task(task_id)
|
||||
return {"ok": ok}
|
||||
|
||||
|
||||
# ── Meetings ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class MeetingCreate(BaseModel):
|
||||
title: str
|
||||
starts_at: str
|
||||
agenda: str = ""
|
||||
duration_min: int = 30
|
||||
location: str = ""
|
||||
attendees: List[str] = []
|
||||
created_by: str = ""
|
||||
|
||||
|
||||
class MeetingUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
agenda: Optional[str] = None
|
||||
starts_at: Optional[str] = None
|
||||
duration_min: Optional[int] = None
|
||||
location: Optional[str] = None
|
||||
attendees: Optional[List[str]] = None
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/meetings")
|
||||
async def list_meetings(project_id: str, limit: int = Query(50, ge=1, le=200)):
|
||||
return await _db.list_meetings(project_id, limit=limit)
|
||||
|
||||
|
||||
@docs_router.post("/projects/{project_id}/meetings", status_code=201)
|
||||
async def create_meeting(project_id: str, body: MeetingCreate):
|
||||
if not body.title.strip():
|
||||
raise HTTPException(status_code=400, detail="title is required")
|
||||
if not body.starts_at:
|
||||
raise HTTPException(status_code=400, detail="starts_at is required")
|
||||
if not await _db.get_project(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
meeting = await _db.create_meeting(
|
||||
project_id=project_id,
|
||||
title=body.title.strip(),
|
||||
starts_at=body.starts_at,
|
||||
agenda=body.agenda,
|
||||
duration_min=body.duration_min,
|
||||
location=body.location,
|
||||
attendees=body.attendees,
|
||||
created_by=body.created_by,
|
||||
)
|
||||
# Auto-upsert dialog node
|
||||
await _db.upsert_dialog_node(
|
||||
project_id=project_id,
|
||||
node_type="meeting",
|
||||
ref_id=meeting["meeting_id"],
|
||||
title=meeting["title"],
|
||||
summary=meeting["agenda"][:200],
|
||||
props={"starts_at": meeting["starts_at"], "duration_min": meeting["duration_min"]},
|
||||
)
|
||||
return meeting
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/meetings/{meeting_id}")
|
||||
async def get_meeting(project_id: str, meeting_id: str):
|
||||
m = await _db.get_meeting(meeting_id)
|
||||
if not m or m["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
return m
|
||||
|
||||
|
||||
@docs_router.patch("/projects/{project_id}/meetings/{meeting_id}")
|
||||
async def update_meeting(project_id: str, meeting_id: str, body: MeetingUpdate):
|
||||
m = await _db.get_meeting(meeting_id)
|
||||
if not m or m["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
updates = body.model_dump(exclude_none=True)
|
||||
ok = await _db.update_meeting(meeting_id, **updates)
|
||||
return {"ok": ok}
|
||||
|
||||
|
||||
@docs_router.delete("/projects/{project_id}/meetings/{meeting_id}")
|
||||
async def delete_meeting(project_id: str, meeting_id: str):
|
||||
m = await _db.get_meeting(meeting_id)
|
||||
if not m or m["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
ok = await _db.delete_meeting(meeting_id)
|
||||
return {"ok": ok}
|
||||
|
||||
|
||||
# ── Dialog Map (Project-level graph) ─────────────────────────────────────────
|
||||
|
||||
@docs_router.get("/projects/{project_id}/dialog-map")
|
||||
async def get_project_dialog_map(project_id: str):
|
||||
"""Return canonical dialog graph for the project (all entity nodes + edges)."""
|
||||
return await _db.get_project_dialog_map(project_id)
|
||||
|
||||
|
||||
class LinkCreate(BaseModel):
|
||||
from_type: str
|
||||
from_id: str
|
||||
to_type: str
|
||||
to_id: str
|
||||
edge_type: str = "references"
|
||||
props: dict = {}
|
||||
created_by: str = ""
|
||||
|
||||
|
||||
@docs_router.post("/projects/{project_id}/dialog/link", status_code=201)
|
||||
async def create_dialog_link(project_id: str, body: LinkCreate):
|
||||
"""Create a dialog edge between two entities (auto-resolves/creates nodes)."""
|
||||
if not await _db.get_project(project_id):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# Resolve or create from_node
|
||||
from_node = await _db.upsert_dialog_node(
|
||||
project_id=project_id,
|
||||
node_type=body.from_type,
|
||||
ref_id=body.from_id,
|
||||
title=f"{body.from_type}:{body.from_id[:8]}",
|
||||
created_by=body.created_by,
|
||||
)
|
||||
# Resolve or create to_node
|
||||
to_node = await _db.upsert_dialog_node(
|
||||
project_id=project_id,
|
||||
node_type=body.to_type,
|
||||
ref_id=body.to_id,
|
||||
title=f"{body.to_type}:{body.to_id[:8]}",
|
||||
created_by=body.created_by,
|
||||
)
|
||||
edge = await _db.create_dialog_edge(
|
||||
project_id=project_id,
|
||||
from_node_id=from_node["node_id"],
|
||||
to_node_id=to_node["node_id"],
|
||||
edge_type=body.edge_type,
|
||||
props=body.props,
|
||||
created_by=body.created_by,
|
||||
)
|
||||
# Also persist as entity_link
|
||||
await _db.create_entity_link(
|
||||
project_id=project_id,
|
||||
from_type=body.from_type, from_id=body.from_id,
|
||||
to_type=body.to_type, to_id=body.to_id,
|
||||
link_type=body.edge_type,
|
||||
created_by=body.created_by,
|
||||
)
|
||||
return {
|
||||
"ok": True,
|
||||
"from_node": from_node,
|
||||
"to_node": to_node,
|
||||
"edge": edge,
|
||||
}
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/dialog/views")
|
||||
async def list_dialog_views(project_id: str):
|
||||
return await _db.list_dialog_views(project_id)
|
||||
|
||||
|
||||
class DialogViewSave(BaseModel):
|
||||
name: str
|
||||
filters: dict = {}
|
||||
layout: dict = {}
|
||||
|
||||
|
||||
@docs_router.put("/projects/{project_id}/dialog/views/{name}")
|
||||
async def save_dialog_view(project_id: str, name: str, body: DialogViewSave):
|
||||
view = await _db.upsert_dialog_view(
|
||||
project_id=project_id,
|
||||
name=name,
|
||||
filters=body.filters,
|
||||
layout=body.layout,
|
||||
)
|
||||
return view
|
||||
|
||||
|
||||
# ── Doc Versions ──────────────────────────────────────────────────────────────
|
||||
|
||||
class DocUpdateRequest(BaseModel):
|
||||
content_md: str
|
||||
author_id: str = "system"
|
||||
reason: str = ""
|
||||
dry_run: bool = False
|
||||
|
||||
|
||||
@docs_router.post("/projects/{project_id}/documents/{doc_id}/update")
|
||||
async def update_document_version(project_id: str, doc_id: str, body: DocUpdateRequest):
|
||||
"""Update document text and create a new version (idempotent by content hash).
|
||||
|
||||
dry_run=True: returns computed version_hash + diff_preview without writing.
|
||||
"""
|
||||
import hashlib, difflib
|
||||
doc = await _db.get_document(doc_id)
|
||||
if not doc or doc["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Document not found")
|
||||
|
||||
content = body.content_md.strip()
|
||||
version_hash = hashlib.sha256(content.encode()).hexdigest()[:16]
|
||||
|
||||
# Get latest version for diff
|
||||
existing = await _db.list_doc_versions(doc_id, limit=1)
|
||||
prev_content = ""
|
||||
if existing:
|
||||
prev_content = (await _db.get_doc_version_content(existing[0]["version_id"])) or ""
|
||||
|
||||
diff_lines = list(difflib.unified_diff(
|
||||
prev_content.splitlines(), content.splitlines(),
|
||||
fromfile="previous", tofile="updated", lineterm="", n=3,
|
||||
))
|
||||
diff_text = "\n".join(diff_lines[:80]) # cap for response
|
||||
will_change = content != prev_content
|
||||
|
||||
if body.dry_run or not will_change:
|
||||
return {
|
||||
"ok": True,
|
||||
"dry_run": body.dry_run,
|
||||
"will_change": will_change,
|
||||
"version_hash": version_hash,
|
||||
"diff_text": diff_text,
|
||||
}
|
||||
|
||||
new_ver = await _db.save_doc_version(doc_id, content, author_id=body.author_id)
|
||||
return {
|
||||
"ok": True,
|
||||
"dry_run": False,
|
||||
"will_change": True,
|
||||
"version_hash": version_hash,
|
||||
"version_id": new_ver["version_id"],
|
||||
"created_at": new_ver["created_at"],
|
||||
"diff_text": diff_text,
|
||||
"reason": body.reason,
|
||||
}
|
||||
|
||||
|
||||
@docs_router.get("/projects/{project_id}/documents/{doc_id}/versions")
|
||||
async def list_doc_versions(project_id: str, doc_id: str, limit: int = Query(20)):
|
||||
doc = await _db.get_document(doc_id)
|
||||
if not doc or doc["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Document not found")
|
||||
return await _db.list_doc_versions(doc_id, limit=limit)
|
||||
|
||||
|
||||
class DocVersionRestore(BaseModel):
|
||||
version_id: str
|
||||
author_id: str = "system"
|
||||
|
||||
|
||||
@docs_router.post("/projects/{project_id}/documents/{doc_id}/restore")
|
||||
async def restore_doc_version(project_id: str, doc_id: str, body: DocVersionRestore):
|
||||
doc = await _db.get_document(doc_id)
|
||||
if not doc or doc["project_id"] != project_id:
|
||||
raise HTTPException(status_code=404, detail="Document not found")
|
||||
content = await _db.get_doc_version_content(body.version_id)
|
||||
if content is None:
|
||||
raise HTTPException(status_code=404, detail="Version not found")
|
||||
# Save restored content as new version
|
||||
new_ver = await _db.save_doc_version(doc_id, content, author_id=body.author_id)
|
||||
return {"ok": True, "new_version": new_ver, "restored_from": body.version_id}
|
||||
303
services/sofiia-console/app/monitor.py
Normal file
303
services/sofiia-console/app/monitor.py
Normal file
@@ -0,0 +1,303 @@
|
||||
"""
|
||||
Monitor telemetry bridge — probes each node's monitor endpoint.
|
||||
|
||||
Each node CAN expose GET /monitor/status (or /healthz extended).
|
||||
This module does a best-effort fan-out: missing/unreachable nodes
|
||||
return {"online": false} without crashing the dashboard.
|
||||
|
||||
Expected monitor/status response shape (node provides):
|
||||
{
|
||||
"online": true,
|
||||
"ts": "ISO",
|
||||
"node_id": "NODA1",
|
||||
"heartbeat_age_s": 5,
|
||||
"router": {"ok": true, "latency_ms": 12},
|
||||
"gateway": {"ok": true, "latency_ms": 8},
|
||||
"alerts_loop_slo": {"p95_ms": 320, "failed_rate": 0.0},
|
||||
"open_incidents": 2,
|
||||
"backends": {"alerts": "postgres", "audit": "auto", ...},
|
||||
"last_artifacts": {
|
||||
"risk_digest": "2026-02-24",
|
||||
"platform_digest": "2026-W08",
|
||||
"backlog": "2026-02-24"
|
||||
}
|
||||
}
|
||||
|
||||
If a node only has /healthz, we synthesise a partial status from it.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import httpx
|
||||
|
||||
# Timeout per node probe (seconds)
|
||||
_PROBE_TIMEOUT = 8.0
|
||||
# Paths tried in order for monitor status
|
||||
_MONITOR_PATHS = ["/monitor/status", "/api/monitor/status"]
|
||||
# Fallback health paths for basic online check
|
||||
_HEALTH_PATHS = ["/healthz", "/health"]
|
||||
|
||||
|
||||
def _running_in_docker() -> bool:
|
||||
return os.path.exists("/.dockerenv")
|
||||
|
||||
|
||||
def _normalize_probe_url(base_url: str) -> str:
|
||||
"""
|
||||
Inside Docker, localhost points to the container itself.
|
||||
Remap localhost/127.0.0.1 to host.docker.internal for node probes.
|
||||
"""
|
||||
if not base_url:
|
||||
return base_url
|
||||
if not _running_in_docker():
|
||||
return base_url
|
||||
try:
|
||||
parsed = urlparse(base_url)
|
||||
if parsed.hostname in ("localhost", "127.0.0.1"):
|
||||
netloc = parsed.netloc.replace(parsed.hostname, "host.docker.internal")
|
||||
return urlunparse(parsed._replace(netloc=netloc))
|
||||
except Exception:
|
||||
return base_url
|
||||
return base_url
|
||||
|
||||
|
||||
async def _probe_monitor(base_url: str, timeout: float = _PROBE_TIMEOUT) -> Dict[str, Any]:
|
||||
"""
|
||||
Probe a node's monitor endpoint.
|
||||
Returns the monitor status dict (may be synthesised from /healthz).
|
||||
"""
|
||||
base = base_url.rstrip("/")
|
||||
t0 = time.monotonic()
|
||||
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
# Try dedicated /monitor/status first
|
||||
for path in _MONITOR_PATHS:
|
||||
try:
|
||||
r = await client.get(f"{base}{path}")
|
||||
if r.status_code == 200:
|
||||
d = r.json()
|
||||
d.setdefault("online", True)
|
||||
d.setdefault("latency_ms", int((time.monotonic() - t0) * 1000))
|
||||
d.setdefault("source", "monitor_endpoint")
|
||||
return d
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Fallback: synthesise from /healthz
|
||||
for path in _HEALTH_PATHS:
|
||||
try:
|
||||
r = await client.get(f"{base}{path}")
|
||||
if r.status_code == 200:
|
||||
latency = int((time.monotonic() - t0) * 1000)
|
||||
try:
|
||||
hdata = r.json()
|
||||
except Exception:
|
||||
hdata = {}
|
||||
return {
|
||||
"online": True,
|
||||
"ts": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
"latency_ms": latency,
|
||||
"source": "healthz_fallback",
|
||||
"router": {"ok": hdata.get("ok", True), "latency_ms": latency},
|
||||
"gateway": None,
|
||||
"alerts_loop_slo": None,
|
||||
"open_incidents": None,
|
||||
"backends": {},
|
||||
"last_artifacts": {},
|
||||
}
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return {
|
||||
"online": False,
|
||||
"ts": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
"latency_ms": None,
|
||||
"source": "unreachable",
|
||||
"error": f"no response from {base}",
|
||||
}
|
||||
|
||||
|
||||
async def _probe_router(router_url: str, timeout: float = 5.0) -> Dict[str, Any]:
|
||||
"""Quick router health probe."""
|
||||
base = router_url.rstrip("/")
|
||||
t0 = time.monotonic()
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
for path in ("/healthz", "/health"):
|
||||
try:
|
||||
r = await client.get(f"{base}{path}")
|
||||
if r.status_code == 200:
|
||||
latency = int((time.monotonic() - t0) * 1000)
|
||||
try:
|
||||
d = r.json()
|
||||
except Exception:
|
||||
d = {}
|
||||
return {"ok": True, "latency_ms": latency, "detail": d.get("status", "ok")}
|
||||
except Exception:
|
||||
continue
|
||||
return {"ok": False, "latency_ms": None}
|
||||
|
||||
|
||||
async def _probe_gateway(gateway_url: str, timeout: float = 5.0) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Gateway health probe — also extracts build_sha, agents_count, required_missing
|
||||
from /health response when available.
|
||||
"""
|
||||
if not gateway_url:
|
||||
return None
|
||||
base = gateway_url.rstrip("/")
|
||||
t0 = time.monotonic()
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
for path in ("/health", "/healthz", "/"):
|
||||
try:
|
||||
r = await client.get(f"{base}{path}", timeout=timeout)
|
||||
latency = int((time.monotonic() - t0) * 1000)
|
||||
if r.status_code < 500:
|
||||
ok = r.status_code < 400
|
||||
result: Dict[str, Any] = {"ok": ok, "latency_ms": latency}
|
||||
if ok:
|
||||
try:
|
||||
d = r.json()
|
||||
result["agents_count"] = d.get("agents_count")
|
||||
result["build_sha"] = d.get("build_sha")
|
||||
result["build_time"] = d.get("build_time")
|
||||
result["node_id"] = d.get("node_id")
|
||||
result["required_missing"] = d.get("required_missing", [])
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
except Exception:
|
||||
continue
|
||||
return {"ok": False, "latency_ms": None}
|
||||
|
||||
|
||||
async def collect_node_telemetry(
|
||||
node_id: str,
|
||||
cfg: Dict[str, Any],
|
||||
router_api_key: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Full telemetry for one node.
|
||||
Runs monitor probe, router probe, gateway probe in parallel.
|
||||
Returns merged/normalised result.
|
||||
"""
|
||||
router_url = _normalize_probe_url(cfg.get("router_url", ""))
|
||||
gateway_url = _normalize_probe_url(cfg.get("gateway_url", ""))
|
||||
monitor_url = _normalize_probe_url(cfg.get("monitor_url") or router_url) # default: same host as router
|
||||
|
||||
async def _no_monitor() -> Dict[str, Any]:
|
||||
return {"online": False, "source": "no_url"}
|
||||
|
||||
async def _no_router() -> Dict[str, Any]:
|
||||
return {"ok": False}
|
||||
|
||||
# Fan-out parallel probes
|
||||
results = await asyncio.gather(
|
||||
_probe_monitor(monitor_url) if monitor_url else _no_monitor(),
|
||||
_probe_router(router_url) if router_url else _no_router(),
|
||||
_probe_gateway(gateway_url),
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
mon = results[0] if not isinstance(results[0], Exception) else {"online": False, "error": str(results[0])[:100]}
|
||||
rtr = results[1] if not isinstance(results[1], Exception) else {"ok": False}
|
||||
gwy = results[2] if not isinstance(results[2], Exception) else None
|
||||
|
||||
# Merge: router from dedicated probe overrides monitor.router if present
|
||||
# (dedicated probe is more accurate; monitor.router may be stale)
|
||||
router_merged = {
|
||||
"ok": rtr.get("ok", False),
|
||||
"latency_ms": rtr.get("latency_ms"),
|
||||
}
|
||||
gateway_merged = gwy # may be None
|
||||
|
||||
# Determine overall online status
|
||||
online = rtr.get("ok", False) or mon.get("online", False)
|
||||
|
||||
gwy_data = gateway_merged or {}
|
||||
|
||||
return {
|
||||
"node_id": node_id,
|
||||
"label": cfg.get("label", node_id),
|
||||
"node_role": cfg.get("node_role", "prod"),
|
||||
"router_url": router_url,
|
||||
"gateway_url": gateway_url or None,
|
||||
"monitor_url": monitor_url or None,
|
||||
"ssh_configured": bool(cfg.get("ssh")),
|
||||
"online": online,
|
||||
"ts": mon.get("ts") or datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
||||
# --- router ---
|
||||
"router_ok": router_merged["ok"],
|
||||
"router_latency_ms": router_merged["latency_ms"],
|
||||
# --- gateway ---
|
||||
"gateway_ok": gwy_data.get("ok"),
|
||||
"gateway_latency_ms": gwy_data.get("latency_ms"),
|
||||
"gateway_agents_count": gwy_data.get("agents_count"),
|
||||
"gateway_build_sha": gwy_data.get("build_sha"),
|
||||
"gateway_build_time": gwy_data.get("build_time"),
|
||||
"gateway_required_missing": gwy_data.get("required_missing", []),
|
||||
# --- monitor extended (present only if monitor endpoint exists) ---
|
||||
"heartbeat_age_s": mon.get("heartbeat_age_s"),
|
||||
"alerts_loop_slo": mon.get("alerts_loop_slo"),
|
||||
"open_incidents": mon.get("open_incidents"),
|
||||
"backends": mon.get("backends") or {},
|
||||
"last_artifacts": mon.get("last_artifacts") or {},
|
||||
# --- meta ---
|
||||
"monitor_source": mon.get("source", "unknown"),
|
||||
"monitor_latency_ms": mon.get("latency_ms"),
|
||||
}
|
||||
|
||||
|
||||
async def collect_all_nodes(
|
||||
nodes_cfg: Dict[str, Any],
|
||||
router_api_key: str = "",
|
||||
timeout_per_node: float = 10.0,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Parallel fan-out for all nodes. Each node gets up to timeout_per_node seconds."""
|
||||
if not nodes_cfg:
|
||||
return []
|
||||
|
||||
async def _safe(node_id: str, cfg: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if cfg.get("enabled", True) is False:
|
||||
return {
|
||||
"node_id": node_id,
|
||||
"label": cfg.get("label", node_id),
|
||||
"router_url": cfg.get("router_url") or None,
|
||||
"gateway_url": cfg.get("gateway_url") or None,
|
||||
"monitor_url": cfg.get("monitor_url") or None,
|
||||
"online": False,
|
||||
"router_ok": False,
|
||||
"gateway_ok": None,
|
||||
"disabled": True,
|
||||
"monitor_source": "disabled",
|
||||
}
|
||||
try:
|
||||
return await asyncio.wait_for(
|
||||
collect_node_telemetry(node_id, cfg, router_api_key),
|
||||
timeout=timeout_per_node,
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
return {
|
||||
"node_id": node_id,
|
||||
"label": cfg.get("label", node_id),
|
||||
"online": False,
|
||||
"router_ok": False,
|
||||
"gateway_ok": None,
|
||||
"error": f"timeout after {timeout_per_node}s",
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"node_id": node_id,
|
||||
"label": cfg.get("label", node_id),
|
||||
"online": False,
|
||||
"router_ok": False,
|
||||
"error": str(e)[:120],
|
||||
}
|
||||
|
||||
tasks = [_safe(nid, ncfg) for nid, ncfg in nodes_cfg.items()]
|
||||
return list(await asyncio.gather(*tasks))
|
||||
45
services/sofiia-console/app/nodes.py
Normal file
45
services/sofiia-console/app/nodes.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Nodes dashboard: aggregate telemetry from all configured nodes."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from .config import load_nodes_registry
|
||||
from .monitor import collect_all_nodes
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def get_nodes_dashboard(router_api_key: str = "") -> Dict[str, Any]:
|
||||
"""
|
||||
GET /api/nodes/dashboard
|
||||
|
||||
For each node in nodes_registry.yml, collects:
|
||||
- router health (ok, latency)
|
||||
- gateway health (ok, latency) — optional
|
||||
- monitor agent telemetry (heartbeat, SLO, incidents, backends, artifacts)
|
||||
|
||||
All probes run in parallel with per-node timeout.
|
||||
Non-fatal: unreachable nodes appear with online=false.
|
||||
"""
|
||||
reg = load_nodes_registry()
|
||||
nodes_cfg = reg.get("nodes", {})
|
||||
defaults = reg.get("defaults", {})
|
||||
timeout = float(defaults.get("health_timeout_sec", 10))
|
||||
|
||||
nodes = await collect_all_nodes(
|
||||
nodes_cfg,
|
||||
router_api_key=router_api_key,
|
||||
timeout_per_node=timeout,
|
||||
)
|
||||
|
||||
online_count = sum(1 for n in nodes if n.get("online"))
|
||||
router_ok_count = sum(1 for n in nodes if n.get("router_ok"))
|
||||
|
||||
return {
|
||||
"nodes": nodes,
|
||||
"summary": {
|
||||
"total": len(nodes),
|
||||
"online": online_count,
|
||||
"router_ok": router_ok_count,
|
||||
},
|
||||
"defaults": defaults,
|
||||
}
|
||||
61
services/sofiia-console/app/ops.py
Normal file
61
services/sofiia-console/app/ops.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Ops: run risk dashboard, pressure dashboard, backlog generate, release_check via router tools."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from .config import get_router_url
|
||||
from .router_client import execute_tool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Map ops action id -> (tool, action, default params)
|
||||
OPS_ACTIONS: Dict[str, tuple] = {
|
||||
"risk_dashboard": ("risk_engine_tool", "dashboard", {"env": "prod"}),
|
||||
"pressure_dashboard": ("architecture_pressure_tool", "dashboard", {"env": "prod"}),
|
||||
"backlog_generate_weekly": ("backlog_tool", "auto_generate_weekly", {"env": "prod"}),
|
||||
"pieces_status": ("pieces_tool", "status", {}),
|
||||
"notion_status": ("notion_tool", "status", {}),
|
||||
"notion_create_task": ("notion_tool", "create_task", {}),
|
||||
"notion_create_page": ("notion_tool", "create_page", {}),
|
||||
"notion_update_page": ("notion_tool", "update_page", {}),
|
||||
"notion_create_database": ("notion_tool", "create_database", {}),
|
||||
"release_check": (
|
||||
"job_orchestrator_tool",
|
||||
"start_task",
|
||||
{"task_id": "release_check", "inputs": {"gate_profile": "staging"}},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def run_ops_action(
|
||||
action_id: str,
|
||||
node_id: str,
|
||||
params_override: Dict[str, Any],
|
||||
*,
|
||||
agent_id: str = "sofiia",
|
||||
timeout: float = 90.0,
|
||||
api_key: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
"""Run one ops action against the given node's router. Returns { status, data, error }."""
|
||||
if action_id not in OPS_ACTIONS:
|
||||
return {"status": "failed", "data": None, "error": {"message": f"Unknown action: {action_id}"}}
|
||||
tool, action, default_params = OPS_ACTIONS[action_id]
|
||||
params = {**default_params, **params_override}
|
||||
base_url = get_router_url(node_id)
|
||||
try:
|
||||
out = await execute_tool(
|
||||
base_url,
|
||||
tool,
|
||||
action,
|
||||
params=params,
|
||||
agent_id=agent_id,
|
||||
timeout=timeout,
|
||||
api_key=api_key,
|
||||
)
|
||||
return out
|
||||
except Exception as e:
|
||||
logger.exception("ops run failed: action=%s node=%s", action_id, node_id)
|
||||
return {
|
||||
"status": "failed",
|
||||
"data": None,
|
||||
"error": {"message": str(e)[:300], "retryable": True},
|
||||
}
|
||||
78
services/sofiia-console/app/router_client.py
Normal file
78
services/sofiia-console/app/router_client.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""Call DAARION router: /v1/agents/{agent_id}/infer and /v1/tools/execute."""
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def infer(
|
||||
base_url: str,
|
||||
agent_id: str,
|
||||
prompt: str,
|
||||
*,
|
||||
model: Optional[str] = None,
|
||||
system_prompt: Optional[str] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
timeout: float = 120.0,
|
||||
api_key: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
"""POST /v1/agents/{agent_id}/infer. Returns { response, model, backend, ... }."""
|
||||
url = f"{base_url.rstrip('/')}/v1/agents/{agent_id}/infer"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
if api_key:
|
||||
headers["Authorization"] = f"Bearer {api_key}"
|
||||
body = {
|
||||
"prompt": prompt,
|
||||
"metadata": metadata or {},
|
||||
"max_tokens": 2048,
|
||||
"temperature": 0.4,
|
||||
}
|
||||
if model:
|
||||
body["model"] = model
|
||||
if system_prompt:
|
||||
body["system_prompt"] = system_prompt
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
r = await client.post(url, json=body, headers=headers)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
||||
async def execute_tool(
|
||||
base_url: str,
|
||||
tool: str,
|
||||
action: str,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
*,
|
||||
agent_id: str = "sofiia",
|
||||
timeout: float = 60.0,
|
||||
api_key: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
"""POST /v1/tools/execute. Returns { status, data, error }."""
|
||||
url = f"{base_url.rstrip('/')}/v1/tools/execute"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
if api_key:
|
||||
headers["Authorization"] = f"Bearer {api_key}"
|
||||
body = {
|
||||
"tool": tool,
|
||||
"action": action,
|
||||
"agent_id": agent_id,
|
||||
**(params or {}),
|
||||
}
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
r = await client.post(url, json=body, headers=headers)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
||||
async def health(base_url: str, timeout: float = 5.0) -> Dict[str, Any]:
|
||||
"""GET /healthz or /health. Returns { ok, status?, ... }."""
|
||||
for path in ("/healthz", "/health", "/"):
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
r = await client.get(f"{base_url.rstrip('/')}{path}")
|
||||
return {"ok": r.status_code == 200, "status": r.status_code, "path": path}
|
||||
except Exception as e:
|
||||
logger.debug("health %s%s failed: %s", base_url, path, e)
|
||||
return {"ok": False, "error": "unreachable"}
|
||||
130
services/sofiia-console/app/voice_utils.py
Normal file
130
services/sofiia-console/app/voice_utils.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
voice_utils.py — Voice pipeline utilities (importable without FastAPI).
|
||||
|
||||
Extracted from main.py to enable unit testing without full app startup.
|
||||
"""
|
||||
import re
|
||||
|
||||
_SENTENCE_SPLIT_RE = re.compile(
|
||||
r'(?<=[.!?…])\s+' # standard sentence end
|
||||
r'|(?<=[,;:])\s{2,}' # long pause after punctuation
|
||||
r'|(?<=\n)\s*(?=\S)' # new paragraph
|
||||
)
|
||||
|
||||
MIN_CHUNK_CHARS = 30 # avoid splitting "OK." into tiny TTS calls
|
||||
MAX_CHUNK_CHARS = 250 # align with max_tts_chars in voice policy
|
||||
MAX_TTS_SAFE_CHARS = 700 # hard server-side limit (memory-service accepts ≤700)
|
||||
|
||||
# Markdown/code patterns to strip before TTS
|
||||
_MD_BOLD_RE = re.compile(r'\*\*(.+?)\*\*', re.DOTALL)
|
||||
_MD_ITALIC_RE = re.compile(r'\*(.+?)\*', re.DOTALL)
|
||||
_MD_HEADER_RE = re.compile(r'^#{1,6}\s+', re.MULTILINE)
|
||||
_MD_LIST_RE = re.compile(r'^[\-\*]\s+', re.MULTILINE)
|
||||
_MD_ORDERED_RE = re.compile(r'^\d+\.\s+', re.MULTILINE)
|
||||
_MD_CODE_BLOCK_RE = re.compile(r'```.*?```', re.DOTALL)
|
||||
_MD_INLINE_CODE_RE = re.compile(r'`[^`]+`')
|
||||
_MD_LINK_RE = re.compile(r'\[([^\]]+)\]\([^)]+\)')
|
||||
_MD_URL_RE = re.compile(r'https?://\S+')
|
||||
_MULTI_SPACE_RE = re.compile(r'[ \t]{2,}')
|
||||
_MULTI_NEWLINE_RE = re.compile(r'\n{3,}')
|
||||
|
||||
|
||||
def split_into_voice_chunks(text: str, max_chars: int = MAX_CHUNK_CHARS) -> list[str]:
|
||||
"""Split text into TTS-friendly chunks (sentences / clauses).
|
||||
|
||||
Rules:
|
||||
- Try sentence boundaries first.
|
||||
- Merge short fragments (< MIN_CHUNK_CHARS) with the next chunk.
|
||||
- Hard-split anything > max_chars on a word boundary.
|
||||
|
||||
Returns a list of non-empty strings. Never loses content.
|
||||
"""
|
||||
raw = [s.strip() for s in _SENTENCE_SPLIT_RE.split(text) if s.strip()]
|
||||
if not raw:
|
||||
return [text.strip()] if text.strip() else []
|
||||
|
||||
chunks: list[str] = []
|
||||
buf = ""
|
||||
for part in raw:
|
||||
candidate = (buf + " " + part).strip() if buf else part
|
||||
if len(candidate) > max_chars:
|
||||
if buf:
|
||||
chunks.append(buf)
|
||||
# hard-split part at word boundary
|
||||
while len(part) > max_chars:
|
||||
cut = part[:max_chars].rsplit(" ", 1)
|
||||
chunks.append(cut[0].strip())
|
||||
part = part[len(cut[0]):].strip()
|
||||
buf = part
|
||||
else:
|
||||
buf = candidate
|
||||
if buf:
|
||||
chunks.append(buf)
|
||||
|
||||
# Merge tiny trailing fragments into the previous chunk
|
||||
merged: list[str] = []
|
||||
for chunk in chunks:
|
||||
if merged and len(chunk) < MIN_CHUNK_CHARS:
|
||||
merged[-1] = merged[-1] + " " + chunk
|
||||
else:
|
||||
merged.append(chunk)
|
||||
return merged
|
||||
|
||||
|
||||
def clean_think_blocks(text: str) -> str:
|
||||
"""Remove <think>...</think> reasoning blocks from LLM output.
|
||||
|
||||
1. Strip complete blocks (DOTALL for multiline).
|
||||
2. Fallback: if an unclosed <think> remains, drop everything after it.
|
||||
"""
|
||||
cleaned = re.sub(r"<think>.*?</think>", "", text,
|
||||
flags=re.DOTALL | re.IGNORECASE)
|
||||
if "<think>" in cleaned.lower():
|
||||
cleaned = re.split(r"(?i)<think>", cleaned)[0]
|
||||
return cleaned.strip()
|
||||
|
||||
|
||||
def sanitize_for_voice(text: str, max_chars: int = MAX_TTS_SAFE_CHARS) -> str:
|
||||
"""Server-side final barrier before TTS synthesis.
|
||||
|
||||
Pipeline (order matters):
|
||||
1. Strip <think> blocks
|
||||
2. Strip markdown (code blocks first → inline → bold → italic → headers → lists → links → URLs)
|
||||
3. Collapse whitespace
|
||||
4. Hard-truncate to max_chars on sentence boundary when possible
|
||||
|
||||
Returns clean, TTS-ready plain text. Never raises.
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
|
||||
# 1. <think> blocks
|
||||
out = clean_think_blocks(text)
|
||||
|
||||
# 2. Markdown stripping (order: fenced code before inline to avoid partial matches)
|
||||
out = _MD_CODE_BLOCK_RE.sub('', out)
|
||||
out = _MD_INLINE_CODE_RE.sub('', out)
|
||||
out = _MD_BOLD_RE.sub(r'\1', out)
|
||||
out = _MD_ITALIC_RE.sub(r'\1', out)
|
||||
out = _MD_HEADER_RE.sub('', out)
|
||||
out = _MD_LIST_RE.sub('', out)
|
||||
out = _MD_ORDERED_RE.sub('', out)
|
||||
out = _MD_LINK_RE.sub(r'\1', out) # keep link text, drop URL
|
||||
out = _MD_URL_RE.sub('', out) # remove bare URLs
|
||||
|
||||
# 3. Whitespace normalisation
|
||||
out = _MULTI_SPACE_RE.sub(' ', out)
|
||||
out = _MULTI_NEWLINE_RE.sub('\n\n', out)
|
||||
out = out.strip()
|
||||
|
||||
# 4. Hard-truncate preserving sentence boundary
|
||||
if len(out) > max_chars:
|
||||
# Try to cut at last sentence-ending punctuation before the limit
|
||||
cut = out[:max_chars]
|
||||
boundary = max(cut.rfind('.'), cut.rfind('!'), cut.rfind('?'), cut.rfind('…'))
|
||||
if boundary > max_chars // 2:
|
||||
out = out[:boundary + 1].strip()
|
||||
else:
|
||||
out = cut.rstrip() + '…'
|
||||
|
||||
return out
|
||||
77
services/sofiia-console/launchd/install-launchd.sh
Executable file
77
services/sofiia-console/launchd/install-launchd.sh
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
LABEL="${SOFIIA_LAUNCHD_LABEL:-com.daarion.sofiia}"
|
||||
DOMAIN="gui/$(id -u)"
|
||||
LAUNCH_AGENTS_DIR="${HOME}/Library/LaunchAgents"
|
||||
PLIST_PATH="${LAUNCH_AGENTS_DIR}/${LABEL}.plist"
|
||||
START_SCRIPT="${ROOT_DIR}/start-daemon.sh"
|
||||
|
||||
PORT_VALUE="${PORT:-8002}"
|
||||
DATA_DIR_VALUE="${SOFIIA_DATA_DIR:-${HOME}/.sofiia/console-data}"
|
||||
LOG_DIR="${DATA_DIR_VALUE}/logs"
|
||||
LOG_OUT="${LOG_DIR}/launchd.out.log"
|
||||
LOG_ERR="${LOG_DIR}/launchd.err.log"
|
||||
PATH_VALUE="${PATH:-/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin}"
|
||||
|
||||
if [ ! -x "${START_SCRIPT}" ]; then
|
||||
echo "[sofiia-launchd] missing start script: ${START_SCRIPT}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${LAUNCH_AGENTS_DIR}" "${LOG_DIR}" "${DATA_DIR_VALUE}"
|
||||
|
||||
cat > "${PLIST_PATH}" <<PLIST
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>${LABEL}</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>${START_SCRIPT}</string>
|
||||
</array>
|
||||
|
||||
<key>WorkingDirectory</key>
|
||||
<string>${ROOT_DIR}</string>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
|
||||
<key>StandardOutPath</key>
|
||||
<string>${LOG_OUT}</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>${LOG_ERR}</string>
|
||||
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>${PATH_VALUE}</string>
|
||||
<key>PYTHONUNBUFFERED</key>
|
||||
<string>1</string>
|
||||
<key>PORT</key>
|
||||
<string>${PORT_VALUE}</string>
|
||||
<key>SOFIIA_DATA_DIR</key>
|
||||
<string>${DATA_DIR_VALUE}</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
PLIST
|
||||
|
||||
chmod 644 "${PLIST_PATH}"
|
||||
|
||||
launchctl bootout "${DOMAIN}/${LABEL}" >/dev/null 2>&1 || true
|
||||
launchctl bootstrap "${DOMAIN}" "${PLIST_PATH}"
|
||||
launchctl enable "${DOMAIN}/${LABEL}" >/dev/null 2>&1 || true
|
||||
launchctl kickstart -k "${DOMAIN}/${LABEL}"
|
||||
|
||||
echo "[sofiia-launchd] installed: ${PLIST_PATH}"
|
||||
echo "[sofiia-launchd] active label: ${DOMAIN}/${LABEL}"
|
||||
echo "[sofiia-launchd] logs: ${LOG_OUT} | ${LOG_ERR}"
|
||||
echo "[sofiia-launchd] check: launchctl print ${DOMAIN}/${LABEL}"
|
||||
19
services/sofiia-console/launchd/status-launchd.sh
Executable file
19
services/sofiia-console/launchd/status-launchd.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
LABEL="${SOFIIA_LAUNCHD_LABEL:-com.daarion.sofiia}"
|
||||
DOMAIN="gui/$(id -u)"
|
||||
DATA_DIR_VALUE="${SOFIIA_DATA_DIR:-${HOME}/.sofiia/console-data}"
|
||||
LOG_OUT="${DATA_DIR_VALUE}/logs/launchd.out.log"
|
||||
LOG_ERR="${DATA_DIR_VALUE}/logs/launchd.err.log"
|
||||
|
||||
echo "[sofiia-launchd] domain: ${DOMAIN}"
|
||||
echo "[sofiia-launchd] label: ${LABEL}"
|
||||
echo ""
|
||||
launchctl print "${DOMAIN}/${LABEL}" || true
|
||||
echo ""
|
||||
echo "[sofiia-launchd] tail stdout (${LOG_OUT})"
|
||||
tail -n 50 "${LOG_OUT}" 2>/dev/null || true
|
||||
echo ""
|
||||
echo "[sofiia-launchd] tail stderr (${LOG_ERR})"
|
||||
tail -n 100 "${LOG_ERR}" 2>/dev/null || true
|
||||
15
services/sofiia-console/launchd/uninstall-launchd.sh
Executable file
15
services/sofiia-console/launchd/uninstall-launchd.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
LABEL="${SOFIIA_LAUNCHD_LABEL:-com.daarion.sofiia}"
|
||||
DOMAIN="gui/$(id -u)"
|
||||
PLIST_PATH="${HOME}/Library/LaunchAgents/${LABEL}.plist"
|
||||
|
||||
launchctl bootout "${DOMAIN}/${LABEL}" >/dev/null 2>&1 || true
|
||||
launchctl disable "${DOMAIN}/${LABEL}" >/dev/null 2>&1 || true
|
||||
|
||||
if [ -f "${PLIST_PATH}" ]; then
|
||||
rm -f "${PLIST_PATH}"
|
||||
fi
|
||||
|
||||
echo "[sofiia-launchd] removed: ${PLIST_PATH}"
|
||||
59
services/sofiia-console/start-daemon.sh
Executable file
59
services/sofiia-console/start-daemon.sh
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
cd "${ROOT_DIR}"
|
||||
|
||||
# Load root env if present (API keys, etc.)
|
||||
if [ -f "../../.env" ]; then
|
||||
set -a
|
||||
# shellcheck disable=SC1091
|
||||
source "../../.env"
|
||||
set +a
|
||||
fi
|
||||
|
||||
export ENV="${ENV:-dev}"
|
||||
export PORT="${PORT:-8002}"
|
||||
export OLLAMA_URL="${OLLAMA_URL:-http://localhost:11434}"
|
||||
# On NODA2 native runtime we prefer local memory-service.
|
||||
# Set SOFIIA_FORCE_LOCAL_MEMORY=false to keep external URL from env.
|
||||
if [ "${SOFIIA_FORCE_LOCAL_MEMORY:-true}" = "true" ]; then
|
||||
export MEMORY_SERVICE_URL="http://localhost:8000"
|
||||
else
|
||||
export MEMORY_SERVICE_URL="${MEMORY_SERVICE_URL:-http://localhost:8000}"
|
||||
fi
|
||||
export ROUTER_URL="${ROUTER_URL:-http://144.76.224.179:9102}"
|
||||
export GATEWAY_URL="${GATEWAY_URL:-http://144.76.224.179:9300}"
|
||||
|
||||
export SOFIIA_PREFERRED_CHAT_MODEL="${SOFIIA_PREFERRED_CHAT_MODEL:-ollama:qwen3:14b}"
|
||||
export SOFIIA_OLLAMA_TIMEOUT_SEC="${SOFIIA_OLLAMA_TIMEOUT_SEC:-120}"
|
||||
export SOFIIA_OLLAMA_VOICE_TIMEOUT_SEC="${SOFIIA_OLLAMA_VOICE_TIMEOUT_SEC:-45}"
|
||||
export SOFIIA_OLLAMA_KEEP_ALIVE="${SOFIIA_OLLAMA_KEEP_ALIVE:-30m}"
|
||||
export SOFIIA_OLLAMA_NUM_CTX="${SOFIIA_OLLAMA_NUM_CTX:-8192}"
|
||||
export SOFIIA_OLLAMA_NUM_THREAD="${SOFIIA_OLLAMA_NUM_THREAD:-8}"
|
||||
export SOFIIA_OLLAMA_NUM_GPU="${SOFIIA_OLLAMA_NUM_GPU:--1}"
|
||||
export SOFIIA_OLLAMA_NUM_PREDICT_TEXT="${SOFIIA_OLLAMA_NUM_PREDICT_TEXT:-768}"
|
||||
|
||||
export SOFIIA_DATA_DIR="${SOFIIA_DATA_DIR:-$HOME/.sofiia/console-data}"
|
||||
mkdir -p "${SOFIIA_DATA_DIR}"
|
||||
|
||||
export AISTALK_ENABLED="${AISTALK_ENABLED:-true}"
|
||||
export AISTALK_URL="${AISTALK_URL:-http://127.0.0.1:9415}"
|
||||
export AISTALK_API_KEY="${AISTALK_API_KEY:-}"
|
||||
|
||||
if [ -d "venv" ]; then
|
||||
# shellcheck disable=SC1091
|
||||
source venv/bin/activate
|
||||
elif [ -d "../../venv" ]; then
|
||||
# shellcheck disable=SC1091
|
||||
source ../../venv/bin/activate
|
||||
fi
|
||||
|
||||
echo "[sofiia-daemon] starting on 127.0.0.1:${PORT}"
|
||||
echo "[sofiia-daemon] data: ${SOFIIA_DATA_DIR}"
|
||||
echo "[sofiia-daemon] ollama: ${OLLAMA_URL}"
|
||||
echo "[sofiia-daemon] model: ${SOFIIA_PREFERRED_CHAT_MODEL}"
|
||||
echo "[sofiia-daemon] tune: ctx=${SOFIIA_OLLAMA_NUM_CTX} threads=${SOFIIA_OLLAMA_NUM_THREAD} gpu=${SOFIIA_OLLAMA_NUM_GPU} keep_alive=${SOFIIA_OLLAMA_KEEP_ALIVE}"
|
||||
echo "[sofiia-daemon] aistalk: enabled=${AISTALK_ENABLED} url=${AISTALK_URL}"
|
||||
|
||||
exec python3 -m uvicorn app.main:app --host 127.0.0.1 --port "${PORT}"
|
||||
65
services/sofiia-console/start-local.sh
Executable file
65
services/sofiia-console/start-local.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/bin/bash
|
||||
# Sofiia Console — NODA2 local dev startup
|
||||
# Runs without API key (localhost bypass active), uses Grok by default.
|
||||
# Usage: ./start-local.sh
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Load root .env if exists (picks up XAI_API_KEY, DEEPSEEK_API_KEY, etc.)
|
||||
if [ -f "../../.env" ]; then
|
||||
set -a
|
||||
source "../../.env"
|
||||
set +a
|
||||
fi
|
||||
|
||||
# Dev mode — no auth for localhost
|
||||
export ENV=dev
|
||||
export PORT=8002
|
||||
|
||||
# === Sofiia's HOME is NODA2 (MacBook) ===
|
||||
# Primary LLM: Grok 4.1 Fast Reasoning (per AGENTS.md)
|
||||
# XAI_API_KEY, GLM5_API_KEY loaded from root .env above
|
||||
# Quick tasks: GLM-5
|
||||
# Local/offline: NODA2 Ollama (qwen3:14b, qwen3.5:35b-a3b, etc.)
|
||||
|
||||
# NODA2 local Ollama
|
||||
export OLLAMA_URL=http://localhost:11434
|
||||
export SOFIIA_PREFERRED_CHAT_MODEL="${SOFIIA_PREFERRED_CHAT_MODEL:-ollama:qwen3:14b}"
|
||||
export SOFIIA_OLLAMA_TIMEOUT_SEC="${SOFIIA_OLLAMA_TIMEOUT_SEC:-120}"
|
||||
export SOFIIA_OLLAMA_VOICE_TIMEOUT_SEC="${SOFIIA_OLLAMA_VOICE_TIMEOUT_SEC:-45}"
|
||||
export SOFIIA_OLLAMA_KEEP_ALIVE="${SOFIIA_OLLAMA_KEEP_ALIVE:-30m}"
|
||||
export SOFIIA_OLLAMA_NUM_CTX="${SOFIIA_OLLAMA_NUM_CTX:-8192}"
|
||||
export SOFIIA_OLLAMA_NUM_THREAD="${SOFIIA_OLLAMA_NUM_THREAD:-8}"
|
||||
export SOFIIA_OLLAMA_NUM_GPU="${SOFIIA_OLLAMA_NUM_GPU:--1}"
|
||||
export SOFIIA_OLLAMA_NUM_PREDICT_TEXT="${SOFIIA_OLLAMA_NUM_PREDICT_TEXT:-768}"
|
||||
|
||||
# NODA2 memory service
|
||||
export MEMORY_SERVICE_URL=http://localhost:8000
|
||||
|
||||
# NODA1 services (optional — for Router/Telegram context)
|
||||
export ROUTER_URL=http://144.76.224.179:9102
|
||||
export GATEWAY_URL=http://144.76.224.179:9300
|
||||
|
||||
# Data dir
|
||||
export SOFIIA_DATA_DIR="$HOME/.sofiia/console-data"
|
||||
mkdir -p "$SOFIIA_DATA_DIR"
|
||||
|
||||
# Activate venv if present
|
||||
if [ -d "venv" ]; then
|
||||
source venv/bin/activate
|
||||
elif [ -d "../../venv" ]; then
|
||||
source ../../venv/bin/activate
|
||||
fi
|
||||
|
||||
echo "🚀 Sofiia Console — http://localhost:8002 (НОДА2, без авторизації)"
|
||||
echo " Primary: Grok 4.1 Fast Reasoning (AGENTS.md)"
|
||||
echo " XAI_API_KEY: ${XAI_API_KEY:0:12}..."
|
||||
echo " GLM5_API_KEY: ${GLM5_API_KEY:0:12}..."
|
||||
echo " OLLAMA_URL: $OLLAMA_URL (НОДА2 local models)"
|
||||
echo " Preferred: $SOFIIA_PREFERRED_CHAT_MODEL"
|
||||
echo " Ollama tune: ctx=$SOFIIA_OLLAMA_NUM_CTX threads=$SOFIIA_OLLAMA_NUM_THREAD gpu=$SOFIIA_OLLAMA_NUM_GPU keep_alive=$SOFIIA_OLLAMA_KEEP_ALIVE"
|
||||
echo " Models: qwen3:14b, qwen3.5:35b-a3b, glm-4.7-flash, deepseek-r1:70b..."
|
||||
echo ""
|
||||
|
||||
python -m uvicorn app.main:app --host 127.0.0.1 --port "$PORT" --reload
|
||||
225
services/sofiia-console/static/react/ExportSettings.tsx
Normal file
225
services/sofiia-console/static/react/ExportSettings.tsx
Normal file
@@ -0,0 +1,225 @@
|
||||
import React, { useMemo } from "react";
|
||||
|
||||
export type AuroraResolution = "original" | "1080p" | "4k" | "8k" | "custom";
|
||||
export type AuroraFormat = "mp4_h264" | "mp4_h265" | "avi_lossless" | "frames_png";
|
||||
export type AuroraRoi = "full_frame" | "auto_faces" | "auto_plates" | "manual";
|
||||
|
||||
export interface AuroraCropBox {
|
||||
x: number;
|
||||
y: number;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
export interface ExportSettingsValue {
|
||||
resolution: AuroraResolution;
|
||||
format: AuroraFormat;
|
||||
roi: AuroraRoi;
|
||||
customWidth?: number;
|
||||
customHeight?: number;
|
||||
crop?: AuroraCropBox | null;
|
||||
}
|
||||
|
||||
interface ExportSettingsProps {
|
||||
value: ExportSettingsValue;
|
||||
onChange: (next: ExportSettingsValue) => void;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
function toInt(v: string, fallback = 0): number {
|
||||
const n = Number.parseInt(v, 10);
|
||||
return Number.isFinite(n) ? Math.max(0, n) : fallback;
|
||||
}
|
||||
|
||||
export const ExportSettings: React.FC<ExportSettingsProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
disabled = false,
|
||||
}) => {
|
||||
const crop = value.crop ?? { x: 0, y: 0, width: 0, height: 0 };
|
||||
const showCustomResolution = value.resolution === "custom";
|
||||
const showManualCrop = value.roi === "manual";
|
||||
const summary = useMemo(() => {
|
||||
const res =
|
||||
value.resolution === "custom"
|
||||
? `${value.customWidth || 0}x${value.customHeight || 0}`
|
||||
: value.resolution;
|
||||
return `${res} • ${value.format} • ${value.roi}`;
|
||||
}, [value]);
|
||||
|
||||
return (
|
||||
<section
|
||||
style={{
|
||||
border: "1px solid #2f2f35",
|
||||
borderRadius: 10,
|
||||
padding: 12,
|
||||
background: "#111217",
|
||||
color: "#e5e7eb",
|
||||
}}
|
||||
aria-label="Aurora export settings"
|
||||
>
|
||||
<div style={{ fontSize: 13, fontWeight: 700, marginBottom: 10 }}>Export Settings</div>
|
||||
|
||||
<label style={{ display: "block", marginBottom: 8 }}>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Resolution
|
||||
</span>
|
||||
<select
|
||||
disabled={disabled}
|
||||
value={value.resolution}
|
||||
onChange={(e) => onChange({ ...value, resolution: e.target.value as AuroraResolution })}
|
||||
style={{ width: "100%" }}
|
||||
>
|
||||
<option value="original">Original</option>
|
||||
<option value="1080p">1080p</option>
|
||||
<option value="4k">4K</option>
|
||||
<option value="8k">8K</option>
|
||||
<option value="custom">Custom</option>
|
||||
</select>
|
||||
</label>
|
||||
|
||||
{showCustomResolution && (
|
||||
<div style={{ display: "grid", gridTemplateColumns: "1fr 1fr", gap: 8, marginBottom: 8 }}>
|
||||
<label>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Width
|
||||
</span>
|
||||
<input
|
||||
type="number"
|
||||
min={1}
|
||||
disabled={disabled}
|
||||
value={value.customWidth || 0}
|
||||
onChange={(e) => onChange({ ...value, customWidth: toInt(e.target.value, 0) })}
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Height
|
||||
</span>
|
||||
<input
|
||||
type="number"
|
||||
min={1}
|
||||
disabled={disabled}
|
||||
value={value.customHeight || 0}
|
||||
onChange={(e) => onChange({ ...value, customHeight: toInt(e.target.value, 0) })}
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<label style={{ display: "block", marginBottom: 8 }}>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Format
|
||||
</span>
|
||||
<select
|
||||
disabled={disabled}
|
||||
value={value.format}
|
||||
onChange={(e) => onChange({ ...value, format: e.target.value as AuroraFormat })}
|
||||
style={{ width: "100%" }}
|
||||
>
|
||||
<option value="mp4_h264">MP4 (H.264)</option>
|
||||
<option value="mp4_h265">MP4 (H.265)</option>
|
||||
<option value="avi_lossless">AVI (lossless)</option>
|
||||
<option value="frames_png">Frames (PNG)</option>
|
||||
</select>
|
||||
</label>
|
||||
|
||||
<label style={{ display: "block", marginBottom: 8 }}>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
ROI
|
||||
</span>
|
||||
<select
|
||||
disabled={disabled}
|
||||
value={value.roi}
|
||||
onChange={(e) => onChange({ ...value, roi: e.target.value as AuroraRoi })}
|
||||
style={{ width: "100%" }}
|
||||
>
|
||||
<option value="full_frame">Full frame</option>
|
||||
<option value="auto_faces">Auto faces</option>
|
||||
<option value="auto_plates">Auto license plates</option>
|
||||
<option value="manual">Manual crop box</option>
|
||||
</select>
|
||||
</label>
|
||||
|
||||
{showManualCrop && (
|
||||
<div style={{ display: "grid", gridTemplateColumns: "1fr 1fr", gap: 8, marginBottom: 8 }}>
|
||||
<label>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
X
|
||||
</span>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
disabled={disabled}
|
||||
value={crop.x}
|
||||
onChange={(e) =>
|
||||
onChange({
|
||||
...value,
|
||||
crop: { ...crop, x: toInt(e.target.value, 0) },
|
||||
})
|
||||
}
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Y
|
||||
</span>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
disabled={disabled}
|
||||
value={crop.y}
|
||||
onChange={(e) =>
|
||||
onChange({
|
||||
...value,
|
||||
crop: { ...crop, y: toInt(e.target.value, 0) },
|
||||
})
|
||||
}
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Width
|
||||
</span>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
disabled={disabled}
|
||||
value={crop.width}
|
||||
onChange={(e) =>
|
||||
onChange({
|
||||
...value,
|
||||
crop: { ...crop, width: toInt(e.target.value, 0) },
|
||||
})
|
||||
}
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
<span style={{ fontSize: 12, color: "#9ca3af", display: "block", marginBottom: 4 }}>
|
||||
Height
|
||||
</span>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
disabled={disabled}
|
||||
value={crop.height}
|
||||
onChange={(e) =>
|
||||
onChange({
|
||||
...value,
|
||||
crop: { ...crop, height: toInt(e.target.value, 0) },
|
||||
})
|
||||
}
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ marginTop: 6, fontSize: 12, color: "#9ca3af" }}>
|
||||
Selected: <span style={{ color: "#e5e7eb" }}>{summary}</span>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
export default ExportSettings;
|
||||
|
||||
Reference in New Issue
Block a user