New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
199 lines
7.1 KiB
Python
199 lines
7.1 KiB
Python
from __future__ import annotations
|
|
|
|
import shutil
|
|
from pathlib import Path
|
|
from typing import Callable, List, Optional
|
|
from urllib.parse import quote
|
|
|
|
from .schemas import (
|
|
AuroraJob,
|
|
AuroraResult,
|
|
InputFileDescriptor,
|
|
MediaType,
|
|
OutputFileDescriptor,
|
|
ProcessingStep,
|
|
)
|
|
from .subagents import (
|
|
ClarityAgent,
|
|
EchoAgent,
|
|
KoreAgent,
|
|
PipelineCancelledError,
|
|
PixisAgent,
|
|
PlateAgent,
|
|
SubagentContext,
|
|
VeraAgent,
|
|
sha256_file,
|
|
)
|
|
|
|
ProgressCallback = Callable[[int, str, Optional[ProcessingStep]], None]
|
|
CancelCheck = Callable[[], bool]
|
|
|
|
|
|
class JobCancelledError(RuntimeError):
|
|
pass
|
|
|
|
|
|
class AuroraOrchestrator:
|
|
def __init__(self, outputs_root: Path, public_base_url: str) -> None:
|
|
self.outputs_root = outputs_root
|
|
self.public_base_url = public_base_url.rstrip("/")
|
|
|
|
def _build_pipeline(self, media_type: MediaType, forensic: bool, priority: str = "balanced") -> List[object]:
|
|
if media_type == "video":
|
|
pipeline: List[object] = [VeraAgent(), PlateAgent()]
|
|
elif media_type == "audio":
|
|
pipeline = [EchoAgent()]
|
|
elif media_type == "photo":
|
|
pipeline = [VeraAgent(), PixisAgent(), PlateAgent()]
|
|
else:
|
|
pipeline = [ClarityAgent()]
|
|
|
|
if forensic:
|
|
pipeline.append(KoreAgent())
|
|
return pipeline
|
|
|
|
def _file_url(self, job_id: str, name: str) -> str:
|
|
return f"{self.public_base_url}/api/aurora/files/{quote(job_id)}/{quote(name)}"
|
|
|
|
def _artifact_type(self, path: Path, media_type: MediaType) -> str:
|
|
lowered = path.name.lower()
|
|
if lowered.endswith("forensic_log.json"):
|
|
return "forensic_log"
|
|
if lowered.endswith("forensic_signature.json"):
|
|
return "forensic_signature"
|
|
if "transcript" in lowered:
|
|
return "transcript"
|
|
if "plate_detection" in lowered:
|
|
return "plate_detections"
|
|
return media_type
|
|
|
|
def run(
|
|
self,
|
|
job: AuroraJob,
|
|
progress_callback: Optional[ProgressCallback] = None,
|
|
cancel_check: Optional[CancelCheck] = None,
|
|
) -> AuroraResult:
|
|
forensic_mode = job.mode == "forensic"
|
|
meta_early = job.metadata if isinstance(job.metadata, dict) else {}
|
|
priority_early = str(meta_early.get("priority") or "balanced").strip().lower() or "balanced"
|
|
pipeline = self._build_pipeline(job.media_type, forensic_mode, priority_early)
|
|
|
|
output_dir = self.outputs_root / job.job_id
|
|
output_dir.mkdir(parents=True, exist_ok=True)
|
|
meta = job.metadata if isinstance(job.metadata, dict) else {}
|
|
export_options = meta.get("export_options") if isinstance(meta.get("export_options"), dict) else {}
|
|
priority = str(meta.get("priority") or "balanced").strip().lower() or "balanced"
|
|
|
|
ctx = SubagentContext(
|
|
job_id=job.job_id,
|
|
mode=job.mode,
|
|
media_type=job.media_type,
|
|
input_hash=job.input_hash,
|
|
output_dir=output_dir,
|
|
priority=priority,
|
|
export_options=export_options,
|
|
cancel_check=cancel_check,
|
|
)
|
|
|
|
current_path = Path(job.input_path)
|
|
processing_log: List[ProcessingStep] = []
|
|
extra_artifacts: List[Path] = []
|
|
digital_signature: Optional[str] = None
|
|
|
|
total = max(1, len(pipeline))
|
|
for idx, subagent in enumerate(pipeline, start=1):
|
|
if cancel_check and cancel_check():
|
|
raise JobCancelledError(f"Job {job.job_id} cancelled")
|
|
stage_from = int(((idx - 1) / total) * 95)
|
|
stage_to = int((idx / total) * 95)
|
|
|
|
def _stage_progress(fraction: float, stage_label: str) -> None:
|
|
if not progress_callback:
|
|
return
|
|
bounded = max(0.0, min(1.0, float(fraction)))
|
|
progress = stage_from + int((stage_to - stage_from) * bounded)
|
|
progress_callback(progress, stage_label, None)
|
|
|
|
stage_ctx = SubagentContext(
|
|
job_id=ctx.job_id,
|
|
mode=ctx.mode,
|
|
media_type=ctx.media_type,
|
|
input_hash=ctx.input_hash,
|
|
output_dir=ctx.output_dir,
|
|
priority=ctx.priority,
|
|
export_options=ctx.export_options,
|
|
cancel_check=ctx.cancel_check,
|
|
stage_progress=_stage_progress if progress_callback else None,
|
|
)
|
|
|
|
try:
|
|
run_result = subagent.run(stage_ctx, current_path)
|
|
except PipelineCancelledError as exc:
|
|
raise JobCancelledError(str(exc)) from exc
|
|
current_path = run_result.output_path
|
|
processing_log.extend(run_result.steps)
|
|
extra_artifacts.extend(run_result.artifacts)
|
|
if run_result.metadata.get("digital_signature"):
|
|
digital_signature = run_result.metadata["digital_signature"]
|
|
|
|
stage = run_result.steps[-1].step if run_result.steps else f"stage_{idx}"
|
|
progress = int((idx / total) * 95)
|
|
if progress_callback:
|
|
for step in run_result.steps:
|
|
progress_callback(progress, stage, step)
|
|
|
|
if cancel_check and cancel_check():
|
|
raise JobCancelledError(f"Job {job.job_id} cancelled")
|
|
|
|
final_media = output_dir / f"aurora_result{current_path.suffix or '.bin'}"
|
|
if current_path != final_media:
|
|
if current_path.parent == output_dir:
|
|
current_path.rename(final_media)
|
|
else:
|
|
shutil.move(str(current_path), str(final_media))
|
|
result_hash = sha256_file(final_media)
|
|
|
|
output_files: List[OutputFileDescriptor] = [
|
|
OutputFileDescriptor(
|
|
type=job.media_type,
|
|
name=final_media.name,
|
|
url=self._file_url(job.job_id, final_media.name),
|
|
hash=result_hash,
|
|
)
|
|
]
|
|
|
|
for artifact in extra_artifacts:
|
|
output_files.append(
|
|
OutputFileDescriptor(
|
|
type=self._artifact_type(artifact, job.media_type),
|
|
name=artifact.name,
|
|
url=self._file_url(job.job_id, artifact.name),
|
|
hash=sha256_file(artifact),
|
|
)
|
|
)
|
|
|
|
if forensic_mode and not digital_signature:
|
|
digest = result_hash.split(":", 1)[-1][:48]
|
|
digital_signature = f"ed25519:{digest}"
|
|
|
|
if progress_callback:
|
|
progress_callback(100, "completed", None)
|
|
|
|
return AuroraResult(
|
|
mode=job.mode,
|
|
job_id=job.job_id,
|
|
media_type=job.media_type,
|
|
input_file=InputFileDescriptor(
|
|
name=job.file_name,
|
|
hash=job.input_hash,
|
|
),
|
|
processing_log=processing_log,
|
|
output_files=output_files,
|
|
digital_signature=digital_signature,
|
|
metadata={
|
|
"pipeline": [type(agent).__name__ for agent in pipeline],
|
|
"forensic_mode": forensic_mode,
|
|
"export_options": export_options,
|
|
},
|
|
)
|