217 lines
8.6 KiB
Python
217 lines
8.6 KiB
Python
"""
|
|
CrewAI Client for Router
|
|
Handles decision: direct LLM vs CrewAI orchestration
|
|
"""
|
|
import os
|
|
import json
|
|
import logging
|
|
import httpx
|
|
from typing import Dict, Any, Optional, Tuple, List
|
|
from pathlib import Path
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
CREWAI_URL = os.getenv("CREWAI_URL", "http://dagi-staging-crewai-service:9010")
|
|
CREWAI_ENABLED = os.getenv("CREWAI_ENABLED", "true").lower() == "true"
|
|
CREWAI_ORCHESTRATORS_ALWAYS = os.getenv("CREWAI_ORCHESTRATORS_ALWAYS", "true").lower() == "true"
|
|
TOP_LEVEL_CREWAI_ON_DEMAND = os.getenv("TOP_LEVEL_CREWAI_ON_DEMAND", "true").lower() == "true"
|
|
TOP_LEVEL_CREWAI_TEAM_LIMIT_FAST = int(os.getenv("TOP_LEVEL_CREWAI_TEAM_LIMIT_FAST", "3"))
|
|
TOP_LEVEL_CREWAI_PROFILE_FAST = os.getenv("TOP_LEVEL_CREWAI_PROFILE_FAST", "")
|
|
TOP_LEVEL_CREWAI_PROFILE_COMPLEX = os.getenv("TOP_LEVEL_CREWAI_PROFILE_COMPLEX", "")
|
|
HELION_CREWAI_TEAM_LIMIT = int(os.getenv("HELION_CREWAI_TEAM_LIMIT", "3"))
|
|
CLAN_CREWAI_PROFILE = os.getenv("CLAN_CREWAI_PROFILE", "zhos_mvp")
|
|
CLAN_CREWAI_PROFILE_FAST = os.getenv("CLAN_CREWAI_PROFILE_FAST", "default")
|
|
CLAN_CREWAI_PROFILE_COMPLEX = os.getenv("CLAN_CREWAI_PROFILE_COMPLEX", CLAN_CREWAI_PROFILE)
|
|
CLAN_CREWAI_TEAM_LIMIT_FAST = int(os.getenv("CLAN_CREWAI_TEAM_LIMIT_FAST", "3"))
|
|
|
|
CREWAI_AGENTS_PATH = os.getenv("CREWAI_AGENTS_PATH", "/config/crewai_agents.json")
|
|
FALLBACK_CREWAI_PATH = "/app/config/crewai_agents.json"
|
|
|
|
MIN_PROMPT_LENGTH_FOR_CREW = 100
|
|
COMPLEXITY_KEYWORDS = [
|
|
"план", "plan", "аналіз", "analysis", "дослідження", "research",
|
|
"порівняй", "compare", "розроби", "develop", "створи стратегію",
|
|
"декомпозиція", "decompose", "крок за кроком", "step by step",
|
|
"детально", "in detail", "комплексний", "comprehensive"
|
|
]
|
|
|
|
_crewai_cache = None
|
|
|
|
|
|
def load_crewai_config():
|
|
global _crewai_cache
|
|
if _crewai_cache is not None:
|
|
return _crewai_cache
|
|
|
|
for path in [CREWAI_AGENTS_PATH, FALLBACK_CREWAI_PATH]:
|
|
try:
|
|
p = Path(path)
|
|
if p.exists():
|
|
with open(p, "r", encoding="utf-8") as f:
|
|
data = json.load(f)
|
|
logger.info(f"Loaded CrewAI config from {path}")
|
|
_crewai_cache = data
|
|
return data
|
|
except Exception as e:
|
|
logger.warning(f"Could not load from {path}: {e}")
|
|
return None
|
|
|
|
|
|
def get_agent_crewai_info(agent_id):
|
|
config = load_crewai_config()
|
|
if not config:
|
|
return {"enabled": False, "orchestrator": False, "team": []}
|
|
|
|
orchestrators = config.get("orchestrators", [])
|
|
teams = config.get("teams", {})
|
|
|
|
is_orchestrator = any(o.get("id") == agent_id for o in orchestrators)
|
|
team_info = teams.get(agent_id, {})
|
|
team_members = team_info.get("members", [])
|
|
|
|
return {
|
|
"enabled": is_orchestrator,
|
|
"orchestrator": is_orchestrator,
|
|
"team": team_members
|
|
}
|
|
|
|
|
|
def should_use_crewai(agent_id, prompt, agent_config, metadata=None, force_crewai=False):
|
|
"""
|
|
Decide whether to use CrewAI orchestration or direct LLM.
|
|
Returns: (use_crewai: bool, reason: str)
|
|
"""
|
|
if not CREWAI_ENABLED:
|
|
return False, "crewai_disabled_globally"
|
|
|
|
if force_crewai:
|
|
return True, "force_crewai_requested"
|
|
|
|
crewai_info = get_agent_crewai_info(agent_id)
|
|
|
|
if not crewai_info.get("enabled", False):
|
|
return False, "agent_crewai_disabled"
|
|
|
|
if not crewai_info.get("orchestrator", False):
|
|
return False, "agent_not_orchestrator"
|
|
|
|
team = crewai_info.get("team", [])
|
|
if not team:
|
|
return False, "agent_has_no_team"
|
|
|
|
metadata = metadata or {}
|
|
force_detailed = bool(metadata.get("force_detailed"))
|
|
requires_complex = bool(metadata.get("requires_complex_reasoning"))
|
|
|
|
# Top-level policy: DeepSeek direct path by default; CrewAI only on-demand.
|
|
# This keeps first-touch replies fast and concise across all top-level agents.
|
|
if crewai_info.get("orchestrator", False) and TOP_LEVEL_CREWAI_ON_DEMAND:
|
|
if force_detailed or requires_complex:
|
|
return True, "orchestrator_complex_or_detailed"
|
|
if len(prompt) >= MIN_PROMPT_LENGTH_FOR_CREW:
|
|
prompt_lower = prompt.lower()
|
|
has_complexity = any(kw in prompt_lower for kw in COMPLEXITY_KEYWORDS)
|
|
if has_complexity:
|
|
return True, "orchestrator_complexity_keywords_detected"
|
|
return False, "orchestrator_direct_llm_first"
|
|
|
|
# Architecture mode: top-level orchestrators go through CrewAI API by default.
|
|
if CREWAI_ORCHESTRATORS_ALWAYS:
|
|
return True, "orchestrator_default_crewai"
|
|
|
|
if len(prompt) < MIN_PROMPT_LENGTH_FOR_CREW:
|
|
return False, "prompt_too_short"
|
|
|
|
prompt_lower = prompt.lower()
|
|
has_complexity = any(kw in prompt_lower for kw in COMPLEXITY_KEYWORDS)
|
|
|
|
if has_complexity:
|
|
return True, "complexity_keywords_detected"
|
|
|
|
return False, "default_direct_llm"
|
|
|
|
|
|
async def call_crewai(agent_id, task, context=None, team=None, profile=None):
|
|
try:
|
|
if not team:
|
|
crewai_info = get_agent_crewai_info(agent_id)
|
|
team = crewai_info.get("team", [])
|
|
|
|
effective_context = context or {}
|
|
metadata = (effective_context.get("metadata", {}) or {})
|
|
force_detailed = bool(metadata.get("force_detailed"))
|
|
requires_complex = bool(metadata.get("requires_complex_reasoning"))
|
|
is_orchestrator = bool(get_agent_crewai_info(agent_id).get("orchestrator", False))
|
|
# Helion policy: limit CrewAI participants unless user requested detailed mode.
|
|
if agent_id == "helion" and not force_detailed and HELION_CREWAI_TEAM_LIMIT > 0 and len(team) > HELION_CREWAI_TEAM_LIMIT:
|
|
team = team[:HELION_CREWAI_TEAM_LIMIT]
|
|
if (
|
|
is_orchestrator
|
|
and TOP_LEVEL_CREWAI_ON_DEMAND
|
|
and not force_detailed
|
|
and not requires_complex
|
|
and TOP_LEVEL_CREWAI_TEAM_LIMIT_FAST > 0
|
|
and len(team) > TOP_LEVEL_CREWAI_TEAM_LIMIT_FAST
|
|
):
|
|
team = team[:TOP_LEVEL_CREWAI_TEAM_LIMIT_FAST]
|
|
|
|
async with httpx.AsyncClient(timeout=600.0) as client:
|
|
effective_profile = profile or (effective_context.get("metadata", {}) or {}).get("crewai_profile")
|
|
if not effective_profile and agent_id == "clan":
|
|
effective_profile = (
|
|
CLAN_CREWAI_PROFILE_COMPLEX
|
|
if (force_detailed or requires_complex)
|
|
else CLAN_CREWAI_PROFILE_FAST
|
|
)
|
|
if not effective_profile and is_orchestrator and TOP_LEVEL_CREWAI_ON_DEMAND:
|
|
if force_detailed or requires_complex:
|
|
effective_profile = TOP_LEVEL_CREWAI_PROFILE_COMPLEX or None
|
|
else:
|
|
effective_profile = TOP_LEVEL_CREWAI_PROFILE_FAST or None
|
|
if (
|
|
agent_id == "clan"
|
|
and effective_profile == CLAN_CREWAI_PROFILE_FAST
|
|
and not force_detailed
|
|
and CLAN_CREWAI_TEAM_LIMIT_FAST > 0
|
|
and len(team) > CLAN_CREWAI_TEAM_LIMIT_FAST
|
|
):
|
|
team = team[:CLAN_CREWAI_TEAM_LIMIT_FAST]
|
|
|
|
payload = {
|
|
"task": task,
|
|
"orchestrator_id": agent_id,
|
|
"context": effective_context,
|
|
}
|
|
if effective_profile:
|
|
payload["profile"] = effective_profile
|
|
if team:
|
|
payload["team"] = [
|
|
m.get("role", str(m)) if isinstance(m, dict) else m
|
|
for m in team
|
|
]
|
|
|
|
logger.info(f"CrewAI call: agent={agent_id}, team={len(team)} members")
|
|
|
|
response = await client.post(f"{CREWAI_URL}/crew/run", json=payload)
|
|
|
|
if response.status_code == 200:
|
|
data = response.json()
|
|
success = data.get("success")
|
|
logger.info(f"CrewAI response: success={success}")
|
|
return data
|
|
else:
|
|
logger.error(f"CrewAI error: {response.status_code}")
|
|
return {"success": False, "result": None, "agents_used": [], "error": f"HTTP {response.status_code}"}
|
|
except Exception as e:
|
|
logger.error(f"CrewAI exception: {e}")
|
|
return {"success": False, "result": None, "agents_used": [], "error": str(e)}
|
|
|
|
|
|
async def get_crewai_health():
|
|
try:
|
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
|
response = await client.get(f"{CREWAI_URL}/health")
|
|
return response.json()
|
|
except Exception as e:
|
|
return {"status": "error", "error": str(e)}
|