helion: deepseek-first, on-demand CrewAI, local subagent profiles, concise post-synthesis
This commit is contained in:
@@ -25,6 +25,7 @@ GATEWAY_MAX_TOKENS_CONCISE = int(os.getenv("GATEWAY_MAX_TOKENS_CONCISE", "220"))
|
||||
GATEWAY_MAX_TOKENS_TRAINING = int(os.getenv("GATEWAY_MAX_TOKENS_TRAINING", "900"))
|
||||
GATEWAY_TEMPERATURE_DEFAULT = float(os.getenv("GATEWAY_TEMPERATURE_DEFAULT", "0.4"))
|
||||
GATEWAY_MAX_TOKENS_SENPAI_DEFAULT = int(os.getenv("GATEWAY_MAX_TOKENS_SENPAI_DEFAULT", "320"))
|
||||
GATEWAY_MAX_TOKENS_HELION_DEFAULT = int(os.getenv("GATEWAY_MAX_TOKENS_HELION_DEFAULT", "240"))
|
||||
GATEWAY_MAX_TOKENS_DETAILED = int(os.getenv("GATEWAY_MAX_TOKENS_DETAILED", "900"))
|
||||
|
||||
|
||||
@@ -87,6 +88,8 @@ async def send_to_router(body: Dict[str, Any]) -> Dict[str, Any]:
|
||||
# Senpai tends to over-verbose responses in Telegram; use lower default unless user asked details.
|
||||
if agent_id == "senpai":
|
||||
max_tokens = GATEWAY_MAX_TOKENS_SENPAI_DEFAULT
|
||||
elif agent_id == "helion":
|
||||
max_tokens = min(max_tokens, GATEWAY_MAX_TOKENS_HELION_DEFAULT)
|
||||
|
||||
if metadata.get("is_training_group"):
|
||||
max_tokens = GATEWAY_MAX_TOKENS_TRAINING
|
||||
|
||||
Reference in New Issue
Block a user