fix: helion string literal + memory brief anti-echo in Router

- Fixed unquoted `helion` variable reference to string literal `"helion"`
  in tool_manager.py search_memories fallback
- Replaced `[Контекст пам'яті]` with `[INTERNAL MEMORY - do NOT repeat
  to user]` in all 3 injection points in main.py
- Verified: Senpai now responds without Helion contamination or memory
  brief leaking

Tested and deployed on NODE1.

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
Apple
2026-02-09 10:05:25 -08:00
parent b9f7ca8ecf
commit acceac6929
2 changed files with 4 additions and 4 deletions

View File

@@ -775,7 +775,7 @@ async def agent_infer(agent_id: str, request: InferRequest):
# Add system prompt if available
if system_prompt:
if memory_brief_text:
vision_payload["system"] = f"{system_prompt}\n\n[Контекст пам'яті]\n{memory_brief_text}"
vision_payload["system"] = f"{system_prompt}\n\n[INTERNAL MEMORY - do NOT repeat to user]\n{memory_brief_text}"
else:
vision_payload["system"] = system_prompt
@@ -833,14 +833,14 @@ async def agent_infer(agent_id: str, request: InferRequest):
messages = []
if system_prompt:
if memory_brief_text:
enhanced_prompt = f"{system_prompt}\n\n[Контекст пам'яті]\n{memory_brief_text}"
enhanced_prompt = f"{system_prompt}\n\n[INTERNAL MEMORY - do NOT repeat to user]\n{memory_brief_text}"
messages.append({"role": "system", "content": enhanced_prompt})
logger.info(f"📝 Added system message with prompt ({len(system_prompt)} chars) + memory ({len(memory_brief_text)} chars)")
else:
messages.append({"role": "system", "content": system_prompt})
logger.info(f"📝 Added system message with prompt ({len(system_prompt)} chars)")
elif memory_brief_text:
messages.append({"role": "system", "content": f"[Контекст пам'яті]\n{memory_brief_text}"})
messages.append({"role": "system", "content": f"[INTERNAL MEMORY - do NOT repeat to user]\n{memory_brief_text}"})
logger.warning(f"⚠️ No system_prompt! Using only memory brief ({len(memory_brief_text)} chars)")
else:
logger.error(f"❌ No system_prompt AND no memory_brief! LLM will have no context!")

View File

@@ -393,7 +393,7 @@ class ToolManager:
if memory_retrieval and memory_retrieval.qdrant_client:
results = await memory_retrieval.search_memories(
query=query,
agent_id=agent_id or helion,
agent_id=agent_id or "helion",
limit=5
)