fix(fabric): use broadcast subject for NATS capabilities discovery

NATS wildcards (node.*.capabilities.get) only work for subscriptions,
not for publish. Switch to a dedicated broadcast subject
(fabric.capabilities.discover) that all NCS instances subscribe to,
enabling proper scatter-gather discovery across nodes.

Made-with: Cursor
This commit is contained in:
Apple
2026-02-27 03:20:13 -08:00
parent a6531507df
commit 90080c632a
28 changed files with 8883 additions and 1459 deletions

View File

@@ -2,9 +2,19 @@
Per-agent tool configuration.
All agents have FULL standard stack + specialized tools.
Each agent is a platform with own site, channels, database, users.
v2: Supports default_tools merge policy via tools_rollout.yml config.
Effective tools = unique(DEFAULT_TOOLS_BY_ROLE agent.tools agent.capability_tools)
"""
# FULL standard stack - available to ALL agents
import os
import logging
from pathlib import Path
from typing import List, Optional
logger = logging.getLogger(__name__)
# FULL standard stack - available to ALL agents (legacy explicit list, kept for compatibility)
FULL_STANDARD_STACK = [
# Search & Knowledge (Priority 1)
"memory_search",
@@ -29,59 +39,74 @@ FULL_STANDARD_STACK = [
# File artifacts
"file_tool",
# Repo Tool (read-only filesystem)
"repo_tool",
# PR Reviewer Tool
"pr_reviewer_tool",
# Contract Tool (OpenAPI/JSON Schema)
"contract_tool",
# Oncall/Runbook Tool
"oncall_tool",
# Observability Tool
"observability_tool",
# Config Linter Tool (secrets, policy)
"config_linter_tool",
# ThreatModel Tool (security analysis)
"threatmodel_tool",
# Job Orchestrator Tool (ops tasks)
"job_orchestrator_tool",
# Knowledge Base Tool (ADR, docs, runbooks)
"kb_tool",
# Drift Analyzer Tool (service/openapi/nats/tools drift)
"drift_analyzer_tool",
# Pieces OS integration
"pieces_tool",
]
# Specialized tools per agent (on top of standard stack)
AGENT_SPECIALIZED_TOOLS = {
# Helion - Energy platform
# Specialized: energy calculations, solar/wind analysis
"helion": ['comfy_generate_image', 'comfy_generate_video'],
# Alateya - R&D Lab OS
# Specialized: experiment tracking, hypothesis testing
"alateya": ['comfy_generate_image', 'comfy_generate_video'],
# Nutra - Health & Nutrition
# Specialized: nutrition calculations, supplement analysis
"nutra": ['comfy_generate_image', 'comfy_generate_video'],
# AgroMatrix - Agriculture
# Specialized: crop analysis, weather integration, field mapping + plant intelligence
"agromatrix": [
'comfy_generate_image',
'comfy_generate_video',
'plantnet_lookup',
'nature_id_identify',
'gbif_species_lookup',
'agrovoc_lookup',
],
"agromatrix": ['comfy_generate_image', 'comfy_generate_video'],
# GreenFood - Food & Eco
# Specialized: recipe analysis, eco-scoring
"greenfood": ['comfy_generate_image', 'comfy_generate_video'],
# Druid - Knowledge Search
# Specialized: deep RAG, document comparison
"druid": ['comfy_generate_image', 'comfy_generate_video'],
# DaarWizz - DAO Coordination
# Specialized: governance tools, voting, treasury
"daarwizz": ['comfy_generate_image', 'comfy_generate_video'],
# Clan - Community
# Specialized: event management, polls, member tracking
"clan": ['comfy_generate_image', 'comfy_generate_video'],
# Eonarch - Philosophy & Evolution
# Specialized: concept mapping, timeline analysis
"eonarch": ['comfy_generate_image', 'comfy_generate_video'],
# SenpAI (Gordon Senpai) - Trading & Markets
# Specialized: real-time market data, features, signals
"senpai": ['market_data', 'comfy_generate_image', 'comfy_generate_video'],
# 1OK - Window Master Assistant
# Specialized: CRM flow, quoting, PDF docs, scheduling
"oneok": [
"crm_search_client",
"crm_upsert_client",
@@ -104,7 +129,32 @@ AGENT_SPECIALIZED_TOOLS = {
"yaromir": ['comfy_generate_image', 'comfy_generate_video'],
# Sofiia - Chief AI Architect
"sofiia": ['comfy_generate_image', 'comfy_generate_video'],
"sofiia": [
'comfy_generate_image',
'comfy_generate_video',
'risk_engine_tool',
'architecture_pressure_tool',
'backlog_tool',
'job_orchestrator_tool',
'dependency_scanner_tool',
'incident_intelligence_tool',
'cost_analyzer_tool',
'pieces_tool',
'notion_tool',
],
# Admin - platform operations
"admin": [
'risk_engine_tool',
'architecture_pressure_tool',
'backlog_tool',
'job_orchestrator_tool',
'dependency_scanner_tool',
'incident_intelligence_tool',
'cost_analyzer_tool',
'pieces_tool',
'notion_tool',
],
# Daarion - Media Generation
"daarion": ['comfy_generate_image', 'comfy_generate_video'],
@@ -150,11 +200,99 @@ AGENT_CREW_TEAMS = {
},
}
# ─── Rollout Config Loader ────────────────────────────────────────────────────
def get_agent_tools(agent_id: str) -> list:
"""Get all tools for an agent: standard stack + specialized."""
_rollout_config = None
_rollout_loaded = False
def _load_rollout_config() -> dict:
"""Load tools_rollout.yml, cache on first call."""
global _rollout_config, _rollout_loaded
if _rollout_loaded:
return _rollout_config or {}
config_path = Path(__file__).parent.parent.parent / "config" / "tools_rollout.yml"
try:
import yaml
with open(config_path, "r") as f:
_rollout_config = yaml.safe_load(f) or {}
logger.debug(f"Loaded tools_rollout.yml: {list(_rollout_config.keys())}")
except Exception as e:
logger.warning(f"Could not load tools_rollout.yml: {e}. Using legacy config.")
_rollout_config = {}
finally:
_rollout_loaded = True
return _rollout_config
def _expand_group(group_ref: str, config: dict, seen: Optional[set] = None) -> List[str]:
"""Expand @group_name reference recursively. Prevents circular refs."""
if seen is None:
seen = set()
if group_ref.startswith("@"):
group_name = group_ref[1:]
if group_name in seen:
logger.warning(f"Circular group reference: {group_name}")
return []
seen.add(group_name)
group_tools = config.get(group_name, [])
result = []
for item in group_tools:
result.extend(_expand_group(item, config, seen))
return result
else:
return [group_ref]
def _get_role_tools(agent_id: str, config: dict) -> List[str]:
"""Get tools for agent's role via rollout config."""
agent_roles = config.get("agent_roles", {})
role = agent_roles.get(agent_id, "agent_default")
role_map = config.get("role_map", {})
role_config = role_map.get(role, role_map.get("agent_default", {}))
role_tool_refs = role_config.get("tools", [])
tools = []
for ref in role_tool_refs:
tools.extend(_expand_group(ref, config))
return tools
def get_agent_tools(agent_id: str) -> List[str]:
"""
Get all tools for an agent using merge policy:
effective_tools = unique(DEFAULT_TOOLS_BY_ROLE FULL_STANDARD_STACK agent.specialized_tools)
- First try rollout config for role-based tools.
- Always union with FULL_STANDARD_STACK for backward compat.
- Always add agent-specific specialized tools.
- Stable order: role_tools → standard_stack → specialized (deduped).
"""
rollout = _load_rollout_config()
# 1. Role-based default tools (from rollout config)
role_tools = _get_role_tools(agent_id, rollout) if rollout else []
# 2. Legacy full standard stack (guaranteed baseline)
standard_tools = list(FULL_STANDARD_STACK)
# 3. Agent-specific specialized tools
specialized = AGENT_SPECIALIZED_TOOLS.get(agent_id, [])
return FULL_STANDARD_STACK + specialized
# Merge with stable order, deduplicate preserving first occurrence
merged = []
seen = set()
for tool in role_tools + standard_tools + specialized:
if tool not in seen:
merged.append(tool)
seen.add(tool)
logger.debug(f"Agent '{agent_id}' effective tools ({len(merged)}): {merged[:10]}...")
return merged
def is_tool_allowed(agent_id: str, tool_name: str) -> bool:
@@ -163,6 +301,21 @@ def is_tool_allowed(agent_id: str, tool_name: str) -> bool:
return tool_name in allowed
def get_agent_role(agent_id: str) -> str:
"""Get the role assigned to an agent via rollout config."""
rollout = _load_rollout_config()
agent_roles = rollout.get("agent_roles", {})
return agent_roles.get(agent_id, "agent_default")
def get_agent_crew(agent_id: str) -> dict:
"""Get CrewAI team configuration for an agent."""
return AGENT_CREW_TEAMS.get(agent_id, {"team_name": "Default", "agents": []})
def reload_rollout_config():
"""Force reload of tools_rollout.yml (for hot-reload/testing)."""
global _rollout_config, _rollout_loaded
_rollout_config = None
_rollout_loaded = False
return _load_rollout_config()

View File

@@ -26,7 +26,7 @@ CACHE_TTL = int(os.getenv("GLOBAL_CAPS_TTL", "30"))
NATS_DISCOVERY_TIMEOUT_MS = int(os.getenv("NATS_DISCOVERY_TIMEOUT_MS", "500"))
NATS_ENABLED = os.getenv("ENABLE_GLOBAL_CAPS_NATS", "true").lower() in ("true", "1")
CAPS_DISCOVERY_SUBJECT = "node.*.capabilities.get"
CAPS_DISCOVERY_SUBJECT = "fabric.capabilities.discover"
CAPS_INBOX_PREFIX = "_CAPS_REPLY"
_node_cache: Dict[str, Dict[str, Any]] = {}

View File

@@ -178,7 +178,7 @@ agents:
greenfood:
description: "GREENFOOD Assistant - ERP orchestrator"
default_llm: mistral_community_7b
default_llm: qwen3_support_8b
system_prompt: |
Ти — GREENFOOD Assistant, фронтовий оркестратор ERP-системи для крафтових виробників.
Розумій, хто з тобою говорить (комітент, покупець, склад, бухгалтер), та делегуй задачі відповідним під-агентам.
@@ -217,7 +217,7 @@ agents:
clan:
description: "CLAN — комунікації кооперативів"
default_llm: mistral_community_7b
default_llm: qwen3_support_8b
system_prompt: |
Ти — CLAN, координуєш комунікацію, оголошення та community operations.
Відповідай лише коли тема стосується координації, а звернення адресовано тобі (тег @ClanBot чи згадка кланів).
@@ -225,7 +225,7 @@ agents:
soul:
description: "SOUL / Spirit — духовний гід комʼюніті"
default_llm: mistral_community_7b
default_llm: qwen3_support_8b
system_prompt: |
Ти — Spirit/SOUL, ментор живої операційної системи.
Пояснюй місію, підтримуй мораль, працюй із soft-skills.
@@ -298,7 +298,7 @@ agents:
eonarch:
description: "EONARCH — мультимодальний агент (vision + chat)"
default_llm: mistral_community_7b
default_llm: qwen3_support_8b
system_prompt: |
Ти — EONARCH, аналізуєш зображення, PDF та текстові запити.
Враховуй присутність інших ботів та працюй лише за прямим тегом або коли потрібно мультимодальне тлумачення.

File diff suppressed because it is too large Load Diff