NCS:
- _collect_worker_caps() fetches capability flags from node-worker /caps
- _derive_capabilities() merges served model types + worker provider flags
- installed_artifacts replaces inventory_only (disk scan with DISK_SCAN_PATHS env)
- New endpoints: /capabilities/caps, /capabilities/installed
Node Worker:
- STT_PROVIDER, TTS_PROVIDER, OCR_PROVIDER, IMAGE_PROVIDER env flags
- /caps endpoint returns capabilities + providers for NCS aggregation
- STT adapter (providers/stt_mlx_whisper.py) — remote + local mode
- TTS adapter (providers/tts_mlx_kokoro.py) — remote + local mode
- OCR handler via vision_prompted (ollama_vision with OCR prompt)
- NATS subjects: node.{id}.stt/tts/ocr/image.request
Router:
- POST /v1/capability/{stt,tts,ocr,image} — capability-based offload routing
- GET /v1/capabilities — global view with capabilities_by_node
- require_fresh_caps(ttl) preflight guard
- find_nodes_with_capability(cap) + load-based node selection
Ops:
- ops/fabric_snapshot.py — full runtime snapshot collector
- ops/fabric_preflight.sh — quick check + snapshot save + diff
- docs/fabric_contract.md — Dev Contract v0.1 (preflight-first)
- tests/test_fabric_contract.py — CI enforcement (6 tests)
Made-with: Cursor
138 lines
4.9 KiB
Python
138 lines
4.9 KiB
Python
"""CI checks enforcing Fabric Dev Contract v0.1.
|
|
|
|
No hardcoded models in swapper configs.
|
|
No silent cloud fallbacks without WARNING.
|
|
No hardcoded Docker bridge IPs (unless explicitly allowed).
|
|
"""
|
|
import os
|
|
import re
|
|
from pathlib import Path
|
|
|
|
REPO_ROOT = Path(__file__).parent.parent
|
|
|
|
ALLOWED_BRIDGE_IPS = {
|
|
"docker-compose.node1.yml", # NODA1 Ollama uses 172.18.0.1
|
|
"docker-compose.staging.yml", # legacy staging config
|
|
}
|
|
|
|
ALLOWED_BRIDGE_ROUTER_CONFIGS = {
|
|
"services/router/router-config.yml", # NODA1 config, Ollama on bridge
|
|
"services/router/router-config.node2.yml", # NODA2 config, Ollama on bridge
|
|
"router-config.yml", # root-level copy
|
|
}
|
|
|
|
SWAPPER_CONFIG_GLOBS = [
|
|
"services/swapper-service/config/*.yaml",
|
|
"services/swapper-service/config/*.yml",
|
|
]
|
|
|
|
ROUTER_CONFIG_GLOBS = [
|
|
"services/router/router-config*.yml",
|
|
"router-config*.yml",
|
|
]
|
|
|
|
|
|
def _read(path: Path) -> str:
|
|
try:
|
|
return path.read_text(encoding="utf-8", errors="replace")
|
|
except Exception:
|
|
return ""
|
|
|
|
|
|
def test_no_hardcoded_models_in_swapper():
|
|
"""Swapper configs must not contain models: blocks (served_models come from NCS)."""
|
|
for pattern in SWAPPER_CONFIG_GLOBS:
|
|
for f in REPO_ROOT.glob(pattern):
|
|
content = _read(f)
|
|
models_block = re.search(r"^models:\s*$", content, re.MULTILINE)
|
|
assert models_block is None, (
|
|
f"BLOCKED: {f.relative_to(REPO_ROOT)} contains 'models:' block. "
|
|
f"Served models must come from NCS, not static config."
|
|
)
|
|
|
|
|
|
def test_no_hardcoded_bridge_ips_in_router_config():
|
|
"""Router configs should not contain 172.17.0.1 or 172.18.0.1 Docker bridge IPs."""
|
|
bridge_pattern = re.compile(r"172\.(17|18)\.0\.1")
|
|
for pattern in ROUTER_CONFIG_GLOBS:
|
|
for f in REPO_ROOT.glob(pattern):
|
|
rel = str(f.relative_to(REPO_ROOT))
|
|
if rel in ALLOWED_BRIDGE_ROUTER_CONFIGS:
|
|
continue
|
|
content = _read(f)
|
|
matches = bridge_pattern.findall(content)
|
|
if matches:
|
|
assert False, (
|
|
f"BLOCKED: {rel} contains Docker bridge IP(s). "
|
|
f"Use service names or host.docker.internal instead."
|
|
)
|
|
|
|
|
|
def test_no_bridge_ips_in_compose_unless_allowed():
|
|
"""Docker compose files should not contain bridge IPs except in allowed files."""
|
|
bridge_pattern = re.compile(r"172\.(17|18)\.0\.1")
|
|
for f in REPO_ROOT.glob("docker-compose*.yml"):
|
|
if f.name in ALLOWED_BRIDGE_IPS:
|
|
continue
|
|
content = _read(f)
|
|
matches = bridge_pattern.findall(content)
|
|
if matches:
|
|
assert False, (
|
|
f"WARNING: {f.name} contains Docker bridge IP(s). "
|
|
f"Add to ALLOWED_BRIDGE_IPS if intentional, or use service names."
|
|
)
|
|
|
|
|
|
def test_no_silent_cloud_fallback():
|
|
"""Router main.py must log WARNING before any cloud fallback.
|
|
|
|
Exemptions: cloud_providers declarations (explicit, not hidden)
|
|
and config/mapping dictionaries.
|
|
"""
|
|
router_main = REPO_ROOT / "services" / "router" / "main.py"
|
|
if not router_main.exists():
|
|
return
|
|
content = _read(router_main)
|
|
|
|
cloud_keywords = ["deepseek-chat", "deepseek-coder", "gpt-4", "gpt-3.5", "claude"]
|
|
exempt_contexts = [
|
|
"cloud_providers", # explicit cloud provider list
|
|
"CLOUD_MODELS", "cloud_model_map", "model_config",
|
|
"llm_profile.get", # default model from profile config
|
|
"profile.get",
|
|
]
|
|
for kw in cloud_keywords:
|
|
positions = [m.start() for m in re.finditer(re.escape(kw), content)]
|
|
for pos in positions:
|
|
context_start = max(0, pos - 500)
|
|
context = content[context_start:pos + len(kw) + 200]
|
|
is_exempt = any(ex in context for ex in exempt_contexts)
|
|
if is_exempt:
|
|
continue
|
|
has_warning = any(w in context.lower() for w in [
|
|
"logger.warning", "logger.warn", "log.warning", "log.warn",
|
|
"WARNING", "fallback",
|
|
])
|
|
if not has_warning:
|
|
line_no = content[:pos].count("\n") + 1
|
|
assert False, (
|
|
f"BLOCKED: Router main.py line ~{line_no} references '{kw}' "
|
|
f"without WARNING/fallback log nearby. "
|
|
f"Silent cloud fallback violates Fabric Contract §6."
|
|
)
|
|
|
|
|
|
def test_preflight_snapshots_dir_exists():
|
|
"""ops/preflight_snapshots/ must exist."""
|
|
snap_dir = REPO_ROOT / "ops" / "preflight_snapshots"
|
|
assert snap_dir.exists(), (
|
|
"ops/preflight_snapshots/ directory missing. "
|
|
"Create it: mkdir -p ops/preflight_snapshots"
|
|
)
|
|
|
|
|
|
def test_fabric_contract_doc_exists():
|
|
"""docs/fabric_contract.md must exist."""
|
|
contract = REPO_ROOT / "docs" / "fabric_contract.md"
|
|
assert contract.exists(), "docs/fabric_contract.md missing — Fabric Contract not documented"
|