New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
209 lines
8.2 KiB
Python
209 lines
8.2 KiB
Python
"""
|
|
tests/test_backlog_endpoints.py — HTTP endpoint + RBAC unit tests.
|
|
"""
|
|
import os
|
|
import sys
|
|
from unittest.mock import MagicMock, patch
|
|
|
|
import pytest
|
|
|
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "services", "router"))
|
|
|
|
from backlog_store import (
|
|
MemoryBacklogStore, BacklogItem, _new_id, _now_iso, _reset_store_for_tests,
|
|
)
|
|
|
|
|
|
def _make_item(**kw) -> BacklogItem:
|
|
base = dict(
|
|
id=_new_id("bl"), created_at=_now_iso(), updated_at=_now_iso(),
|
|
env="prod", service="gateway", category="arch_review",
|
|
title="[ARCH] Review required: gateway", description="test",
|
|
priority="P1", status="open", owner="cto", due_date="2026-03-15",
|
|
source="digest",
|
|
dedupe_key=_new_id("dk"),
|
|
evidence_refs={"incidents": ["inc_001"]},
|
|
tags=["auto"], meta={},
|
|
)
|
|
base.update(kw)
|
|
return BacklogItem.from_dict(base)
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def reset_store():
|
|
_reset_store_for_tests()
|
|
yield
|
|
_reset_store_for_tests()
|
|
|
|
|
|
@pytest.fixture
|
|
def mem_store():
|
|
store = MemoryBacklogStore()
|
|
return store
|
|
|
|
|
|
class TestDashboardEndpoint:
|
|
def test_dashboard_structure(self, mem_store):
|
|
mem_store.create(_make_item())
|
|
dashboard = mem_store.dashboard(env="prod")
|
|
assert "total" in dashboard
|
|
assert "status_counts" in dashboard
|
|
assert "priority_counts" in dashboard
|
|
assert "overdue" in dashboard
|
|
assert "top_services" in dashboard
|
|
assert dashboard["total"] >= 1
|
|
|
|
def test_dashboard_empty_env(self, mem_store):
|
|
dash = mem_store.dashboard(env="staging")
|
|
assert dash["total"] == 0
|
|
assert dash["overdue_count"] == 0
|
|
|
|
def test_dashboard_priority_counts(self, mem_store):
|
|
mem_store.create(_make_item(priority="P0", dedupe_key="k0"))
|
|
mem_store.create(_make_item(priority="P1", dedupe_key="k1"))
|
|
mem_store.create(_make_item(priority="P2", dedupe_key="k2"))
|
|
dash = mem_store.dashboard(env="prod")
|
|
assert dash["priority_counts"].get("P0", 0) >= 1
|
|
assert dash["priority_counts"].get("P1", 0) >= 1
|
|
assert dash["priority_counts"].get("P2", 0) >= 1
|
|
|
|
def test_dashboard_status_counts(self, mem_store):
|
|
mem_store.create(_make_item(status="open", dedupe_key="s1"))
|
|
mem_store.create(_make_item(status="done", dedupe_key="s2"))
|
|
dash = mem_store.dashboard(env="prod")
|
|
assert "open" in dash["status_counts"]
|
|
assert "done" in dash["status_counts"]
|
|
|
|
def test_dashboard_overdue_list(self, mem_store):
|
|
mem_store.create(_make_item(due_date="2020-01-01", status="open", dedupe_key="overdue"))
|
|
dash = mem_store.dashboard(env="prod")
|
|
assert dash["overdue_count"] >= 1
|
|
assert any(ov["due_date"] == "2020-01-01" for ov in dash["overdue"])
|
|
|
|
|
|
class TestListEndpoint:
|
|
def test_list_returns_all_env(self, mem_store):
|
|
mem_store.create(_make_item(dedupe_key="l1"))
|
|
mem_store.create(_make_item(dedupe_key="l2"))
|
|
items = mem_store.list_items({"env": "prod"})
|
|
assert len(items) >= 2
|
|
|
|
def test_list_filter_by_service(self, mem_store):
|
|
mem_store.create(_make_item(service="gateway", dedupe_key="g1"))
|
|
mem_store.create(_make_item(service="router", dedupe_key="r1"))
|
|
items = mem_store.list_items({"service": "router"})
|
|
assert all(it.service == "router" for it in items)
|
|
|
|
def test_list_filter_by_status_list(self, mem_store):
|
|
mem_store.create(_make_item(status="open", dedupe_key="d_open"))
|
|
mem_store.create(_make_item(status="blocked", dedupe_key="d_blocked"))
|
|
mem_store.create(_make_item(status="done", dedupe_key="d_done"))
|
|
items = mem_store.list_items({"status": ["open", "blocked"]})
|
|
statuses = {it.status for it in items}
|
|
assert "done" not in statuses
|
|
assert "open" in statuses or "blocked" in statuses
|
|
|
|
def test_list_pagination(self, mem_store):
|
|
for i in range(5):
|
|
mem_store.create(_make_item(dedupe_key=f"page_{i}"))
|
|
page1 = mem_store.list_items({"env": "prod"}, limit=2, offset=0)
|
|
page2 = mem_store.list_items({"env": "prod"}, limit=2, offset=2)
|
|
assert len(page1) == 2
|
|
assert len(page2) >= 1
|
|
ids1 = {it.id for it in page1}
|
|
ids2 = {it.id for it in page2}
|
|
assert ids1.isdisjoint(ids2)
|
|
|
|
|
|
class TestGetEndpoint:
|
|
def test_get_known_item(self, mem_store):
|
|
item = _make_item()
|
|
mem_store.create(item)
|
|
fetched = mem_store.get(item.id)
|
|
assert fetched is not None
|
|
assert fetched.id == item.id
|
|
assert fetched.evidence_refs.get("incidents") == ["inc_001"]
|
|
|
|
def test_get_unknown_returns_none(self, mem_store):
|
|
assert mem_store.get("nonexistent_id") is None
|
|
|
|
|
|
class TestRbacReadWriteAdmin:
|
|
"""
|
|
RBAC tests verify that entitlement names are correctly defined in policy
|
|
and that read/write/admin actions map to the correct entitlements.
|
|
"""
|
|
def test_rbac_read_entitlements_defined(self):
|
|
import yaml
|
|
policy_path = os.path.join(
|
|
os.path.dirname(__file__), "..", "config", "rbac_tools_matrix.yml"
|
|
)
|
|
with open(policy_path) as f:
|
|
rbac = yaml.safe_load(f)
|
|
bt = rbac.get("tools", {}).get("backlog_tool", {}).get("actions", {})
|
|
assert bt.get("list", {}).get("entitlements") == ["tools.backlog.read"]
|
|
assert bt.get("dashboard", {}).get("entitlements") == ["tools.backlog.read"]
|
|
assert bt.get("get", {}).get("entitlements") == ["tools.backlog.read"]
|
|
|
|
def test_rbac_write_entitlements_defined(self):
|
|
import yaml
|
|
policy_path = os.path.join(
|
|
os.path.dirname(__file__), "..", "config", "rbac_tools_matrix.yml"
|
|
)
|
|
with open(policy_path) as f:
|
|
rbac = yaml.safe_load(f)
|
|
bt = rbac.get("tools", {}).get("backlog_tool", {}).get("actions", {})
|
|
for action in ("create", "upsert", "set_status", "add_comment", "close"):
|
|
assert bt.get(action, {}).get("entitlements") == ["tools.backlog.write"], \
|
|
f"Action {action} should require tools.backlog.write"
|
|
|
|
def test_rbac_admin_entitlements_defined(self):
|
|
import yaml
|
|
policy_path = os.path.join(
|
|
os.path.dirname(__file__), "..", "config", "rbac_tools_matrix.yml"
|
|
)
|
|
with open(policy_path) as f:
|
|
rbac = yaml.safe_load(f)
|
|
bt = rbac.get("tools", {}).get("backlog_tool", {}).get("actions", {})
|
|
for action in ("auto_generate_weekly", "cleanup"):
|
|
assert bt.get(action, {}).get("entitlements") == ["tools.backlog.admin"], \
|
|
f"Action {action} should require tools.backlog.admin"
|
|
|
|
def test_rbac_cto_has_all_entitlements(self):
|
|
import yaml
|
|
policy_path = os.path.join(
|
|
os.path.dirname(__file__), "..", "config", "rbac_tools_matrix.yml"
|
|
)
|
|
with open(policy_path) as f:
|
|
rbac = yaml.safe_load(f)
|
|
roles = rbac.get("role_entitlements", {})
|
|
cto_ents = roles.get("agent_cto", [])
|
|
for ent in ("tools.backlog.read", "tools.backlog.write", "tools.backlog.admin"):
|
|
assert ent in cto_ents, f"CTO missing entitlement: {ent}"
|
|
|
|
def test_rbac_oncall_has_read_write(self):
|
|
import yaml
|
|
policy_path = os.path.join(
|
|
os.path.dirname(__file__), "..", "config", "rbac_tools_matrix.yml"
|
|
)
|
|
with open(policy_path) as f:
|
|
rbac = yaml.safe_load(f)
|
|
roles = rbac.get("role_entitlements", {})
|
|
oncall_ents = roles.get("agent_oncall", [])
|
|
assert "tools.backlog.read" in oncall_ents
|
|
assert "tools.backlog.write" in oncall_ents
|
|
assert "tools.backlog.admin" not in oncall_ents
|
|
|
|
def test_rbac_monitor_has_read_only(self):
|
|
import yaml
|
|
policy_path = os.path.join(
|
|
os.path.dirname(__file__), "..", "config", "rbac_tools_matrix.yml"
|
|
)
|
|
with open(policy_path) as f:
|
|
rbac = yaml.safe_load(f)
|
|
roles = rbac.get("role_entitlements", {})
|
|
# interface or monitor role should have read but not write
|
|
monitor_ents = roles.get("agent_interface", [])
|
|
assert "tools.backlog.read" in monitor_ents
|
|
assert "tools.backlog.write" not in monitor_ents
|