Files
microdao-daarion/tests/test_backlog_generator.py
Apple 129e4ea1fc feat(platform): add new services, tools, tests and crews modules
New router intelligence modules (26 files): alert_ingest/store, audit_store,
architecture_pressure, backlog_generator/store, cost_analyzer, data_governance,
dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment,
platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files),
signature_state_store, sofiia_auto_router, tool_governance

New services:
- sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static
- memory-service: integration_endpoints, integrations, voice_endpoints, static UI
- aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents)
- sofiia-supervisor: new supervisor service
- aistalk-bridge-lite: Telegram bridge lite
- calendar-service: CalDAV calendar service with reminders
- mlx-stt-service / mlx-tts-service: Apple Silicon speech services
- binance-bot-monitor: market monitor service
- node-worker: STT/TTS memory providers

New tools (9): agent_email, browser_tool, contract_tool, observability_tool,
oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault

New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus,
farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine,
session_context, style_adapter, telemetry)

Tests: 85+ test files for all new modules
Made-with: Cursor
2026-03-03 07:14:14 -08:00

272 lines
10 KiB
Python

"""
tests/test_backlog_generator.py — Auto-generation engine unit tests.
"""
import os
import sys
import pytest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "services", "router"))
from backlog_store import MemoryBacklogStore, _new_id
from backlog_generator import (
generate_from_pressure_digest,
generate_from_risk_digest,
_match_rule,
_build_item_from_rule,
_make_dedupe_key,
_builtin_backlog_defaults,
_reload_backlog_policy,
)
POLICY = _builtin_backlog_defaults()
def _pressure_digest(services=None):
"""Build a minimal platform_priority_digest JSON."""
if services is None:
services = [
{
"service": "gateway",
"score": 85,
"band": "critical",
"requires_arch_review": True,
"signals_summary": ["High recurrence 30d", "Overdue follow-ups"],
"components": {"followups_overdue": 3, "regressions_30d": 2},
"evidence_refs": {"incidents": ["inc_001", "inc_002"]},
},
{
"service": "router",
"score": 55,
"band": "high",
"requires_arch_review": False,
"signals_summary": ["Escalations 30d"],
"components": {"followups_overdue": 0},
"evidence_refs": {},
},
]
return {"week": "2026-W08", "top_pressure_services": services}
def _risk_digest(services=None):
if services is None:
services = [
{
"service": "gateway",
"score": 72,
"band": "high",
"trend": {"delta_24h": 15},
"components": {"slo": {"violations": 2}},
"reasons": ["SLO violation detected"],
"attribution": {"evidence_refs": {"alerts": ["alrt_001"]}},
}
]
return {"top_services": services}
class TestMatchRule:
def test_arch_review_required_matches(self):
rule = POLICY["generation"]["rules"][0] # arch_review_required
ctx = {"pressure_requires_arch_review": True}
assert _match_rule(rule, ctx) is True
def test_arch_review_required_no_match(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"pressure_requires_arch_review": False}
assert _match_rule(rule, ctx) is False
def test_high_pressure_refactor_both_high(self):
rule = POLICY["generation"]["rules"][1]
ctx = {"pressure_band": "critical", "risk_band": "high"}
assert _match_rule(rule, ctx) is True
def test_high_pressure_refactor_only_one_high(self):
rule = POLICY["generation"]["rules"][1]
ctx = {"pressure_band": "low", "risk_band": "high"}
assert _match_rule(rule, ctx) is False
def test_slo_violations_match(self):
rule = POLICY["generation"]["rules"][2]
ctx = {"slo_violations": 2}
assert _match_rule(rule, ctx) is True
def test_slo_violations_zero_no_match(self):
rule = POLICY["generation"]["rules"][2]
ctx = {"slo_violations": 0}
assert _match_rule(rule, ctx) is False
def test_followup_backlog_match(self):
rule = POLICY["generation"]["rules"][3]
ctx = {"followups_overdue": 3}
assert _match_rule(rule, ctx) is True
def test_followup_backlog_zero_no_match(self):
rule = POLICY["generation"]["rules"][3]
ctx = {"followups_overdue": 0}
assert _match_rule(rule, ctx) is False
class TestBuildItemFromRule:
def test_title_template(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"pressure_requires_arch_review": True, "pressure_score": 80,
"pressure_band": "critical", "followups_overdue": 2, "evidence_refs": {}}
item = _build_item_from_rule("gateway", rule, ctx, POLICY, "2026-W08", "prod")
assert item is not None
assert "gateway" in item.title
assert item.category == "arch_review"
def test_priority_from_category(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"evidence_refs": {}}
item = _build_item_from_rule("svc", rule, ctx, POLICY, "2026-W08", "prod")
assert item.priority == "P1"
def test_due_date_is_set(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"evidence_refs": {}}
item = _build_item_from_rule("svc", rule, ctx, POLICY, "2026-W08", "prod")
assert item.due_date != ""
def test_owner_override_for_gateway(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"evidence_refs": {}}
item = _build_item_from_rule("gateway", rule, ctx, POLICY, "2026-W08", "prod")
assert item.owner == "cto"
def test_owner_default_for_other_service(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"evidence_refs": {}}
item = _build_item_from_rule("backend", rule, ctx, POLICY, "2026-W08", "prod")
assert item.owner == "oncall"
def test_dedupe_key_format(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"evidence_refs": {}}
item = _build_item_from_rule("gateway", rule, ctx, POLICY, "2026-W08", "prod")
assert item.dedupe_key == "platform_backlog:2026-W08:prod:gateway:arch_review"
def test_evidence_refs_propagated(self):
rule = POLICY["generation"]["rules"][0]
ctx = {"evidence_refs": {"incidents": ["inc_001"]}, "pressure_score": 80,
"pressure_band": "critical", "followups_overdue": 2}
item = _build_item_from_rule("gateway", rule, ctx, POLICY, "2026-W08", "prod")
assert item.evidence_refs.get("incidents") == ["inc_001"]
def test_description_includes_signals(self):
rule = POLICY["generation"]["rules"][0]
ctx = {
"pressure_score": 80, "pressure_band": "critical",
"signals_summary": ["High recurrence 30d"],
"followups_overdue": 2, "evidence_refs": {},
}
item = _build_item_from_rule("gateway", rule, ctx, POLICY, "2026-W08", "prod")
assert "80" in item.description or "Pressure" in item.description
class TestGenerateFromPressureDigest:
def test_generates_items(self):
store = MemoryBacklogStore()
digest = _pressure_digest()
result = generate_from_pressure_digest(digest, env="prod", store=store, policy=POLICY)
assert result["created"] >= 1
assert len(result["items"]) >= 1
def test_idempotency_second_run_updates(self):
store = MemoryBacklogStore()
digest = _pressure_digest()
r1 = generate_from_pressure_digest(digest, env="prod", store=store, policy=POLICY)
r2 = generate_from_pressure_digest(digest, env="prod", store=store, policy=POLICY)
# Second run should update, not create
assert r1["created"] >= 1
assert r2["created"] == 0
assert r2["updated"] >= 1
def test_evidence_refs_propagated(self):
store = MemoryBacklogStore()
digest = _pressure_digest()
generate_from_pressure_digest(digest, env="prod", store=store, policy=POLICY)
items = store.list_items({"service": "gateway"})
# At least one item with incident evidence
has_inc = any("incidents" in (it.evidence_refs or {}) for it in items)
assert has_inc
def test_risk_digest_enriches_context(self):
store = MemoryBacklogStore()
digest = _pressure_digest()
risk = _risk_digest()
result = generate_from_pressure_digest(
digest, env="prod", store=store, policy=POLICY, risk_digest_data=risk,
)
assert result["created"] >= 1
# SLO rule should also fire for gateway (slo_violations=2)
items = store.list_items({"service": "gateway"})
cats = {it.category for it in items}
assert "slo_hardening" in cats
def test_max_items_per_run_capped(self):
policy = dict(POLICY)
policy["defaults"] = dict(POLICY["defaults"])
policy["defaults"]["max_items_per_run"] = 1
services = [
{"service": f"svc_{i}", "score": 80, "band": "critical",
"requires_arch_review": True, "signals_summary": [],
"components": {"followups_overdue": 0}, "evidence_refs": {}}
for i in range(5)
]
digest = _pressure_digest(services=services)
store = MemoryBacklogStore()
result = generate_from_pressure_digest(digest, env="prod", store=store, policy=policy)
assert result["created"] + result["updated"] <= 1
def test_week_field_from_digest(self):
store = MemoryBacklogStore()
digest = _pressure_digest()
result = generate_from_pressure_digest(digest, env="prod", store=store, policy=POLICY)
assert result["week"] == "2026-W08"
def test_weekly_disabled_skips(self):
store = MemoryBacklogStore()
policy = dict(POLICY)
policy["generation"] = dict(POLICY["generation"])
policy["generation"]["weekly_from_pressure_digest"] = False
digest = _pressure_digest()
result = generate_from_pressure_digest(digest, env="prod", store=store, policy=policy)
assert result["created"] == 0
assert "skipped_reason" in result
class TestGenerateFromRiskDigest:
def test_disabled_by_default(self):
store = MemoryBacklogStore()
result = generate_from_risk_digest(
_risk_digest(), env="prod", store=store, policy=POLICY,
)
assert result["created"] == 0
assert "skipped_reason" in result
def test_enabled_creates_slo_item(self):
policy = dict(POLICY)
policy["generation"] = dict(POLICY["generation"])
policy["generation"]["daily_from_risk_digest"] = True
store = MemoryBacklogStore()
result = generate_from_risk_digest(
_risk_digest(), env="prod", store=store, policy=policy,
)
# SLO rule fires for gateway (2 violations)
assert result["created"] >= 1
items = store.list_items({"service": "gateway"})
assert any(it.category == "slo_hardening" for it in items)
class TestMakeDedupeKey:
def test_format(self):
key = _make_dedupe_key("platform_backlog", "2026-W08", "prod", "gateway", "arch_review")
assert key == "platform_backlog:2026-W08:prod:gateway:arch_review"
def test_different_services_different_keys(self):
k1 = _make_dedupe_key("platform_backlog", "2026-W08", "prod", "svc_a", "arch_review")
k2 = _make_dedupe_key("platform_backlog", "2026-W08", "prod", "svc_b", "arch_review")
assert k1 != k2