Files
microdao-daarion/tests/test_backlog_store_postgres.py
Apple 129e4ea1fc feat(platform): add new services, tools, tests and crews modules
New router intelligence modules (26 files): alert_ingest/store, audit_store,
architecture_pressure, backlog_generator/store, cost_analyzer, data_governance,
dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment,
platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files),
signature_state_store, sofiia_auto_router, tool_governance

New services:
- sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static
- memory-service: integration_endpoints, integrations, voice_endpoints, static UI
- aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents)
- sofiia-supervisor: new supervisor service
- aistalk-bridge-lite: Telegram bridge lite
- calendar-service: CalDAV calendar service with reminders
- mlx-stt-service / mlx-tts-service: Apple Silicon speech services
- binance-bot-monitor: market monitor service
- node-worker: STT/TTS memory providers

New tools (9): agent_email, browser_tool, contract_tool, observability_tool,
oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault

New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus,
farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine,
session_context, style_adapter, telemetry)

Tests: 85+ test files for all new modules
Made-with: Cursor
2026-03-03 07:14:14 -08:00

195 lines
7.5 KiB
Python

"""
tests/test_backlog_store_postgres.py — Postgres backend unit tests (mocked psycopg2).
"""
import json
import os
import sys
import types
from unittest.mock import MagicMock, patch, call
import pytest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "services", "router"))
from backlog_store import (
BacklogItem, BacklogEvent, PostgresBacklogStore, _new_id, _now_iso,
)
def _make_item(**overrides) -> BacklogItem:
base = dict(
id=_new_id("bl"), created_at=_now_iso(), updated_at=_now_iso(),
env="prod", service="router", category="refactor",
title="[REF] Reduce pressure: router", description="desc",
priority="P1", status="open", owner="cto", due_date="2026-04-01",
source="digest",
dedupe_key="platform_backlog:2026-W09:prod:router:refactor",
evidence_refs={}, tags=[], meta={},
)
base.update(overrides)
return BacklogItem.from_dict(base)
def _make_col(name):
c = MagicMock()
c.name = name
return c
def _build_row(item: BacklogItem):
fields = ["id", "created_at", "updated_at", "env", "service", "category",
"title", "description", "priority", "status", "owner", "due_date",
"source", "dedupe_key", "evidence_refs", "tags", "meta"]
d = item.to_dict()
row = tuple(
json.dumps(d[f]) if f in ("evidence_refs", "tags", "meta") else d.get(f, "")
for f in fields
)
desc = [_make_col(f) for f in fields]
return row, desc
@pytest.fixture
def mock_psycopg2(monkeypatch):
"""Patch psycopg2 at the module level used by backlog_store."""
mock_mod = MagicMock()
monkeypatch.setattr("builtins.__import__", _make_import_patcher(mock_mod))
return mock_mod
def _make_import_patcher(mock_pg):
real_import = __builtins__.__import__ if hasattr(__builtins__, "__import__") else __import__
def patched_import(name, *args, **kwargs):
if name == "psycopg2":
return mock_pg
return real_import(name, *args, **kwargs)
return patched_import
class TestPostgresCreate:
def test_create_executes_insert(self):
pytest.importorskip("psycopg2", reason="psycopg2 not installed")
store = PostgresBacklogStore(dsn="postgresql://test/db")
item = _make_item()
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
result = store.create(item)
assert result.id == item.id
assert mock_cur.execute.called
sql = mock_cur.execute.call_args[0][0]
assert "INSERT INTO backlog_items" in sql
class TestPostgresGet:
def test_get_returns_item(self):
store = PostgresBacklogStore(dsn="postgresql://test/db")
item = _make_item()
row, desc = _build_row(item)
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_cur.fetchone.return_value = row
mock_cur.description = desc
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
result = store.get(item.id)
assert result is not None
assert result.id == item.id
assert result.service == "router"
def test_get_returns_none_when_missing(self):
store = PostgresBacklogStore(dsn="postgresql://test/db")
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_cur.fetchone.return_value = None
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
result = store.get("no_such_id")
assert result is None
class TestPostgresGetByDedupeKey:
def test_get_by_dedupe_key_found(self):
store = PostgresBacklogStore(dsn="postgresql://test/db")
item = _make_item()
row, desc = _build_row(item)
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_cur.fetchone.return_value = row
mock_cur.description = desc
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
result = store.get_by_dedupe_key(item.dedupe_key)
assert result is not None
assert result.dedupe_key == item.dedupe_key
class TestPostgresUpdate:
def test_update_executes_sql(self):
store = PostgresBacklogStore(dsn="postgresql://test/db")
item = _make_item()
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
store.update(item)
sql = mock_cur.execute.call_args[0][0]
assert "UPDATE backlog_items" in sql
class TestPostgresListItems:
def test_list_with_env_filter(self):
store = PostgresBacklogStore(dsn="postgresql://test/db")
item = _make_item()
row, desc = _build_row(item)
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_cur.fetchall.return_value = [row]
mock_cur.description = desc
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
results = store.list_items({"env": "prod"})
assert len(results) == 1
sql = mock_cur.execute.call_args[0][0]
assert "WHERE" in sql
assert "env" in sql
class TestPostgresCleanup:
def test_cleanup_runs_delete(self):
store = PostgresBacklogStore(dsn="postgresql://test/db")
mock_conn = MagicMock()
mock_cur = MagicMock()
mock_cur.rowcount = 3
mock_conn.__enter__ = MagicMock(return_value=mock_conn)
mock_conn.__exit__ = MagicMock(return_value=False)
mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cur)
mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False)
with patch.object(store, "_conn", return_value=mock_conn):
deleted = store.cleanup(retention_days=180)
assert deleted == 3
sql = mock_cur.execute.call_args[0][0]
assert "DELETE FROM backlog_items" in sql
assert "done" in sql or "canceled" in sql