New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
207 lines
7.4 KiB
Python
207 lines
7.4 KiB
Python
"""
|
|
tests/test_backlog_store_jsonl.py — JSONL backend unit tests.
|
|
"""
|
|
import datetime
|
|
import json
|
|
import os
|
|
import sys
|
|
import tempfile
|
|
|
|
import pytest
|
|
|
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "services", "router"))
|
|
|
|
from backlog_store import (
|
|
BacklogItem, BacklogEvent,
|
|
JsonlBacklogStore, _new_id, _now_iso,
|
|
)
|
|
|
|
|
|
def _make_item(**overrides) -> BacklogItem:
|
|
base = dict(
|
|
id=_new_id("bl"),
|
|
created_at=_now_iso(),
|
|
updated_at=_now_iso(),
|
|
env="prod",
|
|
service="gateway",
|
|
category="arch_review",
|
|
title="[ARCH] Review required: gateway",
|
|
description="Test item",
|
|
priority="P1",
|
|
status="open",
|
|
owner="oncall",
|
|
due_date="2026-03-15",
|
|
source="digest",
|
|
dedupe_key="platform_backlog:2026-W08:prod:gateway:arch_review",
|
|
evidence_refs={},
|
|
tags=["auto"],
|
|
meta={},
|
|
)
|
|
base.update(overrides)
|
|
return BacklogItem.from_dict(base)
|
|
|
|
|
|
@pytest.fixture
|
|
def store(tmp_path):
|
|
items = str(tmp_path / "items.jsonl")
|
|
events = str(tmp_path / "events.jsonl")
|
|
return JsonlBacklogStore(items_path=items, events_path=events)
|
|
|
|
|
|
class TestJsonlCreate:
|
|
def test_create_and_get(self, store):
|
|
item = _make_item()
|
|
stored = store.create(item)
|
|
fetched = store.get(stored.id)
|
|
assert fetched is not None
|
|
assert fetched.service == "gateway"
|
|
|
|
def test_get_unknown_returns_none(self, store):
|
|
assert store.get("no_such_id") is None
|
|
|
|
def test_create_persists_to_file(self, store, tmp_path):
|
|
item = _make_item()
|
|
store.create(item)
|
|
with open(tmp_path / "items.jsonl") as f:
|
|
lines = [l for l in f if l.strip()]
|
|
assert len(lines) == 1
|
|
d = json.loads(lines[0])
|
|
assert d["id"] == item.id
|
|
|
|
|
|
class TestJsonlDedupeKey:
|
|
def test_get_by_dedupe_key_found(self, store):
|
|
item = _make_item()
|
|
store.create(item)
|
|
found = store.get_by_dedupe_key(item.dedupe_key)
|
|
assert found is not None
|
|
assert found.id == item.id
|
|
|
|
def test_get_by_dedupe_key_not_found(self, store):
|
|
assert store.get_by_dedupe_key("nonexistent") is None
|
|
|
|
def test_dedupe_key_uniqueness_via_upsert(self, store):
|
|
item = _make_item()
|
|
r1 = store.upsert(item)
|
|
r2 = store.upsert(_make_item(
|
|
id=_new_id("bl"),
|
|
dedupe_key=item.dedupe_key,
|
|
title="Updated title",
|
|
))
|
|
assert r1["action"] == "created"
|
|
assert r2["action"] == "updated"
|
|
# Still only one dedupe_key
|
|
items = store.list_items({"env": "prod"}, limit=100)
|
|
keys = [it.dedupe_key for it in items]
|
|
assert keys.count(item.dedupe_key) == 1
|
|
|
|
|
|
class TestJsonlListFilters:
|
|
def test_list_all(self, store):
|
|
store.create(_make_item(id=_new_id("bl"), service="svc_a", dedupe_key="k1"))
|
|
store.create(_make_item(id=_new_id("bl"), service="svc_b", dedupe_key="k2"))
|
|
items = store.list_items({"env": "prod"})
|
|
assert len(items) >= 2
|
|
|
|
def test_filter_by_service(self, store):
|
|
store.create(_make_item(id=_new_id("bl"), service="svc_a", dedupe_key="ka"))
|
|
store.create(_make_item(id=_new_id("bl"), service="svc_b", dedupe_key="kb"))
|
|
results = store.list_items({"service": "svc_a"})
|
|
assert all(it.service == "svc_a" for it in results)
|
|
|
|
def test_filter_by_status(self, store):
|
|
store.create(_make_item(id=_new_id("bl"), status="open", dedupe_key="d1"))
|
|
store.create(_make_item(id=_new_id("bl"), status="done", dedupe_key="d2"))
|
|
results = store.list_items({"status": "done"})
|
|
assert all(it.status == "done" for it in results)
|
|
|
|
def test_filter_by_category(self, store):
|
|
store.create(_make_item(id=_new_id("bl"), category="arch_review", dedupe_key="c1"))
|
|
store.create(_make_item(id=_new_id("bl"), category="refactor", dedupe_key="c2"))
|
|
results = store.list_items({"category": "refactor"})
|
|
assert all(it.category == "refactor" for it in results)
|
|
|
|
def test_filter_due_before(self, store):
|
|
store.create(_make_item(id=_new_id("bl"), due_date="2025-01-01", dedupe_key="old"))
|
|
store.create(_make_item(id=_new_id("bl"), due_date="2027-01-01", dedupe_key="future"))
|
|
results = store.list_items({"due_before": "2026-01-01"})
|
|
assert all(it.due_date < "2026-01-01" for it in results if it.due_date)
|
|
|
|
|
|
class TestJsonlUpdate:
|
|
def test_update_reflects_new_title(self, store):
|
|
item = _make_item()
|
|
store.create(item)
|
|
item.title = "New Title"
|
|
store.update(item)
|
|
fetched = store.get(item.id)
|
|
assert fetched.title == "New Title"
|
|
|
|
|
|
class TestJsonlEvents:
|
|
def test_add_and_get_event(self, store):
|
|
item = _make_item()
|
|
store.create(item)
|
|
ev = BacklogEvent(
|
|
id=_new_id("ev"), item_id=item.id, ts=_now_iso(),
|
|
type="comment", message="Test comment", actor="oncall",
|
|
)
|
|
store.add_event(ev)
|
|
events = store.get_events(item.id)
|
|
assert len(events) == 1
|
|
assert events[0].message == "Test comment"
|
|
|
|
def test_events_scoped_to_item(self, store):
|
|
item1 = _make_item(id=_new_id("bl"), dedupe_key="k1")
|
|
item2 = _make_item(id=_new_id("bl"), dedupe_key="k2")
|
|
store.create(item1)
|
|
store.create(item2)
|
|
store.add_event(BacklogEvent(id=_new_id("ev"), item_id=item1.id,
|
|
ts=_now_iso(), type="comment", message="A", actor="a"))
|
|
store.add_event(BacklogEvent(id=_new_id("ev"), item_id=item2.id,
|
|
ts=_now_iso(), type="comment", message="B", actor="b"))
|
|
ev1 = store.get_events(item1.id)
|
|
assert all(e.item_id == item1.id for e in ev1)
|
|
|
|
|
|
class TestJsonlCleanup:
|
|
def test_cleanup_removes_old_done_items(self, store):
|
|
old_ts = (datetime.datetime.utcnow() - datetime.timedelta(days=200)).isoformat()
|
|
item = _make_item(status="done", updated_at=old_ts, dedupe_key="old_done")
|
|
store.create(item)
|
|
deleted = store.cleanup(retention_days=180)
|
|
assert deleted == 1
|
|
assert store.get(item.id) is None
|
|
|
|
def test_cleanup_keeps_open_items(self, store):
|
|
item = _make_item(status="open", dedupe_key="open_item")
|
|
store.create(item)
|
|
deleted = store.cleanup(retention_days=1)
|
|
assert deleted == 0
|
|
assert store.get(item.id) is not None
|
|
|
|
def test_cleanup_keeps_recent_done(self, store):
|
|
item = _make_item(status="done", dedupe_key="recent_done")
|
|
store.create(item)
|
|
deleted = store.cleanup(retention_days=180)
|
|
assert deleted == 0
|
|
|
|
|
|
class TestJsonlDashboard:
|
|
def test_dashboard_structure(self, store):
|
|
store.create(_make_item(id=_new_id("bl"), dedupe_key="d1", status="open"))
|
|
store.create(_make_item(id=_new_id("bl"), dedupe_key="d2", status="done"))
|
|
dash = store.dashboard(env="prod")
|
|
assert "total" in dash
|
|
assert "status_counts" in dash
|
|
assert "priority_counts" in dash
|
|
assert "overdue" in dash
|
|
assert "top_services" in dash
|
|
|
|
def test_dashboard_overdue(self, store):
|
|
past_due = "2020-01-01"
|
|
store.create(_make_item(id=_new_id("bl"), due_date=past_due,
|
|
status="open", dedupe_key="overdue_item"))
|
|
dash = store.dashboard(env="prod")
|
|
assert dash["overdue_count"] >= 1
|