New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
248 lines
9.5 KiB
Python
248 lines
9.5 KiB
Python
"""
|
|
Tests for alert_store + alert_ingest logic.
|
|
Covers: ingest with dedupe, list/get/ack, RBAC entitlements, severity validation.
|
|
"""
|
|
import os
|
|
import sys
|
|
import time
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from unittest.mock import patch
|
|
|
|
ROOT = Path(__file__).resolve().parent.parent
|
|
ROUTER = ROOT / "services" / "router"
|
|
if str(ROUTER) not in sys.path:
|
|
sys.path.insert(0, str(ROUTER))
|
|
|
|
|
|
def _make_alert(service="gateway", severity="P1", kind="slo_breach",
|
|
fingerprint="abc123", title="High latency"):
|
|
return {
|
|
"source": "monitor@node1",
|
|
"service": service,
|
|
"env": "prod",
|
|
"severity": severity,
|
|
"kind": kind,
|
|
"title": title,
|
|
"summary": f"{service} is experiencing {kind}",
|
|
"started_at": datetime.utcnow().isoformat(),
|
|
"labels": {"node": "node1", "fingerprint": fingerprint},
|
|
"metrics": {"latency_p95_ms": 450, "error_rate_pct": 2.5},
|
|
"evidence": {
|
|
"log_samples": ["ERROR timeout after 30s", "WARN retry 3/3"],
|
|
},
|
|
}
|
|
|
|
|
|
class TestMemoryAlertStoreIngest:
|
|
def setup_method(self):
|
|
from alert_store import MemoryAlertStore, set_alert_store
|
|
self.store = MemoryAlertStore()
|
|
set_alert_store(self.store)
|
|
|
|
def teardown_method(self):
|
|
from alert_store import set_alert_store
|
|
set_alert_store(None)
|
|
|
|
def test_ingest_new_alert(self):
|
|
from alert_ingest import ingest_alert
|
|
result = ingest_alert(self.store, _make_alert())
|
|
assert result["accepted"] is True
|
|
assert result["deduped"] is False
|
|
assert result["occurrences"] == 1
|
|
assert result["alert_ref"].startswith("alrt_")
|
|
assert len(result["dedupe_key"]) == 32
|
|
|
|
def test_ingest_duplicate_within_ttl(self):
|
|
from alert_ingest import ingest_alert
|
|
alert = _make_alert(fingerprint="dup_key")
|
|
r1 = ingest_alert(self.store, alert, dedupe_ttl_minutes=30)
|
|
r2 = ingest_alert(self.store, alert, dedupe_ttl_minutes=30)
|
|
assert r2["deduped"] is True
|
|
assert r2["occurrences"] == 2
|
|
assert r2["alert_ref"] == r1["alert_ref"]
|
|
|
|
def test_ingest_after_ttl_creates_new(self):
|
|
from alert_ingest import ingest_alert
|
|
from alert_store import MemoryAlertStore
|
|
alert = _make_alert(fingerprint="expire_test")
|
|
store2 = MemoryAlertStore()
|
|
r1 = ingest_alert(store2, alert, dedupe_ttl_minutes=30)
|
|
|
|
# Manipulate created_at to be older than TTL
|
|
with store2._lock:
|
|
ref = r1["alert_ref"]
|
|
store2._alerts[ref]["created_at"] = (
|
|
datetime.utcnow() - timedelta(minutes=60)
|
|
).isoformat()
|
|
|
|
r2 = ingest_alert(store2, alert, dedupe_ttl_minutes=30)
|
|
assert r2["deduped"] is False
|
|
# New ref or same ref (depending on whether store evicts) — occurrences reset
|
|
assert r2["occurrences"] == 1
|
|
|
|
def test_different_fingerprint_creates_separate(self):
|
|
from alert_ingest import ingest_alert
|
|
r1 = ingest_alert(self.store, _make_alert(fingerprint="a"))
|
|
r2 = ingest_alert(self.store, _make_alert(fingerprint="b"))
|
|
assert r1["alert_ref"] != r2["alert_ref"]
|
|
assert r1["dedupe_key"] != r2["dedupe_key"]
|
|
|
|
def test_list_alerts(self):
|
|
from alert_ingest import ingest_alert, list_alerts
|
|
ingest_alert(self.store, _make_alert(service="gateway"))
|
|
ingest_alert(self.store, _make_alert(service="router"))
|
|
all_alerts = list_alerts(self.store)
|
|
assert len(all_alerts) >= 2
|
|
|
|
gw_alerts = list_alerts(self.store, service="gateway")
|
|
assert all(a["service"] == "gateway" for a in gw_alerts)
|
|
|
|
def test_get_alert(self):
|
|
from alert_ingest import ingest_alert, get_alert
|
|
r = ingest_alert(self.store, _make_alert())
|
|
fetched = get_alert(self.store, r["alert_ref"])
|
|
assert fetched is not None
|
|
assert fetched["alert_ref"] == r["alert_ref"]
|
|
assert fetched["service"] == "gateway"
|
|
assert "evidence" in fetched
|
|
|
|
def test_get_nonexistent(self):
|
|
from alert_ingest import get_alert
|
|
assert get_alert(self.store, "nonexistent") is None
|
|
|
|
def test_ack_alert(self):
|
|
from alert_ingest import ingest_alert, ack_alert, get_alert
|
|
r = ingest_alert(self.store, _make_alert())
|
|
ack_result = ack_alert(self.store, r["alert_ref"], "sofiia", note="handled")
|
|
assert ack_result["ack_status"] == "acked"
|
|
fetched = get_alert(self.store, r["alert_ref"])
|
|
assert fetched["ack_status"] == "acked"
|
|
assert fetched["ack_actor"] == "sofiia"
|
|
|
|
def test_ack_nonexistent(self):
|
|
from alert_ingest import ack_alert
|
|
result = ack_alert(self.store, "nonexistent", "sofiia")
|
|
assert result is None
|
|
|
|
|
|
class TestAlertValidation:
|
|
def setup_method(self):
|
|
from alert_store import MemoryAlertStore, set_alert_store
|
|
self.store = MemoryAlertStore()
|
|
set_alert_store(self.store)
|
|
|
|
def teardown_method(self):
|
|
from alert_store import set_alert_store
|
|
set_alert_store(None)
|
|
|
|
def test_missing_service_rejected(self):
|
|
from alert_ingest import ingest_alert
|
|
alert = _make_alert()
|
|
del alert["service"]
|
|
result = ingest_alert(self.store, alert)
|
|
assert result["accepted"] is False
|
|
assert "service" in result["error"]
|
|
|
|
def test_missing_title_rejected(self):
|
|
from alert_ingest import ingest_alert
|
|
alert = _make_alert()
|
|
del alert["title"]
|
|
result = ingest_alert(self.store, alert)
|
|
assert result["accepted"] is False
|
|
|
|
def test_invalid_severity_rejected(self):
|
|
from alert_ingest import ingest_alert
|
|
alert = _make_alert()
|
|
alert["severity"] = "CRITICAL" # not in our enum
|
|
result = ingest_alert(self.store, alert)
|
|
assert result["accepted"] is False
|
|
|
|
def test_invalid_kind_rejected(self):
|
|
from alert_ingest import ingest_alert
|
|
alert = _make_alert()
|
|
alert["kind"] = "unknown_kind"
|
|
result = ingest_alert(self.store, alert)
|
|
assert result["accepted"] is False
|
|
|
|
def test_secret_redacted_in_summary(self):
|
|
from alert_ingest import ingest_alert, get_alert
|
|
alert = _make_alert()
|
|
alert["summary"] = "Error: token=sk-secret123 caused issue"
|
|
r = ingest_alert(self.store, alert)
|
|
fetched = get_alert(self.store, r["alert_ref"])
|
|
assert "sk-secret123" not in fetched["summary"]
|
|
assert "***" in fetched["summary"]
|
|
|
|
def test_evidence_truncated(self):
|
|
from alert_ingest import ingest_alert, get_alert
|
|
alert = _make_alert()
|
|
alert["evidence"] = {"log_samples": [f"line {i}" for i in range(100)]}
|
|
r = ingest_alert(self.store, alert)
|
|
fetched = get_alert(self.store, r["alert_ref"])
|
|
assert len(fetched["evidence"]["log_samples"]) <= 40
|
|
|
|
|
|
class TestAlertRBAC:
|
|
"""Verify RBAC entitlements for alert_ingest_tool actions."""
|
|
|
|
def test_monitor_has_ingest_entitlement(self):
|
|
import yaml
|
|
rbac_path = ROOT / "config" / "rbac_tools_matrix.yml"
|
|
with open(rbac_path) as f:
|
|
matrix = yaml.safe_load(f)
|
|
monitor_ents = set(matrix["role_entitlements"]["agent_monitor"])
|
|
assert "tools.alerts.ingest" in monitor_ents
|
|
|
|
def test_monitor_has_no_ack_entitlement(self):
|
|
import yaml
|
|
rbac_path = ROOT / "config" / "rbac_tools_matrix.yml"
|
|
with open(rbac_path) as f:
|
|
matrix = yaml.safe_load(f)
|
|
monitor_ents = set(matrix["role_entitlements"]["agent_monitor"])
|
|
assert "tools.alerts.ack" not in monitor_ents
|
|
|
|
def test_cto_has_all_alert_entitlements(self):
|
|
import yaml
|
|
rbac_path = ROOT / "config" / "rbac_tools_matrix.yml"
|
|
with open(rbac_path) as f:
|
|
matrix = yaml.safe_load(f)
|
|
cto_ents = set(matrix["role_entitlements"]["agent_cto"])
|
|
for ent in ("tools.alerts.ingest", "tools.alerts.read", "tools.alerts.ack"):
|
|
assert ent in cto_ents, f"Missing: {ent}"
|
|
|
|
def test_interface_has_read_only(self):
|
|
import yaml
|
|
rbac_path = ROOT / "config" / "rbac_tools_matrix.yml"
|
|
with open(rbac_path) as f:
|
|
matrix = yaml.safe_load(f)
|
|
iface_ents = set(matrix["role_entitlements"]["agent_interface"])
|
|
assert "tools.alerts.read" in iface_ents
|
|
assert "tools.alerts.ack" not in iface_ents
|
|
assert "tools.alerts.ingest" not in iface_ents
|
|
|
|
|
|
class TestAlertStoreFactory:
|
|
def test_default_is_memory(self):
|
|
from alert_store import _create_alert_store, MemoryAlertStore
|
|
env = {"ALERT_BACKEND": "memory"}
|
|
with patch.dict(os.environ, env, clear=False):
|
|
store = _create_alert_store()
|
|
assert isinstance(store, MemoryAlertStore)
|
|
|
|
def test_auto_with_dsn(self):
|
|
from alert_store import _create_alert_store, AutoAlertStore
|
|
env = {"ALERT_BACKEND": "auto", "DATABASE_URL": "postgresql://x:x@localhost/test"}
|
|
with patch.dict(os.environ, env, clear=False):
|
|
store = _create_alert_store()
|
|
assert isinstance(store, AutoAlertStore)
|
|
|
|
def test_auto_without_dsn_gives_memory(self):
|
|
from alert_store import _create_alert_store, MemoryAlertStore
|
|
env_clear = {k: v for k, v in os.environ.items()
|
|
if k not in ("DATABASE_URL", "ALERT_DATABASE_URL")}
|
|
env_clear["ALERT_BACKEND"] = "auto"
|
|
with patch.dict(os.environ, env_clear, clear=True):
|
|
store = _create_alert_store()
|
|
assert isinstance(store, MemoryAlertStore)
|