feat(platform): add new services, tools, tests and crews modules
New router intelligence modules (26 files): alert_ingest/store, audit_store, architecture_pressure, backlog_generator/store, cost_analyzer, data_governance, dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment, platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files), signature_state_store, sofiia_auto_router, tool_governance New services: - sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static - memory-service: integration_endpoints, integrations, voice_endpoints, static UI - aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents) - sofiia-supervisor: new supervisor service - aistalk-bridge-lite: Telegram bridge lite - calendar-service: CalDAV calendar service with reminders - mlx-stt-service / mlx-tts-service: Apple Silicon speech services - binance-bot-monitor: market monitor service - node-worker: STT/TTS memory providers New tools (9): agent_email, browser_tool, contract_tool, observability_tool, oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus, farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine, session_context, style_adapter, telemetry) Tests: 85+ test files for all new modules Made-with: Cursor
This commit is contained in:
152
tests/test_risk_timeline.py
Normal file
152
tests/test_risk_timeline.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
tests/test_risk_timeline.py
|
||||
|
||||
Unit tests for build_timeline() in risk_attribution.py:
|
||||
- Buckets multiple same-type events in same time window into one item
|
||||
- Includes incident escalation events
|
||||
- Respects max_items limit
|
||||
- Sorts newest-first
|
||||
"""
|
||||
import sys, os
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../services/router"))
|
||||
|
||||
import datetime
|
||||
import pytest
|
||||
from risk_attribution import build_timeline
|
||||
|
||||
|
||||
def _now() -> str:
|
||||
return datetime.datetime.utcnow().isoformat()
|
||||
|
||||
|
||||
def _ts(minutes_ago: int) -> str:
|
||||
return (datetime.datetime.utcnow() - datetime.timedelta(minutes=minutes_ago)).isoformat()
|
||||
|
||||
|
||||
_POLICY = {
|
||||
"timeline": {
|
||||
"enabled": True,
|
||||
"lookback_hours": 24,
|
||||
"max_items": 30,
|
||||
"include_types": ["deploy", "incident", "slo", "followup", "alert_loop", "release_gate",
|
||||
"dependency", "drift", "alert"],
|
||||
"time_bucket_minutes": 5,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class TestBuildTimeline:
|
||||
def test_empty_input(self):
|
||||
result = build_timeline([], _POLICY)
|
||||
assert result == []
|
||||
|
||||
def test_single_event(self):
|
||||
events = [{"ts": _ts(10), "type": "deploy", "label": "Deploy: canary", "refs": {}}]
|
||||
result = build_timeline(events, _POLICY)
|
||||
assert len(result) == 1
|
||||
assert result[0]["type"] == "deploy"
|
||||
assert result[0]["label"] == "Deploy: canary"
|
||||
|
||||
def test_newest_first(self):
|
||||
events = [
|
||||
{"ts": _ts(60), "type": "deploy", "label": "Old deploy", "refs": {}},
|
||||
{"ts": _ts(10), "type": "incident", "label": "New incident", "refs": {}},
|
||||
]
|
||||
result = build_timeline(events, _POLICY)
|
||||
assert result[0]["type"] == "incident" # newest first
|
||||
assert result[1]["type"] == "deploy"
|
||||
|
||||
def test_buckets_same_type_same_window(self):
|
||||
"""Multiple deploy alerts in the same 5-min window → coalesced to 1 item with xN."""
|
||||
now = datetime.datetime.utcnow()
|
||||
# All within the same 5-min bucket
|
||||
base = now.replace(second=0, microsecond=0)
|
||||
bucket_start = base - datetime.timedelta(minutes=base.minute % 5)
|
||||
events = [
|
||||
{"ts": (bucket_start + datetime.timedelta(seconds=i)).isoformat(),
|
||||
"type": "deploy", "label": "Deploy alert", "refs": {"alert_ref": f"alrt_{i}"}}
|
||||
for i in range(4)
|
||||
]
|
||||
result = build_timeline(events, _POLICY)
|
||||
# Should be coalesced into 1 item
|
||||
deploy_items = [e for e in result if e["type"] == "deploy"]
|
||||
assert len(deploy_items) == 1
|
||||
assert "×4" in deploy_items[0]["label"]
|
||||
|
||||
def test_different_types_not_bucketed_together(self):
|
||||
now = datetime.datetime.utcnow()
|
||||
bucket_start = now.replace(second=0, microsecond=0)
|
||||
bucket_start -= datetime.timedelta(minutes=bucket_start.minute % 5)
|
||||
events = [
|
||||
{"ts": bucket_start.isoformat(), "type": "deploy",
|
||||
"label": "Deploy", "refs": {}},
|
||||
{"ts": bucket_start.isoformat(), "type": "incident",
|
||||
"label": "Incident", "refs": {}},
|
||||
]
|
||||
result = build_timeline(events, _POLICY)
|
||||
assert len(result) == 2
|
||||
|
||||
def test_max_items_respected(self):
|
||||
events = [
|
||||
{"ts": _ts(i * 6), "type": "alert", "label": f"Alert {i}", "refs": {}}
|
||||
for i in range(50)
|
||||
]
|
||||
policy = {**_POLICY, "timeline": {**_POLICY["timeline"], "max_items": 5}}
|
||||
result = build_timeline(events, policy)
|
||||
assert len(result) == 5
|
||||
|
||||
def test_include_types_filter(self):
|
||||
events = [
|
||||
{"ts": _ts(10), "type": "deploy", "label": "Deploy", "refs": {}},
|
||||
{"ts": _ts(20), "type": "unknown_type", "label": "Unknown", "refs": {}},
|
||||
]
|
||||
policy = {**_POLICY, "timeline": {**_POLICY["timeline"], "include_types": ["deploy"]}}
|
||||
result = build_timeline(events, policy)
|
||||
assert all(e["type"] == "deploy" for e in result)
|
||||
|
||||
def test_incident_escalation_included(self):
|
||||
events = [
|
||||
{"ts": _ts(5), "type": "incident",
|
||||
"label": "Incident escalated: inc_001",
|
||||
"refs": {"incident_id": "inc_001"}},
|
||||
]
|
||||
result = build_timeline(events, _POLICY)
|
||||
assert len(result) == 1
|
||||
assert "inc_001" in str(result[0]["refs"])
|
||||
|
||||
def test_timeline_disabled(self):
|
||||
policy = {**_POLICY, "timeline": {**_POLICY["timeline"], "enabled": False}}
|
||||
events = [{"ts": _ts(5), "type": "deploy", "label": "D", "refs": {}}]
|
||||
result = build_timeline(events, policy)
|
||||
assert result == []
|
||||
|
||||
def test_refs_preserved(self):
|
||||
events = [{
|
||||
"ts": _ts(5),
|
||||
"type": "deploy",
|
||||
"label": "Canary deploy",
|
||||
"refs": {"alert_ref": "alrt_xyz", "service": "gateway"},
|
||||
}]
|
||||
result = build_timeline(events, _POLICY)
|
||||
assert len(result) == 1
|
||||
refs = result[0]["refs"]
|
||||
# refs can be dict or list of tuples; we just need to verify alert_ref is present
|
||||
assert "alrt_xyz" in str(refs)
|
||||
|
||||
def test_bucketed_item_refs_merged(self):
|
||||
"""When items coalesce, refs from multiple events are merged (up to 5)."""
|
||||
now = datetime.datetime.utcnow()
|
||||
bucket_start = now.replace(second=0, microsecond=0)
|
||||
bucket_start -= datetime.timedelta(minutes=bucket_start.minute % 5)
|
||||
events = [
|
||||
{"ts": (bucket_start + datetime.timedelta(seconds=i)).isoformat(),
|
||||
"type": "deploy",
|
||||
"label": "Deploy",
|
||||
"refs": {"alert_ref": f"alrt_{i}"}}
|
||||
for i in range(3)
|
||||
]
|
||||
result = build_timeline(events, _POLICY)
|
||||
assert len(result) == 1
|
||||
# Refs should contain at least one alert_ref
|
||||
refs_str = str(result[0]["refs"])
|
||||
assert "alrt_" in refs_str
|
||||
Reference in New Issue
Block a user