feat(platform): add new services, tools, tests and crews modules

New router intelligence modules (26 files): alert_ingest/store, audit_store,
architecture_pressure, backlog_generator/store, cost_analyzer, data_governance,
dependency_scanner, drift_analyzer, incident_* (5 files), llm_enrichment,
platform_priority_digest, provider_budget, release_check_runner, risk_* (6 files),
signature_state_store, sofiia_auto_router, tool_governance

New services:
- sofiia-console: Dockerfile, adapters/, monitor/nodes/ops/voice modules, launchd, react static
- memory-service: integration_endpoints, integrations, voice_endpoints, static UI
- aurora-service: full app suite (analysis, job_store, orchestrator, reporting, schemas, subagents)
- sofiia-supervisor: new supervisor service
- aistalk-bridge-lite: Telegram bridge lite
- calendar-service: CalDAV calendar service with reminders
- mlx-stt-service / mlx-tts-service: Apple Silicon speech services
- binance-bot-monitor: market monitor service
- node-worker: STT/TTS memory providers

New tools (9): agent_email, browser_tool, contract_tool, observability_tool,
oncall_tool, pr_reviewer_tool, repo_tool, safe_code_executor, secure_vault

New crews: agromatrix_crew (10 modules: depth_classifier, doc_facts, doc_focus,
farm_state, light_reply, llm_factory, memory_manager, proactivity, reflection_engine,
session_context, style_adapter, telemetry)

Tests: 85+ test files for all new modules
Made-with: Cursor
This commit is contained in:
Apple
2026-03-03 07:14:14 -08:00
parent e9dedffa48
commit 129e4ea1fc
241 changed files with 69349 additions and 0 deletions

View File

@@ -0,0 +1,164 @@
"""
Tests for Observability Tool
"""
import pytest
import os
import sys
from unittest.mock import AsyncMock, patch
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
from services.router.tool_manager import ToolManager, ToolResult
class TestObservabilityToolMetrics:
"""Test metrics query functionality"""
@pytest.mark.asyncio
async def test_metrics_query_valid_promql(self):
"""Test that valid PromQL queries work"""
tool_mgr = ToolManager({})
with patch.object(tool_mgr.http_client, 'get') as mock_get:
mock_response = AsyncMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"status": "success",
"data": {
"result": [
{"metric": {"service": "gateway"}, "value": [1234567890, "0.5"]}
]
}
}
mock_get.return_value = mock_response
result = await tool_mgr._observability_tool({
"action": "metrics_query",
"params": {
"query": "rate(http_requests_total[5m])",
"datasource": "prometheus"
}
})
assert result.success is True
@pytest.mark.asyncio
async def test_metrics_query_invalid_promql_blocked(self):
"""Test that invalid PromQL is blocked"""
tool_mgr = ToolManager({})
result = await tool_mgr._observability_tool({
"action": "metrics_query",
"params": {
"query": "group(*) by (service)", # Not in allowlist
"datasource": "prometheus"
}
})
assert result.success is False
assert "allowed" in result.error.lower()
class TestObservabilityToolLogs:
"""Test log query functionality"""
@pytest.mark.asyncio
async def test_logs_query_time_limit(self):
"""Test that time window limit is enforced"""
tool_mgr = ToolManager({})
# Try to query with 48h window (exceeds 24h limit)
result = await tool_mgr._observability_tool({
"action": "logs_query",
"params": {
"query": "{service=\"gateway\"}",
"time_range": {
"from": "2024-01-01T00:00:00Z",
"to": "2024-01-03T00:00:00Z"
}
}
})
assert result.success is False
assert "limit" in result.error.lower()
class TestObservabilityToolTraces:
"""Test trace functionality"""
@pytest.mark.asyncio
async def test_traces_query_by_id(self):
"""Test getting trace by ID"""
tool_mgr = ToolManager({})
with patch.object(tool_mgr.http_client, 'get') as mock_get:
mock_response = AsyncMock()
mock_response.status_code = 200
mock_response.json.return_value = {"batches": []}
mock_get.return_value = mock_response
result = await tool_mgr._observability_tool({
"action": "traces_query",
"params": {
"trace_id": "abc123"
}
})
assert result.success is True
class TestObservabilityToolServiceOverview:
"""Test service overview"""
@pytest.mark.asyncio
async def test_service_overview_default_time(self):
"""Test service overview with default time range"""
tool_mgr = ToolManager({})
with patch.object(tool_mgr.http_client, 'get') as mock_get:
mock_response = AsyncMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"status": "success",
"data": {
"result": [
{"value": [1234567890, "0.1"]}
]
}
}
mock_get.return_value = mock_response
result = await tool_mgr._observability_tool({
"action": "service_overview",
"params": {
"service": "gateway"
}
})
assert result.success is True
assert "metrics" in result.result
class TestObservabilityToolSecurity:
"""Test security features"""
@pytest.mark.asyncio
async def test_query_timeout(self):
"""Test that queries have timeout"""
tool_mgr = ToolManager({})
# The tool should have timeout protection
result = await tool_mgr._observability_tool({
"action": "metrics_query",
"params": {
"query": "rate(http_requests_total[5m])"
}
})
# Should either succeed or fail gracefully
assert result is not None
if __name__ == "__main__":
pytest.main([__file__, "-v"])