feat: MD pipeline — market-data-service hardening + SenpAI NATS consumer
Producer (market-data-service):
- Backpressure: smart drop policy (heartbeats→quotes→trades preserved)
- Heartbeat monitor: synthetic HeartbeatEvent on provider silence
- Graceful shutdown: WS→bus→storage→DB engine cleanup sequence
- Bybit V5 public WS provider (backup for Binance, no API key needed)
- FailoverManager: health-based provider switching with recovery
- NATS output adapter: md.events.{type}.{symbol} for SenpAI
- /bus-stats endpoint for backpressure monitoring
- Dockerfile + docker-compose.node1.yml integration
- 36 tests (parsing + bus + failover), requirements.lock
Consumer (senpai-md-consumer):
- NATSConsumer: subscribe md.events.>, queue group senpai-md, backpressure
- State store: LatestState + RollingWindow (deque, 60s)
- Feature engine: 11 features (mid, spread, VWAP, return, vol, latency)
- Rule-based signals: long/short on return+volume+spread conditions
- Publisher: rate-limited features + signals + alerts to NATS
- HTTP API: /health, /metrics, /state/latest, /features/latest, /stats
- 10 Prometheus metrics
- Dockerfile + docker-compose.senpai.yml
- 41 tests (parsing + state + features + rate-limit), requirements.lock
CI: ruff + pytest + smoke import for both services
Tests: 77 total passed, lint clean
Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
133
services/market-data-service/app/consumers/nats_output.py
Normal file
133
services/market-data-service/app/consumers/nats_output.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""
|
||||
NATS output adapter — pushes normalised events to NATS subjects.
|
||||
|
||||
Subject schema:
|
||||
{prefix}.{event_type}.{symbol}
|
||||
e.g. md.events.trade.BTCUSDT
|
||||
md.events.quote.AAPL
|
||||
md.events.heartbeat.__system__
|
||||
|
||||
SenpAI (or any other consumer) can subscribe to:
|
||||
md.events.> — all events
|
||||
md.events.trade.> — all trades
|
||||
md.events.*.BTCUSDT — all event types for BTC
|
||||
|
||||
Payload: JSON (event.model_dump_json())
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from app.config import settings
|
||||
from app.domain.events import Event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Lazy import — nats-py may not be installed in minimal setups
|
||||
_nc = None
|
||||
|
||||
|
||||
class NatsOutputConsumer:
|
||||
"""
|
||||
Publishes every event to NATS as JSON.
|
||||
|
||||
Auto-reconnects via nats-py built-in mechanism.
|
||||
If NATS is unavailable, logs warning and drops events (non-blocking).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
nats_url: str | None = None,
|
||||
subject_prefix: str | None = None,
|
||||
) -> None:
|
||||
self._url = nats_url or settings.nats_url
|
||||
self._prefix = subject_prefix or settings.nats_subject_prefix
|
||||
self._nc = None
|
||||
self._connected = False
|
||||
self._publish_count = 0
|
||||
self._drop_count = 0
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Connect to NATS."""
|
||||
try:
|
||||
import nats # noqa: F811
|
||||
|
||||
self._nc = await nats.connect(
|
||||
self._url,
|
||||
reconnect_time_wait=2,
|
||||
max_reconnect_attempts=-1, # infinite
|
||||
name="market-data-service",
|
||||
error_cb=self._error_cb,
|
||||
disconnected_cb=self._disconnected_cb,
|
||||
reconnected_cb=self._reconnected_cb,
|
||||
)
|
||||
self._connected = True
|
||||
logger.info(
|
||||
"nats_output.connected",
|
||||
extra={"url": self._url, "prefix": self._prefix},
|
||||
)
|
||||
except ImportError:
|
||||
logger.error(
|
||||
"nats_output.nats_not_installed",
|
||||
extra={"hint": "pip install nats-py"},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"nats_output.connect_failed",
|
||||
extra={"url": self._url, "error": str(e)},
|
||||
)
|
||||
|
||||
async def handle(self, event: Event) -> None:
|
||||
"""Publish event to NATS subject."""
|
||||
if not self._nc or not self._connected:
|
||||
self._drop_count += 1
|
||||
return
|
||||
|
||||
symbol = getattr(event, "symbol", "__system__")
|
||||
subject = f"{self._prefix}.{event.event_type.value}.{symbol}"
|
||||
|
||||
try:
|
||||
payload = event.model_dump_json().encode("utf-8")
|
||||
await self._nc.publish(subject, payload)
|
||||
self._publish_count += 1
|
||||
except Exception as e:
|
||||
self._drop_count += 1
|
||||
if self._drop_count % 1000 == 1:
|
||||
logger.warning(
|
||||
"nats_output.publish_failed",
|
||||
extra={
|
||||
"subject": subject,
|
||||
"error": str(e),
|
||||
"total_dropped": self._drop_count,
|
||||
},
|
||||
)
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Flush and close NATS connection."""
|
||||
if self._nc:
|
||||
try:
|
||||
await self._nc.flush(timeout=5)
|
||||
await self._nc.close()
|
||||
except Exception as e:
|
||||
logger.warning("nats_output.close_error", extra={"error": str(e)})
|
||||
|
||||
logger.info(
|
||||
"nats_output.stopped",
|
||||
extra={
|
||||
"published": self._publish_count,
|
||||
"dropped": self._drop_count,
|
||||
},
|
||||
)
|
||||
|
||||
# ── NATS callbacks ────────────────────────────────────────────────
|
||||
|
||||
async def _error_cb(self, e: Exception) -> None:
|
||||
logger.error("nats_output.error", extra={"error": str(e)})
|
||||
|
||||
async def _disconnected_cb(self) -> None:
|
||||
self._connected = False
|
||||
logger.warning("nats_output.disconnected")
|
||||
|
||||
async def _reconnected_cb(self) -> None:
|
||||
self._connected = True
|
||||
logger.info("nats_output.reconnected")
|
||||
@@ -3,7 +3,6 @@ StorageConsumer: persists events to SQLite + JSONL log.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
Reference in New Issue
Block a user