services: update comfy agent, senpai md consumer, and swapper deps

This commit is contained in:
Apple
2026-02-19 00:14:18 -08:00
parent c201d105f6
commit c57e6ed96b
15 changed files with 586 additions and 40 deletions

View File

@@ -25,6 +25,7 @@ logger = logging.getLogger(__name__)
# These are set by main.py at startup
_state: LatestState | None = None
_stats_fn = None # callable → dict
_features_cache: dict[str, dict] = {} # symbol → last computed features
def set_state(state: LatestState) -> None:
@@ -37,6 +38,11 @@ def set_stats_fn(fn) -> None:
_stats_fn = fn
def cache_features(symbol: str, features: dict) -> None:
"""Cache pre-computed features for fast API responses."""
_features_cache[symbol] = features
async def _handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
"""Minimal HTTP request handler."""
try:
@@ -117,15 +123,19 @@ async def _route(
return body, "application/json", "200 OK"
elif path == "/features/latest":
symbol = params.get("symbol", "")
symbol = params.get("symbol", "").upper()
if not symbol:
body = json.dumps({"error": "missing ?symbol=XXX"}).encode()
return body, "application/json", "400 Bad Request"
if not _state:
cached = _features_cache.get(symbol)
if cached:
data = {"symbol": symbol, "features": cached}
elif _state:
# Fallback to live compute (slower)
data = {"symbol": symbol, "features": compute_features(_state, symbol)}
else:
body = json.dumps({"error": "not initialized"}).encode()
return body, "application/json", "503 Service Unavailable"
features = compute_features(_state, symbol)
data = {"symbol": symbol.upper(), "features": features}
body = json.dumps(data, ensure_ascii=False).encode()
return body, "application/json", "200 OK"

View File

@@ -79,12 +79,24 @@ async def process_events(
events_per_sec_count = 0
time.monotonic()
feature_compute_interval = 1.0 / max(settings.features_pub_rate_hz, 1.0)
next_feature_compute: dict[str, float] = {}
next_signal_emit: dict[str, float] = {}
signal_cooldown_sec = 1.0
batch_counter = 0
while True:
try:
event = await consumer.queue.get()
except asyncio.CancelledError:
break
# Yield to event loop every N events so HTTP API stays responsive
batch_counter += 1
if batch_counter >= 5:
batch_counter = 0
await asyncio.sleep(0)
proc_start = time.monotonic()
try:
@@ -110,15 +122,24 @@ async def process_events(
else:
symbol = None
# Compute features + publish (only for trade/quote events)
# Compute features + publish with per-symbol throttling
if symbol and settings.features_enabled:
snapshot = make_feature_snapshot(state, symbol)
await publisher.publish_features(snapshot)
now_mono = time.monotonic()
due = next_feature_compute.get(symbol, 0.0)
if now_mono >= due:
snapshot = make_feature_snapshot(state, symbol)
# Cache for fast HTTP API responses
api.cache_features(symbol, snapshot.features)
await publisher.publish_features(snapshot)
# Check for trade signal
sig = check_signal(snapshot.features, symbol)
if sig:
await publisher.publish_signal(sig)
# Check for trade signal with cooldown to avoid flood
sig = check_signal(snapshot.features, symbol)
sig_due = next_signal_emit.get(symbol, 0.0)
if sig and now_mono >= sig_due:
await publisher.publish_signal(sig)
next_signal_emit[symbol] = now_mono + signal_cooldown_sec
next_feature_compute[symbol] = now_mono + feature_compute_interval
# Processing latency metric
proc_ms = (time.monotonic() - proc_start) * 1000

View File

@@ -78,7 +78,7 @@ class Publisher:
symbol=signal.symbol,
direction=signal.direction,
).inc()
logger.info(
logger.debug(
"publisher.signal_emitted",
extra={
"symbol": signal.symbol,