feat: Add Comfy Agent service for NODE3 image/video generation
- Create comfy-agent service with FastAPI + NATS integration - ComfyUI client with HTTP/WebSocket support - REST API: /generate/image, /generate/video, /status, /result - NATS subjects: agent.invoke.comfy, comfy.request.* - Async job queue with progress tracking - Docker compose configuration for NODE3 - Update PROJECT-MASTER-INDEX.md with NODE2/NODE3 docs Co-Authored-By: Warp <agent@warp.dev>
This commit is contained in:
54
services/comfy-agent/app/comfyui_client.py
Normal file
54
services/comfy-agent/app/comfyui_client.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# services/comfy-agent/app/comfyui_client.py
|
||||
import asyncio
|
||||
import httpx
|
||||
import json
|
||||
import websockets
|
||||
from typing import Any, Dict, Optional, Callable
|
||||
from .config import settings
|
||||
|
||||
ProgressCb = Callable[[float, str], None]
|
||||
|
||||
class ComfyUIClient:
|
||||
def __init__(self) -> None:
|
||||
self.http = httpx.AsyncClient(base_url=settings.COMFYUI_HTTP, timeout=60)
|
||||
|
||||
async def queue_prompt(self, prompt_graph: Dict[str, Any], client_id: str) -> str:
|
||||
# ComfyUI expects: {"prompt": {...}, "client_id": "..."}
|
||||
r = await self.http.post("/prompt", json={"prompt": prompt_graph, "client_id": client_id})
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
# typically returns {"prompt_id": "...", "number": ...}
|
||||
return data["prompt_id"]
|
||||
|
||||
async def wait_progress(self, client_id: str, prompt_id: str, on_progress: Optional[ProgressCb] = None) -> None:
|
||||
# WS emits progress/executing/status; keep generic handling
|
||||
ws_url = f"{settings.COMFYUI_WS}?clientId={client_id}"
|
||||
async with websockets.connect(ws_url, max_size=50_000_000) as ws:
|
||||
while True:
|
||||
msg = await ws.recv()
|
||||
evt = json.loads(msg)
|
||||
|
||||
# Best-effort progress mapping
|
||||
if evt.get("type") == "progress":
|
||||
data = evt.get("data", {})
|
||||
max_v = float(data.get("max", 1.0))
|
||||
val = float(data.get("value", 0.0))
|
||||
p = 0.0 if max_v <= 0 else min(1.0, val / max_v)
|
||||
if on_progress:
|
||||
on_progress(p, "progress")
|
||||
|
||||
# completion signal varies; "executing" with node=None часто означає done
|
||||
if evt.get("type") == "executing":
|
||||
data = evt.get("data", {})
|
||||
if data.get("prompt_id") == prompt_id and data.get("node") is None:
|
||||
if on_progress:
|
||||
on_progress(1.0, "done")
|
||||
return
|
||||
|
||||
async def get_history(self, prompt_id: str) -> Dict[str, Any]:
|
||||
r = await self.http.get(f"/history/{prompt_id}")
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
async def close(self) -> None:
|
||||
await self.http.aclose()
|
||||
Reference in New Issue
Block a user