Files
microdao-daarion/docker-compose.node1.yml
2026-03-03 08:08:23 -08:00

1323 lines
43 KiB
YAML
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
version: '3.8'
services:
# DAGI Router для NODE1
router:
build:
context: ./services/router
dockerfile: Dockerfile
container_name: dagi-router-node1
ports:
- "9102:8000"
environment:
- NATS_URL=nats://nats:4222
- ROUTER_CONFIG_PATH=/app/router_config.yaml
- LOG_LEVEL=info
- NODE_ID=noda1
- MEMORY_SERVICE_URL=http://memory-service:8000
# Timeout policy: Gateway (180s) > Router (60s) > LLM (30s)
- ROUTER_TIMEOUT=180
- QDRANT_HOST=qdrant
- QDRANT_PORT=6333
- QDRANT_ENABLED=true
- NEO4J_BOLT_URL=bolt://neo4j:7687
- NEO4J_HTTP_URL=http://neo4j:7474
- NEO4J_USER=neo4j
- NEO4J_PASSWORD=DaarionNeo4j2026!
- DEEPSEEK_API_KEY=sk-0db94e8193ec4a6e9acd593ee8d898e7
- MISTRAL_API_KEY=40Gwjo8nVBx4i4vIkgszvXw9bOwDOu4G
- COHERE_API_KEY=nOdOXnuepLku2ipJWpe6acWgAsJCsDhMO0RnaEJB
- GROK_API_KEY=xai-CpoLMPgw91NP9AEdHPhIrvU4ZnhV1q1P8BJBKCpD5kTPFRXJmTOkgGNHwYdZpXMlRxBgHcgcSlIXccxh
- VISION_ENCODER_URL=http://vision-encoder:8001
- SWAPPER_SERVICE_URL=http://swapper-service:8890
- BINANCE_MONITOR_URL=http://dagi-binance-bot-monitor-node1:8893
- IMAGE_GEN_URL=http://swapper-service:8890/image/generate
- STT_SERVICE_URL=http://swapper-service:8890
- STT_SERVICE_UPLOAD_URL=http://swapper-service:8890/stt
- OCR_SERVICE_URL=http://swapper-service:8890
- WEB_SEARCH_SERVICE_URL=http://swapper-service:8890
- REDIS_URL=redis://redis:6379/0
- CREWAI_SERVICE_URL=http://dagi-staging-crewai-service:9010
- NATURE_ID_URL=http://plant-vision-node1:8085
- NATURE_ID_MIN_CONFIDENCE=0.65
- PLANTNET_API_KEY=${PLANTNET_API_KEY}
- ONEOK_CRM_BASE_URL=http://oneok-crm-adapter:8088
- ONEOK_CALC_BASE_URL=http://oneok-calc-adapter:8089
- ONEOK_DOCS_BASE_URL=http://oneok-docs-adapter:8090
- ONEOK_SCHEDULE_BASE_URL=http://oneok-schedule-adapter:8091
- ONEOK_ADAPTER_API_KEY=${ONEOK_ADAPTER_API_KEY}
- ROUTER_TOOL_MAX_ROUNDS=${ROUTER_TOOL_MAX_ROUNDS:-10}
- AGROMATRIX_REVIEW_AUTH_MODE=${AGROMATRIX_REVIEW_AUTH_MODE:-bearer}
- AGROMATRIX_REVIEW_BEARER_TOKENS=${AGROMATRIX_REVIEW_BEARER_TOKENS}
# ── Fabric Layer (NCS + Node Worker, Swapper being decommissioned) ──
- NODE_CAPABILITIES_URL=http://node-capabilities:8099/capabilities
- ENABLE_GLOBAL_CAPS_NATS=true
- OLLAMA_URL=http://172.18.0.1:11434
- PREFER_NODE_WORKER=true
- ENABLE_CREW_MODEL_ROUTING=1
- CREW_SMALL_MODEL=smollm2:135m
- CREWAI_WORKER_LLM_PROFILE=crew_local_27b
volumes:
- ${DEPLOY_ROOT:-.}/services/router/router_config.yaml:/app/router_config.yaml:ro
- ${DEPLOY_ROOT:-.}/services/router/router-config.yml:/app/router-config.yml:ro
- ${DEPLOY_ROOT:-.}/config/crewai_agents.json:/config/crewai_agents.json:ro
- ${DEPLOY_ROOT:-.}/gateway-bot:/app/prompts:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# Swapper Service для NODE1 - Dynamic LLM + OCR model loading
swapper-service:
build:
context: ./services/swapper-service
dockerfile: Dockerfile
container_name: swapper-service-node1
ports:
- "8890:8890"
- "8891:8891" # Metrics
environment:
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- SWAPPER_CONFIG_PATH=/app/config/swapper_config.yaml
- SWAPPER_MODE=single-active
- MAX_CONCURRENT_MODELS=1
- MODEL_SWAP_TIMEOUT=300
- GPU_ENABLED=false
- NODE_ID=noda1
- HF_HOME=/root/.cache/huggingface
- CUDA_VISIBLE_DEVICES=
- WHISPER_DEVICE=cpu
- WHISPER_COMPUTE_TYPE=int8
- CRAWL4AI_URL=http://crawl4ai:11235
# Cloud API keys for video/image generation
- GROK_API_KEY=xai-CpoLMPgw91NP9AEdHPhIrvU4ZnhV1q1P8BJBKCpD5kTPFRXJmTOkgGNHwYdZpXMlRxBgHcgcSlIXccxh
- MISTRAL_API_KEY=40Gwjo8nVBx4i4vIkgszvXw9bOwDOu4G
volumes:
- ${DEPLOY_ROOT:-.}/services/swapper-service/config/swapper_config_node1.yaml:/app/config/swapper_config.yaml:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
- swapper-hf-cache-node1:/root/.cache/huggingface
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8890/health || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
# Image Generation тепер інтегровано в Swapper Service (lazy loading)
# Endpoint: POST /image/generate на swapper-service:8890
# Plant Vision wrapper (local nature-id CLI -> HTTP)
plant-vision-node1:
build:
context: ./services/plant-vision-node1
dockerfile: Dockerfile
container_name: plant-vision-node1
environment:
- NATURE_ID_CMD=${NATURE_ID_CMD:-python /opt/nature-id/nature_id.py -m plants -l -r 5 -s {image_path}}
- NATURE_ID_TIMEOUT=40
- DOWNLOAD_TIMEOUT=20
networks:
- dagi-network
volumes:
- ${DEPLOY_ROOT:-.}/third_party/nature-id:/opt/nature-id:ro
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8085/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 15s
# Crawl4AI - Advanced Web Crawler with JavaScript support
crawl4ai:
image: unclecode/crawl4ai:latest
container_name: dagi-crawl4ai-node1
ports:
- "11235:11235"
environment:
- CRAWL4AI_API_TOKEN=${CRAWL4AI_API_TOKEN:-}
- MAX_CONCURRENT_TASKS=5
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11235/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
# Gateway Bot (Helion + DAARWIZZ)
gateway:
build:
context: ./gateway-bot
dockerfile: Dockerfile
args:
BUILD_SHA: "${BUILD_SHA:-dev}"
BUILD_TIME: "${BUILD_TIME:-local}"
container_name: dagi-gateway-node1
ports:
- "9300:9300"
environment:
- BUILD_SHA=${BUILD_SHA:-dev}
- BUILD_TIME=${BUILD_TIME:-local}
- NODE_ID=NODA1
- ROUTER_URL=${ROUTER_URL:-http://dagi-staging-router:8000}
- GATEWAY_MAX_TOKENS_CONCISE=350
- GATEWAY_MAX_TOKENS_SENPAI_DEFAULT=700
- GATEWAY_MAX_TOKENS_DEFAULT=700
- GATEWAY_MAX_TOKENS_DETAILED=1200
- SERVICE_ID=gateway
- SERVICE_ROLE=gateway
- BRAND_INTAKE_URL=http://brand-intake:9211
- BRAND_REGISTRY_URL=http://brand-registry:9210
- PRESENTATION_RENDERER_URL=http://presentation-renderer:9212
- ARTIFACT_REGISTRY_URL=http://artifact-registry:9220
- HELION_TELEGRAM_BOT_TOKEN=8112062582:AAGS-HwRLEI269lDutLtAJTFArsIq31YNhE
- HELION_NAME=Helion
- HELION_PROMPT_PATH=/app/gateway-bot/helion_prompt.txt
- NUTRA_TELEGRAM_BOT_TOKEN=8517315428:AAGTLcKxBAZDsMgx28agKTvl1SqJGi0utH4
- NUTRA_NAME=NUTRA
- DRUID_TELEGRAM_BOT_TOKEN=8145618489:AAFR714mBsNmiuF-rjCw-295iORBReJQZ70
- DRUID_NAME=Druid
- DRUID_PROMPT_PATH=/app/gateway-bot/druid_prompt.txt
- DAARWIZZ_TELEGRAM_BOT_TOKEN=8323412397:AAGZbAR22LuOiGD8xVC3OXMjahQ8rs2lJwo
- DAARWIZZ_NAME=DAARWIZZ
- DAARWIZZ_PROMPT_PATH=/app/gateway-bot/daarwizz_prompt.txt
- GREENFOOD_TELEGRAM_BOT_TOKEN=7495165343:AAGR1XEOzg7DkPFPCzL_eYLCJfxJuonCxug
- GREENFOOD_NAME=GREENFOOD
- GREENFOOD_PROMPT_PATH=/app/gateway-bot/greenfood_prompt.txt
- AGROMATRIX_TELEGRAM_BOT_TOKEN=8580290441:AAFuDBmFJtpl-3I_WfkH7Hkb59X0fhYNMOE
- AGROMATRIX_NAME=AgroMatrix
- AGROMATRIX_PROMPT_PATH=/app/gateway-bot/agromatrix_prompt.txt
# Alateya - R&D, біотех, інновації
- ALATEYA_TELEGRAM_BOT_TOKEN=8436880945:AAEi-HS6GEctddoqBUd37MHfweZQP-OjRlo
- ALATEYA_NAME=Alateya
- ALATEYA_PROMPT_PATH=/app/gateway-bot/alateya_prompt.txt
# Clan (Spirit) - Дух Общини
- CLAN_TELEGRAM_BOT_TOKEN=8516872152:AAHH26wU8hJZJbSCJXb4vbmPmakTP77ok5E
- CLAN_NAME=Spirit
- CLAN_PROMPT_PATH=/app/gateway-bot/clan_prompt.txt
# Eonarch - Еволюція свідомості
- EONARCH_TELEGRAM_BOT_TOKEN=7962391584:AAFYkelLRG3VR_Lxuu6pEGG76t4vZdANtz4
- EONARCH_NAME=EONARCH
- EONARCH_PROMPT_PATH=/app/gateway-bot/eonarch_prompt.txt
- SENPAI_TELEGRAM_BOT_TOKEN=8510265026:AAGFrFBIIEihsLptZSxuKdmW2RoRPQDY9FE
- ONEOK_TELEGRAM_BOT_TOKEN=${ONEOK_TELEGRAM_BOT_TOKEN}
- SOUL_TELEGRAM_BOT_TOKEN=8041596416:AAHhpfCtY8paCm_9AD-4stJJg-Vw-CBf6Qk
- YAROMIR_TELEGRAM_BOT_TOKEN=8128180674:AAGNZdG3LwECI4z_803smsuRHsK3nPdjMLY
- SOFIIA_TELEGRAM_BOT_TOKEN=8589292566:AAEmPvS6nY9e-Y-TZm04CAHWlaFnWVxajE4
- SENPAI_NAME=SENPAI
- ONEOK_NAME=1OK
- SOUL_NAME=Athena
- YAROMIR_NAME=Yaromir
- SOFIIA_NAME=Sophia
- SENPAI_PROMPT_PATH=/app/gateway-bot/senpai_prompt.txt
- ONEOK_PROMPT_PATH=/app/gateway-bot/oneok_prompt.txt
- MEMORY_SERVICE_URL=http://memory-service:8000
# Timeout policy: Gateway (180s) > Router (60s) > LLM (30s)
- ROUTER_TIMEOUT=180
- SWAPPER_SERVICE_URL=http://swapper-service:8890
- IMAGE_GEN_URL=http://swapper-service:8890/image/generate
- STT_SERVICE_URL=http://swapper-service:8890
- STT_SERVICE_UPLOAD_URL=http://swapper-service:8890/stt
- OCR_SERVICE_URL=http://swapper-service:8890
- WEB_SEARCH_SERVICE_URL=http://swapper-service:8890
- REDIS_URL=redis://redis:6379/0
- CREWAI_SERVICE_URL=http://dagi-staging-crewai-service:9010
- AGROMATRIX_REVIEW_AUTH_MODE=${AGROMATRIX_REVIEW_AUTH_MODE:-bearer}
- AGROMATRIX_REVIEW_BEARER_TOKENS=${AGROMATRIX_REVIEW_BEARER_TOKENS}
- ENABLE_CREW_MODEL_ROUTING=1
- CREW_SMALL_MODEL=smollm2:135m
- CREWAI_WORKER_LLM_PROFILE=crew_local_27b
# v4.3 FarmOS integration (fail-closed: якщо пусто — агент повідомить "не налаштований")
- FARMOS_BASE_URL=http://dagi-farmos-node1
- FARMOS_TOKEN=${FARMOS_TOKEN:-}
- FARMOS_USER=${FARMOS_USER:-}
- FARMOS_PASS=${FARMOS_PASS:-}
- FARMOS_CLIENT_ID=${FARMOS_CLIENT_ID:-farm}
env_file:
- .env.stepan.node1
volumes:
- ${DEPLOY_ROOT:-.}/gateway-bot:/app/gateway-bot:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- router
- memory-service
- redis
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9300/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
gateway-worker:
build:
context: ./gateway-bot
dockerfile: Dockerfile
container_name: dagi-gateway-worker-node1
command: ["python", "-m", "daarion_facade.worker"]
environment:
- ROUTER_BASE_URL=http://router:8000
- REDIS_URL=redis://redis:6379/0
- ROUTER_WORKER_TIMEOUT=60
volumes:
- ${DEPLOY_ROOT:-.}/gateway-bot:/app/gateway-bot:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- router
- redis
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "python", "-c", "print(\"ok\")"]
interval: 30s
timeout: 5s
retries: 3
gateway-reminder-worker:
build:
context: ./gateway-bot
dockerfile: Dockerfile
container_name: dagi-gateway-reminder-worker-node1
command: ["python", "-m", "daarion_facade.reminder_worker"]
environment:
- REDIS_URL=redis://redis:6379/0
- DAARION_REMINDER_POLL_SECONDS=${DAARION_REMINDER_POLL_SECONDS:-2}
- DAARION_REMINDER_TTL_SECONDS=${DAARION_REMINDER_TTL_SECONDS:-2592000}
- DAARION_REMINDER_DEFAULT_TZ=${DAARION_REMINDER_DEFAULT_TZ:-Europe/Kyiv}
- GLOBAL_RELAY_ALLOWED_USER_IDS=${GLOBAL_RELAY_ALLOWED_USER_IDS:-}
- MENTOR_PRIVATE_HANDLES=${MENTOR_PRIVATE_HANDLES:-ivantytar,archenvis,olegarch88}
- MENTOR_PRIVATE_NAMES=${MENTOR_PRIVATE_NAMES:-Іван Титар,Александр Вертій,Олег Ковальчук}
- MENTOR_DISCLOSURE_ALLOWED_USER_IDS=${MENTOR_DISCLOSURE_ALLOWED_USER_IDS:-}
- HELION_MENTOR_CHAT_IDS=${HELION_MENTOR_CHAT_IDS:-}
- HELION_RELAY_ALLOWED_USER_IDS=${HELION_RELAY_ALLOWED_USER_IDS:-}
- DAARWIZZ_TELEGRAM_BOT_TOKEN=${DAARWIZZ_TELEGRAM_BOT_TOKEN:-8323412397:AAGZbAR22LuOiGD8xVC3OXMjahQ8rs2lJwo}
- HELION_TELEGRAM_BOT_TOKEN=${HELION_TELEGRAM_BOT_TOKEN:-8112062582:AAGS-HwRLEI269lDutLtAJTFArsIq31YNhE}
- GREENFOOD_TELEGRAM_BOT_TOKEN=${GREENFOOD_TELEGRAM_BOT_TOKEN:-7495165343:AAGR1XEOzg7DkPFPCzL_eYLCJfxJuonCxug}
- AGROMATRIX_TELEGRAM_BOT_TOKEN=${AGROMATRIX_TELEGRAM_BOT_TOKEN:-8580290441:AAFuDBmFJtpl-3I_WfkH7Hkb59X0fhYNMOE}
- ALATEYA_TELEGRAM_BOT_TOKEN=${ALATEYA_TELEGRAM_BOT_TOKEN:-8436880945:AAEi-HS6GEctddoqBUd37MHfweZQP-OjRlo}
- NUTRA_TELEGRAM_BOT_TOKEN=${NUTRA_TELEGRAM_BOT_TOKEN:-8517315428:AAGTLcKxBAZDsMgx28agKTvl1SqJGi0utH4}
- DRUID_TELEGRAM_BOT_TOKEN=${DRUID_TELEGRAM_BOT_TOKEN:-8145618489:AAFR714mBsNmiuF-rjCw-295iORBReJQZ70}
- CLAN_TELEGRAM_BOT_TOKEN=${CLAN_TELEGRAM_BOT_TOKEN:-8516872152:AAHH26wU8hJZJbSCJXb4vbmPmakTP77ok5E}
- EONARCH_TELEGRAM_BOT_TOKEN=${EONARCH_TELEGRAM_BOT_TOKEN:-7962391584:AAFYkelLRG3VR_Lxuu6pEGG76t4vZdANtz4}
- SENPAI_TELEGRAM_BOT_TOKEN=${SENPAI_TELEGRAM_BOT_TOKEN:-8510265026:AAGFrFBIIEihsLptZSxuKdmW2RoRPQDY9FE}
- ONEOK_TELEGRAM_BOT_TOKEN=${ONEOK_TELEGRAM_BOT_TOKEN}
- SOUL_TELEGRAM_BOT_TOKEN=${SOUL_TELEGRAM_BOT_TOKEN:-8041596416:AAHhpfCtY8paCm_9AD-4stJJg-Vw-CBf6Qk}
- YAROMIR_TELEGRAM_BOT_TOKEN=${YAROMIR_TELEGRAM_BOT_TOKEN:-8128180674:AAGNZdG3LwECI4z_803smsuRHsK3nPdjMLY}
- SOFIIA_TELEGRAM_BOT_TOKEN=${SOFIIA_TELEGRAM_BOT_TOKEN:-8589292566:AAEmPvS6nY9e-Y-TZm04CAHWlaFnWVxajE4}
volumes:
- ${DEPLOY_ROOT:-.}/gateway-bot:/app/gateway-bot:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- redis
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "python", "-c", "print(\"ok\")"]
interval: 30s
timeout: 5s
retries: 3
metrics-poller-node1:
build:
context: ./gateway-bot
dockerfile: Dockerfile
container_name: dagi-metrics-poller-node1
command: ["python", "-m", "daarion_facade.metrics_poller"]
environment:
- REDIS_URL=redis://redis:6379/0
- MEMORY_SERVICE_URL=http://memory-service:8000
- DAARION_METRICS_POLL_INTERVAL_SECONDS=${DAARION_METRICS_POLL_INTERVAL_SECONDS:-10}
- DAARION_METRICS_TTL_SECONDS=${DAARION_METRICS_TTL_SECONDS:-60}
- DAARION_METRICS_HTTP_CONNECT_TIMEOUT_SECONDS=${DAARION_METRICS_HTTP_CONNECT_TIMEOUT_SECONDS:-2}
- DAARION_METRICS_HTTP_TOTAL_TIMEOUT_SECONDS=${DAARION_METRICS_HTTP_TOTAL_TIMEOUT_SECONDS:-5}
- DAARION_NODE_COUNT=${DAARION_NODE_COUNT:-1}
volumes:
- ${DEPLOY_ROOT:-.}/gateway-bot:/app/gateway-bot:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- redis
- memory-service
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "python", "-c", "print(\"ok\")"]
interval: 30s
timeout: 5s
retries: 3
# CLAN Consent Outbox Worker (Postgres event-store applier; no execute)
clan-consent-outbox-worker:
build:
context: ./services/clan-consent-adapter
dockerfile: Dockerfile
container_name: clan-consent-outbox-worker
command: ["python", "clan_consent_outbox_worker.py"]
environment:
- CLAN_PG_DSN=${CLAN_PG_DSN:-postgresql://daarion:DaarionDB2026!@dagi-postgres:5432/daarion_main}
- CLAN_OUTBOX_BATCH_SIZE=${CLAN_OUTBOX_BATCH_SIZE:-10}
- CLAN_OUTBOX_POLL_INTERVAL_SEC=${CLAN_OUTBOX_POLL_INTERVAL_SEC:-1.0}
- CLAN_OUTBOX_MAX_CAS_RETRIES=${CLAN_OUTBOX_MAX_CAS_RETRIES:-5}
- CLAN_CONSENT_APPLIER_ACTOR_ID=${CLAN_CONSENT_APPLIER_ACTOR_ID:-system:consent-applier}
volumes:
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- dagi-postgres
networks:
- dagi-network
restart: unless-stopped
# 1OK - EspoCRM DB
oneok-espocrm-db:
image: mariadb:11
container_name: oneok-espocrm-db-node1
environment:
- MARIADB_ROOT_PASSWORD=${ONEOK_ESPO_DB_ROOT_PASSWORD:-change_me_root}
- MARIADB_DATABASE=${ONEOK_ESPO_DB_NAME:-oneok_espocrm}
- MARIADB_USER=${ONEOK_ESPO_DB_USER:-oneok}
- MARIADB_PASSWORD=${ONEOK_ESPO_DB_PASSWORD:-change_me_oneok}
volumes:
- oneok-espocrm-db-node1:/var/lib/mysql
networks:
- dagi-network
restart: unless-stopped
# 1OK - EspoCRM
oneok-espocrm:
image: espocrm/espocrm:latest
container_name: oneok-espocrm-node1
ports:
- "9080:80"
environment:
- ESPOCRM_DATABASE_HOST=oneok-espocrm-db
- ESPOCRM_DATABASE_NAME=${ONEOK_ESPO_DB_NAME:-oneok_espocrm}
- ESPOCRM_DATABASE_USER=${ONEOK_ESPO_DB_USER:-oneok}
- ESPOCRM_DATABASE_PASSWORD=${ONEOK_ESPO_DB_PASSWORD:-change_me_oneok}
- ESPOCRM_ADMIN_USERNAME=${ONEOK_ESPO_ADMIN_USER:-admin}
- ESPOCRM_ADMIN_PASSWORD=${ONEOK_ESPO_ADMIN_PASSWORD:-change_me_admin}
- ESPOCRM_SITE_URL=${ONEOK_ESPO_SITE_URL:-http://localhost:9080}
depends_on:
- oneok-espocrm-db
networks:
- dagi-network
restart: unless-stopped
# 1OK - Gotenberg PDF
oneok-gotenberg:
image: gotenberg/gotenberg:8
container_name: oneok-gotenberg-node1
command:
- gotenberg
- --api-timeout=30s
- --api-port=3000
- --chromium-disable-routes=true
ports:
- "3010:3000"
networks:
- dagi-network
restart: unless-stopped
# 1OK - CRM Adapter
oneok-crm-adapter:
build:
context: ./services/oneok-crm-adapter
dockerfile: Dockerfile
container_name: oneok-crm-adapter-node1
environment:
- ONEOK_ADAPTER_API_KEY=${ONEOK_ADAPTER_API_KEY}
- ONEOK_CRM_DB_PATH=/data/oneok_crm.sqlite
- ONEOK_ESPO_URL=http://oneok-espocrm
- ONEOK_ESPO_API_KEY=${ONEOK_ESPO_API_KEY}
volumes:
- oneok-crm-data-node1:/data
depends_on:
- oneok-espocrm
networks:
- dagi-network
restart: unless-stopped
# 1OK - Docs Adapter
oneok-docs-adapter:
build:
context: ./services/oneok-docs-adapter
dockerfile: Dockerfile
container_name: oneok-docs-adapter-node1
environment:
- ONEOK_ADAPTER_API_KEY=${ONEOK_ADAPTER_API_KEY}
- ONEOK_GOTENBERG_URL=http://oneok-gotenberg:3000
depends_on:
- oneok-gotenberg
networks:
- dagi-network
restart: unless-stopped
# 1OK - Calc Adapter
oneok-calc-adapter:
build:
context: ./services/oneok-calc-adapter
dockerfile: Dockerfile
container_name: oneok-calc-adapter-node1
environment:
- ONEOK_ADAPTER_API_KEY=${ONEOK_ADAPTER_API_KEY}
- ONEOK_BASE_RATE_PER_M2=${ONEOK_BASE_RATE_PER_M2:-3200}
- ONEOK_INSTALL_RATE_PER_M2=${ONEOK_INSTALL_RATE_PER_M2:-900}
- ONEOK_CURRENCY=${ONEOK_CURRENCY:-UAH}
networks:
- dagi-network
restart: unless-stopped
# 1OK - Schedule Adapter
oneok-schedule-adapter:
build:
context: ./services/oneok-schedule-adapter
dockerfile: Dockerfile
container_name: oneok-schedule-adapter-node1
environment:
- ONEOK_ADAPTER_API_KEY=${ONEOK_ADAPTER_API_KEY}
- ONEOK_SCHEDULE_TZ=Europe/Kyiv
networks:
- dagi-network
restart: unless-stopped
# Node Capabilities Service — model inventory + load metrics
node-capabilities:
build:
context: ./services/node-capabilities
dockerfile: Dockerfile
container_name: node-capabilities-node1
ports:
- "127.0.0.1:8099:8099"
environment:
- NODE_ID=noda1
- OLLAMA_BASE_URL=http://172.18.0.1:11434
- SWAPPER_URL=
- CACHE_TTL_SEC=15
- ENABLE_NATS_CAPS=true
- NATS_URL=nats://nats:4222
- NODE_WORKER_URL=http://node-worker:8109
extra_hosts:
- "host.docker.internal:host-gateway"
depends_on:
- nats
networks:
dagi-network:
aliases:
- node-capabilities
restart: unless-stopped
# Node Worker — NATS offload executor
node-worker:
build:
context: ./services/node-worker
dockerfile: Dockerfile
container_name: node-worker-node1
ports:
- "127.0.0.1:8109:8109"
extra_hosts:
- "host.docker.internal:host-gateway"
environment:
- NODE_ID=noda1
- NATS_URL=nats://nats:4222
- OLLAMA_BASE_URL=http://172.18.0.1:11434
- NODE_DEFAULT_LLM=qwen3.5:27b
- NODE_DEFAULT_VISION=qwen3-vl:8b
- NODE_WORKER_MAX_CONCURRENCY=2
- NCS_REPORT_URL=http://node-capabilities:8099
- STT_PROVIDER=none
- TTS_PROVIDER=none
- OCR_PROVIDER=vision_prompted
- IMAGE_PROVIDER=none
depends_on:
- nats
networks:
- dagi-network
restart: unless-stopped
# NATS (JetStream)
nats:
image: nats:2.11-alpine
container_name: dagi-nats-node1
ports:
- "4222:4222"
- "8222:8222" # HTTP monitoring
- "7422:7422" # Leafnode hub (NODA2/NODA3 connect here)
command: ["-c", "/etc/nats/nats-node1.conf"]
volumes:
- nats-data-node1:/data
- ./config/nats/nats-node1.conf:/etc/nats/nats-node1.conf:ro
networks:
dagi-network:
aliases:
- nats
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8222/healthz || exit 1"]
interval: 5s
timeout: 3s
retries: 10
start_period: 5s
# MinIO Object Storage
minio:
image: minio/minio:latest
container_name: dagi-minio-node1
ports:
- "9000:9000"
- "9001:9001"
environment:
- MINIO_ROOT_USER=minioadmin
- MINIO_ROOT_PASSWORD=minioadmin
command: ["server", "/data", "--console-address", ":9001"]
volumes:
- minio-data-node1:/data
networks:
- dagi-network
restart: unless-stopped
# Artifact Registry (shared for docs/presentations)
artifact-registry:
build:
context: ./services/artifact-registry
dockerfile: Dockerfile
container_name: artifact-registry-node1
ports:
- "9220:9220"
environment:
- POSTGRES_HOST=dagi-postgres
- POSTGRES_PORT=5432
- POSTGRES_USER=daarion
- POSTGRES_PASSWORD=DaarionDB2026!
- POSTGRES_DB=daarion_main
- MINIO_ENDPOINT=minio:9000
- MINIO_ACCESS_KEY=minioadmin
- MINIO_SECRET_KEY=minioadmin
- MINIO_BUCKET=artifacts
- MINIO_SECURE=false
- NATS_URL=nats://nats:4222
volumes:
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- nats
- minio
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9220/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# RAG Service (pgvector)
rag-service:
build:
context: ./services/rag-service
dockerfile: Dockerfile
container_name: rag-service-node1
ports:
- "9500:9500"
environment:
- PG_DSN=postgresql+psycopg2://daarion:DaarionDB2026!@dagi-postgres:5432/rag
- RAG_TABLE_NAME=rag_documents
depends_on:
- dagi-postgres
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "wget", "-qO-", "http://localhost:9500/health"]
interval: 10s
timeout: 3s
retries: 10
# PPTX Render Worker
render-pptx-worker:
build:
context: ./services/render-pptx-worker
dockerfile: Dockerfile
container_name: render-pptx-worker-node1
environment:
- NATS_URL=nats://nats:4222
- ARTIFACT_REGISTRY_URL=http://artifact-registry:9220
- MINIO_ENDPOINT=minio:9000
- MINIO_ACCESS_KEY=minioadmin
- MINIO_SECRET_KEY=minioadmin
- MINIO_BUCKET=artifacts
- MINIO_SECURE=false
depends_on:
nats:
condition: service_healthy
artifact-registry:
condition: service_started
minio:
condition: service_started
networks:
- dagi-network
restart: unless-stopped
# PDF Render Worker (LibreOffice)
render-pdf-worker:
build:
context: ./services/render-pdf-worker
dockerfile: Dockerfile
container_name: render-pdf-worker-node1
environment:
- NATS_URL=nats://nats:4222
- ARTIFACT_REGISTRY_URL=http://artifact-registry:9220
- MINIO_ENDPOINT=minio:9000
- MINIO_ACCESS_KEY=minioadmin
- MINIO_SECRET_KEY=minioadmin
- MINIO_BUCKET=artifacts
- MINIO_SECURE=false
depends_on:
- nats
- artifact-registry
- minio
networks:
- dagi-network
restart: unless-stopped
stop_grace_period: 30s
# Index Doc Worker
index-doc-worker:
build:
context: ./services/index-doc-worker
dockerfile: Dockerfile
container_name: index-doc-worker-node1
environment:
- NATS_URL=nats://nats:4222
- ARTIFACT_REGISTRY_URL=http://artifact-registry:9220
- RAG_SERVICE_URL=http://rag-service:9500
- MINIO_ENDPOINT=minio:9000
- MINIO_ACCESS_KEY=minioadmin
- MINIO_SECRET_KEY=minioadmin
- MINIO_BUCKET=artifacts
- MINIO_SECURE=false
- INDEX_DOC_MAX_BYTES=52428800
depends_on:
nats:
condition: service_healthy
artifact-registry:
condition: service_started
rag-service:
condition: service_started
minio:
condition: service_started
networks:
- dagi-network
restart: unless-stopped
# Brand Registry Service
brand-registry:
build:
context: ./services/brand-registry
dockerfile: Dockerfile
container_name: brand-registry-node1
ports:
- "9210:9210"
environment:
- BRAND_REGISTRY_DATA=/data/brand-registry
volumes:
- ${DEPLOY_ROOT:-.}/logs:/app/logs
- brand-registry-data-node1:/data/brand-registry
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9210/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# Brand Intake Service
brand-intake:
build:
context: ./services/brand-intake
dockerfile: Dockerfile
container_name: brand-intake-node1
ports:
- "9211:9211"
environment:
- BRAND_MAP_PATH=/app/config/BrandMap.yaml
- BRAND_INTAKE_DATA=/data/brand-intake
- BRAND_REGISTRY_URL=http://brand-registry:9210
volumes:
- ./config/brand/BrandMap.yaml:/app/config/BrandMap.yaml:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
- brand-intake-data-node1:/data/brand-intake
depends_on:
- brand-registry
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9211/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# Presentation Renderer Service (MVP)
presentation-renderer:
build:
context: ./services/presentation-renderer
dockerfile: Dockerfile
container_name: presentation-renderer-node1
ports:
- "9212:9212"
environment:
- BRAND_REGISTRY_URL=http://brand-registry:9210
- PRESENTATION_DATA=/data/presentations
volumes:
- ${DEPLOY_ROOT:-.}/logs:/app/logs
- presentation-data-node1:/data/presentations
depends_on:
- brand-registry
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9212/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# Memory Service
memory-service:
build:
context: ./services/memory-service
dockerfile: Dockerfile
container_name: dagi-memory-service-node1
ports:
- "8000:8000"
environment:
# PostgreSQL connection (uses MEMORY_ prefix as per config.py)
- MEMORY_POSTGRES_HOST=dagi-postgres
- MEMORY_POSTGRES_PORT=5432
- MEMORY_POSTGRES_USER=daarion
- MEMORY_POSTGRES_PASSWORD=DaarionDB2026!
- MEMORY_POSTGRES_DB=daarion_memory
# Qdrant connection
- MEMORY_QDRANT_HOST=dagi-qdrant-node1
- MEMORY_QDRANT_PORT=6333
# Cohere for embeddings
- MEMORY_COHERE_API_KEY=nOdOXnuepLku2ipJWpe6acWgAsJCsDhMO0RnaEJB
- MEMORY_DEBUG=false
volumes:
- ${DEPLOY_ROOT:-.}/logs:/app/logs
depends_on:
- qdrant
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# PostgreSQL (pgvector)
dagi-postgres:
image: pgvector/pgvector:pg16
container_name: dagi-postgres
ports:
- "5432:5432"
environment:
- POSTGRES_USER=daarion
- POSTGRES_PASSWORD=DaarionDB2026!
- POSTGRES_DB=daarion_main
volumes:
- postgres_data_node1:/var/lib/postgresql/data
networks:
- dagi-network
restart: unless-stopped
# Qdrant Vector Database
qdrant:
image: qdrant/qdrant:v1.13.6
container_name: dagi-qdrant-node1
ulimits:
nofile:
soft: 65536
hard: 65536
ports:
- "6333:6333" # HTTP API
- "6334:6334" # gRPC API
volumes:
- qdrant-data-node1:/qdrant/storage
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "true"]
interval: 30s
timeout: 10s
retries: 3
# Neo4j Graph Database
neo4j:
image: neo4j:5.26-community
container_name: dagi-neo4j-node1
ports:
- "7474:7474" # HTTP
- "7687:7687" # Bolt
environment:
- NEO4J_AUTH=neo4j/DaarionNeo4j2026!
- NEO4J_PLUGINS=["apoc"]
- NEO4J_server_memory_heap_initial__size=512m
- NEO4J_server_memory_heap_max__size=2G
- NEO4J_server_config_strict__validation_enabled=false
volumes:
- neo4j-data-node1:/data
- neo4j-logs-node1:/logs
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:7474"]
interval: 30s
timeout: 10s
retries: 3
# Redis Cache
redis:
image: redis:8-alpine
container_name: dagi-redis-node1
ports:
- "6379:6379"
volumes:
- redis-data-node1:/data
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "redis-cli", "PING"]
interval: 30s
timeout: 5s
retries: 3
# Vision Encoder Service - OpenCLIP for text/image embeddings
vision-encoder:
build:
context: ./services/vision-encoder
dockerfile: Dockerfile
container_name: dagi-vision-encoder-node1
ports:
- "8001:8001"
environment:
- DEVICE=cpu # НОДА1 без GPU
- MODEL_NAME=${VISION_MODEL_NAME:-ViT-L-14}
- MODEL_PRETRAINED=${VISION_MODEL_PRETRAINED:-openai}
- NORMALIZE_EMBEDDINGS=true
- QDRANT_HOST=qdrant
- QDRANT_PORT=6333
- QDRANT_ENABLED=true
volumes:
- ${DEPLOY_ROOT:-.}/logs:/app/logs
- vision-model-cache-node1:/root/.cache/clip
depends_on:
- qdrant
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8001/health')\""]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
# OCR тепер через Swapper Service (got-ocr2, donut-base, donut-cord моделі)
# Explicit volume names to prevent prefix mismatch
# This ensures volumes are always named consistently regardless of COMPOSE_PROJECT_NAME
# E2E Agent Prober - monitors agent pipeline health
agent-e2e-prober:
build:
context: ./services/agent-e2e-prober
dockerfile: Dockerfile
container_name: agent-e2e-prober-node1
ports:
- "9108:9108"
environment:
- GATEWAY_URL=http://gateway:9300
- PROBE_INTERVAL=60
- PROBE_TIMEOUT=30
- METRICS_PORT=9108
- SEMANTIC_AGENTS=clan,sofiia,monitor,helion,agromatrix,senpai
networks:
- dagi-network
restart: unless-stopped
depends_on:
- gateway
# === Market Data Pipeline (added 2026-02-09) ===
market-data-service:
container_name: dagi-market-data-node1
restart: unless-stopped
build:
context: ./services/market-data-service
dockerfile: Dockerfile
environment:
- BINANCE_WS_URL=wss://stream.binance.com:9443/ws
- BYBIT_WS_URL=wss://stream.bybit.com/v5/public/spot
- ALPACA_DRY_RUN=true
- SQLITE_URL=sqlite+aiosqlite:////data/market_data.db
- JSONL_PATH=/data/events.jsonl
- HTTP_HOST=0.0.0.0
- HTTP_PORT=8891
- NATS_URL=nats://nats:4222
- NATS_ENABLED=true
- NATS_SUBJECT_PREFIX=md.events
- LOG_LEVEL=INFO
- LOG_SAMPLE_RATE=500
ports:
- "8893:8891"
volumes:
- market-data-node1:/data
networks:
- dagi-network
depends_on:
- nats
command: ["run", "--provider", "binance,bybit", "--symbols", "BTCUSDT,ETHUSDT,BNBUSDT,SOLUSDT,XRPUSDT,ADAUSDT,DOGEUSDT,AVAXUSDT,DOTUSDT,LINKUSDT,POLUSDT,SHIBUSDT,TRXUSDT,UNIUSDT,LTCUSDT,ATOMUSDT,NEARUSDT,ICPUSDT,FILUSDT,APTUSDT,PAXGUSDT"]
healthcheck:
test:
- CMD-SHELL
- python -c "import urllib.request; urllib.request.urlopen('http://localhost:8891/health')"
interval: 15s
timeout: 5s
retries: 3
start_period: 10s
senpai-md-consumer:
container_name: dagi-senpai-md-consumer-node1
restart: unless-stopped
build:
context: ./services/senpai-md-consumer
dockerfile: Dockerfile
environment:
- NATS_URL=nats://nats:4222
- NATS_SUBJECT=md.events.>
- NATS_QUEUE_GROUP=senpai-md
- FEATURES_ENABLED=true
- FEATURES_PUB_RATE_HZ=10
- FEATURES_PUB_SUBJECT=senpai.features
- SIGNALS_PUB_SUBJECT=senpai.signals
- ALERTS_PUB_SUBJECT=senpai.alerts
- LOG_LEVEL=INFO
- HTTP_PORT=8892
ports:
- "8892:8892"
networks:
- dagi-network
depends_on:
nats:
condition: service_started
market-data-service:
condition: service_healthy
healthcheck:
test:
- CMD-SHELL
- python -c "import urllib.request; urllib.request.urlopen('http://localhost:8892/health')"
interval: 15s
timeout: 10s
retries: 5
start_period: 15s
binance-bot-monitor:
build:
context: ./services/binance-bot-monitor
dockerfile: Dockerfile
container_name: dagi-binance-bot-monitor-node1
restart: unless-stopped
environment:
- REDIS_URL=redis://redis:6379/0
- CRAWL4AI_URL=http://crawl4ai:11235
- SWAPPER_URL=http://swapper-service:8890
- BINANCE_CACHE_TTL=3600
- BINANCE_REFRESH_INTERVAL=1800
- BINANCE_API_KEY=${BINANCE_API_KEY:-}
- BINANCE_SECRET_KEY=${BINANCE_SECRET_KEY:-}
networks:
- dagi-network
# ── FarmOS (v4.3 integration) ────────────────────────────────────────────────
# PostgreSQL для farmOS (окрема БД, не чіпає dagi-postgres)
dagi-farmos-db-node1:
image: postgres:16-alpine
container_name: dagi-farmos-db-node1
restart: unless-stopped
environment:
- POSTGRES_DB=farmos
- POSTGRES_USER=farmos
- POSTGRES_PASSWORD=${FARMOS_DB_PASS}
volumes:
- farmos-db-data-node1:/var/lib/postgresql/data
networks:
- dagi-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U farmos -d farmos"]
interval: 10s
timeout: 5s
retries: 10
start_period: 15s
# farmOS Drupal application (4.x — актуальна стабільна, amd64 для x86_64 сервера)
dagi-farmos-node1:
image: farmos/farmos:4.x-amd64
container_name: dagi-farmos-node1
restart: unless-stopped
depends_on:
dagi-farmos-db-node1:
condition: service_healthy
environment:
- FARMOS_DB_HOST=dagi-farmos-db-node1
- FARMOS_DB_NAME=farmos
- FARMOS_DB_USER=farmos
- FARMOS_DB_PASSWORD=${FARMOS_DB_PASS}
- FARMOS_DB_DRIVER=pgsql
volumes:
- farmos-sites-node1:/opt/drupal/web/sites
networks:
- dagi-network
ports:
# Доступний тільки локально; для браузерного setup — SSH tunnel: ssh -L 8088:localhost:8088
- "127.0.0.1:8088:80"
healthcheck:
# 403 = alive but Drupal requires auth/initial setup; fail-open healthcheck
test: ["CMD-SHELL", "curl -sSo /dev/null -w '%{http_code}' http://localhost:80/ | grep -qE '(200|301|302|401|403)' || exit 1"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
nats-js-init:
image: natsio/nats-box:latest
container_name: dagi-nats-js-init-node1
depends_on:
- nats
networks:
- dagi-network
restart: "no"
command:
- sh
- -c
- >-
sleep 3 &&
(nats --server nats://nats:4222 stream info ATTACHMENTS >/dev/null 2>&1 || nats --server nats://nats:4222 stream add ATTACHMENTS --subjects='attachments.>' --storage=file --retention=limits --max-age=168h --discard=old --replicas=1 --defaults) &&
(nats --server nats://nats:4222 stream info TASKS >/dev/null 2>&1 || nats --server nats://nats:4222 stream add TASKS --subjects='tasks.>' --storage=file --retention=limits --max-age=168h --discard=old --replicas=1 --defaults) &&
(nats --server nats://nats:4222 stream info MESSAGES >/dev/null 2>&1 || nats --server nats://nats:4222 stream add MESSAGES --subjects='messages.>' --storage=file --retention=limits --max-age=168h --discard=old --replicas=1 --defaults) &&
echo JetStream_streams_ready
# ── Parser Pipeline (NATS ATTACHMENTS consumer → Swapper) ───────────────────
parser-pipeline:
build:
context: ./services/parser-pipeline
dockerfile: Dockerfile
container_name: parser-pipeline
environment:
- NATS_URL=nats://nats:4222
- SWAPPER_URL=http://swapper-service:8890
- MEMORY_SERVICE_URL=http://memory-service:8000
- COHERE_API_KEY=
depends_on:
nats:
condition: service_healthy
swapper-service:
condition: service_healthy
networks:
- dagi-network
restart: unless-stopped
# ── Ingest Service (HTTP upload → NATS ATTACHMENTS) ─────────────────────────
ingest-service:
build:
context: ./services/ingest-service
dockerfile: Dockerfile
container_name: ingest-service
ports:
- 8100:8100
environment:
- NATS_URL=nats://nats:4222
- SWAPPER_URL=http://swapper-service:8890
depends_on:
nats:
condition: service_healthy
networks:
- dagi-network
restart: unless-stopped
# ─── Sofiia Console (Network Control Panel) ────────────────────────────────
dagi-sofiia-console-node1:
build:
context: ./services/sofiia-console
args:
BUILD_SHA: "${BUILD_SHA:-dev}"
BUILD_TIME: "${BUILD_TIME:-local}"
container_name: dagi-sofiia-console-node1
ports:
- "8002:8002"
environment:
- PORT=8002
- ENV=prod
- NODE_ID=NODA1
- BUILD_SHA=${BUILD_SHA:-dev}
- BUILD_TIME=${BUILD_TIME:-local}
- SOFIIA_DATA_DIR=/data/sofiia
- NODES_REGISTRY_PATH=/config/nodes_registry.yml
- NODES_NODA1_SSH_PASSWORD=bRhfV7uNY9m6er
- ROUTER_URL=http://dagi-router-node1:8000
- GATEWAY_URL=http://dagi-gateway-node1:9300
- MEMORY_SERVICE_URL=http://dagi-memory-service-node1:8000
- OLLAMA_URL=http://172.18.0.1:11434
- SWAPPER_URL=http://swapper-service-node1:8890
- XAI_API_KEY=${XAI_API_KEY:-}
- GLM5_API_KEY=${GLM5_API_KEY:-}
- SOFIIA_CONSOLE_API_KEY=${SOFIIA_CONSOLE_API_KEY:-}
- SOFIIA_CONSOLE_TEAM_KEYS=${SOFIIA_CONSOLE_TEAM_KEYS:-}
- SOFIIA_INTERNAL_TOKEN=${SOFIIA_INTERNAL_TOKEN:-}
# aurora-service not deployed on NODA1 — set explicit URL to avoid DNS lookup failure
- AURORA_SERVICE_URL=http://127.0.0.1:9401
volumes:
- sofiia-console-data-node1:/data/sofiia
- /opt/microdao-daarion/config/nodes_registry.yml:/config/nodes_registry.yml:ro
healthcheck:
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8002/api/meta/version', timeout=5)"]
interval: 30s
timeout: 10s
retries: 3
start_period: 20s
networks:
- dagi-network
restart: unless-stopped
volumes:
sofiia-console-data-node1:
name: sofiia-console-data-node1
driver: local
qdrant-data-node1:
name: microdao-daarion_qdrant-data-node1
external: true
neo4j-data-node1:
name: neo4j-data-node1
driver: local
neo4j-logs-node1:
name: neo4j-logs-node1
driver: local
redis-data-node1:
name: redis-data-node1
driver: local
vision-model-cache-node1:
name: vision-model-cache-node1
driver: local
docling-model-cache-node1:
name: docling-model-cache-node1
driver: local
swapper-hf-cache-node1:
name: swapper-hf-cache-node1
driver: local
brand-registry-data-node1:
name: brand-registry-data-node1
driver: local
brand-intake-data-node1:
name: brand-intake-data-node1
driver: local
presentation-data-node1:
name: presentation-data-node1
driver: local
nats-data-node1:
name: nats-data-node1
driver: local
minio-data-node1:
name: minio-data-node1
driver: local
postgres_data_node1:
name: postgres-data-node1
driver: local
market-data-node1:
name: market-data-node1
driver: local
oneok-espocrm-db-node1:
name: oneok-espocrm-db-node1
driver: local
oneok-crm-data-node1:
name: oneok-crm-data-node1
driver: local
# farmOS persistent volumes (v4.3)
farmos-db-data-node1:
name: farmos-db-data-node1
driver: local
farmos-sites-node1:
name: farmos-sites-node1
driver: local
networks:
dagi-network:
external: true