#!/bin/bash # Sofiia Console — NODA2 local dev startup # Runs without API key (localhost bypass active), uses Grok by default. # Usage: ./start-local.sh set -e cd "$(dirname "$0")" # Load root .env if exists (picks up XAI_API_KEY, DEEPSEEK_API_KEY, etc.) if [ -f "../../.env" ]; then set -a source "../../.env" set +a fi # Dev mode — no auth for localhost export ENV=dev export PORT=8002 # === Sofiia's HOME is NODA2 (MacBook) === # Primary LLM: Grok 4.1 Fast Reasoning (per AGENTS.md) # XAI_API_KEY, GLM5_API_KEY loaded from root .env above # Quick tasks: GLM-5 # Local/offline: NODA2 Ollama (qwen3:14b, qwen3.5:35b-a3b, etc.) # NODA2 local Ollama export OLLAMA_URL=http://localhost:11434 export SOFIIA_PREFERRED_CHAT_MODEL="${SOFIIA_PREFERRED_CHAT_MODEL:-ollama:qwen3:14b}" export SOFIIA_OLLAMA_TIMEOUT_SEC="${SOFIIA_OLLAMA_TIMEOUT_SEC:-120}" export SOFIIA_OLLAMA_VOICE_TIMEOUT_SEC="${SOFIIA_OLLAMA_VOICE_TIMEOUT_SEC:-45}" export SOFIIA_OLLAMA_KEEP_ALIVE="${SOFIIA_OLLAMA_KEEP_ALIVE:-30m}" export SOFIIA_OLLAMA_NUM_CTX="${SOFIIA_OLLAMA_NUM_CTX:-8192}" export SOFIIA_OLLAMA_NUM_THREAD="${SOFIIA_OLLAMA_NUM_THREAD:-8}" export SOFIIA_OLLAMA_NUM_GPU="${SOFIIA_OLLAMA_NUM_GPU:--1}" export SOFIIA_OLLAMA_NUM_PREDICT_TEXT="${SOFIIA_OLLAMA_NUM_PREDICT_TEXT:-768}" # NODA2 memory service export MEMORY_SERVICE_URL=http://localhost:8000 # NODA1 services (optional — for Router/Telegram context) export ROUTER_URL=http://144.76.224.179:9102 export GATEWAY_URL=http://144.76.224.179:9300 # Data dir export SOFIIA_DATA_DIR="$HOME/.sofiia/console-data" mkdir -p "$SOFIIA_DATA_DIR" # Activate venv if present if [ -d "venv" ]; then source venv/bin/activate elif [ -d "../../venv" ]; then source ../../venv/bin/activate fi echo "🚀 Sofiia Console — http://localhost:8002 (НОДА2, без авторизації)" echo " Primary: Grok 4.1 Fast Reasoning (AGENTS.md)" echo " XAI_API_KEY: ${XAI_API_KEY:0:12}..." echo " GLM5_API_KEY: ${GLM5_API_KEY:0:12}..." echo " OLLAMA_URL: $OLLAMA_URL (НОДА2 local models)" echo " Preferred: $SOFIIA_PREFERRED_CHAT_MODEL" echo " Ollama tune: ctx=$SOFIIA_OLLAMA_NUM_CTX threads=$SOFIIA_OLLAMA_NUM_THREAD gpu=$SOFIIA_OLLAMA_NUM_GPU keep_alive=$SOFIIA_OLLAMA_KEEP_ALIVE" echo " Models: qwen3:14b, qwen3.5:35b-a3b, glm-4.7-flash, deepseek-r1:70b..." echo "" python -m uvicorn app.main:app --host 127.0.0.1 --port "$PORT" --reload