feat(sofiia-console): add multi-user team key auth + fix aurora DNS env

- auth.py: adds SOFIIA_CONSOLE_TEAM_KEYS="name:key,..." support;
  require_auth now returns identity ("operator"/"user:<name>") for audit;
  validate_any_key checks primary + team keys; login sets per-user cookie
- main.py: auth/login+check endpoints return identity field;
  imports validate_any_key, _expected_team_cookie_tokens from auth
- docker-compose.node1.yml: adds SOFIIA_CONSOLE_TEAM_KEYS env var;
  adds AURORA_SERVICE_URL=http://127.0.0.1:9401 to prevent DNS lookup
  failure for aurora-service (not deployed on NODA1)

Made-with: Cursor
This commit is contained in:
Apple
2026-03-03 06:38:26 -08:00
parent 32989525fb
commit e0bea910b9
3 changed files with 300 additions and 57 deletions

View File

@@ -30,6 +30,7 @@ services:
- GROK_API_KEY=xai-CpoLMPgw91NP9AEdHPhIrvU4ZnhV1q1P8BJBKCpD5kTPFRXJmTOkgGNHwYdZpXMlRxBgHcgcSlIXccxh
- VISION_ENCODER_URL=http://vision-encoder:8001
- SWAPPER_SERVICE_URL=http://swapper-service:8890
- BINANCE_MONITOR_URL=http://dagi-binance-bot-monitor-node1:8893
- IMAGE_GEN_URL=http://swapper-service:8890/image/generate
- STT_SERVICE_URL=http://swapper-service:8890
- STT_SERVICE_UPLOAD_URL=http://swapper-service:8890/stt
@@ -53,14 +54,20 @@ services:
- ENABLE_GLOBAL_CAPS_NATS=true
- OLLAMA_URL=http://172.18.0.1:11434
- PREFER_NODE_WORKER=true
- ENABLE_CREW_MODEL_ROUTING=1
- CREW_SMALL_MODEL=smollm2:135m
- CREWAI_WORKER_LLM_PROFILE=crew_local_27b
volumes:
- ${DEPLOY_ROOT:-.}/services/router/router_config.yaml:/app/router_config.yaml:ro
- ${DEPLOY_ROOT:-.}/services/router/router-config.yml:/app/router-config.yml:ro
- ${DEPLOY_ROOT:-.}/config/crewai_agents.json:/config/crewai_agents.json:ro
- ${DEPLOY_ROOT:-.}/gateway-bot:/app/prompts:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000/health')\""]
interval: 30s
@@ -78,15 +85,17 @@ services:
- "8890:8890"
- "8891:8891" # Metrics
environment:
- OLLAMA_BASE_URL=http://172.18.0.1:11434
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- SWAPPER_CONFIG_PATH=/app/config/swapper_config.yaml
- SWAPPER_MODE=single-active
- MAX_CONCURRENT_MODELS=2 # 1 LLM + 1 OCR
- MAX_CONCURRENT_MODELS=1
- MODEL_SWAP_TIMEOUT=300
- GPU_ENABLED=true
- NODE_ID=node-1-hetzner-gex44
- GPU_ENABLED=false
- NODE_ID=noda1
- HF_HOME=/root/.cache/huggingface
- CUDA_VISIBLE_DEVICES=0
- CUDA_VISIBLE_DEVICES=
- WHISPER_DEVICE=cpu
- WHISPER_COMPUTE_TYPE=int8
- CRAWL4AI_URL=http://crawl4ai:11235
# Cloud API keys for video/image generation
- GROK_API_KEY=xai-CpoLMPgw91NP9AEdHPhIrvU4ZnhV1q1P8BJBKCpD5kTPFRXJmTOkgGNHwYdZpXMlRxBgHcgcSlIXccxh
@@ -95,19 +104,11 @@ services:
- ${DEPLOY_ROOT:-.}/services/swapper-service/config/swapper_config_node1.yaml:/app/config/swapper_config.yaml:ro
- ${DEPLOY_ROOT:-.}/logs:/app/logs
- swapper-hf-cache-node1:/root/.cache/huggingface
# GPU support for OCR models
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:172.18.0.1"
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8890/health || exit 1"]
interval: 30s
@@ -133,6 +134,8 @@ services:
volumes:
- ${DEPLOY_ROOT:-.}/third_party/nature-id:/opt/nature-id:ro
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8085/health')\""]
interval: 30s
@@ -142,7 +145,7 @@ services:
# Crawl4AI - Advanced Web Crawler with JavaScript support
crawl4ai:
image: unclecode/crawl4ai@sha256:4d8b065bf185962733cb5f9701f4122d03383fa1ab6b5f6a9873f04fa0416a84
image: unclecode/crawl4ai:latest
container_name: dagi-crawl4ai-node1
ports:
- "11235:11235"
@@ -152,6 +155,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11235/health"]
interval: 30s
@@ -164,10 +169,16 @@ services:
build:
context: ./gateway-bot
dockerfile: Dockerfile
args:
BUILD_SHA: "${BUILD_SHA:-dev}"
BUILD_TIME: "${BUILD_TIME:-local}"
container_name: dagi-gateway-node1
ports:
- "9300:9300"
environment:
- BUILD_SHA=${BUILD_SHA:-dev}
- BUILD_TIME=${BUILD_TIME:-local}
- NODE_ID=NODA1
- ROUTER_URL=${ROUTER_URL:-http://dagi-staging-router:8000}
- GATEWAY_MAX_TOKENS_CONCISE=350
- GATEWAY_MAX_TOKENS_SENPAI_DEFAULT=700
@@ -233,6 +244,9 @@ services:
- CREWAI_SERVICE_URL=http://dagi-staging-crewai-service:9010
- AGROMATRIX_REVIEW_AUTH_MODE=${AGROMATRIX_REVIEW_AUTH_MODE:-bearer}
- AGROMATRIX_REVIEW_BEARER_TOKENS=${AGROMATRIX_REVIEW_BEARER_TOKENS}
- ENABLE_CREW_MODEL_ROUTING=1
- CREW_SMALL_MODEL=smollm2:135m
- CREWAI_WORKER_LLM_PROFILE=crew_local_27b
# v4.3 FarmOS integration (fail-closed: якщо пусто — агент повідомить "не налаштований")
- FARMOS_BASE_URL=http://dagi-farmos-node1
- FARMOS_TOKEN=${FARMOS_TOKEN:-}
@@ -251,6 +265,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9300/health')\""]
interval: 30s
@@ -278,6 +294,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "python", "-c", "print(\"ok\")"]
interval: 30s
@@ -324,6 +342,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "python", "-c", "print(\"ok\")"]
interval: 30s
@@ -354,6 +374,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "python", "-c", "print(\"ok\")"]
interval: 30s
@@ -492,12 +514,14 @@ services:
- dagi-network
restart: unless-stopped
# Node Capabilities Service (model inventory for router)
# Node Capabilities Service model inventory + load metrics
node-capabilities:
build:
context: ./services/node-capabilities
dockerfile: Dockerfile
container_name: node-capabilities-node1
ports:
- "127.0.0.1:8099:8099"
environment:
- NODE_ID=noda1
- OLLAMA_BASE_URL=http://172.18.0.1:11434
@@ -546,22 +570,31 @@ services:
# NATS (JetStream)
nats:
image: nats:2.10-alpine
image: nats:2.11-alpine
container_name: dagi-nats-node1
ports:
- "4222:4222"
command: ["-js"]
- "8222:8222" # HTTP monitoring
- "7422:7422" # Leafnode hub (NODA2/NODA3 connect here)
command: ["-c", "/etc/nats/nats-node1.conf"]
volumes:
- nats-data-node1:/data
- ./config/nats/nats-node1.conf:/etc/nats/nats-node1.conf:ro
networks:
dagi-network:
aliases:
- nats
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8222/healthz || exit 1"]
interval: 5s
timeout: 3s
retries: 10
start_period: 5s
# MinIO Object Storage
minio:
image: minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
image: minio/minio:latest
container_name: dagi-minio-node1
ports:
- "9000:9000"
@@ -604,6 +637,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9220/health')\""]
interval: 30s
@@ -627,6 +662,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "wget", "-qO-", "http://localhost:9500/health"]
interval: 10s
@@ -648,9 +685,12 @@ services:
- MINIO_BUCKET=artifacts
- MINIO_SECURE=false
depends_on:
- nats
- artifact-registry
- minio
nats:
condition: service_healthy
artifact-registry:
condition: service_started
minio:
condition: service_started
networks:
- dagi-network
restart: unless-stopped
@@ -695,10 +735,14 @@ services:
- MINIO_SECURE=false
- INDEX_DOC_MAX_BYTES=52428800
depends_on:
- nats
- artifact-registry
- rag-service
- minio
nats:
condition: service_healthy
artifact-registry:
condition: service_started
rag-service:
condition: service_started
minio:
condition: service_started
networks:
- dagi-network
restart: unless-stopped
@@ -719,6 +763,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9210/health')\""]
interval: 30s
@@ -747,6 +793,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9211/health')\""]
interval: 30s
@@ -773,6 +821,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:9212/health')\""]
interval: 30s
@@ -808,6 +858,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000/health')\""]
interval: 30s
@@ -833,7 +885,7 @@ services:
# Qdrant Vector Database
qdrant:
image: qdrant/qdrant:v1.7.4
image: qdrant/qdrant:v1.13.6
container_name: dagi-qdrant-node1
ulimits:
nofile:
@@ -847,6 +899,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "true"]
interval: 30s
@@ -855,7 +909,7 @@ services:
# Neo4j Graph Database
neo4j:
image: neo4j:5.15-community
image: neo4j:5.26-community
container_name: dagi-neo4j-node1
ports:
- "7474:7474" # HTTP
@@ -863,14 +917,17 @@ services:
environment:
- NEO4J_AUTH=neo4j/DaarionNeo4j2026!
- NEO4J_PLUGINS=["apoc"]
- NEO4J_dbms_memory_heap_initial__size=512m
- NEO4J_dbms_memory_heap_max__size=2G
- NEO4J_server_memory_heap_initial__size=512m
- NEO4J_server_memory_heap_max__size=2G
- NEO4J_server_config_strict__validation_enabled=false
volumes:
- neo4j-data-node1:/data
- neo4j-logs-node1:/logs
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:7474"]
interval: 30s
@@ -879,7 +936,7 @@ services:
# Redis Cache
redis:
image: redis:7-alpine
image: redis:8-alpine
container_name: dagi-redis-node1
ports:
- "6379:6379"
@@ -888,6 +945,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "redis-cli", "PING"]
interval: 30s
@@ -918,6 +977,8 @@ services:
networks:
- dagi-network
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8001/health')\""]
interval: 30s
@@ -979,7 +1040,7 @@ services:
- dagi-network
depends_on:
- nats
command: ["run", "--provider", "binance,bybit", "--symbols", "BTCUSDT,ETHUSDT"]
command: ["run", "--provider", "binance,bybit", "--symbols", "BTCUSDT,ETHUSDT,BNBUSDT,SOLUSDT,XRPUSDT,ADAUSDT,DOGEUSDT,AVAXUSDT,DOTUSDT,LINKUSDT,POLUSDT,SHIBUSDT,TRXUSDT,UNIUSDT,LTCUSDT,ATOMUSDT,NEARUSDT,ICPUSDT,FILUSDT,APTUSDT,PAXGUSDT"]
healthcheck:
test:
- CMD-SHELL
@@ -1084,17 +1145,121 @@ services:
# Доступний тільки локально; для браузерного setup — SSH tunnel: ssh -L 8088:localhost:8088
- "127.0.0.1:8088:80"
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:80 -o /dev/null || exit 1"]
# 403 = alive but Drupal requires auth/initial setup; fail-open healthcheck
test: ["CMD-SHELL", "curl -sSo /dev/null -w '%{http_code}' http://localhost:80/ | grep -qE '(200|301|302|401|403)' || exit 1"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
nats-js-init:
image: natsio/nats-box:latest
container_name: dagi-nats-js-init-node1
depends_on:
- nats
networks:
- dagi-network
restart: "no"
command:
- sh
- -c
- >-
sleep 3 &&
(nats --server nats://nats:4222 stream info ATTACHMENTS >/dev/null 2>&1 || nats --server nats://nats:4222 stream add ATTACHMENTS --subjects='attachments.>' --storage=file --retention=limits --max-age=168h --discard=old --replicas=1 --defaults) &&
(nats --server nats://nats:4222 stream info TASKS >/dev/null 2>&1 || nats --server nats://nats:4222 stream add TASKS --subjects='tasks.>' --storage=file --retention=limits --max-age=168h --discard=old --replicas=1 --defaults) &&
(nats --server nats://nats:4222 stream info MESSAGES >/dev/null 2>&1 || nats --server nats://nats:4222 stream add MESSAGES --subjects='messages.>' --storage=file --retention=limits --max-age=168h --discard=old --replicas=1 --defaults) &&
echo JetStream_streams_ready
# ── Parser Pipeline (NATS ATTACHMENTS consumer → Swapper) ───────────────────
parser-pipeline:
build:
context: ./services/parser-pipeline
dockerfile: Dockerfile
container_name: parser-pipeline
environment:
- NATS_URL=nats://nats:4222
- SWAPPER_URL=http://swapper-service:8890
- MEMORY_SERVICE_URL=http://memory-service:8000
- COHERE_API_KEY=
depends_on:
nats:
condition: service_healthy
swapper-service:
condition: service_healthy
networks:
- dagi-network
restart: unless-stopped
# ── Ingest Service (HTTP upload → NATS ATTACHMENTS) ─────────────────────────
ingest-service:
build:
context: ./services/ingest-service
dockerfile: Dockerfile
container_name: ingest-service
ports:
- 8100:8100
environment:
- NATS_URL=nats://nats:4222
- SWAPPER_URL=http://swapper-service:8890
depends_on:
nats:
condition: service_healthy
networks:
- dagi-network
restart: unless-stopped
# ─── Sofiia Console (Network Control Panel) ────────────────────────────────
dagi-sofiia-console-node1:
build:
context: ./services/sofiia-console
args:
BUILD_SHA: "${BUILD_SHA:-dev}"
BUILD_TIME: "${BUILD_TIME:-local}"
container_name: dagi-sofiia-console-node1
ports:
- "8002:8002"
environment:
- PORT=8002
- ENV=prod
- NODE_ID=NODA1
- BUILD_SHA=${BUILD_SHA:-dev}
- BUILD_TIME=${BUILD_TIME:-local}
- SOFIIA_DATA_DIR=/data/sofiia
- NODES_REGISTRY_PATH=/config/nodes_registry.yml
- NODES_NODA1_SSH_PASSWORD=bRhfV7uNY9m6er
- ROUTER_URL=http://dagi-router-node1:8000
- GATEWAY_URL=http://dagi-gateway-node1:9300
- MEMORY_SERVICE_URL=http://dagi-memory-service-node1:8000
- OLLAMA_URL=http://172.18.0.1:11434
- SWAPPER_URL=http://swapper-service-node1:8890
- XAI_API_KEY=${XAI_API_KEY:-}
- GLM5_API_KEY=${GLM5_API_KEY:-}
- SOFIIA_CONSOLE_API_KEY=${SOFIIA_CONSOLE_API_KEY:-}
- SOFIIA_CONSOLE_TEAM_KEYS=${SOFIIA_CONSOLE_TEAM_KEYS:-}
# aurora-service not deployed on NODA1 — set explicit URL to avoid DNS lookup failure
- AURORA_SERVICE_URL=http://127.0.0.1:9401
volumes:
- sofiia-console-data-node1:/data/sofiia
- /opt/microdao-daarion/config/nodes_registry.yml:/config/nodes_registry.yml:ro
healthcheck:
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8002/api/meta/version', timeout=5)"]
interval: 30s
timeout: 10s
retries: 3
start_period: 20s
networks:
- dagi-network
restart: unless-stopped
volumes:
qdrant-data-node1:
name: qdrant-data-node1
sofiia-console-data-node1:
name: sofiia-console-data-node1
driver: local
qdrant-data-node1:
name: microdao-daarion_qdrant-data-node1
external: true
neo4j-data-node1:
name: neo4j-data-node1
driver: local
@@ -1150,6 +1315,7 @@ volumes:
name: farmos-sites-node1
driver: local
networks:
dagi-network:
external: true

View File

@@ -9,12 +9,18 @@ All protected endpoints check:
1. Cookie "console_token" (browser sessions)
2. X-API-Key header (backward compat: curl, API clients)
Single operator key: SOFIIA_CONSOLE_API_KEY
Team keys (multi-user): SOFIIA_CONSOLE_TEAM_KEYS = "alice:key1,bob:key2,sergiy:key3"
- Each user gets a personal key; login stores a per-user cookie token.
- require_auth returns "user:<name>" for audit identification.
Dev mode (ENV != prod, no key configured): open access.
"""
import hashlib
import logging
import os
import secrets
from typing import Dict, Optional, Tuple
from fastapi import Cookie, HTTPException, Request, Security
from fastapi.security import APIKeyHeader
@@ -32,13 +38,46 @@ def get_console_api_key() -> str:
return os.getenv("SOFIIA_CONSOLE_API_KEY", "").strip()
def _get_team_keys() -> Dict[str, str]:
"""
Parse SOFIIA_CONSOLE_TEAM_KEYS = "alice:key1,bob:key2"
Returns {name: key} mapping. Entries with empty name or key are skipped.
"""
raw = os.getenv("SOFIIA_CONSOLE_TEAM_KEYS", "").strip()
if not raw:
return {}
result: Dict[str, str] = {}
for entry in raw.split(","):
entry = entry.strip()
if ":" not in entry:
continue
name, _, key = entry.partition(":")
name = name.strip()
key = key.strip()
if name and key:
result[name] = key
return result
def _key_valid(provided: str) -> bool:
"""Check against the primary key. Returns True if no key configured (open)."""
configured = get_console_api_key()
if not configured:
return True # no key set → open
return True
return secrets.compare_digest(provided.strip(), configured)
def _team_key_identity(provided: str) -> Optional[str]:
"""
Check provided key against team keys.
Returns user name if match found, None otherwise.
"""
for name, key in _get_team_keys().items():
if secrets.compare_digest(provided.strip(), key):
return name
return None
def _cookie_token(api_key: str) -> str:
"""Derive a stable session token from the api key (so we never store key directly in cookie)."""
return hashlib.sha256(api_key.encode()).hexdigest()
@@ -48,6 +87,30 @@ def _expected_cookie_token() -> str:
return _cookie_token(get_console_api_key())
def _expected_team_cookie_tokens() -> Dict[str, str]:
"""Returns {cookie_token: user_name} for all team keys."""
return {_cookie_token(key): name for name, key in _get_team_keys().items()}
def validate_any_key(provided: str) -> Tuple[bool, str]:
"""
Check provided key against primary key AND team keys.
Returns (is_valid, identity_string).
identity_string: "operator" for primary key, "user:<name>" for team key.
"""
# Primary key
configured = get_console_api_key()
if configured and secrets.compare_digest(provided.strip(), configured):
return True, "operator"
if not configured:
return True, "anonymous"
# Team keys
name = _team_key_identity(provided)
if name:
return True, f"user:{name}"
return False, ""
def require_auth(
request: Request,
x_api_key: str = Security(API_KEY_HEADER),
@@ -56,6 +119,7 @@ def require_auth(
Check cookie OR X-API-Key header.
Localhost (127.0.0.1 / ::1) is ALWAYS allowed — no key needed.
In dev mode without a configured key: pass through.
Returns identity string for audit (e.g. "operator", "user:alice", "localhost").
"""
# Localhost bypass — always open for local development
client_ip = (request.client.host if request.client else "") or ""
@@ -63,19 +127,27 @@ def require_auth(
return "localhost"
configured = get_console_api_key()
if not configured:
team_keys = _get_team_keys()
if not configured and not team_keys:
if _IS_PROD:
logger.warning("SOFIIA_CONSOLE_API_KEY not set in prod — console is OPEN")
return ""
# 1) Cookie check
# 1) Cookie check — primary key cookie
cookie_val = request.cookies.get(_COOKIE_NAME, "")
if cookie_val and secrets.compare_digest(cookie_val, _expected_cookie_token()):
return "cookie"
if cookie_val:
if configured and secrets.compare_digest(cookie_val, _expected_cookie_token()):
return "operator"
# Team key cookies
team_tokens = _expected_team_cookie_tokens()
if cookie_val in team_tokens:
return f"user:{team_tokens[cookie_val]}"
# 2) X-API-Key header (for API clients / curl)
if x_api_key and _key_valid(x_api_key):
return "header"
# 2) X-API-Key header
if x_api_key:
valid, identity = validate_any_key(x_api_key)
if valid:
return identity
raise HTTPException(status_code=401, detail="Unauthorized")

View File

@@ -38,6 +38,7 @@ from .auth import (
require_api_key, require_api_key_strict, require_auth, require_auth_strict, require_audit_auth,
get_console_api_key, _key_valid, _cookie_token, _expected_cookie_token,
_COOKIE_NAME, _COOKIE_MAX_AGE, _IS_PROD,
validate_any_key, _expected_team_cookie_tokens,
)
from .config import (
@@ -5114,11 +5115,11 @@ class _LoginBody(BaseModel):
@app.post("/api/auth/login")
async def auth_login(body: _LoginBody, response: Response):
"""
Verify API key (sent in JSON body — avoids header encoding issues).
On success: set httpOnly session cookie, return ok=true.
No CORS/header encoding issues since key travels in request body.
Verify API key (primary or team key, sent in JSON body).
On success: set httpOnly session cookie, return ok=true + identity.
"""
if not _key_valid(body.key):
valid, identity = validate_any_key(body.key)
if not valid:
raise HTTPException(status_code=401, detail="Invalid key")
token = _cookie_token(body.key)
@@ -5131,7 +5132,7 @@ async def auth_login(body: _LoginBody, response: Response):
max_age=_COOKIE_MAX_AGE,
path="/",
)
return {"ok": True, "auth": "cookie"}
return {"ok": True, "auth": "cookie", "identity": identity}
@app.post("/api/auth/logout")
@@ -5143,19 +5144,23 @@ async def auth_logout(response: Response):
@app.get("/api/auth/check")
async def auth_check(request: Request):
"""Returns 200 if session is valid, 401 otherwise. Used by UI on startup."""
# Localhost is always open — no auth needed
"""Returns 200 + identity if session is valid, 401 otherwise. Used by UI on startup."""
import secrets as _sec
client_ip = (request.client.host if request.client else "") or ""
if client_ip in ("127.0.0.1", "::1", "localhost"):
return {"ok": True, "auth": "localhost"}
return {"ok": True, "auth": "localhost", "identity": "localhost"}
configured = get_console_api_key()
if not configured:
return {"ok": True, "auth": "open"}
from .auth import _expected_cookie_token as _ect
from .auth import _get_team_keys
team_keys = _get_team_keys()
if not configured and not team_keys:
return {"ok": True, "auth": "open", "identity": "anonymous"}
cookie_val = request.cookies.get(_COOKIE_NAME, "")
import secrets as _sec
if cookie_val and _sec.compare_digest(cookie_val, _ect()):
return {"ok": True, "auth": "cookie"}
if cookie_val:
if configured and _sec.compare_digest(cookie_val, _expected_cookie_token()):
return {"ok": True, "auth": "cookie", "identity": "operator"}
team_tokens = _expected_team_cookie_tokens()
if cookie_val in team_tokens:
return {"ok": True, "auth": "cookie", "identity": f"user:{team_tokens[cookie_val]}"}
raise HTTPException(status_code=401, detail="Not authenticated")