Files
microdao-daarion/services/comfy-agent/app/storage.py

118 lines
3.4 KiB
Python

# services/comfy-agent/app/storage.py
import mimetypes
import os
from pathlib import Path
from .config import settings
try:
import boto3
from botocore.client import Config
except Exception: # pragma: no cover - optional runtime dependency fallback
boto3 = None
Config = None
_s3_client = None
_s3_bucket: str | None = None
_s3_prefix: str | None = None
def _s3_value(primary: str, fallback: str) -> str:
return (primary or fallback).strip()
def _build_endpoint_url(endpoint: str) -> str:
if endpoint.startswith("http://") or endpoint.startswith("https://"):
return endpoint
scheme = "https" if settings.S3_SECURE else "http"
return f"{scheme}://{endpoint}"
def _ensure_bucket(client, bucket: str) -> None:
try:
client.head_bucket(Bucket=bucket)
return
except Exception:
pass
client.create_bucket(Bucket=bucket)
def init_object_storage() -> None:
global _s3_client, _s3_bucket, _s3_prefix
endpoint = _s3_value(settings.S3_ENDPOINT, settings.MINIO_ENDPOINT)
bucket = _s3_value(settings.S3_BUCKET, settings.MINIO_BUCKET)
access_key = _s3_value(settings.S3_ACCESS_KEY, settings.MINIO_ACCESS_KEY)
secret_key = _s3_value(settings.S3_SECRET_KEY, settings.MINIO_SECRET_KEY)
if not endpoint or not bucket or not access_key or not secret_key:
_s3_client = None
_s3_bucket = None
_s3_prefix = None
return
if boto3 is None or Config is None:
print("S3 storage disabled: boto3 is not installed")
return
endpoint_url = _build_endpoint_url(endpoint)
s3_config = Config(
signature_version="s3v4",
s3={"addressing_style": "path" if settings.S3_FORCE_PATH_STYLE else "auto"},
)
client = boto3.client(
"s3",
endpoint_url=endpoint_url,
region_name=settings.S3_REGION,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
use_ssl=settings.S3_SECURE,
config=s3_config,
)
_ensure_bucket(client, bucket)
_s3_client = client
_s3_bucket = bucket
_s3_prefix = settings.S3_PREFIX.strip("/")
print(f"S3 storage enabled: endpoint={endpoint_url} bucket={bucket}")
def ensure_storage() -> None:
Path(settings.STORAGE_PATH).mkdir(parents=True, exist_ok=True)
def make_job_dir(job_id: str) -> str:
ensure_storage()
d = os.path.join(settings.STORAGE_PATH, job_id)
Path(d).mkdir(parents=True, exist_ok=True)
return d
def public_url(job_id: str, filename: str) -> str:
return f"{settings.PUBLIC_BASE_URL}/{job_id}/{filename}"
def publish_result_url(job_id: str, filename: str, local_path: str) -> str:
if _s3_client is None or _s3_bucket is None:
return public_url(job_id, filename)
object_key = f"{job_id}/{filename}"
if _s3_prefix:
object_key = f"{_s3_prefix}/{object_key}"
content_type = mimetypes.guess_type(filename)[0] or "application/octet-stream"
try:
_s3_client.upload_file(
local_path,
_s3_bucket,
object_key,
ExtraArgs={"ContentType": content_type},
)
return _s3_client.generate_presigned_url(
ClientMethod="get_object",
Params={"Bucket": _s3_bucket, "Key": object_key},
ExpiresIn=settings.S3_URL_TTL_S,
)
except Exception as e:
print(f"S3 upload/presign failed for job={job_id}: {e}")
return public_url(job_id, filename)