Files
microdao-daarion/ops/scripts/migrate_backlog_postgres.py
Apple 67225a39fa docs(platform): add policy configs, runbooks, ops scripts and platform documentation
Config policies (16 files): alert_routing, architecture_pressure, backlog,
cost_weights, data_governance, incident_escalation, incident_intelligence,
network_allowlist, nodes_registry, observability_sources, rbac_tools_matrix,
release_gate, risk_attribution, risk_policy, slo_policy, tool_limits, tools_rollout

Ops (22 files): Caddyfile, calendar compose, grafana voice dashboard,
deployments/incidents logs, runbooks for alerts/audit/backlog/incidents/sofiia/voice,
cron jobs, scripts (alert_triage, audit_cleanup, migrate_*, governance, schedule),
task_registry, voice alerts/ha/latency/policy

Docs (30+ files): HUMANIZED_STEPAN v2.7-v3 changelogs and runbooks,
NODA1/NODA2 status and setup, audit index and traces, backlog, incident,
supervisor, tools, voice, opencode, release, risk, aistalk, spacebot

Made-with: Cursor
2026-03-03 07:14:53 -08:00

117 lines
4.8 KiB
Python

#!/usr/bin/env python3
"""
migrate_backlog_postgres.py — Idempotent DDL migration for Engineering Backlog.
DAARION.city
Creates tables and indexes if they do not exist. Safe to re-run.
Usage:
python3 ops/scripts/migrate_backlog_postgres.py
python3 ops/scripts/migrate_backlog_postgres.py --dry-run
python3 ops/scripts/migrate_backlog_postgres.py --dsn "postgresql://user:pass@host/db"
"""
from __future__ import annotations
import argparse
import os
import sys
DDL = [
# ── backlog_items ─────────────────────────────────────────────────────────
"""
CREATE TABLE IF NOT EXISTS backlog_items (
id TEXT PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
env TEXT NOT NULL DEFAULT 'prod',
service TEXT NOT NULL DEFAULT '',
category TEXT NOT NULL DEFAULT '',
title TEXT NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
priority TEXT NOT NULL DEFAULT 'P2',
status TEXT NOT NULL DEFAULT 'open',
owner TEXT NOT NULL DEFAULT 'oncall',
due_date DATE,
source TEXT NOT NULL DEFAULT 'manual',
dedupe_key TEXT NOT NULL UNIQUE DEFAULT '',
evidence_refs JSONB NOT NULL DEFAULT '{}',
tags JSONB NOT NULL DEFAULT '[]',
meta JSONB NOT NULL DEFAULT '{}'
)
""",
# ── backlog_events ────────────────────────────────────────────────────────
"""
CREATE TABLE IF NOT EXISTS backlog_events (
id TEXT PRIMARY KEY,
item_id TEXT NOT NULL REFERENCES backlog_items(id) ON DELETE CASCADE,
ts TIMESTAMPTZ NOT NULL DEFAULT NOW(),
type TEXT NOT NULL DEFAULT 'comment',
message TEXT NOT NULL DEFAULT '',
actor TEXT NOT NULL DEFAULT 'system',
meta JSONB NOT NULL DEFAULT '{}'
)
""",
# ── Indexes ───────────────────────────────────────────────────────────────
"CREATE INDEX IF NOT EXISTS idx_backlog_items_env_status ON backlog_items (env, status)",
"CREATE INDEX IF NOT EXISTS idx_backlog_items_service ON backlog_items (service)",
"CREATE INDEX IF NOT EXISTS idx_backlog_items_due_date ON backlog_items (due_date)",
"CREATE INDEX IF NOT EXISTS idx_backlog_items_owner ON backlog_items (owner)",
"CREATE INDEX IF NOT EXISTS idx_backlog_items_category ON backlog_items (category)",
"CREATE INDEX IF NOT EXISTS idx_backlog_events_item_id ON backlog_events (item_id)",
"CREATE INDEX IF NOT EXISTS idx_backlog_events_ts ON backlog_events (ts)",
]
def migrate(dsn: str, dry_run: bool = False) -> None:
print(f"[backlog migration] DSN: {dsn!r} dry_run={dry_run}")
if dry_run:
print("[dry-run] Would execute the following DDL statements:")
for stmt in DDL:
print(" ", stmt.strip()[:120])
return
try:
import psycopg2
except ImportError:
print("ERROR: psycopg2 not installed. Run: pip install psycopg2-binary", file=sys.stderr)
sys.exit(1)
conn = psycopg2.connect(dsn)
conn.autocommit = True
try:
with conn.cursor() as cur:
for stmt in DDL:
stmt = stmt.strip()
if not stmt:
continue
print(f" EXEC: {stmt[:80].replace(chr(10), ' ')}")
cur.execute(stmt)
print("[backlog migration] Done. All DDL applied idempotently.")
finally:
conn.close()
def main() -> None:
parser = argparse.ArgumentParser(
description="Idempotent Postgres DDL migration for Engineering Backlog"
)
parser.add_argument(
"--dsn",
default=os.environ.get(
"BACKLOG_POSTGRES_DSN",
os.environ.get("POSTGRES_DSN", "postgresql://localhost/daarion"),
),
help="Postgres DSN (default: $BACKLOG_POSTGRES_DSN or $POSTGRES_DSN)",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Print DDL without executing",
)
args = parser.parse_args()
migrate(args.dsn, dry_run=args.dry_run)
if __name__ == "__main__":
main()