docs(platform): add policy configs, runbooks, ops scripts and platform documentation
Config policies (16 files): alert_routing, architecture_pressure, backlog, cost_weights, data_governance, incident_escalation, incident_intelligence, network_allowlist, nodes_registry, observability_sources, rbac_tools_matrix, release_gate, risk_attribution, risk_policy, slo_policy, tool_limits, tools_rollout Ops (22 files): Caddyfile, calendar compose, grafana voice dashboard, deployments/incidents logs, runbooks for alerts/audit/backlog/incidents/sofiia/voice, cron jobs, scripts (alert_triage, audit_cleanup, migrate_*, governance, schedule), task_registry, voice alerts/ha/latency/policy Docs (30+ files): HUMANIZED_STEPAN v2.7-v3 changelogs and runbooks, NODA1/NODA2 status and setup, audit index and traces, backlog, incident, supervisor, tools, voice, opencode, release, risk, aistalk, spacebot Made-with: Cursor
This commit is contained in:
121
ops/scripts/migrate_incidents_postgres.py
Normal file
121
ops/scripts/migrate_incidents_postgres.py
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Idempotent DDL migration for Postgres incident log backend.
|
||||
|
||||
Creates tables: incidents, incident_events, incident_artifacts (+ indexes).
|
||||
|
||||
Usage:
|
||||
DATABASE_URL=postgresql://... python3 ops/scripts/migrate_incidents_postgres.py
|
||||
python3 ops/scripts/migrate_incidents_postgres.py --dry-run
|
||||
|
||||
Exit codes: 0 = success, 1 = error
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
DDL = textwrap.dedent("""\
|
||||
-- ─── incidents ──────────────────────────────────────────────────────────
|
||||
CREATE TABLE IF NOT EXISTS incidents (
|
||||
id TEXT PRIMARY KEY,
|
||||
workspace_id TEXT NOT NULL DEFAULT 'default',
|
||||
service TEXT NOT NULL,
|
||||
env TEXT NOT NULL DEFAULT 'prod',
|
||||
severity TEXT NOT NULL DEFAULT 'P2',
|
||||
status TEXT NOT NULL DEFAULT 'open',
|
||||
title TEXT NOT NULL,
|
||||
summary TEXT,
|
||||
started_at TIMESTAMPTZ NOT NULL,
|
||||
ended_at TIMESTAMPTZ,
|
||||
created_by TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_incidents_ws_created
|
||||
ON incidents (workspace_id, created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_incidents_service_status
|
||||
ON incidents (service, status);
|
||||
|
||||
-- ─── incident_events (timeline) ─────────────────────────────────────────
|
||||
CREATE TABLE IF NOT EXISTS incident_events (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
incident_id TEXT NOT NULL REFERENCES incidents(id) ON DELETE CASCADE,
|
||||
ts TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
type TEXT NOT NULL,
|
||||
message TEXT NOT NULL DEFAULT '',
|
||||
meta JSONB
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_incident_events_inc_ts
|
||||
ON incident_events (incident_id, ts);
|
||||
|
||||
-- ─── incident_artifacts ──────────────────────────────────────────────────
|
||||
CREATE TABLE IF NOT EXISTS incident_artifacts (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
incident_id TEXT NOT NULL REFERENCES incidents(id) ON DELETE CASCADE,
|
||||
ts TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
kind TEXT NOT NULL,
|
||||
format TEXT NOT NULL DEFAULT 'json',
|
||||
path TEXT NOT NULL,
|
||||
sha256 TEXT NOT NULL DEFAULT '',
|
||||
size_bytes INT NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_incident_artifacts_inc_ts
|
||||
ON incident_artifacts (incident_id, ts);
|
||||
""")
|
||||
|
||||
|
||||
def run(dsn: str, dry_run: bool) -> int:
|
||||
try:
|
||||
import psycopg2 # type: ignore
|
||||
except ImportError:
|
||||
try:
|
||||
import subprocess
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "--quiet", "psycopg2-binary"])
|
||||
import psycopg2 # type: ignore # noqa: F811
|
||||
except Exception as pip_err:
|
||||
print(f"[ERROR] psycopg2 not available: {pip_err}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
print(f"[migrate-incidents] Connecting to: {dsn[:40]}…")
|
||||
if dry_run:
|
||||
print("[migrate-incidents] DRY-RUN — DDL only:\n")
|
||||
print(DDL)
|
||||
return 0
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(dsn)
|
||||
conn.autocommit = False
|
||||
cur = conn.cursor()
|
||||
cur.execute(DDL)
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
print("[migrate-incidents] ✅ Incident tables created/verified successfully.")
|
||||
return 0
|
||||
except Exception as exc:
|
||||
print(f"[migrate-incidents] ❌ Migration failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Idempotent Postgres incident DDL migration")
|
||||
parser.add_argument("--dry-run", action="store_true")
|
||||
parser.add_argument(
|
||||
"--dsn",
|
||||
default=os.getenv("DATABASE_URL") or os.getenv("POSTGRES_DSN", ""),
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if not args.dsn:
|
||||
print("[migrate-incidents] ERROR: DATABASE_URL not set.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
sys.exit(run(args.dsn, args.dry_run))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user