docs(platform): add policy configs, runbooks, ops scripts and platform documentation
Config policies (16 files): alert_routing, architecture_pressure, backlog, cost_weights, data_governance, incident_escalation, incident_intelligence, network_allowlist, nodes_registry, observability_sources, rbac_tools_matrix, release_gate, risk_attribution, risk_policy, slo_policy, tool_limits, tools_rollout Ops (22 files): Caddyfile, calendar compose, grafana voice dashboard, deployments/incidents logs, runbooks for alerts/audit/backlog/incidents/sofiia/voice, cron jobs, scripts (alert_triage, audit_cleanup, migrate_*, governance, schedule), task_registry, voice alerts/ha/latency/policy Docs (30+ files): HUMANIZED_STEPAN v2.7-v3 changelogs and runbooks, NODA1/NODA2 status and setup, audit index and traces, backlog, incident, supervisor, tools, voice, opencode, release, risk, aistalk, spacebot Made-with: Cursor
This commit is contained in:
84
ops/scripts/migrate_risk_history_postgres.py
Normal file
84
ops/scripts/migrate_risk_history_postgres.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Idempotent DDL migration for Postgres risk_history backend.
|
||||
|
||||
Creates table: risk_history (+ indexes).
|
||||
|
||||
Usage:
|
||||
DATABASE_URL=postgresql://... python3 ops/scripts/migrate_risk_history_postgres.py
|
||||
python3 ops/scripts/migrate_risk_history_postgres.py --dry-run
|
||||
|
||||
Exit codes: 0 = success, 1 = error
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
DDL = textwrap.dedent("""\
|
||||
-- ─── risk_history ──────────────────────────────────────────────────────────
|
||||
CREATE TABLE IF NOT EXISTS risk_history (
|
||||
ts TIMESTAMPTZ NOT NULL,
|
||||
service TEXT NOT NULL,
|
||||
env TEXT NOT NULL DEFAULT 'prod',
|
||||
score INTEGER NOT NULL,
|
||||
band TEXT NOT NULL,
|
||||
components JSONB NOT NULL DEFAULT '{}',
|
||||
reasons JSONB NOT NULL DEFAULT '[]',
|
||||
PRIMARY KEY (ts, service, env)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS risk_history_svc_env_ts
|
||||
ON risk_history (service, env, ts DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS risk_history_env_ts
|
||||
ON risk_history (env, ts DESC);
|
||||
""")
|
||||
|
||||
|
||||
def run(dsn: str, dry_run: bool = False) -> None:
|
||||
if dry_run:
|
||||
print("=== DRY RUN — DDL that would be applied ===")
|
||||
print(DDL)
|
||||
return
|
||||
|
||||
try:
|
||||
import psycopg2 # type: ignore
|
||||
except ImportError:
|
||||
print("ERROR: psycopg2 not installed. Run: pip install psycopg2-binary", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(dsn)
|
||||
conn.autocommit = True
|
||||
cur = conn.cursor()
|
||||
for statement in DDL.split(";"):
|
||||
stmt = statement.strip()
|
||||
if stmt:
|
||||
cur.execute(stmt + ";")
|
||||
cur.close()
|
||||
conn.close()
|
||||
print("risk_history migration applied successfully.")
|
||||
except Exception as e:
|
||||
print(f"ERROR: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Migrate risk_history table in Postgres")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Print DDL without executing")
|
||||
parser.add_argument("--dsn", default="", help="Postgres DSN (overrides DATABASE_URL)")
|
||||
args = parser.parse_args()
|
||||
|
||||
dsn = args.dsn or os.getenv("DATABASE_URL") or os.getenv("RISK_DATABASE_URL", "")
|
||||
if not dsn and not args.dry_run:
|
||||
print("ERROR: No DSN provided. Set DATABASE_URL or pass --dsn.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
run(dsn, dry_run=args.dry_run)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user