Files
microdao-daarion/scripts/backup/backup_all.sh
Apple ef3473db21 snapshot: NODE1 production state 2026-02-09
Complete snapshot of /opt/microdao-daarion/ from NODE1 (144.76.224.179).
This represents the actual running production code that has diverged
significantly from the previous main branch.

Key changes from old main:
- Gateway (http_api.py): expanded from ~40KB to 164KB with full agent support
- Router: new /v1/agents/{id}/infer endpoint with vision + DeepSeek routing
- Behavior Policy: SOWA v2.2 (3-level: FULL/ACK/SILENT)
- Agent Registry: config/agent_registry.yml as single source of truth
- 13 agents configured (was 3)
- Memory service integration
- CrewAI teams and roles

Excluded from snapshot: venv/, .env, data/, backups, .tgz archives

Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-09 08:46:46 -08:00

128 lines
4.6 KiB
Bash
Executable File

#!/bin/bash
# Backup script for Postgres, Qdrant, Neo4j
# Usage: ./backup_all.sh [restore_test]
set -e
BACKUP_DIR="/opt/microdao-daarion/data/backups"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
RESTORE_TEST=${1:-false}
echo "╔══════════════════════════════════════════════════════════════╗"
echo "║ BACKUP ALL DATABASES ║"
echo "╚══════════════════════════════════════════════════════════════╝"
echo "Timestamp: $TIMESTAMP"
echo ""
# 1. Postgres Backup
echo "=== 1. Postgres Backup ==="
PG_BACKUP="$BACKUP_DIR/postgres/backup_$TIMESTAMP.sql"
docker exec dagi-postgres pg_dump -U daarion daarion_main > "$PG_BACKUP"
echo "✅ Postgres backup: $PG_BACKUP ($(du -h "$PG_BACKUP" | cut -f1))"
# Get row counts for verification
echo "Collecting row counts..."
PG_COUNTS=$(docker exec dagi-postgres psql -U daarion -d daarion_main -t -c "
SELECT
'\"sessions\":' || COUNT(*) || ',' ||
'\"facts\":' || (SELECT COUNT(*) FROM facts) || ',' ||
'\"audit_log\":' || (SELECT COUNT(*) FROM audit_log) || ',' ||
'\"helion_mentors\":' || (SELECT COUNT(*) FROM helion_mentors)
FROM sessions;
" 2>/dev/null || echo '{"sessions":0,"facts":0,"audit_log":0,"helion_mentors":0}')
echo "Postgres counts: $PG_COUNTS"
# 2. Qdrant Snapshot
echo ""
echo "=== 2. Qdrant Snapshot ==="
QDRANT_BACKUP_DIR="$BACKUP_DIR/qdrant/backup_$TIMESTAMP"
mkdir -p "$QDRANT_BACKUP_DIR"
# List collections
COLLECTIONS=$(docker exec dagi-qdrant-node1 curl -s http://localhost:6333/collections 2>/dev/null | python3 -c "
import sys, json
try:
data = json.load(sys.stdin)
for col in data.get('result', {}).get('collections', []):
print(col['name'])
except:
pass
" 2>/dev/null || echo "")
if [ -z "$COLLECTIONS" ]; then
echo "⚠️ No collections found in Qdrant"
QDRANT_COUNTS="{}"
else
echo "Collections: $COLLECTIONS"
for col in $COLLECTIONS; do
echo " Snapshotting: $col"
docker exec dagi-qdrant-node1 curl -s -X POST "http://localhost:6333/collections/$col/snapshots" > "$QDRANT_BACKUP_DIR/${col}_snapshot.json" 2>/dev/null || echo " ⚠️ Failed to snapshot $col"
done
# Get point counts
QDRANT_COUNTS=$(docker exec dagi-qdrant-node1 curl -s http://localhost:6333/collections 2>/dev/null | python3 -c "
import sys, json
import urllib.request
counts = {}
try:
data = json.load(sys.stdin)
for col in data.get('result', {}).get('collections', []):
name = col['name']
info_resp = urllib.request.urlopen(f'http://localhost:6333/collections/{name}')
info = json.loads(info_resp.read().decode())
counts[name] = info.get('result', {}).get('points_count', 0)
except:
pass
print(json.dumps(counts))
" 2>/dev/null || echo "{}")
fi
echo "Qdrant counts: $QDRANT_COUNTS"
# 3. Neo4j Backup
echo ""
echo "=== 3. Neo4j Backup ==="
NEO4J_BACKUP_DIR="$BACKUP_DIR/neo4j/backup_$TIMESTAMP"
mkdir -p "$NEO4J_BACKUP_DIR"
# Try neo4j-admin dump
echo "Creating Neo4j backup..."
docker exec dagi-neo4j-node1 neo4j-admin database dump --database=neo4j --to-path=/backups > "$NEO4J_BACKUP_DIR/neo4j.dump" 2>&1 || {
echo "⚠️ neo4j-admin dump failed, using volume info"
docker exec dagi-neo4j-node1 ls -la /data/databases/neo4j/ > "$NEO4J_BACKUP_DIR/volume_info.txt" 2>&1 || true
}
# Get node/relationship counts
NEO4J_COUNTS=$(docker exec dagi-neo4j-node1 cypher-shell -u neo4j -p "DaarionNeo4j2026!" "MATCH (n) RETURN count(n) as nodes;" 2>/dev/null | grep -oE '[0-9]+' | head -1 || echo "0")
NEO4J_RELS=$(docker exec dagi-neo4j-node1 cypher-shell -u neo4j -p "DaarionNeo4j2026!" "MATCH ()-[r]->() RETURN count(r) as rels;" 2>/dev/null | grep -oE '[0-9]+' | head -1 || echo "0")
echo "Neo4j counts: nodes=$NEO4J_COUNTS, relationships=$NEO4J_RELS"
# Save metadata
cat > "$BACKUP_DIR/backup_$TIMESTAMP.metadata.json" << EOF
{
"timestamp": "$TIMESTAMP",
"postgres": {
"backup_file": "$PG_BACKUP",
"counts": $PG_COUNTS
},
"qdrant": {
"backup_dir": "$QDRANT_BACKUP_DIR",
"counts": $QDRANT_COUNTS
},
"neo4j": {
"backup_dir": "$NEO4J_BACKUP_DIR",
"nodes": $NEO4J_COUNTS,
"relationships": $NEO4J_RELS
}
}
EOF
echo ""
echo "✅ Backup completed: $TIMESTAMP"
echo "Metadata: $BACKUP_DIR/backup_$TIMESTAMP.metadata.json"
if [ "$RESTORE_TEST" = "true" ]; then
echo ""
echo "=== Starting Restore Test ==="
/opt/microdao-daarion/scripts/restore/restore_test.sh "$TIMESTAMP"
fi