Files
microdao-daarion/scripts/backup_postgres.sh
Apple fca48b3eb0 feat(node2): Complete NODE2 setup - guardian, agents, swapper models
- Node-guardian running on MacBook and updating metrics
- NODE2 agents (Atlas, Greeter, Oracle, Builder Bot) assigned to node-2-macbook-m4max
- Swapper models displaying correctly (8 models)
- DAGI Router agents showing with correct status (3 active, 1 stale)
- Router health check using node_cache for remote nodes
2025-12-02 07:07:58 -08:00

44 lines
1.3 KiB
Bash
Executable File

#!/bin/bash
# PostgreSQL Backup Script for DAARION
# Run hourly via cron: 0 * * * * /usr/local/bin/backup.sh
set -e
# Configuration
DB_USER="${DB_USER:-daarion}"
DB_NAME="${DB_NAME:-daarion}"
BACKUP_DIR="${BACKUP_DIR:-/var/backups/daarion}"
RETENTION_DAYS="${RETENTION_DAYS:-7}"
# Create backup directory if it doesn't exist
mkdir -p "$BACKUP_DIR"
# Generate backup filename with timestamp
BACKUP_FILE="$BACKUP_DIR/daarion_$(date +"%Y-%m-%d_%H-%M").dump"
# Perform backup
echo "Starting backup: $BACKUP_FILE"
pg_dump -U "$DB_USER" -Fc "$DB_NAME" > "$BACKUP_FILE"
# Check if backup was successful
if [ $? -eq 0 ]; then
echo "Backup successful: $BACKUP_FILE"
# Compress backup (optional, pg_dump -Fc already creates compressed format)
# gzip "$BACKUP_FILE"
# Remove old backups (older than RETENTION_DAYS)
find "$BACKUP_DIR" -type f -name "daarion_*.dump" -mtime +$RETENTION_DAYS -delete
echo "Cleaned up backups older than $RETENTION_DAYS days"
# Optional: Upload to cloud storage (Cloudflare R2, S3, etc.)
# Example for Cloudflare R2:
# if [ -n "$R2_ACCESS_KEY" ] && [ -n "$R2_SECRET_KEY" ]; then
# rclone copy "$BACKUP_FILE" "r2:daarion-backups/" --config /etc/rclone.conf
# fi
else
echo "Backup failed!" >&2
exit 1
fi