#!/bin/bash # PostgreSQL Backup Script for DAARION # Run hourly via cron: 0 * * * * /usr/local/bin/backup.sh set -e # Configuration DB_USER="${DB_USER:-daarion}" DB_NAME="${DB_NAME:-daarion}" BACKUP_DIR="${BACKUP_DIR:-/var/backups/daarion}" RETENTION_DAYS="${RETENTION_DAYS:-7}" # Create backup directory if it doesn't exist mkdir -p "$BACKUP_DIR" # Generate backup filename with timestamp BACKUP_FILE="$BACKUP_DIR/daarion_$(date +"%Y-%m-%d_%H-%M").dump" # Perform backup echo "Starting backup: $BACKUP_FILE" pg_dump -U "$DB_USER" -Fc "$DB_NAME" > "$BACKUP_FILE" # Check if backup was successful if [ $? -eq 0 ]; then echo "Backup successful: $BACKUP_FILE" # Compress backup (optional, pg_dump -Fc already creates compressed format) # gzip "$BACKUP_FILE" # Remove old backups (older than RETENTION_DAYS) find "$BACKUP_DIR" -type f -name "daarion_*.dump" -mtime +$RETENTION_DAYS -delete echo "Cleaned up backups older than $RETENTION_DAYS days" # Optional: Upload to cloud storage (Cloudflare R2, S3, etc.) # Example for Cloudflare R2: # if [ -n "$R2_ACCESS_KEY" ] && [ -n "$R2_SECRET_KEY" ]; then # rclone copy "$BACKUP_FILE" "r2:daarion-backups/" --config /etc/rclone.conf # fi else echo "Backup failed!" >&2 exit 1 fi