#!/bin/bash set -e # Configuration BACKUP_TIMESTAMP=$(date +%Y%m%d-%H%M%S) BACKUP_DIR="/tmp/pds-backup-$BACKUP_TIMESTAMP" BACKUP_TARBALL="/tmp/pds-backup-$BACKUP_TIMESTAMP.tar.gz" R2_BUCKET="mbdpds-backup" R2_ENDPOINT="https://fe6870e7e305737ba53a51bed8a8e775.r2.cloudflarestorage.com" PDS_DIR="/pds" echo "๐Ÿ”„ Starting PDS backup..." # Create backup directory mkdir -p "$BACKUP_DIR" # Backup SQLite databases echo "๐Ÿ“ฆ Backing up databases..." cp "$PDS_DIR/account.sqlite" "$BACKUP_DIR/" cp -r "$PDS_DIR/actors" "$BACKUP_DIR/" # Backup blocks (blob storage) echo "๐Ÿ“ฆ Backing up blocks..." tar -czf "$BACKUP_DIR/blocks.tar.gz" -C "$PDS_DIR" blocks # Backup configuration echo "๐Ÿ“ฆ Backing up config..." cp "$PDS_DIR/pds.env" "$BACKUP_DIR/" cp "$PDS_DIR/compose.yaml" "$BACKUP_DIR/" # Backup Caddy config echo "๐Ÿ“ฆ Backing up Caddy..." cp -r "$PDS_DIR/caddy/etc" "$BACKUP_DIR/caddy-config" # Create tarball echo "๐Ÿ“ฆ Creating archive..." cd /tmp tar -czf "$BACKUP_TARBALL" "$(basename $BACKUP_DIR)" # Upload to R2 echo "โ˜๏ธ Uploading to R2..." if /snap/bin/aws s3 cp "$BACKUP_TARBALL" "s3://$R2_BUCKET/" \ --endpoint-url="$R2_ENDPOINT" 2>&1; then echo "โœ… Upload successful" # Delete immediately after successful upload rm -f "$BACKUP_TARBALL" else echo "โŒ Upload failed" # Still clean up on failure rm -f "$BACKUP_TARBALL" exit 1 fi # Cleanup backup directory echo "๐Ÿงน Cleaning up..." rm -rf "$BACKUP_DIR" # Delete backups older than 7 days from R2 echo "๐Ÿงน Removing old backups (>7 days)..." /snap/bin/aws s3 ls "s3://$R2_BUCKET/" --endpoint-url="$R2_ENDPOINT" | \ while read -r line; do createDate=$(echo $line | awk '{print $1" "$2}') createDate=$(date -d "$createDate" +%s) olderThan=$(date -d "7 days ago" +%s) if [[ $createDate -lt $olderThan ]]; then fileName=$(echo $line | awk '{print $4}') if [[ $fileName != "" ]]; then /snap/bin/aws s3 rm "s3://$R2_BUCKET/$fileName" --endpoint-url="$R2_ENDPOINT" echo "Deleted: $fileName" fi fi done echo "โœ… Backup complete!"