Why Offsite?
Local backups protect against accidental deletion but not against hardware failure, datacenter incidents, or ransomware. Offsite backups to S3-compatible storage provide geographic redundancy.
Install AWS CLI
sudo apt install -y awscliConfigure Credentials
aws configure
# Enter your Access Key ID, Secret Key, Region, and output format
# For non-AWS S3 providers, configure the endpoint separatelyBasic Backup Script
#!/bin/bash
DATE=$(date +%Y%m%d)
BUCKET="s3://my-backups/$(hostname)"
# Database backup
mysqldump -u root --all-databases --single-transaction | gzip > /tmp/db_${DATE}.sql.gz
# Upload to S3
aws s3 cp /tmp/db_${DATE}.sql.gz ${BUCKET}/databases/
# Upload web files
tar -czf /tmp/www_${DATE}.tar.gz /var/www
aws s3 cp /tmp/www_${DATE}.tar.gz ${BUCKET}/files/
# Upload configs
tar -czf /tmp/etc_${DATE}.tar.gz /etc
aws s3 cp /tmp/etc_${DATE}.tar.gz ${BUCKET}/configs/
# Clean up local temp files
rm /tmp/db_${DATE}.sql.gz /tmp/www_${DATE}.tar.gz /tmp/etc_${DATE}.tar.gz
# Remove S3 backups older than 30 days
aws s3 ls ${BUCKET}/databases/ | awk '{print $4}' | while read file; do
file_date=$(echo "$file" | grep -oP '\d{8}')
if [ "$(date -d "$file_date" +%s)" -lt "$(date -d "30 days ago" +%s)" ]; then
aws s3 rm "${BUCKET}/databases/${file}"
fi
done
echo "Backup completed: $(date)"Schedule with Cron
0 2 * * * /root/scripts/s3-backup.sh >> /var/log/s3-backup.log 2>&1