#!/bin/sh set -e # Backup docker volumes to S3 # This script lists all docker volumes, creates tar.gz backups, # uploads them to S3, and deletes local archives one at a time # to minimize disk space usage. export AWS_PROFILE="volume-backup" # Configuration S3_BUCKET="homelab-volume-backups-v2" HOSTNAME=$(hostname -s) BACKUP_PREFIX="volumes/$HOSTNAME" TEMP_DIR=$(mktemp -d) TIMESTAMP=$(date +%Y_%m_%d) # Logging functions log_info() { echo "[INFO] $1" } log_warn() { echo "[WARN] $1" } log_error() { echo "[ERROR] $1" } # Validate configuration validate_config() { # Test AWS credentials if ! aws s3 ls "s3://$S3_BUCKET" >/dev/null 2>&1; then log_error "Cannot access S3 bucket: $S3_BUCKET" log_error "Please check your AWS credentials and bucket permissions" exit 1 fi } # Cleanup temporary directory cleanup() { if [ -d "$TEMP_DIR" ]; then rm -rf "$TEMP_DIR" log_info "Cleaned up temporary directory" fi } # Backup a single volume backup_volume() { local volume_name="$1" local archive_name="${volume_name}_${TIMESTAMP}.tar.gz" local archive_path="$TEMP_DIR/$archive_name" local s3_path="s3://$S3_BUCKET/$BACKUP_PREFIX/$archive_name" log_info "Starting backup of volume: $volume_name" # Create tar.gz archive of the volume # We use a temporary container to mount and backup the volume log_info "Creating archive: $archive_name" if docker run --rm \ -v "$volume_name:/volume:ro" \ -v "$TEMP_DIR:/backup" \ alpine \ tar czf "/backup/$archive_name" -C /volume . 2>/dev/null; then log_info "Archive created successfully" else log_error "Failed to create archive for volume: $volume_name" return 1 fi # Get archive size for logging if [ -f "$archive_path" ]; then archive_size=$(du -h "$archive_path" | cut -f1) log_info "Archive size: $archive_size" # Upload to S3 log_info "Uploading to S3: $s3_path" if aws s3 cp "$archive_path" "$s3_path"; then log_info "Successfully uploaded to S3" # Delete local archive rm -f "$archive_path" log_info "Deleted local archive" return 0 else log_error "Failed to upload to S3" rm -f "$archive_path" return 1 fi else log_error "Archive file not found: $archive_path" return 1 fi } # Main backup process main() { log_info "Docker Volume Backup Script" log_info "===========================" validate_config log_info "Fetching list of Docker volumes..." volumes=$(docker volume ls -q) if [ -z "$volumes" ]; then log_warn "No Docker volumes found" cleanup exit 0 fi volume_count=$(echo "$volumes" | wc -l | tr -d ' ') log_info "Found $volume_count volume(s) to backup" successful=0 failed=0 current=0 for volume in $volumes; do current=$((current + 1)) log_info "Processing volume $current/$volume_count" if backup_volume "$volume"; then successful=$((successful + 1)) else failed=$((failed + 1)) fi echo "" done # Cleanup cleanup # Print summary log_info "===========================" log_info "Backup Summary" log_info "===========================" log_info "Total volumes: $volume_count" log_info "Successful: $successful" log_info "Failed: $failed" if [ "$failed" -gt 0 ]; then log_warn "Some backups failed. Please check the logs above." exit 1 else log_info "All backups completed successfully!" exit 0 fi } # Run main function main