Son aktivite 1762100049

backup-volumes.sh Ham
1#!/bin/sh
2
3set -e
4
5# Backup docker volumes to S3
6# This script lists all docker volumes, creates tar.gz backups,
7# uploads them to S3, and deletes local archives one at a time
8# to minimize disk space usage.
9
10export AWS_PROFILE="volume-backup"
11
12# Configuration
13S3_BUCKET="homelab-volume-backups-v2"
14HOSTNAME=$(hostname -s)
15BACKUP_PREFIX="volumes/$HOSTNAME"
16TEMP_DIR=$(mktemp -d)
17TIMESTAMP=$(date +%Y_%m_%d)
18
19# Logging functions
20log_info() {
21 echo "[INFO] $1"
22}
23
24log_warn() {
25 echo "[WARN] $1"
26}
27
28log_error() {
29 echo "[ERROR] $1"
30}
31
32
33
34# Validate configuration
35validate_config() {
36 # Test AWS credentials
37 if ! aws s3 ls "s3://$S3_BUCKET" >/dev/null 2>&1; then
38 log_error "Cannot access S3 bucket: $S3_BUCKET"
39 log_error "Please check your AWS credentials and bucket permissions"
40 exit 1
41 fi
42}
43
44# Cleanup temporary directory
45cleanup() {
46 if [ -d "$TEMP_DIR" ]; then
47 rm -rf "$TEMP_DIR"
48 log_info "Cleaned up temporary directory"
49 fi
50}
51
52# Backup a single volume
53backup_volume() {
54 local volume_name="$1"
55 local archive_name="${volume_name}_${TIMESTAMP}.tar.gz"
56 local archive_path="$TEMP_DIR/$archive_name"
57 local s3_path="s3://$S3_BUCKET/$BACKUP_PREFIX/$archive_name"
58
59 log_info "Starting backup of volume: $volume_name"
60
61 # Create tar.gz archive of the volume
62 # We use a temporary container to mount and backup the volume
63 log_info "Creating archive: $archive_name"
64
65 if docker run --rm \
66 -v "$volume_name:/volume:ro" \
67 -v "$TEMP_DIR:/backup" \
68 alpine \
69 tar czf "/backup/$archive_name" -C /volume . 2>/dev/null; then
70
71 log_info "Archive created successfully"
72 else
73 log_error "Failed to create archive for volume: $volume_name"
74 return 1
75 fi
76
77 # Get archive size for logging
78 if [ -f "$archive_path" ]; then
79 archive_size=$(du -h "$archive_path" | cut -f1)
80 log_info "Archive size: $archive_size"
81
82 # Upload to S3
83 log_info "Uploading to S3: $s3_path"
84
85 if aws s3 cp "$archive_path" "$s3_path"; then
86 log_info "Successfully uploaded to S3"
87
88 # Delete local archive
89 rm -f "$archive_path"
90 log_info "Deleted local archive"
91
92 return 0
93 else
94 log_error "Failed to upload to S3"
95 rm -f "$archive_path"
96 return 1
97 fi
98 else
99 log_error "Archive file not found: $archive_path"
100 return 1
101 fi
102}
103
104# Main backup process
105main() {
106 log_info "Docker Volume Backup Script"
107 log_info "==========================="
108 validate_config
109 log_info "Fetching list of Docker volumes..."
110 volumes=$(docker volume ls -q)
111
112 if [ -z "$volumes" ]; then
113 log_warn "No Docker volumes found"
114 cleanup
115 exit 0
116 fi
117
118 volume_count=$(echo "$volumes" | wc -l | tr -d ' ')
119 log_info "Found $volume_count volume(s) to backup"
120
121 successful=0
122 failed=0
123 current=0
124 for volume in $volumes; do
125 current=$((current + 1))
126 log_info "Processing volume $current/$volume_count"
127
128 if backup_volume "$volume"; then
129 successful=$((successful + 1))
130 else
131 failed=$((failed + 1))
132 fi
133
134 echo ""
135 done
136
137 # Cleanup
138 cleanup
139
140 # Print summary
141 log_info "==========================="
142 log_info "Backup Summary"
143 log_info "==========================="
144 log_info "Total volumes: $volume_count"
145 log_info "Successful: $successful"
146 log_info "Failed: $failed"
147
148 if [ "$failed" -gt 0 ]; then
149 log_warn "Some backups failed. Please check the logs above."
150 exit 1
151 else
152 log_info "All backups completed successfully!"
153 exit 0
154 fi
155}
156
157# Run main function
158main
159