Last active
March 31, 2025 09:54
-
-
Save Takerman/4b09f5fece4d170b76ec27f42bda7b45 to your computer and use it in GitHub Desktop.
backup docker swarm cluster
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
set -euo pipefail | |
# Daily backup script for volumes and databases | |
# Configuration | |
VOLUME_ROOT="[docker_volumes_location]" | |
RCLONE_REMOTE="gdrive:[path_in_google_drive_for_backups]" | |
# Database connection parameters | |
MARIADB_HOST="[hostname]" | |
MARIADB_PORT="[port]" # Using default internal port | |
MARIADB_USER="[user]" | |
MARIADB_PASSWORD="[password]" | |
MSSQL_HOST="[host]" | |
MSSQL_PORT="[port]" # Using default internal port | |
MSSQL_USER="[user]" | |
MSSQL_PASSWORD="[password]" | |
declare -A DATABASES=( | |
["[packagename]"]="mariadb:[database_name]" | |
["[packagename]"]="mssql:[database_name]" | |
) | |
# Date formats | |
DAILY_DATE=$(date +"%Y_%m_%d") | |
MONTHLY_DATE=$(date +"%Y_%m") | |
YEARLY_DATE=$(date +"%Y") | |
# Create temporary directory | |
TMP_DIR=$(mktemp -d -t backup-XXXXXX) | |
trap 'rm -rf "$TMP_DIR"' EXIT | |
# Logging function | |
log_error() { | |
local message=$1 | |
local solution=$2 | |
echo "ERROR: $(date +"%Y-%m-%d %H:%M:%S") - $message" | |
echo "SOLUTION: $solution" | |
exit 1 | |
} | |
# Test SQL Server connection | |
test_mssql_connection() { | |
local host=$1 | |
local port=$2 | |
local user=$3 | |
local password=$4 | |
echo "Testing MSSQL connection to $host:$port..." | |
sqlcmd -S "$host,$port" -U "$user" -P "$password" -Q "SELECT 1" > /dev/null 2>&1 || \ | |
log_error "Failed to connect to MSSQL server at $host:$port" "Check SQL Server credentials and network connectivity." | |
} | |
# Main backup function | |
backup_volume() { | |
local volume=$1 | |
local db_type=${2%%:*} | |
local db_name=${2##*:} | |
echo "Processing $volume volume..." | |
# Create backup directory structure | |
local volume_type=$(get_volume_type "$volume") | |
local remote_path="$RCLONE_REMOTE/${volume_type}/${volume}/backups" | |
local archive_name="${DAILY_DATE}_${volume}.tar.bz2" | |
local temp_archive="$TMP_DIR/$archive_name" | |
# Database backup | |
case $db_type in | |
mariadb) | |
echo "Backing up MariaDB database: $db_name" | |
mysqldump -h "$MARIADB_HOST" -P "$MARIADB_PORT" -u "$MARIADB_USER" -p"$MARIADB_PASSWORD" \ | |
--single-transaction \ | |
"$db_name" > "$TMP_DIR/${volume}.sql" || \ | |
log_error "Failed to back up MariaDB database: $db_name" "Check database credentials and connectivity." | |
;; | |
mssql) | |
echo "Backing up MSSQL database: $db_name" | |
test_mssql_connection "$MSSQL_HOST" "$MSSQL_PORT" "$MSSQL_USER" "$MSSQL_PASSWORD" | |
# Create a credentials file for BCP | |
echo "export username=\"$MSSQL_USER\"" > "$TMP_DIR/sqlserver.cred" | |
echo "export password=\"$MSSQL_PASSWORD\"" >> "$TMP_DIR/sqlserver.cred" | |
source "$TMP_DIR/sqlserver.cred" | |
bcp "$db_name" out "$TMP_DIR/${volume}.bcp" \ | |
-S "$MSSQL_HOST,$MSSQL_PORT" -U "$username" -P "$password" \ | |
-n -q || \ | |
log_error "Failed to back up MSSQL database: $db_name" "Check database credentials and connectivity." | |
;; | |
*) | |
echo "No database for $volume" | |
;; | |
esac | |
# Create archive | |
echo "Creating compressed archive..." | |
tar -cjf "$temp_archive" \ | |
--warning=no-file-changed \ | |
-C "$VOLUME_ROOT" "$volume" \ | |
$( [ -f "$TMP_DIR/${volume}.sql" ] && echo "-C $TMP_DIR ${volume}.sql" ) \ | |
$( [ -f "$TMP_DIR/${volume}.bcp" ] && echo "-C $TMP_DIR ${volume}.bcp" ) || \ | |
log_error "Failed to create archive for volume: $volume" "Ensure sufficient disk space and permissions." | |
# Upload to Google Drive | |
echo "Uploading to Google Drive..." | |
rclone copy "$temp_archive" "$remote_path/" \ | |
--progress \ | |
--no-check-certificate \ | |
--tpslimit 12 \ | |
--bwlimit 8.5M || \ | |
log_error "Failed to upload archive to Google Drive for volume: $volume" "Check rclone configuration and network connectivity." | |
# Apply retention policies | |
apply_retention "$remote_path" | |
} | |
# Retention policy function | |
apply_retention() { | |
local path=$1 | |
echo "Applying retention policies..." | |
# Daily backups (keep 15 days) | |
rclone delete "$path" \ | |
--min-age 15d \ | |
--filter "+ *.tar.bz2" \ | |
--filter "- *" \ | |
--progress || \ | |
log_error "Failed to apply daily retention policy for path: $path" "Check rclone configuration and permissions." | |
# Monthly backups (keep 6 months) | |
rclone delete "$path" \ | |
--min-age 6M \ | |
--filter "+ *-01_*.tar.bz2" \ | |
--filter "- *" \ | |
--progress || \ | |
log_error "Failed to apply monthly retention policy for path: $path" "Check rclone configuration and permissions." | |
# Yearly backups (keep 3 years) | |
rclone delete "$path" \ | |
--filter "+ *-01-01_*.tar.bz2" \ | |
--filter "- *" \ | |
--progress || \ | |
log_error "Failed to apply yearly retention policy for path: $path" "Check rclone configuration and permissions." | |
} | |
# Determine volume type | |
get_volume_type() { | |
local volume=$1 | |
case $volume in | |
[first_volume_name]|[second_volume_name]|[etc_volume_name]) | |
echo "media" | |
;; | |
[first_volume_name]|[second_volume_name]|[etc_volume_name]) | |
echo "monitoring" | |
;; | |
[first_volume_name]|[second_volume_name]|[etc_volume_name]) | |
echo "database" | |
;; | |
*) | |
echo "other" | |
;; | |
esac | |
} | |
# Main execution loop | |
for volume in "${!DATABASES[@]}"; do | |
volume_type=$(get_volume_type "$volume") | |
backup_volume "$volume" "${DATABASES[$volume]}" || \ | |
log_error "Error processing $volume volume" "Check the logs above for more details." | |
done | |
# Special case for media volumes | |
media_volumes=("[first_volume_name]" "[second_volume_name]" "[etc_volume_name]") | |
for volume in "${media_volumes[@]}"; do | |
echo "Processing media volume: $volume" | |
volume_type="media" | |
remote_path="$RCLONE_REMOTE/$volume_type/$volume/backups" | |
archive_name="${DAILY_DATE}_${volume}.tar.bz2" | |
temp_archive="$TMP_DIR/$archive_name" | |
tar -cjf "$temp_archive" -C "$VOLUME_ROOT" "$volume" | |
rclone copy "$temp_archive" "$remote_path/" | |
apply_retention "$remote_path" | |
done | |
# Special case for monitoring volumes | |
monitoring_volumes=("[first_volume_name]" "[second_volume_name]" "[etc_volume_name]") | |
for volume in "${monitoring_volumes[@]}"; do | |
echo "Processing monitoring volume: $volume" | |
volume_type="monitoring" | |
remote_path="$RCLONE_REMOTE/$volume_type/$volume/backups" | |
archive_name="${DAILY_DATE}_${volume}.tar.bz2" | |
temp_archive="$TMP_DIR/$archive_name" | |
tar -cjf "$temp_archive" -C "$VOLUME_ROOT" "$volume" | |
rclone copy "$temp_archive" "$remote_path/" | |
apply_retention "$remote_path" | |
done | |
# Special case for database volumes | |
database_volumes=("[first_volume_name]" "[second_volume_name]" "[etc_volume_name]") | |
for volume in "${database_volumes[@]}"; do | |
echo "Processing database volume: $volume" | |
volume_type="database" | |
remote_path="$RCLONE_REMOTE/$volume_type/$volume/backups" | |
archive_name="${DAILY_DATE}_${volume}.tar.bz2" | |
temp_archive="$TMP_DIR/$archive_name" | |
tar -cjf "$temp_archive" -C "$VOLUME_ROOT" "$volume" | |
rclone copy "$temp_archive" "$remote_path/" | |
apply_retention "$remote_path" | |
done | |
echo "Backup process completed at $(date)" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This script can be used to backup a docker swarm cluster and upload packages of volumes + database dumps to google drive.
It gets all the physically mounted volumes from a folder and creates packages with their databases and uploads them to google drive volume_name/backups folder. The backups have timestamps on them. You need to replace the things in square brackets with actual values like volume names etc. That's a good way to keep your cluster in the cloud and not loose your volumes I think.