Created
January 28, 2019 07:06
-
-
Save mckartha/b2d57d19db3b9b39623469195ffd996f to your computer and use it in GitHub Desktop.
Use rclone to archive directories of Db data to S3
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#/bin/bash | |
# rclone-db-bkup-to-s3.sh | |
# [2018Nov30 mkartha] transfer backups from the /mnt/nas/BackupLinux/ Db pg dumps to S3 using rclone | |
# - do this as root from the db-server or a server with rclone in PATH and access to db dumpfiles | |
# - this script also assumes that rclone has been configured with S3 credentials to transfer data to S3 with rclone profile aws-s3 | |
# skeleton example | |
#restore_scripts/mkartha/logexec.sh rclone sync ./2018-11-28-daily aws-s3:db-backup/db-backups/2018-11-28-daily/ --ignore-checksum --fast-list --verbose | |
# usage: run script from the /mnt/nas/BackupLinux/ directory on a db server, with the 1st parameter as the name of the backup directory to transfer to S3 | |
usage () | |
{ | |
echo " | |
# | |
# Usage: run script $0 from the /mnt/nas/BackupLinux/ directory on a db server | |
# provide one (1) or more parameters as a list of directories to backup to S3: | |
# * the name of the backup directory to transfer to S3 - ** Don't include the trailing slash '/' character ** | |
# (i.e. 2018-11-29-daily or 2018-11-28-weekly) | |
# | |
" | |
} | |
if [ $# -lt 1 ] | |
then usage | |
else | |
# Check if logexec.sh is available for use | |
# - logexec.sh available at: https://gist.github.com/mckartha/7e0241c29cc018647954b20de68c35b9 | |
LOGEXPATH=${LOGEXPATH:-$HOME} | |
if [ -f $LOGEXPATH/logexec.sh ] | |
then LOGIT=$LOGEXPATH/logexec.sh | |
else | |
echo "*** | |
Can't find logexec.sh script in [ $LOGEXPATH ] to log this rclone sync operation | |
- please set LOGEXPATH={path-to-logexec.sh} before re-running this script | |
***" | |
exit 1 | |
fi | |
DIRCOUNT=$# | |
# get 1 or more parameters as the name of the pg dump backup directory to transfer to S3 | |
for BKUPDIR in "$@" ; do | |
# BKUPDIR=$1 | |
# should test to verify that this backup directory exists and give a useful error message | |
if [ -d "./$BKUPDIR" ]; then | |
$LOGIT rclone sync ./$BKUPDIR aws-s3:db-backup/db-backups/$BKUPDIR/ --ignore-checksum --fast-list --verbose | |
else | |
echo "The directory $BKUPDIR does not appear to exist in this current directory: $CWD" | |
fi | |
done | |
# - or - if run without a parameter should check the current date and attempt to do the most recent valid backup (i.e. yesterday's backup) | |
fi |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment