Last active
March 14, 2019 16:34
-
-
Save dertin/a60352c7c11358bf98d9004ea00d8ca7 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Usage: | |
# bash s3BackUp.sh YOUR_BACKUP_DIRECTORY BACKUP_NAME YOUR_BUCKET MAIL_FROM MAIL_TO (OPTIONAL: S3_FOLDER PROFILE) | |
# bash s3BackUp.sh /var/www/webdisk/example.com/ example my_bucket [email protected] [email protected] backup default | |
# Arguments: | |
readonly BACKUP_PATH_NO_REMOVE=$1 | |
readonly BACKUP_NAME=$2 | |
readonly S3_BUCKET_NAME=$3 | |
readonly MAIL_FROM=${4} | |
readonly MAIL_TO=${5} | |
readonly S3_FOLDER=${6-backup} | |
readonly PROFILE=${7-default} | |
# Default: | |
readonly PREFIX=backup_ | |
readonly DATE=`date +%d-%m-%Y` | |
readonly BACKUP_FILE_NAME=${PREFIX}${BACKUP_NAME}_${DATE}.tgz | |
readonly BACKUP_PATH_FILE=${HOME}/${BACKUP_FILE_NAME} | |
readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/ | |
readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_FILE_NAME} | |
readonly TEMP_PARTS=${HOME}/temp-parts | |
readonly TEMP_EXTRACT=${HOME}/temp-extract | |
##### | |
function finish() { | |
rm -r ${TEMP_PARTS} 2> /dev/null | |
rm -r ${TEMP_EXTRACT} 2> /dev/null | |
rm ${BACKUP_PATH_FILE} 2> /dev/null | |
} | |
trap finish EXIT | |
##### | |
function s3-send-mail() { | |
local from=$1 | |
local to=$2 | |
local subject=$3 | |
local text=$4 | |
aws ses send-email \ | |
--from $from \ | |
--destination "ToAddresses=${to}" \ | |
--message "Subject={Data=${subject},Charset=utf8},Body={Text={Data=${text},Charset=utf8},Html={Data=${text},Charset=utf8}}" | |
} | |
##### | |
function s3-multipart-upload() { | |
sudo apt-get install -y -qq jq | |
local filePath=$1 # file to upload | |
local bucket=$2 # name of S3 bucket | |
local s3Folder=$3 # destination of the file in S3 | |
local dirParts=$4 # local folder where you create the parts of the file to send | |
local profile=${5-default} # configuration profile of aws-cli | |
#Set to 90 MBs as default, 100 MBs is the limit for AWS files | |
mbSplitSize=90 | |
((partSize = $mbSplitSize * 1000000)) | |
# Get main file size | |
echo "Preparing $filePath for multipart upload" | |
fileSize=`wc -c $filePath | awk '{print $1}'` | |
((parts = ($fileSize+$partSize-1) / partSize)) | |
# Get main file hash | |
mainMd5Hash=`openssl md5 -binary $filePath | base64` | |
# Make directory to store temporary parts | |
echo "Splitting $filePath into $parts temporary parts" | |
rm -r ${dirParts} | |
mkdir -p ${dirParts} | |
cd ${dirParts} | |
split -b $partSize $filePath | |
# Create mutlipart upload | |
echo "Initiating multipart upload for $filePath" | |
uploadId=`aws s3api create-multipart-upload --bucket $bucket --key $s3Folder --metadata md5=$mainMd5Hash --profile $profile | jq -r '.UploadId'` | |
# Generate fileparts.json file that will be used at the end of the multipart upload | |
jsonData="{\"Parts\":[" | |
for file in * | |
do | |
((index++)) | |
echo "Uploading part $index of $parts..." | |
hashData=`openssl md5 -binary $file | base64` | |
eTag=`aws s3api upload-part --bucket $bucket --key $s3Folder --part-number $index --body $file --upload-id $uploadId --profile $profile | jq -r '.ETag'` | |
jsonData+="{\"ETag\":$eTag,\"PartNumber\":$index}" | |
if (( $index == $parts )) | |
then | |
jsonData+="]}" | |
else | |
jsonData+="," | |
fi | |
done | |
jq -n $jsonData > fileparts.json | |
# Complete multipart upload, check ETag to verify success | |
mainEtag=`aws s3api complete-multipart-upload --multipart-upload file://fileparts.json --bucket $bucket --key $s3Folder --upload-id $uploadId --profile $profile | jq -r '.ETag'` | |
if [[ $mainEtag != "" ]]; | |
then | |
echo "Successfully uploaded: $filePath to S3 bucket: $bucket" | |
else | |
echo "Something went wrong! $filePath was not uploaded to S3 bucket: $bucket" | |
# SEND FAULT REPORT | |
s3-send-mail MAIL_FROM MAIL_TO "ALERT BACKUP FAULT - ${BACKUP_NAME}" "Verify the sending of file parts to the AWS S3 service" | |
exit 1 | |
fi | |
# Clean up files | |
rm -r ${TEMP_PARTS} | |
cd .. | |
} | |
##### | |
function main() { | |
# Release file in local and remote destination | |
rm ${BACKUP_PATH_FILE} 2> /dev/null | |
# CREATE BACKUP ARCHIVE | |
tar czvf ${BACKUP_PATH_FILE} ${BACKUP_PATH_NO_REMOVE} | |
# VERIFY BACKING ARCHIVE | |
rm -r ${TEMP_EXTRACT} | |
mkdir -p ${TEMP_EXTRACT} | |
tar xzf ${BACKUP_PATH_FILE} --directory ${TEMP_EXTRACT} > /dev/null | |
if [ $? != 0 ]; then | |
echo "File is corrupted ... $1" | |
# SEND FAULT REPORT | |
s3-send-mail MAIL_FROM MAIL_TO "ALERT BACKUP FAULT - ${BACKUP_NAME}" "Check backup compression" | |
exit 1 | |
fi | |
rm -r ${TEMP_EXTRACT} | |
# SEND NEW BACKUP TO S3 | |
s3-multipart-upload ${BACKUP_PATH_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS} ${PROFILE} | |
# DELETE OLD BACKUP IN S3 | |
aws s3 ls ${S3_BUCKET_BACKUP} | while read -r line; | |
do | |
strCreateDate=`echo $line|awk {'print $1" "$2'}` | |
if date -d "${strCreateDate}" >/dev/null 2>&1 | |
then | |
createDate=`date --date "$strCreateDate" +%s` | |
olderThan=`date --date "7 days ago" +%s` | |
if [[ $createDate -lt $olderThan ]] | |
then | |
filePath=`echo $line|awk {'print $4'}` | |
if [[ $filePath != "" ]] | |
then | |
aws s3 rm ${S3_BUCKET_BACKUP}$filePath | |
fi | |
fi | |
fi | |
done; | |
} | |
##### | |
main |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment