- install awscli
- create AWS new role and add inline policy (see aws-s3-policy.json file)
- run
aws configure
- use
backup.sh
Last active
January 14, 2022 01:06
-
-
Save saniaky/30985d144374b09bf5118dad52d721fa to your computer and use it in GitHub Desktop.
Backup and restore a ALL MySQL databases from a running Docker MySQL container.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"Version": "2012-10-17", | |
"Statement": [ | |
{ | |
"Sid": "AllowPolicy", | |
"Effect": "Allow", | |
"Action": [ | |
"s3:PutObject", | |
"s3:GetObject", | |
"s3:ListBucket" | |
], | |
"Resource": [ | |
"arn:aws:s3:::your-bucket-backup/*", | |
"arn:aws:s3:::your-bucket-backup" | |
] | |
} | |
] | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
# Stop if command fails | |
set -e | |
DB_CONTAINER=mysql_db | |
DB_ROOT_PASSWORD=ChangeMe | |
S3_BUCKET=YOUR_BUCKET | |
S3_FOLDER=mysql | |
BACKUP_FILE_NAME=$(date '+%Y-%m-%d_%H.%M.%S')-${DB_CONTAINER}-db-backup.sql.gz | |
# ============ Create DB dump ================== | |
echo "Making backup to '$BACKUP_FILE_NAME'" | |
docker exec ${DB_CONTAINER} /usr/bin/mysqldump \ | |
--all-databases --routines --flush-privileges --quick --single-transaction --skip-lock-tables \ | |
-uroot -p"$DB_ROOT_PASSWORD" | gzip >"${BACKUP_FILE_NAME}" | |
# Breakdown | |
# --single-transaction: Issue a BEGIN SQL statement before dumping data from the server. | |
# --quick: Enforce dumping tables row by row. This provides added safety for systems with little RAM and/or large databases where storing tables in memory could become problematic. | |
# --lock-tables=false: Do not lock tables for the backup session. | |
# ============ Upload to AWS S3 ================ | |
echo "Uploading '$BACKUP_FILE_NAME' file to S3 Bucket '$S3_BUCKET'" | |
aws s3 cp "${BACKUP_FILE_NAME}" "s3://${S3_BUCKET}/${S3_FOLDER}/${BACKUP_FILE_NAME}" | |
# ========================= Cleanup ======================== | |
echo "Removing backup file ${BACKUP_FILE_NAME}" | |
rm -f "${BACKUP_FILE_NAME}" | |
# ============ Automate Backups with cron ================ | |
# Create the cron job file. | |
# $ crontab -e | |
# 0 0 * * * cd /home/user/mysql/backup && /bin/bash backup.sh |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
# Stop if command fails | |
set -e | |
DB_CONTAINER=mysql_db_1 | |
DB_ROOT_PASSWORD=ChangeMe | |
S3_BUCKET=YOUR_BUCKET | |
S3_FOLDER=mysql | |
BACKUP_FILE_NAME=2020-08-27_18.02.29-mysql_db-db-backup.sql.gz | |
# ======================== Get s3 file key ======================== | |
echo "Downloading latest backup - $BACKUP_FILE_NAME" | |
aws s3 cp "s3://${S3_BUCKET}/${S3_FOLDER}/${BACKUP_FILE_NAME}" "$BACKUP_FILE_NAME" | |
# ======================== Unzip ======================== | |
echo "Unzipping file $BACKUP_FILE_NAME" | |
gunzip "${BACKUP_FILE_NAME}" | |
BACKUP_FILE_NAME="${BACKUP_FILE_NAME%.gz}" | |
# ========================= Import ======================== | |
echo "Importing file $BACKUP_FILE_NAME" | |
docker exec -i $DB_CONTAINER /usr/bin/mysql -uroot -p"$DB_ROOT_PASSWORD" <"$BACKUP_FILE_NAME" | |
# ========================= Cleanup ======================== | |
echo "Removing backup file ${BACKUP_FILE_NAME}" | |
rm -f "${BACKUP_FILE_NAME}" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment