Skip to content

Instantly share code, notes, and snippets.

@samukasmk
Last active August 29, 2015 13:57
Show Gist options
  • Save samukasmk/9687459 to your computer and use it in GitHub Desktop.
Save samukasmk/9687459 to your computer and use it in GitHub Desktop.
backup-mysql-magento-stores.sh - Make easy backups of mysql databases for magento stores
#!/bin/bash
#
# Goal: Make backup of mysql databases for magento stores
# Created by: Samuel Maciel Sampaio [20120901] [20140321]
# Contact: [email protected] <smktecnologia.com.br>
# Needed Applications:
# - mysql
# - mysqldump
# - s3put
backup_dir="/etc/scripts/backups/mysql"
# S3 Config
s3_access_key="CHANGE-HERE-WITH-YOUR-ACCESS-KEY-HERE"
s3_secret_key="CHANGE-HERE-WITH-YOUR-SECRET-KEY-HERE"
s3_bucket_dest="CHANGE-HERE-WITH-THE-NAME-BUCKET"
s3_dir_path="/"
# Mysql Config
mysql_user="CHANGE-HERE-WITH-MYSQL-ROOT-USER"
mysql_passwd="CHANGE-HERE-WITH-MYSQL-ROOT-PASSWD"
mysql_host="CHANGE-HERE-WITH-MYSQL-HOST-ADDRESS"
# Ignore databases of dump
ignore_databases="information_schema
innodb
performance_schema
mysql"
# Get just the skeleton tables for temp or var tables
get_just_skeleton_tables="dataflow_batch_export
dataflow_batch_import
log_customer
log_quote
log_summary
log_summary_type
log_url
log_url_info
log_visitor
log_visitor_info
log_visitor_online
report_viewed_product_index
report_compared_product_index
report_event
index_event
aw_core_logger
catalog_compare_item
catalogindex_aggregation
catalogindex_aggregation_tag
catalogindex_aggregation_to_tag"
# Locate bin paths
s3put_bin=$(which s3put)
mysql_bin=$(which mysql)
mysqldump_bin=$(which mysqldump)
# Ensure directories exists and is running in correct
mkdir -p $backup_dir/temp
mkdir -p $backup_dir/dumps
mkdir -p $backup_dir/migrations_scripts
cd $backup_dir
# Create grep filter to ignore databases
grep_filter_ignore_databases=''
for ignore_database in $ignore_databases;
do
grep_filter_ignore_databases="$grep_filter_ignore_databases\\|^$ignore_database\$"
done
grep_filter_ignore_databases=$(echo $grep_filter_ignore_databases | sed s/'^\\|'/''/g)
# Get list of databases to dump
db_list=(`$mysql_bin -u$mysql_user -p$mysql_passwd -h$mysql_host -e "show databases" -sN | grep -v "$grep_filter_ignore_databases"`)
datetime_now=`date +%y%m%d-%H%M`
# For each database instance
for db_instance in ${db_list[@]};
do
# Build a specific s3 sub folder in bucket
s3_instance_dir_path="$s3_dir_path/$db_instance"
# Get list of all tables
tables_list=$($mysql_bin -u$mysql_user -p$mysql_passwd -h$mysql_host -e "show tables" -sN -D $db_instance)
# Build filters of ignored temp tables
mysql_params_ignore_tables=''
mysql_skeleton_tables=''
for skeleton_table in $get_just_skeleton_tables;
do
if echo "$tables_list" | grep -s "$skeleton_table" > /dev/null 2>&1;
then
mysql_params_ignore_tables="$mysql_params_ignore_tables --ignore-table=$db_instance.$skeleton_table"
mysql_skeleton_tables="$mysql_skeleton_tables $skeleton_table"
fi
done
# Start the backup tasks
echo
echo -e "\nBacking up Mysql Instance: ($db_instance):"
echo "[1] - Creating temp folder: ./temp/backup_$db_instance-$datetime_now"
mkdir -p ./temp/backup_$db_instance-$datetime_now
echo "[2] - Starting mysql backup ..."
echo "[2.1] - Initialize the dump file of dest: ./temp/backup_$db_instance-$datetime_now/$db_instance-$datetime_now.sql"
echo > ./temp/backup_$db_instance-$datetime_now/$db_instance-$datetime_now.sql
if [ -n "$mysql_skeleton_tables" ];
then
echo -e "[2.2] - Get skeleton tables:\n($mysql_params_ignore_tables)\nto: ./temp/backup_$db_instance-$datetime_now/$db_instance-$datetime_now.sql"
$mysqldump_bin --no-data -u$mysql_user -p$mysql_passwd -h$mysql_host $db_instance $mysql_skeleton_tables >> ./temp/backup_$db_instance-$datetime_now/$db_instance-$datetime_now.sql
else
echo "[2.2] - There are no skeleton tables to get"
fi
echo "[2.3] - Get dump of all database data, less skeleton tables to: ./temp/backup_$db_instance-$datetime_now/$db_instance-$datetime_now.sql ..."
$mysqldump_bin -u$mysql_user -p$mysql_passwd -h$mysql_host --single-transaction --quick --skip-lock-tables $mysql_params_ignore_tables $db_instance >> ./temp/backup_$db_instance-$datetime_now/$db_instance-$datetime_now.sql
echo "[3] - Copying migration scripts files ./migrations_scripts/* to: ./temp/backup_$db_instance-$datetime_now"
cp -r ./migrations_scripts/* ./temp/backup_$db_instance-$datetime_now > /dev/null 2>&1
# echo "[3.1] - Appending migration script in begin of file ./$db_instance-$datetime_now.sql.bz2 ..."
# echo "[3.2] - Appending migration script in end of file ./$db_instance-$datetime_now.sql.bz2 ..."
echo "[4] - Compacting sql scripts to: ./dumps/$db_instance-$datetime_now.tar.gz"
cd ./temp
tar -czf ../dumps/$db_instance-$datetime_now.tar.gz backup_$db_instance-$datetime_now
cd ..
echo "[5] - Sending dump file: ./dumps/$db_instance-$datetime_now.tar.gz to: s3://$s3_bucket_dest/$s3_instance_dir_path/$db_instance-$datetime_now.tar.gz"
$s3put_bin -a$s3_access_key -s$s3_secret_key -b$s3_bucket_dest -p$backup_dir -k$s3_instance_dir_path ./dumps/$db_instance-$datetime_now.tar.gz > /dev/null 2>&1
echo "[6] - Erasing local temp folder: ./temp/backup_$db_instance-$datetime_now" > /dev/null 2>&1
rm -fr ./temp/backup_$db_instance-$datetime_now > /dev/null 2>&1
echo "[7] - Erasing local dump file: ./temp/backup_$db_instance-$datetime_now" > /dev/null 2>&1
rm -fr ./dumps/$db_instance-$datetime_now.tar.gz > /dev/null 2>&1
echo "Final S3 URL is:"
echo " https://s3.amazonaws.com/$s3_bucket_dest/$s3_instance_dir_path/dumps/$db_instance-$datetime_now.tar.gz"
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment