Skip to content

Instantly share code, notes, and snippets.

@tomsseisums
Last active September 11, 2017 08:56
Show Gist options
  • Save tomsseisums/6ae4dcc9ce57109880f06d056ecf6c8e to your computer and use it in GitHub Desktop.
Save tomsseisums/6ae4dcc9ce57109880f06d056ecf6c8e to your computer and use it in GitHub Desktop.
A rough script to backup/restore shared hosting sites into a tarball. [highly opinionated]

TODO:

  • FUCK BASH!
  • Usage help text
  • Proper validation
  • Removal of set -e and proper error handling
  • Update functionality
  • One client - multiple sites
  • vhost, pool generation
  • Data reuse (database user, ssh client etc.)
#!/bin/bash
set -e
errcho()
{
>&2 echo "$@";
}
WEB_CLIENTS=()
UNIQUE=$(date +%s | md5sum | awk '{print $1}')
EXTRA_FILES=()
echo "Unique: ${UNIQUE}"
while [[ $# -gt 1 ]]
do
key="$1"
case $key in
--client)
WEB_CLIENTS+=("$2")
shift
;;
--dbuser)
DB_USER="$2"
DATABASE=1
shift
;;
--dbpass)
DB_PASS="$2"
DATABASE=1
shift
;;
--dbname)
DB_NAME="$2"
DATABASE=1
shift
;;
--dbtables)
DB_TABLES="$2"
shift
;;
--dbextract)
DB_EXTRACT=1
shift
;;
-s|--site)
SITE_FOLDER="$2"
if [[ ! -d $SITE_FOLDER ]]; then
errcho "Site appears to be nonexistant(?)."
exit 3
fi
shift
;;
-n|--name)
PROJECT_NAME="$2"
shift
;;
-o|--output)
OUTPUT_FOLDER="$2"
if [[ ! -d $OUTPUT_FOLDER ]]; then
errcho "Output does not seem to be a directory."
exit 2
fi
shift
;;
--keep-old)
KEEP_OLD=1
shift
;;
--vhost)
VHOST_FILE="$2"
if [[ ! -f $VHOST_FILE ]]; then
errcho "Specified virtual host file ${VHOST_FILE} could not be found."
exit 3
fi
shift
;;
--extra-file)
EXTRA_FILE="$2"
if [[ -f $EXTRA_FILE ]] || [[ -d $EXTRA_FILE ]]; then
EXTRA_FILES+=("${EXTRA_FILE}")
else
errcho "Specified extra file ${EXTRA_FILE} does not exist."
fi
shift
;;
*)
errcho "Unknown option."
;;
esac
shift
done
if [[ -z $SITE_FOLDER ]]; then
errcho "Site not specified."
exit 2
fi
if [[ -z $OUTPUT_FOLDER ]]; then
OUTPUT_FOLDER=`pwd`
fi
if [[ $DATABASE == 1 ]]; then
if [[ -z $DB_USER ]] || [[ -z $DB_PASS ]]; then
errcho "Invalid database setup. Make sure you have specified user and password."
exit 4
else
# Fallback to database user if database name omitted.
if [[ -z $DB_NAME ]]; then
DB_NAME=$DB_USER
fi
echo "Dumping database: ${DB_NAME}"
DB_FILE="${SITE_FOLDER}/backup-${UNIQUE}-database.sql.gz"
mysqldump -u $DB_USER -p${DB_PASS} $DB_NAME $DB_TABLES | gzip > "${DB_FILE}"
echo "Exported database to: ${DB_FILE}"
if [[ $DB_EXTRACT == 1 ]]; then
echo "Exiting after database extract."
exit 0;
fi
fi
fi
if [[ ! -z $VHOST_FILE ]]; then
VHOST_TARGET_FILE="${SITE_FOLDER}/backup-${UNIQUE}-vhost.conf"
if cp "${VHOST_FILE}" "${VHOST_TARGET_FILE}"
then
echo "Copied virtual host to: ${VHOST_TARGET_FILE}"
fi
fi
for EXTRA_FILE in "${EXTRA_FILES[@]}"
do
EXTRA_FILE_FOLDER="${SITE_FOLDER}/backup-${UNIQUE}-extra-files"
EXTRA_FILE_STRUCTURAL_PATH=$(readlink -f "${EXTRA_FILE}")
EXTRA_FILE_COPY_PATH="${EXTRA_FILE_FOLDER}/${EXTRA_FILE_STRUCTURAL_PATH}"
EXTRA_FILE_COPY_DIRECTORY=$(dirname $EXTRA_FILE_COPY_PATH)
if [[ ! -d $EXTRA_FILE_COPY_DIRECTORY ]]; then
if ! mkdir -p $EXTRA_FILE_COPY_DIRECTORY &> /dev/null
then
echo "Failed to create directories for: ${EXTRA_FILE}"
elif ! cp -a "${EXTRA_FILE}" "${EXTRA_FILE_COPY_PATH}" &> /dev/null; then
echo "Failed to copy: ${EXTRA_FILE}"
fi
fi
done
for WEB_CLIENT in "${WEB_CLIENTS[@]}"
do
echo "Working with web client: ${WEB_CLIENT}"
if ! id "${WEB_CLIENT}" &> /dev/null
then
errcho "Cannot resolve web client to user."
exit 10
fi
CLIENT_HOME=$(eval echo "~${WEB_CLIENT}")
echo "Resolved home: ${CLIENT_HOME}"
CRONTAB_FILE="${SITE_FOLDER}/backup-${UNIQUE}-${WEB_CLIENT}.crontab"
if crontab -u $WEB_CLIENT -l 1> "$CRONTAB_FILE" 2> /dev/null
then
echo "Saved crontab to: ${CRONTAB_FILE}"
else
echo "Empty crontab."
rm "${CRONTAB_FILE}"
fi
CLIENT_SSH_KEY_FILE=$CLIENT_HOME/.ssh/authorized_keys
KEY_FILE="${SITE_FOLDER}/backup-${UNIQUE}-${WEB_CLIENT}.authorized_keys"
if cp "${CLIENT_SSH_KEY_FILE}" "${KEY_FILE}" &> /dev/null
then
echo "Copied authorized_keys to: ${KEY_FILE}"
fi
done
if [[ -z $PROJECT_NAME ]]; then
SITE_NAME=$(basename "${SITE_FOLDER}")
else
SITE_NAME=$PROJECT_NAME
fi
if [[ $KEEP_OLD != 1 ]]; then
echo "Cleaning up previous backups..."
if ! rm -v "${OUTPUT_FOLDER}/${SITE_NAME}-backup-"*.tar.gz
then
echo "Some previous backups left over."
fi
fi
echo "Exporting: ${SITE_NAME}"
OUTPUT="${OUTPUT_FOLDER}/${SITE_NAME}-backup-${UNIQUE}.tar.gz"
if ! tar -czf "${OUTPUT}" "${SITE_FOLDER}"
then
errcho "Failed to create tar archive."
exit 8
fi
echo "Exported to: ${OUTPUT}"
echo "Cleanup..."
if ! rm -rv "${SITE_FOLDER}/backup-${UNIQUE}"*
then
errcho "Failed cleaning up."
exit 9
fi
echo "Finished"
#!/bin/bash
set -e
errcho()
{
>&2 echo "$@";
}
NAME_REGEX="^[a-z_][a-z0-9_-]{0,30}$"
UPLOAD_DIRECTORIES=()
BACKUP_FORMAT=".tar.gz"
HTTP_USER="www-data"
HTTP_GROUP="${HTTP_USER}"
while [[ $# -gt 1 ]]
do
key="$1"
case $key in
--client)
WEB_CLIENT="$2"
if ! [[ $WEB_CLIENT =~ $NAME_REGEX ]]; then
errcho "Your provided client name ${WEB_CLIENT} does not match the format: ${NAME_REGEX}"
exit 2
fi
shift
;;
--dbuser)
DB_USER="$2"
DATABASE=1
shift
;;
--dbname)
DB_NAME="$2"
DATABASE=1
shift
;;
-f|--file)
BACKUP_FILE="$2"
shift
;;
--format)
BACKUP_FORMAT="$3"
shift
;;
-d|--domain)
PROJECT_DOMAIN="$2"
shift
;;
-o|--output)
OUTPUT_FOLDER="$2"
shift
;;
-u|--unique)
UNIQUE="$2"
shift
;;
--upload-directory)
UPLOAD_DIRECTORIES+=("$2")
shift
;;
--http-user)
HTTP_USER="$2"
shift
;;
--http-group)
HTTP_GROUP="$2"
shift
;;
--extraction-base-path)
EXTRACTION_BASE_PATH="$2"
shift
;;
*)
errcho "Unknown option."
;;
esac
shift
done
if [[ ! -f $BACKUP_FILE ]]; then
errcho "Backup file ${BACKUP_FILE} does not exist."
exit 3
fi
if [[ $BACKUP_FORMAT != ".tar.gz" ]]; then
errcho "Invalid backup format: ${BACKUP_FORMAT}."
exit 3
fi
if [[ -z $UNIQUE ]]; then
errcho "Unique is not specified."
exit 3
fi
if [[ -z $WEB_CLIENT ]]; then
errcho "Web client not specified."
exit 3
fi
if [[ ! -d $OUTPUT_FOLDER ]]; then
errcho "Output folder does not exist."
exit 3
fi
echo "Unique: ${UNIQUE}"
echo "Client: ${WEB_CLIENT}"
if [[ -z $PROJECT_DOMAIN ]]; then
PROJECT_DOMAIN="${WEB_CLIENT}"
fi
echo "Domain: ${PROJECT_DOMAIN}"
echo "Path: ${OUTPUT_FOLDER}/${PROJECT_DOMAIN}"
echo "Copying archive to temporary directory for sudo extraction."
TMP_ARCHIVE=$(mktemp)
if ! cp "${BACKUP_FILE}" "${TMP_ARCHIVE}"
then
errcho "Failed to copy backup file to temporary path."
exit 3
fi
echo "Copied."
chmod 777 "${TMP_ARCHIVE}"
CLIENT_HOME="${OUTPUT_FOLDER}/${PROJECT_DOMAIN}"
CLIENT_NAME="www-${WEB_CLIENT}"
if ! useradd -d "${CLIENT_HOME}" -c "${PROJECT_DOMAIN}" -m -G www-data -s /bin/bash "${CLIENT_NAME}"
then
errcho "Failed to create client ${CLIENT_NAME} at: ${CLIENT_HOME}"
exit 3
fi
echo "Created www client user: ${CLIENT_NAME}"
PHP_TEMPORARIES="${CLIENT_HOME}/web/php-tmp"
WWW_PUBLIC="${CLIENT_HOME}/web/www/public"
sudo -u "${CLIENT_NAME}" mkdir -p "${CLIENT_HOME}/web/log"
sudo -u "${CLIENT_NAME}" mkdir -p "${PHP_TEMPORARIES}"
chown $HTTP_USER:$HTTP_GROUP "${PHP_TEMPORARIES}"
sudo -u "${CLIENT_NAME}" mkdir -p "${WWW_PUBLIC}"
case "${BACKUP_FORMAT}" in
".tar.gz")
if [[ ! -z $EXTRACTION_BASE_PATH ]]; then
EXTRACTION_PARAMS="--transform s|${EXTRACTION_BASE_PATH}||"
fi
echo "Extracting web tarball ${TMP_ARCHIVE} (${BACKUP_FILE}) to ${WWW_PUBLIC}."
sudo -u "${CLIENT_NAME}" tar xzf "${TMP_ARCHIVE}" -C "${WWW_PUBLIC}" $EXTRACTION_PARAMS
echo "Extracted."
if [[ ! -z $EXTRACTION_BASE_PATH ]]; then
REMOVAL_EXTRACTION_BASE_PATH=$(echo "${EXTRACTION_BASE_PATH}" | cut -f1 -d/)
echo "Removing base path..."
rm -r "${WWW_PUBLIC}/${REMOVAL_EXTRACTION_BASE_PATH}"
echo "Removed."
fi
;;
esac
for UPLOAD_DIRECTORY in "${UPLOAD_DIRECTORIES[@]}"
do
FULL_UPLOAD_DIRECTORY="${WWW_PUBLIC}/${UPLOAD_DIRECTORY}"
find "${FULL_UPLOAD_DIRECTORY}" -type d -exec chown $CLIENT_NAME:$HTTP_GROUP {} +
find "${FULL_UPLOAD_DIRECTORY}" -type d -exec chmod 775 {} +
find "${FULL_UPLOAD_DIRECTORY}" -type f -exec chown $CLIENT_NAME:$HTTP_GROUP {} +
find "${FULL_UPLOAD_DIRECTORY}" -type f -exec chmod 664 {} +
echo "Prepared upload directory: ${FULL_UPLOAD_DIRECTORY}"
done
DB_FILE="${WWW_PUBLIC}/backup-${UNIQUE}-database.sql.gz"
if [[ -f $DB_FILE ]]; then
if [[ -z $DB_USER ]]; then
DB_USER=$WEB_CLIENT
errcho "No database user specified, using web client: ${WEB_CLIENT}"
fi
if [[ -z $DB_NAME ]]; then
DB_NAME=$WEB_CLIENT
errcho "No database name specified, reusing web client: ${WEB_CLIENT}"
fi
if ! mysql -u root -e "CREATE DATABASE IF NOT EXISTS ${DB_NAME};"
then
errcho "Failed to create database: ${DB_NAME}"
exit 3
fi
DB_PASS=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo;)
if ! mysql -u root -e "GRANT ALL PRIVILEGES ON ${DB_NAME}.* TO '${DB_USER}'@'localhost' IDENTIFIED BY '${DB_PASS}';"
then
errcho "Failed to create user ${DB_USER} and grant privileges to ${DB_NAME}."
exit 3
fi
echo "Created user ${DB_USER} with password ${DB_PASS} and full privileges on ${DB_NAME}."
if ! mysql -u root -e "FLUSH PRIVILEGES;"
then
errcho "Failed to flush privileges."
exit 3
fi
echo "Importing database..."
if ! zcat "${DB_FILE}" | mysql -u "${DB_USER}" -p"${DB_PASS}" "${DB_NAME}"
then
errcho "Failed to import database dump."
exit 3
fi
echo "Imported."
if ! rm "${DB_FILE}"
then
errcho "Failed to delete database file."
exit 3
fi
fi
shopt -s nullglob
CRONTAB_FILES=("${WWW_PUBLIC}/backup-${UNIQUE}-"*.crontab)
AUTHORIZED_KEY_FILES=("${WWW_PUBLIC}/backup-${UNIQUE}-"*.authorized_keys)
shopt -u nullglob
if [[ ${#AUTHORIZED_KEY_FILES[@]} -gt 0 ]]; then
AUTHORIZED_KEYS_DIRECTORY="${CLIENT_HOME}/.ssh"
AUTHORIZED_KEYS_FILE="${CLIENT_HOME}/.ssh/authorized_keys"
sudo -u "${CLIENT_NAME}" mkdir "${AUTHORIZED_KEYS_DIRECTORY}"
sudo -u "${CLIENT_NAME}" chmod 700 "${AUTHORIZED_KEYS_DIRECTORY}"
sudo -u "${CLIENT_NAME}" touch "${AUTHORIZED_KEYS_FILE}"
sudo -u "${CLIENT_NAME}" chmod 600 "${AUTHORIZED_KEYS_FILE}"
for AUTHORIZED_KEY_FILE in "${AUTHORIZED_KEY_FILES[@]}"
do
echo "Importing keys from: ${AUTHORIZED_KEY_FILE}"
cat "${AUTHORIZED_KEY_FILE}" | grep -v -E "^\s*(#|$)" | while read -r AUTHORIZED_KEY_LINE; do
echo "Importing: ${AUTHORIZED_KEY_LINE}"
echo "${AUTHORIZED_KEY_LINE}" >> "${AUTHORIZED_KEYS_FILE}"
done
rm "${AUTHORIZED_KEY_FILE}"
done
fi
for CRONTAB_FILE in "${CRONTAB_FILES[@]}"
do
echo "Make sure to apply crontab from: ${CRONTAB_FILE}"
if ! cat "${CRONTAB_FILE}" | grep -v -E "^\s*(#|$)"
then
echo "There was an error retrieving cron entries."
fi
rm "${CRONTAB_FILE}"
done
echo "Removing temporary archive..."
rm "${TMP_ARCHIVE}"
echo "Removed temporary archive."
echo "Finished, result:"
echo "SSH_USER: ${CLIENT_NAME}"
if [[ ! -z $DB_PASS ]]; then
echo "DB_USER: ${DB_USER}"
echo "DB_PASS: ${DB_PASS}"
fi
echo "Now create and enable vhost. If needed, create separate php-fpm pool."
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment