|
#!/bin/bash |
|
|
|
set -e |
|
|
|
errcho() |
|
{ |
|
>&2 echo "$@"; |
|
} |
|
|
|
NAME_REGEX="^[a-z_][a-z0-9_-]{0,30}$" |
|
|
|
UPLOAD_DIRECTORIES=() |
|
BACKUP_FORMAT=".tar.gz" |
|
HTTP_USER="www-data" |
|
HTTP_GROUP="${HTTP_USER}" |
|
|
|
while [[ $# -gt 1 ]] |
|
do |
|
key="$1" |
|
|
|
case $key in |
|
--client) |
|
WEB_CLIENT="$2" |
|
if ! [[ $WEB_CLIENT =~ $NAME_REGEX ]]; then |
|
errcho "Your provided client name ${WEB_CLIENT} does not match the format: ${NAME_REGEX}" |
|
exit 2 |
|
fi |
|
shift |
|
;; |
|
--dbuser) |
|
DB_USER="$2" |
|
DATABASE=1 |
|
shift |
|
;; |
|
--dbname) |
|
DB_NAME="$2" |
|
DATABASE=1 |
|
shift |
|
;; |
|
-f|--file) |
|
BACKUP_FILE="$2" |
|
shift |
|
;; |
|
--format) |
|
BACKUP_FORMAT="$3" |
|
shift |
|
;; |
|
-d|--domain) |
|
PROJECT_DOMAIN="$2" |
|
shift |
|
;; |
|
-o|--output) |
|
OUTPUT_FOLDER="$2" |
|
shift |
|
;; |
|
-u|--unique) |
|
UNIQUE="$2" |
|
shift |
|
;; |
|
--upload-directory) |
|
UPLOAD_DIRECTORIES+=("$2") |
|
shift |
|
;; |
|
--http-user) |
|
HTTP_USER="$2" |
|
shift |
|
;; |
|
--http-group) |
|
HTTP_GROUP="$2" |
|
shift |
|
;; |
|
--extraction-base-path) |
|
EXTRACTION_BASE_PATH="$2" |
|
shift |
|
;; |
|
*) |
|
errcho "Unknown option." |
|
;; |
|
esac |
|
|
|
shift |
|
done |
|
|
|
if [[ ! -f $BACKUP_FILE ]]; then |
|
errcho "Backup file ${BACKUP_FILE} does not exist." |
|
exit 3 |
|
fi |
|
|
|
if [[ $BACKUP_FORMAT != ".tar.gz" ]]; then |
|
errcho "Invalid backup format: ${BACKUP_FORMAT}." |
|
exit 3 |
|
fi |
|
|
|
if [[ -z $UNIQUE ]]; then |
|
errcho "Unique is not specified." |
|
exit 3 |
|
fi |
|
|
|
if [[ -z $WEB_CLIENT ]]; then |
|
errcho "Web client not specified." |
|
exit 3 |
|
fi |
|
|
|
if [[ ! -d $OUTPUT_FOLDER ]]; then |
|
errcho "Output folder does not exist." |
|
exit 3 |
|
fi |
|
|
|
echo "Unique: ${UNIQUE}" |
|
echo "Client: ${WEB_CLIENT}" |
|
|
|
if [[ -z $PROJECT_DOMAIN ]]; then |
|
PROJECT_DOMAIN="${WEB_CLIENT}" |
|
fi |
|
|
|
echo "Domain: ${PROJECT_DOMAIN}" |
|
echo "Path: ${OUTPUT_FOLDER}/${PROJECT_DOMAIN}" |
|
|
|
echo "Copying archive to temporary directory for sudo extraction." |
|
|
|
TMP_ARCHIVE=$(mktemp) |
|
if ! cp "${BACKUP_FILE}" "${TMP_ARCHIVE}" |
|
then |
|
errcho "Failed to copy backup file to temporary path." |
|
exit 3 |
|
fi |
|
|
|
echo "Copied." |
|
|
|
chmod 777 "${TMP_ARCHIVE}" |
|
|
|
CLIENT_HOME="${OUTPUT_FOLDER}/${PROJECT_DOMAIN}" |
|
CLIENT_NAME="www-${WEB_CLIENT}" |
|
|
|
if ! useradd -d "${CLIENT_HOME}" -c "${PROJECT_DOMAIN}" -m -G www-data -s /bin/bash "${CLIENT_NAME}" |
|
then |
|
errcho "Failed to create client ${CLIENT_NAME} at: ${CLIENT_HOME}" |
|
exit 3 |
|
fi |
|
|
|
echo "Created www client user: ${CLIENT_NAME}" |
|
|
|
PHP_TEMPORARIES="${CLIENT_HOME}/web/php-tmp" |
|
WWW_PUBLIC="${CLIENT_HOME}/web/www/public" |
|
|
|
sudo -u "${CLIENT_NAME}" mkdir -p "${CLIENT_HOME}/web/log" |
|
sudo -u "${CLIENT_NAME}" mkdir -p "${PHP_TEMPORARIES}" |
|
chown $HTTP_USER:$HTTP_GROUP "${PHP_TEMPORARIES}" |
|
sudo -u "${CLIENT_NAME}" mkdir -p "${WWW_PUBLIC}" |
|
|
|
case "${BACKUP_FORMAT}" in |
|
".tar.gz") |
|
if [[ ! -z $EXTRACTION_BASE_PATH ]]; then |
|
EXTRACTION_PARAMS="--transform s|${EXTRACTION_BASE_PATH}||" |
|
fi |
|
|
|
echo "Extracting web tarball ${TMP_ARCHIVE} (${BACKUP_FILE}) to ${WWW_PUBLIC}." |
|
sudo -u "${CLIENT_NAME}" tar xzf "${TMP_ARCHIVE}" -C "${WWW_PUBLIC}" $EXTRACTION_PARAMS |
|
echo "Extracted." |
|
|
|
if [[ ! -z $EXTRACTION_BASE_PATH ]]; then |
|
REMOVAL_EXTRACTION_BASE_PATH=$(echo "${EXTRACTION_BASE_PATH}" | cut -f1 -d/) |
|
echo "Removing base path..." |
|
rm -r "${WWW_PUBLIC}/${REMOVAL_EXTRACTION_BASE_PATH}" |
|
echo "Removed." |
|
fi |
|
;; |
|
esac |
|
|
|
for UPLOAD_DIRECTORY in "${UPLOAD_DIRECTORIES[@]}" |
|
do |
|
FULL_UPLOAD_DIRECTORY="${WWW_PUBLIC}/${UPLOAD_DIRECTORY}" |
|
|
|
find "${FULL_UPLOAD_DIRECTORY}" -type d -exec chown $CLIENT_NAME:$HTTP_GROUP {} + |
|
find "${FULL_UPLOAD_DIRECTORY}" -type d -exec chmod 775 {} + |
|
find "${FULL_UPLOAD_DIRECTORY}" -type f -exec chown $CLIENT_NAME:$HTTP_GROUP {} + |
|
find "${FULL_UPLOAD_DIRECTORY}" -type f -exec chmod 664 {} + |
|
|
|
echo "Prepared upload directory: ${FULL_UPLOAD_DIRECTORY}" |
|
done |
|
|
|
DB_FILE="${WWW_PUBLIC}/backup-${UNIQUE}-database.sql.gz" |
|
if [[ -f $DB_FILE ]]; then |
|
if [[ -z $DB_USER ]]; then |
|
DB_USER=$WEB_CLIENT |
|
errcho "No database user specified, using web client: ${WEB_CLIENT}" |
|
fi |
|
|
|
if [[ -z $DB_NAME ]]; then |
|
DB_NAME=$WEB_CLIENT |
|
errcho "No database name specified, reusing web client: ${WEB_CLIENT}" |
|
fi |
|
|
|
if ! mysql -u root -e "CREATE DATABASE IF NOT EXISTS ${DB_NAME};" |
|
then |
|
errcho "Failed to create database: ${DB_NAME}" |
|
exit 3 |
|
fi |
|
|
|
DB_PASS=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-32};echo;) |
|
|
|
if ! mysql -u root -e "GRANT ALL PRIVILEGES ON ${DB_NAME}.* TO '${DB_USER}'@'localhost' IDENTIFIED BY '${DB_PASS}';" |
|
then |
|
errcho "Failed to create user ${DB_USER} and grant privileges to ${DB_NAME}." |
|
exit 3 |
|
fi |
|
|
|
echo "Created user ${DB_USER} with password ${DB_PASS} and full privileges on ${DB_NAME}." |
|
|
|
if ! mysql -u root -e "FLUSH PRIVILEGES;" |
|
then |
|
errcho "Failed to flush privileges." |
|
exit 3 |
|
fi |
|
|
|
echo "Importing database..." |
|
if ! zcat "${DB_FILE}" | mysql -u "${DB_USER}" -p"${DB_PASS}" "${DB_NAME}" |
|
then |
|
errcho "Failed to import database dump." |
|
exit 3 |
|
fi |
|
echo "Imported." |
|
|
|
if ! rm "${DB_FILE}" |
|
then |
|
errcho "Failed to delete database file." |
|
exit 3 |
|
fi |
|
fi |
|
|
|
shopt -s nullglob |
|
CRONTAB_FILES=("${WWW_PUBLIC}/backup-${UNIQUE}-"*.crontab) |
|
AUTHORIZED_KEY_FILES=("${WWW_PUBLIC}/backup-${UNIQUE}-"*.authorized_keys) |
|
shopt -u nullglob |
|
|
|
if [[ ${#AUTHORIZED_KEY_FILES[@]} -gt 0 ]]; then |
|
AUTHORIZED_KEYS_DIRECTORY="${CLIENT_HOME}/.ssh" |
|
AUTHORIZED_KEYS_FILE="${CLIENT_HOME}/.ssh/authorized_keys" |
|
|
|
sudo -u "${CLIENT_NAME}" mkdir "${AUTHORIZED_KEYS_DIRECTORY}" |
|
sudo -u "${CLIENT_NAME}" chmod 700 "${AUTHORIZED_KEYS_DIRECTORY}" |
|
|
|
sudo -u "${CLIENT_NAME}" touch "${AUTHORIZED_KEYS_FILE}" |
|
sudo -u "${CLIENT_NAME}" chmod 600 "${AUTHORIZED_KEYS_FILE}" |
|
|
|
for AUTHORIZED_KEY_FILE in "${AUTHORIZED_KEY_FILES[@]}" |
|
do |
|
echo "Importing keys from: ${AUTHORIZED_KEY_FILE}" |
|
cat "${AUTHORIZED_KEY_FILE}" | grep -v -E "^\s*(#|$)" | while read -r AUTHORIZED_KEY_LINE; do |
|
echo "Importing: ${AUTHORIZED_KEY_LINE}" |
|
echo "${AUTHORIZED_KEY_LINE}" >> "${AUTHORIZED_KEYS_FILE}" |
|
done |
|
rm "${AUTHORIZED_KEY_FILE}" |
|
done |
|
fi |
|
|
|
for CRONTAB_FILE in "${CRONTAB_FILES[@]}" |
|
do |
|
echo "Make sure to apply crontab from: ${CRONTAB_FILE}" |
|
|
|
if ! cat "${CRONTAB_FILE}" | grep -v -E "^\s*(#|$)" |
|
then |
|
echo "There was an error retrieving cron entries." |
|
fi |
|
|
|
rm "${CRONTAB_FILE}" |
|
done |
|
|
|
echo "Removing temporary archive..." |
|
rm "${TMP_ARCHIVE}" |
|
echo "Removed temporary archive." |
|
|
|
echo "Finished, result:" |
|
|
|
echo "SSH_USER: ${CLIENT_NAME}" |
|
|
|
if [[ ! -z $DB_PASS ]]; then |
|
echo "DB_USER: ${DB_USER}" |
|
echo "DB_PASS: ${DB_PASS}" |
|
fi |
|
|
|
echo "Now create and enable vhost. If needed, create separate php-fpm pool." |