Last active
December 23, 2022 21:06
-
-
Save nolanlawson/668abcf9f5e8ef4659e132a32525e50c to your computer and use it in GitHub Desktop.
Script to back up a Mastodon instance (for non-dockerized installs)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
# | |
# Make a backup file of a Mastodon instance. The backup file is a TGZ containing | |
# the Postgres dump as well as the /public/system (media) files and the | |
# .env.production file. For loading the dump file contents, see: | |
# https://github.com/tootsuite/documentation/blob/master/Running-Mastodon/Migration-guide.md | |
# | |
# Usage: ./mastodon_backup.sh my_dump_file.tgz | |
# | |
# Advanced usage: MASTODON_LIVE_DIRECTORY=/path/to/live ./mastodon_backup.sh my_dump_file.tgz | |
# | |
set -e | |
if [ -z "$MASTODON_LIVE_DIRECTORY" ]; then | |
MASTODON_LIVE_DIRECTORY=$HOME/live | |
echo "Using $MASTODON_LIVE_DIRECTORY as the live directory. Set MASTODON_LIVE_DIRECTORY=/path/to/live to change" | |
else | |
echo "Using $MASTODON_LIVE_DIRECTORY as the live directory" | |
fi | |
POSTGRES_DUMPFILE="postgres-$(date --rfc-3339=s | tr ' ' '_').sql" | |
OUTPUT_FILENAME="$1" | |
if [ -z "$OUTPUT_FILENAME" ]; then | |
OUTPUT_FILENAME=mastodon_dump.tgz | |
fi | |
cd "$MASTODON_LIVE_DIRECTORY" | |
source .env.production | |
DB_URL=postgresql://"$DB_USER":"$DB_PASS"@"$DB_HOST":"$DB_PORT"/"$DB_NAME" | |
pg_dump --dbname="$DB_URL" > "$POSTGRES_DUMPFILE" | |
TMP_DUMPFILE=/tmp/masto-backup-tgz-"$RANDOM".tgz | |
tar -czf "$TMP_DUMPFILE" "$POSTGRES_DUMPFILE" .env.production ./public/system | |
rm -f "$POSTGRES_DUMPFILE" | |
cd - > /dev/null | |
mv "$TMP_DUMPFILE" "$OUTPUT_FILENAME" | |
echo "Created dump file at $OUTPUT_FILENAME" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Cool fork of this script that does S3 uploads as well: https://gist.github.com/vahnj/ce625482329847b589d243b71143b87f