Skip to content

Instantly share code, notes, and snippets.

@snewcomer
Last active July 17, 2025 18:45
Show Gist options
  • Save snewcomer/f9b322a303d7fc4c225ceb98eaeeb3cb to your computer and use it in GitHub Desktop.
Save snewcomer/f9b322a303d7fc4c225ceb98eaeeb3cb to your computer and use it in GitHub Desktop.
name: Backup Production Database
on:
schedule:
- cron: '0 0 * * 1'
workflow_dispatch:
jobs:
backup-db:
runs-on: ubuntu-latest
env:
SSH_PRIVATE_KEY: ${{ secrets.PRODUCTION_PRIVATE_SSH_KEY }}
SERVER_HOST: ${{ secrets.PRODUCTION_SERVER_HOST }}
SERVER_USER: ${{ secrets.PRODUCTION_SERVER_USER }}
steps:
- name: Setup SSH Connection
run: |
if [ -z "$SERVER_HOST" ] || [ -z "$SERVER_USER" ] || [ -z "$SSH_PRIVATE_KEY" ]; then
echo "❌ Error: Required SSH environment variables are not set"
exit 1
fi
mkdir -p ~/.ssh
echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
cat >> ~/.ssh/config << EOF
Host production
HostName $SERVER_HOST
User $SERVER_USER
IdentityFile ~/.ssh/id_rsa
StrictHostKeyChecking no
ServerAliveInterval 30
ServerAliveCountMax 3
ConnectTimeout 10
EOF
echo "βœ… SSH configuration completed"
- name: Create Database Backup
id: create_backup
run: |
echo "$(date): πŸ“Š Starting PostgreSQL backup" > backup_log.txt
ssh production << 'EOF' > backup_vars.txt
set -e
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_FILE="backup_${TIMESTAMP}.dump"
LOG_FILE="/tmp/backup_${TIMESTAMP}.log"
echo "$(date): πŸ”„ Creating db dump..." >> $LOG_FILE
if ! docker exec postgres_production pg_dump \
-Fc \
-U my_pg_user \
-d my_db_production \
-f /tmp/$BACKUP_FILE \
--verbose \
--no-password 2>> $LOG_FILE; then
echo "$(date): ❌ ERROR - Backup creation failed" >> $LOG_FILE
exit 1
fi
echo "$(date): πŸ“€ Copying backup from container..." >> $LOG_FILE
if ! docker exec postgres_production cat /tmp/$BACKUP_FILE > /tmp/$BACKUP_FILE; then
echo "$(date): ❌ ERROR - Failed to copy backup from container" >> $LOG_FILE
exit 1
fi
docker exec postgres_production rm -f /tmp/$BACKUP_FILE
BACKUP_SIZE=$(stat -c%s "/tmp/$BACKUP_FILE")
echo "$(date): Backup created successfully - Size: $BACKUP_SIZE bytes" >> $LOG_FILE
MIN_SIZE=10000 # 10KB minimum, adjust based on your database
if [ $BACKUP_SIZE -lt $MIN_SIZE ]; then
echo "$(date): ERROR - Backup file too small: $BACKUP_SIZE bytes" >> $LOG_FILE
exit 1
fi
md5sum /tmp/$BACKUP_FILE > /tmp/${BACKUP_FILE}.md5
echo "$(date): Generated MD5 checksum: $(cat /tmp/${BACKUP_FILE}.md5)" >> $LOG_FILE
echo "$(date): Backup creation completed successfully" >> $LOG_FILE
echo "BACKUP_FILE=${BACKUP_FILE}"
echo "BACKUP_SIZE=${BACKUP_SIZE}"
echo "TIMESTAMP=${TIMESTAMP}"
EOF
BACKUP_FILE=$(grep "BACKUP_FILE=" backup_vars.txt | cut -d= -f2)
BACKUP_SIZE=$(grep "BACKUP_SIZE=" backup_vars.txt | cut -d= -f2)
TIMESTAMP=$(grep "TIMESTAMP=" backup_vars.txt | cut -d= -f2)
echo "BACKUP_FILE=$BACKUP_FILE" >> $GITHUB_OUTPUT
echo "BACKUP_SIZE=$BACKUP_SIZE" >> $GITHUB_OUTPUT
echo "TIMESTAMP=$TIMESTAMP" >> $GITHUB_OUTPUT
echo "$(date): Backup creation step completed - File: $BACKUP_FILE, Size: $BACKUP_SIZE bytes" >> backup_log.txt
- name: Validate Database Backup
id: validate_backup
run: |
BACKUP_FILE="${{ steps.create_backup.outputs.BACKUP_FILE }}"
TIMESTAMP="${{ steps.create_backup.outputs.TIMESTAMP }}"
echo "$(date): Starting backup validation for $BACKUP_FILE" >> backup_log.txt
ssh production << EOF
set -e
BACKUP_FILE="$BACKUP_FILE"
LOG_FILE="/tmp/backup_${TIMESTAMP}.log"
echo "\$(date): Starting backup validation" >> \$LOG_FILE
mkdir -p /tmp/backup_validation
echo "\$(date): Validating backup integrity with pg_restore -l (dry run)" >> \$LOG_FILE
if ! docker exec -i postgres_production pg_restore -l < /tmp/\$BACKUP_FILE >/dev/null 2>> \$LOG_FILE; then
echo "\$(date): ERROR - Backup validation failed - cannot be read by pg_restore" >> \$LOG_FILE
exit 1
fi
echo "\$(date): Checking for critical tables" >> \$LOG_FILE
CRITICAL_TABLES=("users" "organizations" "reports")
for table in "\${CRITICAL_TABLES[@]}"; do
if ! docker exec -i postgres_production pg_restore -l < /tmp/\$BACKUP_FILE | grep -q "TABLE.*\$table"; then
echo "\$(date): ERROR - Critical table '\$table' not found in backup" >> \$LOG_FILE
exit 1
fi
echo "\$(date): Verified table '\$table' exists in backup" >> \$LOG_FILE
done
echo "\$(date): Backup validation completed successfully" >> \$LOG_FILE
EOF
echo "$(date): Backup validation step completed successfully" >> backup_log.txt
- name: Download and Upload Backup to Azure
id: upload_backup
run: |
BACKUP_FILE="${{ steps.create_backup.outputs.BACKUP_FILE }}"
TIMESTAMP="${{ steps.create_backup.outputs.TIMESTAMP }}"
echo "$(date): Starting backup download and upload process" >> backup_log.txt
echo "Downloading backup file and logs from server"
scp $SERVER_USER@$SERVER_HOST:/tmp/$BACKUP_FILE .
scp $SERVER_USER@$SERVER_HOST:/tmp/${BACKUP_FILE}.md5 .
scp $SERVER_USER@$SERVER_HOST:/tmp/backup_${TIMESTAMP}.log .
sed -i "s|/tmp/||g" ${BACKUP_FILE}.md5
echo "Verifying integrity after download"
md5sum -c ${BACKUP_FILE}.md5 >> backup_log.txt
if [ $? -ne 0 ]; then
echo "ERROR: Integrity check failed after download" >> backup_log.txt
exit 1
fi
echo "Uploading backup to Azure" >> backup_log.txt
az storage blob upload \
--container-name ${{ secrets.AZURE_DB_BACKUP_CONTAINER }} \
--name $BACKUP_FILE \
--file $BACKUP_FILE \
--connection-string "${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}"
az storage blob upload \
--container-name ${{ secrets.AZURE_DB_BACKUP_CONTAINER }} \
--name ${BACKUP_FILE}.md5 \
--file ${BACKUP_FILE}.md5 \
--connection-string "${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}"
az storage blob upload \
--container-name ${{ secrets.AZURE_DB_BACKUP_CONTAINER }} \
--name backup_${TIMESTAMP}.log \
--file backup_${TIMESTAMP}.log \
--connection-string "${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}"
rm -f $BACKUP_FILE ${BACKUP_FILE}.md5 backup_${TIMESTAMP}.log backup_vars.txt
echo "$(date): Backup upload process completed successfully" >> backup_log.txt
- name: Verify Azure Storage
run: |
BACKUP_FILE="${{ steps.create_backup.outputs.BACKUP_FILE }}"
BLOB_CHECK=$(az storage blob exists --container-name ${{ secrets.AZURE_DB_BACKUP_CONTAINER }} --name $BACKUP_FILE --connection-string "${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}" --output tsv)
if [ "$BLOB_CHECK" != "True" ]; then
echo "ERROR: Backup verification in Azure failed - blob does not exist"
exit 1
fi
echo "Azure storage verification completed successfully"
- name: Retention Policy - Clean up old backups
run: |
echo "$(date): Applying retention policy - removing backups older than 30 days" >> retention_log.txt
DELETED_COUNT=$(az storage blob delete-batch --source ${{ secrets.AZURE_DB_BACKUP_CONTAINER }} --if-unmodified-since $(date -d "30 days ago" +%Y-%m-%d) --connection-string "${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}" --output tsv | wc -l)
echo "$(date): Deleted $DELETED_COUNT backup files older than 30 days" >> retention_log.txt
az storage blob upload \
--container-name ${{ secrets.AZURE_DB_BACKUP_CONTAINER }} \
--name retention_log_$(date +%Y%m%d_%H%M%S).txt \
--file retention_log.txt \
--connection-string "${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}"
- name: Upload backup logs as workflow artifacts
uses: actions/upload-artifact@v4
with:
name: backup-logs-${{ steps.create_backup.outputs.TIMESTAMP }}
path: |
backup_log.txt
retention_log.txt
retention-days: 30
- name: Notify on failure
if: failure()
run: |
echo "Backup process failed. Check logs for details. @todo slack"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment