Skip to content

Instantly share code, notes, and snippets.

@hensapir
Last active May 11, 2016 21:03
Show Gist options
  • Save hensapir/ceebd4876727923888ddf887c0e4b7e0 to your computer and use it in GitHub Desktop.
Save hensapir/ceebd4876727923888ddf887c0e4b7e0 to your computer and use it in GitHub Desktop.
script to backup websites and their corresponding MySQL databases to S3
#!/usr/bin/python3
import os
import tarfile
from subprocess import call
from datetime import datetime, timedelta
static_sites = [
"site1.com",
"site2.com"
]
dynamic_sites = [
{
"name": "site3.com",
"db": "dbname",
"db_user": "dbuser",
"db_pass": "dbpass"
}
]
def make_tar(output_filename, source_dir):
with tarfile.open(output_filename, "w:gz") as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
def backup():
backup_dir = '/root/dev/files/'
s3_bucket = 's3://backups/'
for site in static_sites:
output_filename = '%s%s-%s.tar' % (backup_dir, site.replace('.', ''), datetime.now().strftime("%Y%m%d") )
source_dir = '/var/www/%s' % (site, )
print("Backing up %s ..." % (site, ))
make_tar(output_filename, source_dir)
call(["aws", "s3", "cp", output_filename, s3_bucket])
print("Backed up to: %s" % (output_filename, ))
# Delete backups older than 5 days
for file in os.listdir(backup_dir):
filename_split = file.split('-')
sitename = filename_split[0]
backup_date = filename_split[1].split('.')[0]
if (sitename == site.replace('.', '')):
date_object = datetime.strptime(backup_date, "%Y%m%d")
delta = datetime.now() - date_object
if (delta.days >= 5):
call(["aws", "s3", "rm", s3_bucket + file])
for dsite in dynamic_sites:
site = dsite['name']
output_filename = '%s%s-%s.tar' % (backup_dir, site.replace('.', ''), datetime.now().strftime("%Y%m%d"))
source_dir = '/var/www/%s' % (site, )
print("Backing up %s ..." % (site, ))
make_tar(output_filename, source_dir)
call(["aws", "s3", "cp", output_filename, s3_bucket])
print("Backed up to: %s" % (output_filename, ))
# MySQL backup
db_output = "db-%s-%s.bak.gz" % (site.replace('.',''), datetime.now().strftime("%Y%m%d"))
os.system("mysqldump -u %s -p%s %s | gzip > %s" % (dsite['db_user'], dsite['db_pass'], dsite['db'], backup_dir + db_output))
call(["aws", "s3", "cp", backup_dir + db_output, s3_bucket])
# delete compressed files
call(["rm", output_filename])
call(["rm", backup_dir + db_output])
# delete old backups from S3
five_days_ago = datetime.strftime((datetime.now() - timedelta(days=5)), "%Y%m%d")
delete_filename = "%s-%s.tar" % (site.replace('.', ''), five_days_ago)
delete_db = "db-%s.bak.gz" % (site.replace('.', ''))
call(["aws", "s3", "rm", s3_bucket + delete_filename])
call(["aws", "s3", "rm", s3_bucket + delete_db])
if __name__ == '__main__':
backup()
@hensapir
Copy link
Author

daily 3 AM cron job:
00 03 * * * python /root/dev/backup-sites.py

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment