Last active
December 3, 2024 12:19
-
-
Save artem78/537424be084818ffa051190b219b240c to your computer and use it in GitHub Desktop.
Python script for making MySQL db backups to MEGA.nz
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python3 | |
# | |
# Python script for making MySQL db backups to MEGA.nz | |
# | |
# Features: | |
# - Can backup multiple databases (each to separate sub-folder) | |
# - Upload backups to Mega.nz (no backups stored local) | |
# - Detect changes in database with checksum and do not upload unchanged data twice | |
# | |
# Before run: | |
# 1) Install MegaCMD - https://mega.io/ru/cmd#download | |
# 2) mega-login '[email protected]' 'password' | |
# 3) pip3 install simple-file-checksum | |
# 4) mysql_config_editor set --login-path=client --host=localhost --user=localuser --password | |
# 5) Edit settings vars db_names, mysql_login_path and remote_dir | |
# | |
# At last add this script to crontab or run it manually. | |
# | |
from pprint import pp | |
import json | |
from os import chdir, getcwd | |
from os.path import isfile, join, dirname | |
import tempfile | |
from datetime import datetime | |
import subprocess | |
from simple_file_checksum import get_checksum | |
import tarfile | |
# Settings | |
db_names = ['db1', 'db2' ,'db3'] | |
mysql_login_path='client' | |
remote_dir = '/backups/mysql' | |
def load_data(): | |
global data | |
if isfile(join(dirname(__file__), 'data.json')): | |
with open(join(dirname(__file__), 'data.json'), 'r') as file: | |
data = json.load(file) | |
def save_data(): | |
global data | |
with open(join(dirname(__file__), 'data.json'), 'w') as f: | |
json.dump(data, f, indent=3) | |
def save_db_backup(db_name, filename): | |
global mysql_login_path | |
subprocess.run(["mysqldump", | |
'--login-path={}'.format(mysql_login_path), | |
'--default-character-set', 'utf8', | |
'-B', db_name, | |
'--skip-extended-insert', | |
'-r', filename, | |
'--skip-dump-date' | |
]) | |
def upload_to_mega(local_file, remote_dir): | |
subprocess.run(['mega-cd']) # cd to root | |
subprocess.run(['mega-put', '-c', local_file, join(remote_dir, '')]) | |
data = {} | |
try: | |
load_data() | |
#print('data.json:') | |
#pp(data) | |
#print() | |
with tempfile.TemporaryDirectory() as temp_dir: | |
print('created temp dir:', temp_dir) | |
print() | |
for db_name in db_names: | |
print('======', 'Database', db_name, '======') | |
if data.get(db_name) is None: | |
data[db_name] = {'last_checksum': ''} | |
sql_filename = '{}_{}.sql'.format(db_name, datetime.now().strftime('%Y-%m-%d_%H.%M.%S')) | |
save_db_backup(db_name, join(temp_dir, sql_filename)) | |
print('saved to:', sql_filename) | |
checksum = get_checksum(join(temp_dir, sql_filename), algorithm="SHA1") | |
print('checksum:', checksum) | |
data[db_name]['last_check'] = datetime.now().strftime('%c') | |
if checksum != data[db_name]['last_checksum']: # file changed | |
print('changes in db detected') | |
prev_dir = getcwd() | |
chdir(temp_dir) | |
try: | |
tar = tarfile.open(join(temp_dir, sql_filename) + ".tar.gz", 'w:gz') | |
tar.add(sql_filename) | |
tar.close() | |
finally: | |
chdir(prev_dir) | |
print('created archive:', join(temp_dir, sql_filename) + ".tar.gz") | |
upload_to_mega(join(temp_dir, sql_filename) + '.tar.gz', join(remote_dir, db_name)) | |
print('uploaded to mega.nz') | |
data[db_name]['last_checksum'] = checksum | |
data[db_name]['last_uploaded'] = datetime.now().strftime('%c') | |
else: | |
print('db unchanged') | |
print() | |
#input("Press Enter to continue...") | |
finally: | |
save_data() | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment