Created
February 18, 2019 16:20
-
-
Save Gabriellpweb/2066779121c49ed37eb0aeb72bf15295 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import os | |
import gzip | |
import shutil | |
import datetime | |
import boto3 | |
import ntpath | |
from subprocess import Popen, PIPE | |
# Database Backup Settings | |
BUCKET = 'mydatabase' | |
FNULL = open(os.devnull, 'w') | |
DBHOST = 'localhost' | |
DBUSR = 'root' | |
DBPWD = 'root' | |
# Time and log | |
current_datetime = datetime.datetime.now() | |
LOG_DIR = os.path.dirname(__file__)+'/logs/'+current_datetime.strftime("%Y-%m-%d") | |
if not os.path.isdir(LOG_DIR): | |
os.makedirs(LOG_DIR) | |
LOG_FILE_PATH = LOG_DIR+'/log_'+current_datetime.strftime("%Y%m%d%H%M%I")+'.log' | |
LOG_FILE = open(LOG_FILE_PATH, 'a') | |
# S3 client setup | |
client = boto3.client( | |
's3', | |
aws_access_key_id='KEY', | |
aws_secret_access_key='SECRET' | |
) | |
def dump_schema(schema, to_file): | |
cmd = [ | |
'mysqldump', | |
'-h', DBHOST, | |
'-u'+DBUSR, | |
'-p'+DBPWD, | |
schema, | |
'--single-transaction', | |
'--lock-tables=false' | |
] | |
p = Popen(cmd, stdout=to_file, stderr=PIPE) | |
p.wait() | |
out, err = p.communicate() | |
return out | |
def execute_query(query): | |
cmd = [ | |
'mysql', | |
'-h', DBHOST, | |
'-u'+DBUSR, | |
'-p'+DBPWD, | |
'-B', | |
'--disable-column-names', | |
'-e', | |
query, | |
] | |
p = Popen(cmd, stdout=PIPE, stderr=PIPE) | |
p.wait() | |
out, err = p.communicate() | |
return out | |
def create_file_token(name): | |
return name+'_'+current_datetime.strftime("%Y%m%d%H%M%I") | |
def list_schemas(): | |
ret = execute_query(""" | |
SELECT table_schema AS schema_name | |
FROM information_schema.TABLES | |
WHERE table_schema NOT IN ('mysql', 'performance_schema', 'information_schema') | |
GROUP BY table_schema | |
ORDER BY ROUND(SUM(data_length + index_length) / 1024 / 1024, 2) | |
;""") | |
return [x for x in ret.split("\n") if x != ''] | |
def backup_schema(name): | |
file_token = create_file_token(name) | |
raw_file_path = '/tmp/'+file_token | |
backup_file_path = '/tmp/'+file_token+'.gz' | |
write_log("Backing up "+name+" as " + raw_file_path) | |
with open(raw_file_path, 'a') as backup_file: | |
dump_schema(name, backup_file) | |
write_log("Compressing to "+backup_file_path) | |
with open(raw_file_path, 'rb') as f_in, gzip.open(backup_file_path, 'wb') as f_out: | |
shutil.copyfileobj(f_in, f_out) | |
write_log("Deleting raw file "+raw_file_path) | |
os.remove(raw_file_path) | |
return backup_file_path | |
def write_log(text): | |
timestring = datetime.datetime.now().strftime("[%Y-%m-%d %H:%M:%I]") | |
log_text = timestring+' '+text | |
print log_text | |
LOG_FILE.write(log_text+"\n") | |
def send_to_aws(file_path): | |
file_name = ntpath.basename(file_path) | |
split = file_name.split('_') | |
schema = split[:len(split)-1][0] | |
file_key = current_datetime.strftime('%Y/%m/%d')+'/'+schema+'/'+file_name | |
write_log("Sending to AWS S3 "+file_path+" >> "+BUCKET+"/"+file_key) | |
client.upload_file(file_path, BUCKET, file_key) | |
if __name__ == '__main__': | |
write_log("Starting backup process") | |
write_log("Testing file write and upload permissions...") | |
try: | |
test_file = '/tmp/testdbdump_now' | |
f = open(test_file, 'a') | |
f.write("Just a file test") | |
f.close() | |
send_to_aws(test_file) # test file upload on s3 | |
os.remove(test_file) | |
write_log("Write file permission check OK") | |
write_log("Checking mysql commands") | |
if os.system('which mysqldump') != 0: | |
raise Exception("mysqldump not found") | |
if os.system('which mysql') != 0: | |
raise Exception("mysql not found") | |
except IOError as exception: | |
write_log("!!! Unable to write files: "+exception.message) | |
exit(-1) | |
except Exception as exception: | |
write_log("!!! Problem on write permission test: "+exception.message) | |
exit(-1) | |
write_log("Executing backup\n") | |
schemas = list_schemas() | |
for schema in schemas: | |
write_log("Creating back for: "+schema) | |
backup_file = backup_schema(schema) | |
send_to_aws(backup_file) | |
os.remove(backup_file) | |
write_log("Deleting local file\n") | |
write_log("Finished") | |
LOG_FILE.close() | |
exit(1) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment