Last active
July 29, 2020 07:56
-
-
Save pushp1997/c3fdc350f64103779c3980aea0c93629 to your computer and use it in GitHub Desktop.
Use Python & Boto3 to Backup files / logs to AWS S3
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import boto3 | |
from botocore.exceptions import ClientError | |
import os | |
from datetime import datetime, timedelta | |
import schedule | |
import time | |
def upload_file_to_s3(file_name, bucket, object_name=None, folder_name=None): | |
""" | |
Upload a file to an S3 bucket. | |
Params: | |
file_name: File to upload | |
bucket: Bucket to upload to | |
object_name: S3 object name. If not specified then file_name is used | |
folder_name: Folder name in which file is to be uploaded | |
""" | |
# If S3 object_name was not specified, use file_name | |
if object_name is None: | |
object_name = file_name.split('/')[-1] | |
# If folder_name was specified, upload in the folder | |
if folder_name is not None: | |
object_name = f'{folder_name}/{object_name}' | |
# Upload the file | |
try: | |
s3_client = boto3.client( | |
service_name='s3', | |
aws_access_key_id='YOUR_AWS_ACCESS_KEY_ID', | |
aws_secret_access_key='YOUR_AWS_SECRET_ACCESS_KEY' | |
) | |
response = s3_client.upload_file(file_name, bucket, object_name) | |
print(response) | |
except ClientError as e: | |
print(e) | |
def append_text_to_file_names(files, text): | |
""" | |
Appends given text to the name of the files. | |
Params: | |
files: List(str): list of file paths | |
text: str: Text that is to appended | |
Returns: | |
files: List(str): list of file paths with text appended | |
""" | |
for i in range(len(files)): | |
file_splitted = files[i].split('/') | |
file_path = file_splitted[:-1] | |
file_name = file_splitted[-1] | |
file_name_splitted = file_name.split('.') | |
new_file_name = '.'.join([file_name_splitted[0], text, file_name_splitted[1]]) | |
file_path.append(new_file_name) | |
new_file_name_with_path = '/'.join(file_path) | |
os.rename(files[i], new_file_name_with_path) | |
files[i] = new_file_name_with_path | |
return files | |
def rename_and_backup_logs_s3(): | |
""" | |
Backsup log files to s3 bucket | |
""" | |
today = datetime.now() | |
yesterday = today - timedelta(days=1) | |
text = yesterday.strftime('%d-%m-%Y') | |
log_files = [ | |
'/home/pushp/logs/server1.log', | |
'/home/pushp/logs/server2.log', | |
'/home/pushp/logs/server3.log', | |
'/home/pushp/logs/server4.log' | |
] | |
print('Appending date to log files...') | |
log_files = append_text_to_file_names(log_files, text) | |
print('Appended date to log files...') | |
print('Uploading logs to S3...') | |
for log_file in log_files: | |
upload_file_to_s3( | |
file_name=log_file, | |
bucket='YOUR_BUCKET_NAME', | |
folder_name='server_logs' | |
) | |
print('Uploaded logs to S3...') | |
if __name__ == "__main__": | |
schedule.every().day.at("00:00").do(rename_and_backup_logs_s3) | |
while True: | |
schedule.run_pending() | |
time.sleep(60) # wait one minute |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment