Last active
April 3, 2020 11:25
-
-
Save akira345/2823f369fcc86cbdac210ff50fa7ac59 to your computer and use it in GitHub Desktop.
Python勉強がてら作成した、RDSのログファイルを一括ダウンロードするスクリプトです。日付ごとにディレクトリを作成して格納します。
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Import the SDK | |
import boto3 | |
import datetime | |
import os | |
rds = boto3.client('rds', region_name='us-east-1') | |
db_instance_identifier = "mysql-db" | |
log_base_path = "./log/" | |
ret = rds.describe_db_log_files(DBInstanceIdentifier=db_instance_identifier) | |
for log in ret["DescribeDBLogFiles"]: | |
timestamp = int(log["LastWritten"])/ 1000 | |
str_timestamp = datetime.datetime.fromtimestamp(timestamp).strftime("%Y%m%d") | |
log_path = log_base_path + str_timestamp + "/" | |
if not os.path.isdir(log_path): | |
os.makedirs(log_path) | |
if log["LogFileName"] == "mysqlUpgrade": | |
continue | |
log_file_name = log_path + log["LogFileName"].split("/")[1] | |
print("{}".format(log_file_name)) | |
if os.path.isfile(log_file_name): | |
os.remove(log_file_name) | |
additional_data_pending = True | |
marker = "0:0" | |
number_of_lines = 0 | |
while(additional_data_pending): | |
data = rds.download_db_log_file_portion( | |
DBInstanceIdentifier=db_instance_identifier, | |
LogFileName=log["LogFileName"], | |
Marker=marker, | |
NumberOfLines=number_of_lines | |
) | |
print(type(data["LogFileData"])) | |
with open(log_file_name, 'a') as f: | |
if data["LogFileData"] is not None: | |
f.write(data["LogFileData"]) | |
additional_data_pending = data["AdditionalDataPending"] | |
marker = data["Marker"] | |
os.utime(log_file_name,(timestamp,timestamp)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# RDSのログを取得し、指定したS3バケットへ格納します。 | |
# Import the SDK | |
import boto3 | |
import datetime | |
from botocore.exceptions import ClientError | |
# RDSのリージョンがデフォルトと異なる場合は指定する | |
rds = boto3.client('rds', region_name='us-east-1') | |
s3 = boto3.resource('s3') | |
# RDSのインスタンス名 | |
db_instance_identifier = "mysql-db" | |
# ログ保存先S3バケット名 | |
bucket_name = "rds_log_bucket" | |
log_base_path = db_instance_identifier + "/rds_logs/" | |
bucket = s3.Bucket(bucket_name) | |
def get_db_log_files(log_file_name): | |
# ログファイルの中身取得 | |
log_data = "" # S3へは追記ができないので、中身を組み立ててアップする | |
paginator = rds.get_paginator("download_db_log_file_portion") | |
page_iterator = paginator.paginate( | |
DBInstanceIdentifier=db_instance_identifier, | |
LogFileName=log_file_name | |
) | |
for page in page_iterator: | |
print(page) | |
if page["LogFileData"] is not None: | |
log_data += page["LogFileData"] | |
return log_data | |
def cleanup_s3_db_log_file(log_file_name): | |
# 一旦ファイル削除 | |
try: | |
bucket.Object(log_file_name).load() | |
except ClientError as e: | |
print(e.response) | |
if e.response["Error"]["Code"] == "404": | |
# 存在しないのは何もしない | |
pass | |
else: | |
raise | |
else: | |
# 存在するのでいったん削除 | |
bucket.Object(log_file_name).delete() | |
def upload_s3_db_log_file(log_file_name,log_data): | |
obj = bucket.Object(log_file_name).put( | |
Body = log_data, | |
ContentEncoding = "utf-8", | |
ContentType = "text/plane" | |
# S3へのメタデータでLastModifiedは変更不可 | |
# https://docs.aws.amazon.com/ja_jp/AmazonS3/latest/dev/UsingMetadata.html#object-metadata | |
# Metadata={ | |
# "LastModified":datetime.datetime.fromtimestamp(timestamp) | |
# } | |
) | |
def main(): | |
# ログファイル名を取得 | |
paginator = rds.get_paginator("describe_db_log_files") | |
page_iterator = paginator.paginate( | |
DBInstanceIdentifier=db_instance_identifier | |
) | |
for page in page_iterator: | |
print(page) | |
for log in page["DescribeDBLogFiles"]: | |
# 余計なファイルを除外 | |
if log["LogFileName"] == "mysqlUpgrade": | |
continue | |
# タイムスタンプ取得 | |
timestamp = int(log["LastWritten"])/ 1000 | |
str_timestamp = datetime.datetime.fromtimestamp(timestamp).strftime("%Y%m%d") | |
# ログ格納ディレクトリパス作成 | |
log_path = log_base_path + str_timestamp + "/" | |
# ファイル名はslowquery/mysql-slowquery.logのような形式なので、ファイル名だけ抜き出す。 | |
log_file_name = log_path + log["LogFileName"].split("/")[1] | |
print("{}".format(log_file_name)) | |
# 既にS3にあるログファイルをいったん削除 | |
cleanup_s3_db_log_file(log_file_name) | |
# ログファイルダウンロード | |
log_data = get_db_log_files(log["LogFileName"]) | |
# S3へアップロード | |
upload_s3_db_log_file(log_file_name,log_data) | |
if __name__ == '__main__': | |
main() | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment