Last active
January 7, 2019 15:19
-
-
Save Barry-Fisher/b4bbedfa63169e0857b4571d1016054e to your computer and use it in GitHub Desktop.
digital-ocean-backup-prune.sh
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Copyright: 2017 Barry Fisher | |
# LICENSE: MIT - https://opensource.org/licenses/MIT | |
# BACKGROUND: | |
# Digital Ocean Spaces doesn't currently support automatic | |
# deletion of objects/directories after a specified time period which | |
# makes it difficult to use Spaces for backup purposes. | |
# This script can be run as a cron task (or similar) to inspect a | |
# specified bucket and directory for files provided the directories | |
# themselves contain the date in YYYY-MM-DD format. | |
# E.g. '2017-11-01_host_backups' | |
# This assumes that the user running this script already has the s3cmd | |
# command installed, authorized and configured correctly. | |
# | |
# SETUP: | |
# - Alter DAYS_TO_KEEP value to specify the number of days to keep. | |
# - Alter S3_BUCKET_DIR value to your full path to the containing files | |
# including the trailing slash | |
# - Alter DIRECTORY_SUFFIX to be the portion of the sub-directories that | |
# contain anything after the date. If there is no suffix then set to "" | |
# - Check that S3CMD points to the installed s3cmd binary. | |
# - Test in a safe environment to ensure this script behaves as expected. | |
# - Set up cron task to run this script daily. | |
# @configurable Keep file for 7 days | |
DAYS_TO_KEEP=7 | |
# @configurable Base directory to lookup existing files. | |
S3_BUCKET_DIR="s3://MY_BUCKET/MY_DIRECTORY_CONTAINING_ALL_BACKUPS/" | |
# @configurable The sub-directory suffix (portion after the date). | |
DIRECTORY_SUFFIX="_ANY_TEXT_PLACED_AFTER_THE_DATE/" | |
# @configurable The absolute path to the s3cmd binary. | |
S3CMD="/usr/local/bin/s3cmd" | |
# Everything below is functional. | |
# Call Digital Ocean via s3cmd command to get list of existing objects. | |
OBJECTS_LIST=$($S3CMD ls $S3_BUCKET_DIR | grep -oP 's3:.*') | |
# Iterate from DAYS_TO_KEEP to 30 to get all directory names that were created | |
# in the last 30 days | |
while [ $DAYS_TO_KEEP -lt 30 ]; do | |
# Compose days based on the iterator's current DAYS_TO_KEEP value. | |
DATE=$(date -d "now -"$DAYS_TO_KEEP" days" +%Y-%m-%d) | |
FOUND_DATE=$(echo -e $OBJECTS_LIST | grep -oP $DATE) | |
if [ "$FOUND_DATE" != "" ]; then | |
# Compose directory to delete and make call to recursively delete | |
# all objects in the directory. | |
PATH_TO_DELETE_OBJECT=$S3_BUCKET_DIR$FOUND_DATE$DIRECTORY_SUFFIX | |
$S3CMD del --recursive $PATH_TO_DELETE_OBJECT | |
fi | |
let DAYS_TO_KEEP=DAYS_TO_KEEP+1 | |
done |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment