Skip to content

Instantly share code, notes, and snippets.

@ashaw
Created August 2, 2010 14:50
Show Gist options
  • Save ashaw/504747 to your computer and use it in GitHub Desktop.
Save ashaw/504747 to your computer and use it in GitHub Desktop.
# backup redis databases to s3 on the engineyard platform
require 'zlib'
require 'rubygems'
require 'crack/json'
require 'aws/s3'
class Redis2S3
DATABASE = '/db/redis/redis_state.rdb'
CREDENTIALS_FILE = '/etc/chef/dna.json'
BACKUPS_TO_KEEP = 5
def self.run
@config = Crack::JSON.parse(File.read(CREDENTIALS_FILE))
@app_name = @config['environment']['name']
AWS::S3::Base.establish_connection!(
:access_key_id => @config['aws_secret_id'],
:secret_access_key => @config['aws_secret_key']
)
# There's a few buckets; we want the ey_backup bucket
db_backup_bucket = AWS::S3::Service.buckets.detect {|b| b.instance_variable_get(:@attributes)["name"].match(/ey-backup/)}
file_name = "#{@app_name}_#{Time.now.utc.iso8601}.rdb.gz"
# open the file and gzip it
File.open("/tmp/#{file_name}", 'w') do |file|
gz = Zlib::GzipWriter.new(file)
gz.write File.open(DATABASE).read
gz.close
end
# open the gzip and upload it to s3
AWS::S3::S3Object.store(file_name, open("/tmp/#{file_name}"), db_backup_bucket.name)
# now clear out old backups
app_backups = db_backup_bucket.objects.select {|o| o.path.match(/#{@app_name}_.*\.rdb.gz/) }
if app_backups.size > BACKUPS_TO_KEEP
oldest_backup = app_backups.sort_by {|o| Time.parse(o.about["last-modified"]).to_i }.first
oldest_backup = oldest_backup.key.gsub(/^.*?\//, '')
AWS::S3::S3Object.delete oldest_backup, db_backup_bucket.name
end
end
end
Redis2S3.run
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment