Skip to content

Instantly share code, notes, and snippets.

@dataday
Created November 27, 2017 11:44
Show Gist options
  • Save dataday/ae2615a437894c9febe5cecc7b9d9b91 to your computer and use it in GitHub Desktop.
Save dataday/ae2615a437894c9febe5cecc7b9d9b91 to your computer and use it in GitHub Desktop.
Manages AWS hosted S3 bucket content via the aws-sdk for Ruby
#!/usr/bin/env ruby
# Manages AWS hosted S3 bucket content via the aws-sdk for Ruby.
# @author: dataday
# Example input:
# bundle exec ruby exe/bucket.rb bucket service-cookbook --create-bucket
# bundle exec ruby exe/bucket.rb bucket service-cookbook --delete-bucket
# bundle exec ruby exe/bucket.rb bucket service-cookbook --list-objects
# bundle exec ruby exe/bucket.rb bucket service-cookbook --upload-object local/file.suffix remote/path
# bundle exec ruby exe/bucket.rb bucket service-cookbook --delete-object remote/file.suffix
# aws s3 rb s3://dataday-cookbook --force
require 'aws-sdk'
require 'colorize'
require 'thor'
# Module
module S3
VERSION = '0.1.0'
# @todo:
# + granular create bucket details
# + granular delete objects details
# Command Line Interface
class Cli < Thor
attr_accessor :s3,
:s3_region,
:s3_bucket_name,
:s3_buckets,
:s3_bucket
# Initialises class
# @param *args [mixed] Thor arguments
# @return [void]
def initialize(*args)
super
account_id = '123456789012'
# https://s3-eu-west-2.amazonaws.com
self.s3_region = 'eu-west-2'
self.s3_bucket_name = 's3-bucket'
# assume privilege credentials of specified role
credentials = Aws::AssumeRoleCredentials.new(
client: Aws::STS::Client.new(region: self.s3_region),
role_arn: "arn:aws:iam::#{account_id}:role/s3Manager",
role_session_name: 'Assumes3ManagerSessionRole'
)
settings = {
profile: 's3Manager',
region: self.s3_region,
credentials: credentials,
http_wire_trace: false
}
self.s3 = Aws::S3::Resource.new(
client: Aws::S3::Client.new(settings)
)
self.s3_buckets = self.s3.buckets
self.s3_bucket = nil
end
no_commands do
def prompt(str)
puts "#{self.s3_bucket_name}> ".yellow.concat(str)
end
end
desc 'version', 'Display version'
map %w[-v --version] => :version
def version
puts "#{VERSION}"
end
desc "buckets", "List available S3 buckets"
def buckets
prompt self.s3_buckets.first.nil? ?
"Not Found".red :
"#{self.s3_buckets.limit(50).map(&:name)}".green
end
desc "bucket NAME", "Manage S3 bucket by NAME (view help for options)"
# bucket arguments
method_option :create_bucket, type: :boolean
method_option :delete_bucket, type: :boolean
# object arguments
method_option :list_objects, type: :boolean
method_option :upload_object, type: :array
method_option :delete_object, type: :array
def bucket(bucket_name)
self.s3_bucket_name = bucket_name
self.s3_bucket = self.s3.bucket(self.s3_bucket_name)
unless s3_bucket.exists?
prompt "Not Found: #{s3_bucket.url}".red
end
begin
#
# options[:create_bucket]
# creates a named S3 bucket if it does not already exist
if options[:create_bucket]
unless s3_bucket.exists?
s3_bucket.create
prompt "Created: #{s3_bucket.url}".green
else
prompt "Exists: #{s3_bucket.url}".green
end
#
# options[:delete_bucket]
# deletes a named S3 bucket if it exists
elsif options[:delete_bucket]
unless s3_bucket.exists?
prompt "Ignored: #{s3_bucket.url}".green
else
s3_bucket.delete
prompt "Deleted: #{s3_bucket.url}".green
end
#
# options[:list_objects]
# lists objects found within remote S3 bucket
elsif options[:list_objects]
s3_bucket.objects.limit(50).each do |obj|
prompt "Found: #{obj.key} #{obj.etag}".green
end
#
# options[:upload_object]
# uploads a single file to named S3 bucket
elsif options[:upload_object]
unless options[:upload_object].size == 2
prompt "Missing Paths: #{options[:upload_object]}".red
return
end
# extract path references
local_path = options[:upload_object].first
remote_path = options[:upload_object].last
# extract object references
obj_name = File.basename(local_path)
obj_file = File.new(local_path)
remote_file = "#{remote_path}/#{obj_name}"
# create object instance
obj = s3_bucket.object(remote_file)
# check remote object existence
if obj.exists?
last_modified = obj.last_modified.strftime '%Y-%m-%d-%H%M%S'
archive_file = "archive/#{remote_file}.#{last_modified}"
# moving remote object
obj.move_to(
bucket: bucket_name,
key: archive_file
)
prompt "Moved: #{remote_file} > #{archive_file}".green
end
# upload object
s3_bucket.put_object(
body: obj_file,
key: remote_file
)
prompt "Uploaded: #{remote_file}".green
#
# options[:delete_object]
# deletes a single file from a named S3 bucket
elsif options[:delete_object]
unless options[:delete_object].size == 1
prompt "Missing Path: #{options[:delete_object]}".red
return
end
remote_file = options[:delete_object].first
# create object instance
obj = s3_bucket.object(remote_file)
if obj.exists?
obj.delete
prompt "Deleted: #{remote_file}".green
end
end
# S3 bucket exists:
# but not in specified region
# but isn't owned by specified user
rescue
prompt "Invalid: #{s3_bucket.url}".red
end
end
end
end
# /^_^...
S3::Cli.start(ARGV)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment