-
-
Save norm/2686971 to your computer and use it in GitHub Desktop.
s3 command line script
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env perl | |
use Modern::Perl; | |
use Date::Manip; | |
use File::Basename; | |
use List::MoreUtils qw( any ); | |
use Net::Amazon::S3; | |
use Pod::Usage; | |
my $s3 = Net::Amazon::S3->new({ | |
aws_access_key_id => $ENV{'AWS_ACCESS_KEY_ID'}, | |
aws_secret_access_key => $ENV{'AWS_SECRET_ACCESS_KEY'}, | |
retry => 1, | |
secure => 1, | |
}); | |
my $client = Net::Amazon::S3::Client->new( s3 => $s3 ); | |
my %buckets; | |
my $command = shift; | |
given ( $command ) { | |
when ( 'cp' ) { copy_files(@ARGV); } | |
when ( 'ls' ) { list_files(@ARGV); } | |
when ( 'mk' ) { make_bucket_or_key(@ARGV); } | |
when ( 'rm' ) { remove_bucket_or_key(@ARGV); } | |
default { | |
pod2usage(); | |
} | |
} | |
exit; | |
sub copy_files { | |
my $to = pop @_; | |
my @from = @_; | |
my($bucket_name, $key) = get_bucket_and_key($to); | |
foreach my $from ( @from ) { | |
if ( defined $key ) { | |
my $bucket = $client->bucket( name => $bucket_name ); | |
my $s3_key = get_s3_key_from_filename($bucket, $from, $key); | |
my $object = $bucket->object( key => $s3_key ); | |
$object->put_filename($from); | |
} | |
else { | |
my($from_bucket, $from_key) = get_bucket_and_key($from); | |
my $bucket = $client->bucket( name => $from_bucket ); | |
my $object = $bucket->object( key => $from_key ); | |
my $filename = get_local_filename($from, $to); | |
$object->get_filename($to); | |
} | |
} | |
} | |
sub list_files { | |
my @list = @_; | |
if ( scalar @list ) { | |
foreach my $list ( @list ) { | |
my($bucket_name, $key) = get_bucket_and_key($list); | |
$key =~ s{/$}{}; | |
my $bucket = $client->bucket( name => $bucket_name ); | |
my $list = $bucket->list({ prefix => $key }); | |
my $largest = 0; | |
my @items; | |
while ( ! $list->is_done ) { | |
# collect keys and find the largest size (for formatting) | |
foreach my $item ( $list->items ) { | |
# skip "this directory" | |
next if $item->key eq "$key/"; | |
# skip matching keys in "subdirectories" | |
next if $item->key =~ m{^ $key /? [^/]+ / . }x; | |
push @items, $item; | |
$largest = $item->size | |
unless $item->size < $largest; | |
} | |
} | |
$largest = length $largest; | |
foreach my $item ( @items ) { | |
my $file = $item->key; | |
$file =~ s{^ $key /}{}x; | |
say sprintf "%${largest}d %s %s", | |
$item->size, | |
UnixDate($item->last_modified, '%Y/%m/%d %H:%M'), | |
$file; | |
} | |
} | |
} | |
else { | |
foreach my $bucket ( $client->buckets ) { | |
say $bucket->name; | |
} | |
} | |
} | |
sub make_bucket_or_key { | |
foreach my $arg ( @_ ) { | |
my($bucket_name, $key) = get_bucket_and_key($arg); | |
if ( !$key ) { | |
$client->create_bucket( | |
name => $bucket_name, | |
acl_short => 'private', | |
location_constraint => 'US', | |
); | |
} | |
else { | |
$key = "$key/" | |
unless $key =~ m{/$}; | |
my $bucket = $client->bucket( name => $bucket_name ); | |
my $object = $bucket->object( key => $key ); | |
if ( !$object->exists ) { | |
$object->put(''); | |
} | |
} | |
} | |
} | |
sub remove_bucket_or_key { | |
foreach my $arg ( @_ ) { | |
my($bucket_name, $key) = get_bucket_and_key($arg); | |
my $bucket = $client->bucket( name => $bucket_name ); | |
if ( !$key ) { | |
$bucket->delete; | |
} | |
else { | |
my $object = $bucket->object( key => $key ); | |
if ( !$object->exists ) { | |
# try again for "directories" | |
$object = $bucket->object( key => "$key/" ); | |
} | |
$object->delete; | |
} | |
} | |
} | |
sub get_s3_key_from_filename { | |
my $bucket = shift; | |
my $filename = shift; | |
my $key = shift; | |
# copy to the root, use filename as key | |
if ( $key eq '' or $key eq '.' ) { | |
return $filename; | |
} | |
else { | |
# check to see if a "directory" with that key exists | |
my $check = $key . ( $key =~ m{/$} ? '' : '/' ); | |
my $object = $bucket->object( key => $check ); | |
return "${check}${filename}" | |
if $object->exists; | |
} | |
return $key; | |
} | |
sub get_local_filename { | |
my $from = shift; | |
my $to = shift; | |
$to =~ s{/$}{}; | |
if ( -d $to ) { | |
my($file, $dir) = fileparse($from); | |
$to = "$to/$file"; | |
} | |
return $to; | |
} | |
sub get_bucket_and_key { | |
my $arg = shift; | |
$arg =~ m{^ ([^\:]+) (?: \: (.*) )? $}x; | |
return( $1, $2 ); | |
} | |
__END__ | |
=head1 NAME | |
B<s3> - command line access to files stored in Amazon S3 | |
=head1 SYNOPSIS | |
=over | |
=item B<s3> ls [<bucket>[:<dir>]] | |
List all buckets, or files in a bucket (optional subdirectory) | |
=item B<s3> mk <bucket>[:<dir>] [...] | |
Make a new bucket, or a directory in a bucket | |
=item B<s3> rm <bucket>[:<file/dir>] [...] | |
Remove an empty bucket, or files/directories within a bucket | |
=item B<s3> cp <file> [...] <bucket>:[<dir>] | |
Copy file(s) to a bucket (optional subdirectory) | |
=item B<s3> cp <bucket>:[<dir>] [...] <file/dir> | |
Copy file(s) from a bucket. | |
=back | |
Directory structures on S3 are only emulated, as files in a bucket just | |
have a key name. However, the AWS S3 console displays key names with a slash | |
(/) in them as being within a directory structure. | |
=head1 AUTHOR | |
Mark Norman Francis, L<[email protected]>. | |
=head1 COPYRIGHT AND LICENSE | |
Copyright 2012 Mark Norman Francis. | |
This library is free software; you can redistribute it and/or modify it | |
under the same terms as Perl itself. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment