Skip to content

Instantly share code, notes, and snippets.

@ZeroStride
Created October 12, 2010 17:59
Show Gist options
  • Save ZeroStride/622616 to your computer and use it in GitHub Desktop.
Save ZeroStride/622616 to your computer and use it in GitHub Desktop.
require 'capistrano/recipes/deploy/strategy/copy'
require 'fileutils'
require 'tempfile' # Dir.tmpdir
require 'json'
module Capistrano
module Deploy
module Strategy
# This class implements the strategy for deploying versioned, static assets
# to the Amazon S3 service.
#
# Each deployed asset is renamed according to its blob SHA, so:
# foo.png => foo.0b676ed16.png
#
# You must specify an S3 bucket to which files will be pushed:
#
# set :deploy_s3_bucket, "My_S3_Bukkit"
#
# You may also specify command-line options to s3cmd:
#
# set :deploy_s3_options, "--dryrun"
#
#
class S3Version < Copy
# Obtains a copy of the source code locally (via the #command method),
# and individually places each changed asset into the S3 bucket with
# a versioned file name.
def deploy!
# get list of deployed assets on S3, and add them to copy_exclude
s3_ls = `s3cmd #{s3cmd_opts} --recursive ls #{bucket_name}`
# todo...
if copy_cache
if File.exists?(copy_cache)
logger.debug "refreshing local cache to revision #{revision} at #{copy_cache}"
system(source.sync(revision, copy_cache))
else
logger.debug "preparing local cache at #{copy_cache}"
system(source.checkout(revision, copy_cache))
end
# Check the return code of last system command and rollback if not 0
unless $? == 0
raise Capistrano::Error, "shell command failed with return code #{$?}"
end
logger.debug "copying cache to deployment staging area #{destination}"
Dir.chdir(copy_cache) do
FileUtils.mkdir_p(destination)
queue = Dir.glob("*", File::FNM_DOTMATCH)
while queue.any?
item = queue.shift
name = File.basename(item)
next if name == "." || name == ".."
next if copy_exclude.any? { |pattern| File.fnmatch(pattern, item) }
if File.symlink?(item)
FileUtils.ln_s(File.readlink(File.join(copy_cache, item)), File.join(destination, item))
elsif File.directory?(item)
queue += Dir.glob("#{item}/*", File::FNM_DOTMATCH)
FileUtils.mkdir(File.join(destination, item))
else
FileUtils.ln(File.join(copy_cache, item), File.join(destination, item))
end
end
end
else
logger.debug "getting (via #{copy_strategy}) revision #{revision} to #{destination}"
system(command)
if copy_exclude.any?
logger.debug "processing exclusions..."
if copy_exclude.any?
copy_exclude.each do |pattern|
delete_list = Dir.glob(File.join(destination, pattern), File::FNM_DOTMATCH)
# avoid the /.. trap that deletes the parent directories
delete_list.delete_if { |dir| dir =~ /\/\.\.$/ }
FileUtils.rm_rf(delete_list.compact)
end
end
end
end
# get list of assets before we go making new files
asset_list = Dir.glob(File.join(destination, "*"), File::FNM_DOTMATCH)
File.open(File.join(destination, "REVISION"), "w") { |f| f.puts(revision) }
# process the asset list and upload + build manifest
manifest = {}
while asset_list.any?
item = asset_list.shift
ext = File.extname(item)
name = File.basename(item, ext)
next if name.match(/^\./)
if File.symlink?(item)
raise "Arg I don't know what to do with symlinks yet"
elsif File.directory?(item)
asset_list += Dir.glob("#{item}/*", File::FNM_DOTMATCH)
else
# get a short-sha
Dir.chdir(destination) do
if `#{source.scm("ls-tree HEAD #{item}")}` =~ /[0-9]*\sblob\s([A-Fa-f0-9]*)/ then
sha = $1[0..7]
# put the asset to S3
system("s3cmd #{s3cmd_opts} put #{bucket_name}:#{name}.#{sha}#{ext} #{item}")
# add to manifest
manifest["#{name}#{ext}"] = ("#{name}.#{sha}#{ext}")
else
logger.important "Could not find #{item} in repository, will not be part of deploy."
end
end
end
end
# put the manifest to S3
manifest_file = File.join(destination, "#{revision[0..8]}.json")
File.open(manifest_file, "w") { |f| f.puts(manifest.to_json) }
system("s3cmd #{s3cmd_opts} put #{bucket_name}:#{name} #{manifest_file}")
# upload to EC2
upload(manifest_file, manifest_file)
ensure
FileUtils.rm_rf destination rescue nil
end
private
def bucket_name
configuration[:deploy_s3_bucket]
end
def s3cmd_opts
configuration[:deploy_s3_options]
end
end
end
end
end
@ZeroStride
Copy link
Author

set :strategy, Capistrano::Deploy::Strategy::S3Version.new(self)
set :copy_strategy, :checkout

set :deploy_s3_bucket, "FacepunchBuckkit"
set :deploy_s3_options, "--dryrun"

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment