Skip to content

Instantly share code, notes, and snippets.

@matsimitsu
Created August 10, 2012 09:48
Show Gist options
  • Save matsimitsu/3313055 to your computer and use it in GitHub Desktop.
Save matsimitsu/3313055 to your computer and use it in GitHub Desktop.
mongoid-sync

Mongoid::Sync

A microgem to sync your staging/production MongoDB database (and uploads) to your local environment.

based on: Capistrano recipe to sync rails MongoDB and files

Installation

Add this line to your application's Gemfile:

gem 'mongoid-sync', :git => 'git://gist.github.com/3313055.git'

And then execute:

$ bundle

Make sure you have capistrano and capistrano-ext installed and add the following to your deploy.rb

require 'mongoid_sync'

Configuration

The following block should be present in your deploy.rb file:

## Syncer
set :sync_directories, ["public/uploads"]
set :sync_backups, 1
set :db_file, "mongoid.yml"
set :db_drop, '--drop' # drop database (rewrites everything)
  • :sync_directories is an array with the directories you want to sync (uploads etc.)
  • sync_backups is the number of backups you want to keep
  • :db_file is the mongoid config file
  • :db_drop is the mongodb drop command, the default --drop command shoud be fine.

Usage

First make sure the sync dir is created by running

cap sync:setup

You can then sync your staging or production environment by running

cap sync:down

This will sync both the MongoDB database and uploaded files.

You can run both commands separately.

cap sync:db
cap sync:fs

Credits

Copyright (c) 2012 Robert Beekman
MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# -*- encoding: utf-8 -*-
Gem::Specification.new do |gem|
gem.name = 'mongoid-sync'
gem.version = '0.1.0'
gem.platform = Gem::Platform::RUBY
gem.authors = ["Robert Beekman"]
gem.email = ["[email protected]"]
gem.description = %q{A simple mongdb syncer for capistrano}
gem.summary = %q{Syncs mongodb and uploads from staging/production to local}
gem.homepage = "http://80beans.com"
gem.files = ['mongoid_sync.rb']
gem.require_path = '.'
end
require 'yaml'
require 'pathname'
# Based on http://gist.github.com/111597 http://gist.github.com/339471
#
# Capistrano sync.rb task for syncing databases and directories between the
# local development environment and production environment
#
# Changes were made to sync MongoDB databases
# tested with mongoid
# Modified by Julius Pabrinkis
Capistrano::Configuration.instance.load do
namespace :sync do
after "deploy:setup", "sync:setup"
desc <<-DESC
Creates the sync dir in shared path. The sync directory is used to keep
backups of database dumps and archives from synced directories. This task will
be called on 'deploy:setup'
DESC
task :setup do
run "cd #{shared_path}; mkdir sync"
end
namespace :down do
desc <<-DESC
Syncs the database and declared directories from the selected 'production' environment
to the local development environment. This task simply calls both the 'sync:down:db' and
'sync:down:fs' tasks.
DESC
task :default do
db and fs
end
desc <<-DESC
Sync the production database to local
DESC
task :db, :roles => :db, :only => { :primary => true } do
filename = "database.production.#{Time.now.strftime '%Y-%m-%d_%H-%M-%S'}.sql.bz2"
on_rollback { delete "#{shared_path}/sync/#{filename}" }
username, password, database, host = remote_database_config(stage)
production_database = database
run "mongodump -db #{database}"
run "tar -cjf #{shared_path}/sync/#{filename} dump/#{database}"
run "rm -rf dump"
purge_old_backups "database"
download "#{shared_path}/sync/#{filename}", "tmp/#{filename}"
username, password, database = database_config('development')
system "tar -xjvf tmp/#{filename}"
system "mongorestore #{fetch(:db_drop, '')} -db #{database} dump/#{production_database}"
system "rm -f tmp/#{filename} | rm -rf dump"
logger.important "sync database from the 'production' to local finished"
end
desc <<-DESC
Sync the production files to local
DESC
task :fs, :roles => :web, :once => true do
server, port = host_and_port
Array(fetch(:sync_directories, [])).each do |syncdir|
unless File.directory? "#{syncdir}"
logger.info "create local '#{syncdir}' folder"
Dir.mkdir "#{syncdir}"
end
logger.info "sync #{syncdir} from #{server}:#{port} to local"
destination, base = Pathname.new(syncdir).split
system "rsync --verbose --archive --compress --copy-links --delete --stats --rsh='ssh -p #{port}' #{username}@#{server}:#{current_path}/#{syncdir} #{destination.to_s}"
end
logger.important "sync filesystem from the '#{stage}' to local finished"
end
end
#
# Reads the database credentials from the local config/database.yml file
# +db+ the name of the environment to get the credentials for
# Returns username, password, database
#
def database_config(db)
database = YAML::load_file("config/#{fetch(:db_file, 'database.yml')}")
return database["#{db}"]['username'], database["#{db}"]['password'], database["#{db}"]['database'], database["#{db}"]['host']
end
#
# Reads the database credentials from the remote config/database.yml file
# +db+ the name of the environment to get the credentials for
# Returns username, password, database
#
def remote_database_config(db)
remote_config = capture("cat #{current_path}/config/#{fetch(:db_file, 'database.yml')}")
database = YAML::load(remote_config)
return database["#{db}"]['username'], database["#{db}"]['password'], database["#{db}"]['database'], database["#{db}"]['host']
end
#
# Returns the actual host name to sync and port
#
def host_and_port
return roles[:web].servers.first.host, ssh_options[:port] || roles[:web].servers.first.port || 22
end
#
# Purge old backups within the shared sync directory
#
def purge_old_backups(base)
count = fetch(:sync_backups, 5).to_i
backup_files = capture("ls -xt #{shared_path}/sync/#{base}*").split.reverse
if count >= backup_files.length
logger.important "no old backups to clean up"
else
logger.info "keeping #{count} of #{backup_files.length} sync backups"
delete_backups = (backup_files - backup_files.last(count)).join(" ")
try_sudo "rm -rf #{delete_backups}"
end
end
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment