Skip to content

Instantly share code, notes, and snippets.

@ajayhn
Last active August 29, 2015 14:20
Show Gist options
  • Select an option

  • Save ajayhn/a241b57847eebb1c9f74 to your computer and use it in GitHub Desktop.

Select an option

Save ajayhn/a241b57847eebb1c9f74 to your computer and use it in GitHub Desktop.
cluster-join
#!/usr/bin/python
#
# Copyright (c) 2015 Juniper Networks, Inc. All rights reserved.
#
"""Contrail Cluster Join Operations."""
# Example: python join.py --zk_index 2 --new_node 1.1.1.5 --roles database
# python join.py --my_zk_index 2 --my_ip 1.1.1.5
# --other_nodes_ip 1.1.1.4 --other_nodes_name c1 --other_nodes_zk_indexes 1
# python join.py --my_zk_index 3 --my_ip 1.1.1.6
# --other_nodes_ip 1.1.1.4,1.1.1.5 --other_nodes_name c1,c2 --other_nodes_zk_indexes 1,2
# --erlang_cookie 5ec9eedc-1c75-4ea0-9932-8ac8e0fd08e0
import os
import sys
import datetime
import shutil
import argparse
from distutils.dir_util import copy_tree
from fabric.api import local
from fabric.context_managers import settings
CASSANDRA_DIR = '/var/lib/cassandra'
ZK_DIR = '/var/lib/zookeeper'
RABBITMQ_DIR = '/var/lib/rabbitmq'
class ContrailClusterJoin(object):
def __init__(self, args_str):
self._parse_args(args_str)
self._validate_args()
# end __init__
# end class ContrailClusterJoin
class JoinRequest(ContrailClusterJoin):
def _parse_args(self, args_str):
'''
Base parser.
'''
# Source any specified config/ini file
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--conf_file",
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {}
if args.conf_file:
config = ConfigParser.SafeConfigParser()
config.read([args.conf_file])
defaults.update(dict(config.items("DEFAULTS")))
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
parser.add_argument("-e", "--erlang_cookie",
help="Erlang cookie of rabbitmq cluster")
parser.add_argument("-i", "--my_zk_index",
help="Index of this node in cluster (for zookeeper conf)")
parser.add_argument("-a", "--my_ip",
help="IP address of this node in cluster")
parser.add_argument("-p", "--other_nodes_ip",
help="Comma separated IP addresses of other nodes in cluster")
parser.add_argument("-m", "--other_nodes_name",
help="Comma separated hostnames of other nodes in cluster")
parser.add_argument("-z", "--other_nodes_zk_indexes",
help="Comma separated zookeeper indexes of other nodes in cluster")
self._args_obj, remaining_argv = parser.parse_known_args(remaining_argv)
# end _parse_args
def _validate_args(self):
# csv to list
other_nodes_ip = self._args_obj.other_nodes_ip.split(',')
other_nodes_name = self._args_obj.other_nodes_name.split(',')
other_nodes_zk_indexes = self._args_obj.other_nodes_zk_indexes.split(',')
if os.path.exists(ZK_DIR):
if len(set(other_nodes_ip)) != len(set(other_nodes_zk_indexes)):
raise Exception("Error: Unequal list lengths for other_nodes_ip vs their zookeeper indexes.")
if not self._args_obj.my_zk_index:
raise Exception("Error: my_zk_index has to be specified.")
if not self._args_obj.my_ip:
raise Exception("Error: my_ip has to be specified.")
if os.path.exists(CASSANDRA_DIR):
if not other_nodes_ip:
raise Exception("Error: other_nodes_ip needs to be specified.")
if os.path.exists(RABBITMQ_DIR):
if len(set(other_nodes_ip)) != len(set(other_nodes_name)):
raise Exception("Error: Unequal list lengths for other_nodes_ip vs other_nodes_name")
if not self._args_obj.erlang_cookie:
raise Exception("Error: erlang_cookie needs to be specified.")
if not other_nodes_name:
raise Exception("Error: other_nodes_name needs to be specified.")
# end _validate_args
def request_cluster_join(self):
saved_suffix = str(datetime.datetime.now()).replace(' ','_')
# cassandra join
if os.path.exists(CASSANDRA_DIR):
local('service contrail-database stop')
# create a directory for backup
saved_dirname = 'cassandra_%s' %(saved_suffix)
saved_dirpath = '%s/%s' %('/'.join(CASSANDRA_DIR.split('/')[:-1]),
saved_dirname)
local('mkdir %s' %(saved_dirpath))
# backup config file
local("cp /etc/cassandra/cassandra.yaml %s/" %(saved_dirpath))
# edit config file
quoted_list = '"%s"' %(self._args_obj.other_nodes_ip)
local("sed -i 's/seeds: .*$/seeds: %s/' /etc/cassandra/cassandra.yaml" %(quoted_list))
# backup data directory
local('mv %s/* %s/' %(CASSANDRA_DIR, saved_dirpath))
local('service contrail-database start')
# zookeeper join
if os.path.exists(ZK_DIR):
# csv to list
other_nodes_ip = self._args_obj.other_nodes_ip.split(',')
other_nodes_zk_indexes = self._args_obj.other_nodes_zk_indexes.split(',')
with settings(warn_only=True):
local('service zookeeper stop')
# create a directory for backup
saved_dirname = 'zookeeper_%s' %(saved_suffix)
saved_dirpath = '%s/%s' %('/'.join(ZK_DIR.split('/')[:-1]),
saved_dirname)
local('mkdir %s' %(saved_dirpath))
# backup config file
local("cp -R /etc/zookeeper/* %s/" %(saved_dirpath))
# edit config file
local("echo %s > /etc/zookeeper/conf/myid" %(self._args_obj.my_zk_index))
# clobber all server entries and generate new list
local("sed -i 's/^server.*$//' /etc/zookeeper/conf/zoo.cfg")
local("echo server.%s=%s:2888:3888 >> /etc/zookeeper/conf/zoo.cfg"
%(self._args_obj.my_zk_index, self._args_obj.my_ip))
for i in range(len(other_nodes_ip)):
ip = other_nodes_ip[i]
idx = other_nodes_zk_indexes[i]
local("echo server.%s=%s:2888:3888 >> /etc/zookeeper/conf/zoo.cfg"
%(idx, ip))
# backup data directory
#local('cp -R %s/* %s/' %(ZK_DIR, saved_dirpath))
local('mv %s/* %s/' %(ZK_DIR, saved_dirpath))
local('ln -s /etc/zookeeper/conf/myid %s/myid' %(ZK_DIR))
local('service zookeeper start')
# rabbitmq join
if os.path.exists(RABBITMQ_DIR):
# csv to list
other_nodes_ip = self._args_obj.other_nodes_ip.split(',')
other_nodes_name = self._args_obj.other_nodes_name.split(',')
with settings(warn_only=True):
local('rabbitmqctl stop_app')
# create a directory for backup
saved_dirname = 'rabbitmq_%s' %(saved_suffix)
saved_dirpath = '%s/%s' %('/'.join(RABBITMQ_DIR.split('/')[:-1]),
saved_dirname)
local('mkdir %s' %(saved_dirpath))
# backup data directory
local('cp /var/lib/rabbitmq/.erlang.cookie %s/' %(saved_dirpath))
local('echo "%s" > /var/lib/rabbitmq/.erlang.cookie' %(self._args_obj.erlang_cookie))
# create hosts entry if not resolvable
for i in range(len(other_nodes_ip)):
ip = other_nodes_ip[i]
name = other_nodes_name[i]
with settings(warn_only=True):
out = local("ping -c 1 %s" %(name), capture=True)
if not out.succeeded:
local("echo %s %s %s-ctrl >> /etc/hosts"
%(ip, name, name))
local('rabbitmqctl join_cluster %s@%s' %(other_nodes_name[0],
other_nodes_name[0]))
local('rabbitmqctl start_app')
# end request_cluster_join
# class JoinRequest
class JoinAdmit(ContrailClusterJoin):
def _parse_args(self, args_str):
'''
Base parser.
'''
# Source any specified config/ini file
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--conf_file",
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {}
if args.conf_file:
config = ConfigParser.SafeConfigParser()
config.read([args.conf_file])
defaults.update(dict(config.items("DEFAULTS")))
# Override with CLI options
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
parser.add_argument("-i", "--zk_index",
help="Zookeeper index of new node joining cluster")
parser.add_argument("-n", "--new_node",
help="IP address of new node joining cluster")
parser.add_argument("-r", "--roles",
help="Comma separated roles of new node joining cluster")
self._args_obj, remaining_argv = parser.parse_known_args(remaining_argv)
# end _parse_args
def _validate_args(self):
if not self._args_obj.new_node or not self._args_obj.roles:
raise Exception("Error: New node ip or roles has not been specified.")
if os.path.exists(ZK_DIR):
if 'database' in roles and not self._args_obj.zk_index:
raise Exception("Error: New node has database role but zookeeper index not specified.")
# end _validate_args
def admit_to_cluster(self):
# csv to list
roles = self._args_obj.roles.split(',')
if os.path.exists(ZK_DIR):
with settings(warn_only=True):
local('service zookeeper stop')
# create a directory for backup
saved_dirname = 'zookeeper_%s' %(str(datetime.datetime.now()).replace(' ','_'))
saved_dirpath = '%s/%s' %('/'.join(ZK_DIR.split('/')[:-1]),
saved_dirname)
local('mkdir %s' %(saved_dirpath))
# backup config file
local("cp -R /etc/zookeeper/* %s/" %(saved_dirpath))
# edit config file
local("echo server.%s=%s:2888:3888 >> /etc/zookeeper/conf/zoo.cfg"
%(self._args_obj.zk_index,
self._args_obj.new_node))
# backup data directory
local('cp -R %s/* %s/' %(ZK_DIR, saved_dirpath))
#local('mv %s/* %s/' %(ZK_DIR, saved_dirpath))
local('service zookeeper start')
# end admit_to_cluster
# class JoinAdmit
def cluster_join_request(args_str=None):
jro = JoinRequest(args_str)
jro.request_cluster_join()
# end cluster_join_request
def cluster_join_admit(args_str=None):
jao = JoinAdmit(args_str)
jao.admit_to_cluster()
# end cluster_join_admit
if __name__ == "__main__":
cluster_join_request(' '.join(sys.argv[1:]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment