Created
October 4, 2012 10:57
-
-
Save gitssk/3832945 to your computer and use it in GitHub Desktop.
jClouds based Hadoop Instance Provisioning
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package util; | |
import static com.google.common.base.Charsets.UTF_8; | |
import static com.google.common.base.Preconditions.checkArgument; | |
import static com.google.common.base.Predicates.not; | |
import static com.google.common.collect.Iterables.concat; | |
import static com.google.common.collect.Iterables.contains; | |
import static com.google.common.collect.Iterables.getOnlyElement; | |
import static org.jclouds.compute.config.ComputeServiceProperties.TIMEOUT_SCRIPT_COMPLETE; | |
import static org.jclouds.compute.options.TemplateOptions.Builder.overrideLoginCredentials; | |
import static org.jclouds.compute.options.TemplateOptions.Builder.runScript; | |
import static org.jclouds.compute.predicates.NodePredicates.TERMINATED; | |
import static org.jclouds.compute.predicates.NodePredicates.inGroup; | |
import static org.jclouds.scriptbuilder.domain.Statements.exec; | |
import java.io.File; | |
import java.util.Map; | |
import java.util.Map.Entry; | |
import java.util.Properties; | |
import java.util.Set; | |
import java.util.concurrent.TimeUnit; | |
import org.jclouds.ContextBuilder; | |
import org.jclouds.apis.ApiMetadata; | |
import org.jclouds.apis.Apis; | |
import org.jclouds.compute.*; | |
import org.jclouds.compute.ComputeServiceContext; | |
import org.jclouds.compute.RunNodesException; | |
import org.jclouds.compute.RunScriptOnNodesException; | |
import org.jclouds.compute.domain.ExecResponse; | |
import org.jclouds.compute.domain.NodeMetadata; | |
import org.jclouds.compute.domain.*; | |
import org.jclouds.domain.LoginCredentials; | |
import org.jclouds.enterprise.config.EnterpriseConfigurationModule; | |
import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; | |
import org.jclouds.providers.ProviderMetadata; | |
import org.jclouds.providers.Providers; | |
import org.jclouds.scriptbuilder.domain.Statement; | |
import org.jclouds.scriptbuilder.statements.login.AdminAccess; | |
import org.jclouds.sshj.config.SshjSshClientModule; | |
import com.google.common.base.Charsets; | |
import com.google.common.base.Predicates; | |
import com.google.common.collect.ImmutableSet; | |
import com.google.common.collect.Iterables; | |
import com.google.common.collect.Maps; | |
import com.google.common.io.Files; | |
import com.google.inject.Module; | |
import org.jclouds.openstack.nova.v2_0.compute.options.*; | |
import java.io.*; | |
import java.util.*; | |
public class HadoopCluster { | |
private static String provider = null; | |
private static String identity = null; | |
private static String credential = null; | |
private static String masterImage = null; | |
private static String slaveImage = null; | |
private static String secGroup = null; | |
private static String serverGrp = null; | |
private static String keyPair = null; | |
private static int numSlaves = 0; | |
public static void main(String argv[]) throws Exception { | |
System.out.println(argv.length); | |
if (argv.length != 2) | |
throw new Exception("Usage: java HadoopCluster <propertyFile> <start/stop>"); | |
Properties prop = new Properties(); | |
prop.load(new FileInputStream(argv[0])); | |
provider = prop.getProperty("PROVIDER"); | |
identity = prop.getProperty("IDENTITY"); | |
credential = prop.getProperty("CREDENTIAL"); | |
masterImage = prop.getProperty("MASTER_IMAGE_ID"); | |
slaveImage = prop.getProperty("SLAVE_IMAGE_ID"); | |
secGroup = prop.getProperty("SECURITY_GROUP"); | |
serverGrp = prop.getProperty("SERVER_GROUP"); | |
keyPair = prop.getProperty("KEYPAIR"); | |
numSlaves = Integer.parseInt(prop.getProperty("NUM_SLAVES")); | |
String action = argv[1]; | |
if (action.equals("start")) | |
startCluster(); | |
if (action.equals("stop")) | |
terminateCluster(); | |
} | |
private static void startCluster() throws Exception { | |
System.out.println("Creating Hadoop Master..."); | |
NodeMetadata node = createNode(masterImage); | |
String masterPrivate = node.getPrivateAddresses().iterator().next(); | |
Iterator it = node.getPublicAddresses().iterator(); | |
String masterPublic = (String)it.next(); | |
System.out.println("Master Private IP = " + masterPrivate); | |
System.out.println("Master Public IP = " + masterPublic); | |
StringBuffer ipList = new StringBuffer(); | |
ipList.append(masterPublic); | |
ipList.append(" "); | |
ipList.append(masterPrivate); | |
ipList.append(" "); | |
for(int si=0;si<numSlaves; si++) { | |
System.out.println("Creating Hadoop Slave " + si); | |
node = createNode(slaveImage); | |
String slavePrivate = node.getPrivateAddresses().iterator().next();; | |
System.out.println("Slave Private IP = " + slavePrivate); | |
ipList.append(slavePrivate); | |
ipList.append(" "); | |
} | |
emitIPs(ipList.toString()); | |
} | |
public static void emitIPs(String ipList) throws Exception { | |
Writer output = null; | |
File file = new File("ips.txt"); | |
output = new BufferedWriter(new FileWriter(file)); | |
output.write(ipList); | |
output.close(); | |
} | |
public static NodeMetadata createNode(String imageId) throws Exception { | |
System.out.println("Connecting to " + provider); | |
ComputeServiceContext ctx = new ComputeServiceContextFactory().createContext(provider,identity,credential); | |
System.out.println("Authenticating..."); | |
ComputeService compute = ctx.getComputeService(); | |
TemplateBuilder templateBuilder = compute.templateBuilder(); | |
System.out.println("Loading Image "+imageId); | |
templateBuilder.imageId(imageId); | |
Template template = templateBuilder.build(); | |
System.out.println("Set KeyPair..."); | |
template.getOptions().as(NovaTemplateOptions.class).generateKeyPair(Boolean.FALSE); | |
System.out.println("Set SecurityGroups..."); | |
template.getOptions().as(NovaTemplateOptions.class).securityGroupNames(secGroup); | |
template.getOptions().as(NovaTemplateOptions.class).keyPairName(keyPair); | |
System.out.println("Creating Node..."); | |
return getOnlyElement(compute.createNodesInGroup(secGroup, 1, template)); | |
} | |
private static void terminateCluster() throws Exception { | |
System.out.println("Connecting to " + provider); | |
ComputeServiceContext ctx = new ComputeServiceContextFactory().createContext(provider, identity, credential); | |
System.out.println("Authenticating..."); | |
ComputeService compute = ctx.getComputeService(); | |
System.out.printf("Terminating instances in cluster %s%n", serverGrp); | |
Set<? extends NodeMetadata> destroyed = compute.destroyNodesMatching(Predicates.<NodeMetadata> and(not(TERMINATED), inGroup(serverGrp))); | |
System.out.printf("Terminated Nodes %s%n", destroyed); | |
} | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment