Created
March 11, 2014 05:05
-
-
Save jayunit100/9479790 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import groovy.json.JsonSlurper; | |
import java.io.FileNotFoundException; | |
import java.io.FileReader; | |
import java.util.List; | |
import java.util.Map; | |
import org.apache.hadoop.conf.Configuration; | |
import org.apache.hadoop.fs.permission.FsPermission; | |
import org.apache.hadoop.fs.FileSystem; | |
import org.apache.hadoop.fs.Path; | |
import java.io.BufferedReader; | |
import java.io.InputStreamReader; | |
import java.io.OutputStreamWriter; | |
import java.io.Writer; | |
import org.apache.hadoop.fs.Path; | |
import org.apache.commons.logging.Log; | |
import org.apache.commons.logging.LogFactory; | |
import org.apache.hadoop.fs.permission.FsPermission; | |
import org.apache.bigtop.itest.shell.Shell | |
def LOG = LogFactory.getLog(this.getClass()); | |
def USAGE = """\ | |
USAGE: | |
This script provisions the skeleton of a hadoop file system. | |
It takes a single argument: The json schema (a list of lists), | |
of 4 element tuples. For an example , see the bigtop init-hcfs.json | |
file. The main elements of the JSON file are: | |
dir: list of dirs to create with permissions. | |
user: list of users to setup home dirs with permissions. | |
root_user: The root owner of distributed FS, to run shell commands. | |
""" | |
/** | |
* The HCFS generic provisioning process: | |
* 1) Create a file system skeleton. | |
* 2) Create users with home dirs in /user. | |
**/ | |
if(! args.length == 1) { | |
System.err.println(USAGE); | |
System.err.println("EXAMPLE: groovy -classpath /usr/lib/hadoop/hadoop-common-2.0.6-alpha.jar:/root/.m2/repository/org/apache/bigtop/itest/itest-common/0.8.0-SNAPSHOT/itest-common-0.8.0-SNAPSHOT.jar:/usr/lib/hadoop/lib/guava-11.0.2.jar:/etc/hadoop/conf/:/usr/lib/hadoop/lib/commons-configuration-1.6.jar:/usr/lib/hadoop/lib/commons-lang-2.5.jar:/usr/lib/hadoop/hadoop-auth.jar:/usr/lib/hadoop/lib/slf4j-api-1.6.1.jar:/usr/lib/hadoop-hdfs/hadoop-hdfs-2.0.6-alpha.jar:/usr/lib/hadoop-hdfs/lib/protobuf-java-2.4.0a.jar:/root/.m2/repository/org/apache/bigtop/itest/itest-common/0.8.0-SNAPSHOT/itest-common-0.8.0-20140214.022148-33.jar ./provision.groovy init-hcfs.json"); | |
exit 1; | |
} | |
else if(! new File(args[0]).exists()) { | |
System.err.println("Cannot find file " + args[0]); | |
exit 1; | |
} | |
def json = args[0]; | |
def v = new JsonSlurper(); | |
def jsonParser = new JsonSlurper(); | |
def parsedData = jsonParser.parse( new FileReader(json)); | |
def dirs = (List) parsedData.get("dir"); | |
def users = (List) parsedData.get("user"); | |
def root_user = parsedData.get("root_user"); | |
Configuration conf = new Configuration(); | |
LOG.info("Provisioning file system for file system from Configuration: " + conf.get("fs.defaultFS")); | |
FileSystem fs = FileSystem.get(conf); | |
LOG.info("\nPROVISIONING WITH FILE SYSTEM : " + fs.getClass()+"\n"); | |
dirs.each() { | |
System.out.println("here " + it); | |
def (name,mode,user,group) = it; | |
Path file = new Path(name); | |
LOG.info("mkdirs " + name + " " + mode); | |
LOG.info("Owner " + name + " " + user + " " + group); | |
if(mode == null) { | |
fs.mkdirs(new Path(name)); | |
} | |
else { | |
fs.mkdirs(new Path(name), new FsPermission((short)mode)); | |
} | |
if(user != null){ | |
fs.setOwner(new Path(name), user, group); | |
} | |
else | |
LOG.info("Skipping ... user null"); | |
} | |
/** | |
* For each user we do some initial setup, create a home directory, etc... | |
*/ | |
users.each() { | |
def (user,permission,group) = it; | |
LOG.info("current user: " + user); | |
Path homedir = new Path("/user/"+user); | |
//perms should be ALL, RX,RX ^^ | |
fs.mkdirs(homedir); | |
fs.setOwner(homedir, user, group); | |
FsPermission perm = FsPermission.createImmutable((short)permission); | |
fs.setPermission(homedir, perm); | |
} | |
/** | |
* Now some bigtop specific stuff. | |
* This may or may not be done in all hadoop distros | |
* And the semantics are not necessarily yet included in the | |
* HCFS Json file. | |
*/ | |
Shell sh = new Shell("/bin/bash -s",root_user); | |
LOG.info("Now running some basic shell commands for setting up oozie shared libraries."); | |
/** | |
* input: | |
fs (i.e. filesystem ) | |
input dir (i.e. /usr/lib/hadoop-mapreduce/) | |
name filter (i.e. hadoop-distcp) | |
* target dir (/user/oozie/share) | |
*/ | |
public void copyJars(FileSystem fs, File input, String jarstr, Path target){ | |
input.listFiles(new FilenameFilter(){ | |
public boolean accept(File f, String filename) { | |
return filename.contains(jarstr) && filename.endsWith("jar") | |
}}) | |
.each({ | |
file -> | |
System.out.println("copying "+file); | |
fs.copyFromLocalFile( | |
new Path(file.getAbsolutePath()), | |
target) | |
}); | |
} | |
/** | |
* Copy shared libraries into oozie. | |
*/ | |
copyJars( | |
fs, new File("/usr/lib/hive/lib"), | |
"",new Path("/user/oozie/share/lib/hive/")) | |
copyJars( | |
fs,new File("/usr/lib/hadoop-mapreduce/"), | |
"hadoop-streaming", new Path("/user/oozie/share/lib/mapreduce-streaming/")) | |
copyJars( | |
fs, new File("/usr/lib/hadoop-mapreduce/"), | |
"hadoop-distcp", new Path("/usr/oozie/share/lib/distcp")) | |
copyJars( | |
fs, new File("/usr/lib/pig/lib/"), | |
"", new Path("/usr/oozie/share/lib/pig")) | |
copyJars( | |
fs, new File("/usr/lib/pig/"), | |
"", new Path("/usr/oozie/share/lib/pig")) | |
copyJars( | |
fs, new File("/usr/lib/pig/lib"), | |
"", new Path("/usr/oozie/share/lib/pig")) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment