Created
October 17, 2013 20:35
-
-
Save darKoram/7031777 to your computer and use it in GitHub Desktop.
Problem with running ./site.sh for ansible-hadoop from analytically.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Kesten Ansible managed: accumulo/templates/accumulo_hosts.j2 modified on 2013-10-14 14:58:02 by kbroughton on mb-kbroughton.local | |
# Hadoop Nodes | |
# ============ | |
[zookeepers] # minimal 3, must be odd number of servers | |
Ubuntu_Cluster_02 zoo_id=2 | |
Ubuntu_Cluster_03 zoo_id=3 | |
Ubuntu_Cluster_04 zoo_id=4 | |
Ubuntu_Cluster_05 zoo_id=5 | |
Ubuntu_Cluster_06 zoo_id=6 | |
# Need at least two | |
[namenodes] | |
Ubuntu_Cluster_02 ganglia_accept=true | |
Ubuntu_Cluster_06 ganglia_accept=true | |
[journalnodes] # minimal 3, must be odd number of servers | |
Ubuntu_Cluster_02 | |
Ubuntu_Cluster_03 | |
Ubuntu_Cluster_04 | |
Ubuntu_Cluster_05 | |
Ubuntu_Cluster_06 | |
# Just need one master to be resourcemanager | |
[resourcemanager] | |
Ubuntu_Cluster_02 | |
[datanodes] | |
Ubuntu_Cluster_03 | |
Ubuntu_Cluster_04 | |
Ubuntu_Cluster_05 | |
[nodemanagers:children] | |
datanodes | |
# only need one slave for historyserver | |
[historyserver] | |
Ubuntu_Cluster_03 | |
# HBase Nodes | |
# =========== | |
[hbase_masters:children] | |
namenodes | |
[regionservers:children] | |
datanodes | |
[monitors] | |
Ubuntu_Cluster_01 | |
# Groups | |
# ====== | |
[masters:children] | |
zookeepers | |
namenodes | |
resourcemanager | |
hbase_masters | |
[slaves:children] | |
datanodes | |
nodemanagers | |
regionservers |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
+++++++++++++++++++++++++++++++++ | |
NOTIFIED: [cdh_hadoop_config | restart hadoop-hdfs-namenode] ****************** | |
failed: [Ubuntu_Cluster_03] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-namenode | |
...ignoring | |
failed: [Ubuntu_Cluster_02] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-namenode | |
...ignoring | |
failed: [Ubuntu_Cluster_04] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-namenode | |
...ignoring | |
failed: [Ubuntu_Cluster_06] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-namenode | |
...ignoring | |
failed: [Ubuntu_Cluster_05] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-namenode | |
...ignoring | |
NOTIFIED: [cdh_hadoop_config | restart hadoop-hdfs-datanode] ****************** | |
failed: [Ubuntu_Cluster_03] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-datanode | |
...ignoring | |
failed: [Ubuntu_Cluster_04] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-datanode | |
...ignoring | |
failed: [Ubuntu_Cluster_02] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-datanode | |
...ignoring | |
failed: [Ubuntu_Cluster_06] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-datanode | |
...ignoring | |
failed: [Ubuntu_Cluster_05] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-datanode | |
...ignoring | |
NOTIFIED: [cdh_hadoop_config | restart hadoop-hdfs-journalnode] *************** | |
failed: [Ubuntu_Cluster_06] => {"failed": true, "item": ""} | |
msg: service not found: hadoop-hdfs-journalnode | |
...ignoring | |
+++++++++++++++++++++++++++++++ | |
PLAY [namenodes[0]] *********************************************************** | |
TASK: [Make sure the /data/dfs/nn directory exists] *************************** | |
ok: [Ubuntu_Cluster_02] | |
TASK: [Make sure the namenode is formatted - WILL NOT FORMAT IF /data/dfs/nn/current/VERSION EXISTS TO AVOID DATA LOSS] *** | |
failed: [Ubuntu_Cluster_02] => {"changed": true, "cmd": " hdfs namenode -format -force ", "delta": "0:00:00.707221", "end": "2013-10-17 14:42:39.876808", "rc": 1, "start": "2013-10-17 14:42:39.169587"} | |
stderr: 13/10/17 14:42:39 INFO namenode.NameNode: STARTUP_MSG: | |
/************************************************************ | |
STARTUP_MSG: Starting NameNode | |
STARTUP_MSG: host = ubuntu/10.0.10.53 | |
STARTUP_MSG: args = [-format, -force] | |
STARTUP_MSG: version = 2.0.0-cdh4.4.0 | |
STARTUP_MSG: classpath = /etc/hadoop/conf:/usr/lib/hadoop/lib/jettison-1.1.jar:/usr/lib/hadoop/lib/commons-net-3.1.jar:/usr/lib/hadoop/lib/xmlenc-0.52.jar:/usr/lib/hadoop/lib/jsp-api-2.1.jar:/usr/lib/hadoop/lib/junit-4.8.2.jar:/usr/lib/hadoop/lib/servlet-api-2.5.jar:/usr/lib/hadoop/lib/commons-digester-1.8.jar:/usr/lib/hadoop/lib/zookeeper-3.4.5-cdh4.4.0.jar:/usr/lib/hadoop/lib/slf4j-log4j12-1.6.1.jar:/usr/lib/hadoop/lib/jasper-compiler-5.5.23.jar:/usr/lib/hadoop/lib/jsr305-1.3.9.jar:/usr/lib/hadoop/lib/activation-1.1.jar:/usr/lib/hadoop/lib/jline-0.9.94.jar:/usr/lib/hadoop/lib/commons-codec-1.4.jar:/usr/lib/hadoop/lib/stax-api-1.0.1.jar:/usr/lib/hadoop/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop/lib/commons-math-2.1.jar:/usr/lib/hadoop/lib/jetty-util-6.1.26.cloudera.2.jar:/usr/lib/hadoop/lib/commons-httpclient-3.1.jar:/usr/lib/hadoop/lib/jersey-server-1.8.jar:/usr/lib/hadoop/lib/commons-lang-2.5.jar:/usr/lib/hadoop/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop/lib/log4j-1.2.17.jar:/usr/lib/hadoop/lib/commons-configuration-1.6.jar:/usr/lib/hadoop/lib/xz-1.0.jar:/usr/lib/hadoop/lib/mockito-all-1.8.5.jar:/usr/lib/hadoop/lib/kfs-0.3.jar:/usr/lib/hadoop/lib/commons-beanutils-core-1.8.0.jar:/usr/lib/hadoop/lib/jersey-core-1.8.jar:/usr/lib/hadoop/lib/jasper-runtime-5.5.23.jar:/usr/lib/hadoop/lib/jsch-0.1.42.jar:/usr/lib/hadoop/lib/commons-el-1.0.jar:/usr/lib/hadoop/lib/commons-collections-3.2.1.jar:/usr/lib/hadoop/lib/jackson-jaxrs-1.8.8.jar:/usr/lib/hadoop/lib/jackson-xc-1.8.8.jar:/usr/lib/hadoop/lib/jaxb-impl-2.2.3-1.jar:/usr/lib/hadoop/lib/commons-logging-1.1.1.jar:/usr/lib/hadoop/lib/commons-compress-1.4.1.jar:/usr/lib/hadoop/lib/jetty-6.1.26.cloudera.2.jar:/usr/lib/hadoop/lib/jaxb-api-2.2.2.jar:/usr/lib/hadoop/lib/jets3t-0.6.1.jar:/usr/lib/hadoop/lib/commons-io-2.1.jar:/usr/lib/hadoop/lib/jersey-json-1.8.jar:/usr/lib/hadoop/lib/asm-3.2.jar:/usr/lib/hadoop/lib/commons-cli-1.2.jar:/usr/lib/hadoop/lib/slf4j-api-1.6.1.jar:/usr/lib/hadoop/lib/snappy-java-1.0.4.1.jar:/usr/lib/hadoop/lib/guava-11.0.2.jar:/usr/lib/hadoop/lib/avro-1.7.4.jar:/usr/lib/hadoop/lib/commons-beanutils-1.7.0.jar:/usr/lib/hadoop/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop/lib/paranamer-2.3.jar:/usr/lib/hadoop/.//hadoop-auth.jar:/usr/lib/hadoop/.//hadoop-common-2.0.0-cdh4.4.0-tests.jar:/usr/lib/hadoop/.//hadoop-annotations-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop/.//hadoop-annotations.jar:/usr/lib/hadoop/.//hadoop-common.jar:/usr/lib/hadoop/.//hadoop-auth-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop/.//hadoop-common-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/xmlenc-0.52.jar:/usr/lib/hadoop-hdfs/lib/jsp-api-2.1.jar:/usr/lib/hadoop-hdfs/lib/servlet-api-2.5.jar:/usr/lib/hadoop-hdfs/lib/zookeeper-3.4.5-cdh4.4.0.jar:/usr/lib/hadoop-hdfs/lib/jsr305-1.3.9.jar:/usr/lib/hadoop-hdfs/lib/jline-0.9.94.jar:/usr/lib/hadoop-hdfs/lib/commons-codec-1.4.jar:/usr/lib/hadoop-hdfs/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop-hdfs/lib/jetty-util-6.1.26.cloudera.2.jar:/usr/lib/hadoop-hdfs/lib/jersey-server-1.8.jar:/usr/lib/hadoop-hdfs/lib/commons-lang-2.5.jar:/usr/lib/hadoop-hdfs/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop-hdfs/lib/log4j-1.2.17.jar:/usr/lib/hadoop-hdfs/lib/jersey-core-1.8.jar:/usr/lib/hadoop-hdfs/lib/jasper-runtime-5.5.23.jar:/usr/lib/hadoop-hdfs/lib/commons-el-1.0.jar:/usr/lib/hadoop-hdfs/lib/commons-logging-1.1.1.jar:/usr/lib/hadoop-hdfs/lib/jetty-6.1.26.cloudera.2.jar:/usr/lib/hadoop-hdfs/lib/commons-daemon-1.0.3.jar:/usr/lib/hadoop-hdfs/lib/commons-io-2.1.jar:/usr/lib/hadoop-hdfs/lib/asm-3.2.jar:/usr/lib/hadoop-hdfs/lib/commons-cli-1.2.jar:/usr/lib/hadoop-hdfs/lib/guava-11.0.2.jar:/usr/lib/hadoop-hdfs/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop-hdfs/.//hadoop-hdfs-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-hdfs/.//hadoop-hdfs-2.0.0-cdh4.4.0-tests.jar:/usr/lib/hadoop-hdfs/.//hadoop-hdfs.jar:/usr/lib/hadoop-yarn/lib/javax.inject-1.jar:/usr/lib/hadoop-yarn/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop-yarn/lib/jersey-server-1.8.jar:/usr/lib/hadoop-yarn/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop-yarn/lib/log4j-1.2.17.jar:/usr/lib/hadoop-yarn/lib/xz-1.0.jar:/usr/lib/hadoop-yarn/lib/aopalliance-1.0.jar:/usr/lib/hadoop-yarn/lib/jersey-core-1.8.jar:/usr/lib/hadoop-yarn/lib/netty-3.2.4.Final.jar:/usr/lib/hadoop-yarn/lib/jersey-guice-1.8.jar:/usr/lib/hadoop-yarn/lib/guice-3.0.jar:/usr/lib/hadoop-yarn/lib/commons-compress-1.4.1.jar:/usr/lib/hadoop-yarn/lib/commons-io-2.1.jar:/usr/lib/hadoop-yarn/lib/asm-3.2.jar:/usr/lib/hadoop-yarn/lib/snappy-java-1.0.4.1.jar:/usr/lib/hadoop-yarn/lib/avro-1.7.4.jar:/usr/lib/hadoop-yarn/lib/guice-servlet-3.0.jar:/usr/lib/hadoop-yarn/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop-yarn/lib/paranamer-2.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-nodemanager-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-common.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-client.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-tests.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-applications-distributedshell.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-tests-2.0.0-cdh4.4.0-tests.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-resourcemanager-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-common.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-client-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-common-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-applications-unmanaged-am-launcher.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-applications-distributedshell-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-resourcemanager.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-api.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-web-proxy-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-applications-unmanaged-am-launcher-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-nodemanager.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-api-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-web-proxy.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-site-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-site.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-common-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-tests-2.0.0-cdh4.4.0.jar:/usr/lib/hadoop-0.20-mapreduce/.//* | |
STARTUP_MSG: build = file:///var/lib/jenkins/workspace/generic-package-ubuntu64-12-04/CDH4.4.0-Packaging-Hadoop-2013-09-03_18-48-35/hadoop-2.0.0+1475-1.cdh4.4.0.p0.23~precise/src/hadoop-common-project/hadoop-common -r c0eba6cd38c984557e96a16ccd7356b7de835e79; compiled by 'jenkins' on Tue Sep 3 19:33:54 PDT 2013 | |
STARTUP_MSG: java = 1.6.0_27 | |
************************************************************/ | |
13/10/17 14:42:39 INFO namenode.NameNode: registered UNIX signal handlers for [TERM, HUP, INT] | |
13/10/17 14:42:39 FATAL namenode.NameNode: Exception in namenode join | |
java.lang.IllegalArgumentException: Does not contain a valid host:port authority: Ubuntu_Cluster_02:8020 | |
at org.apache.hadoop.net.NetUtils.createSocketAddr(NetUtils.java:210) | |
at org.apache.hadoop.net.NetUtils.createSocketAddr(NetUtils.java:162) | |
at org.apache.hadoop.net.NetUtils.createSocketAddr(NetUtils.java:151) | |
at org.apache.hadoop.hdfs.DFSUtil.getAddressesForNameserviceId(DFSUtil.java:489) | |
at org.apache.hadoop.hdfs.DFSUtil.getAddresses(DFSUtil.java:471) | |
at org.apache.hadoop.hdfs.DFSUtil.getHaNnRpcAddresses(DFSUtil.java:533) | |
at org.apache.hadoop.hdfs.HAUtil.isHAEnabled(HAUtil.java:68) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1118) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.main(NameNode.java:1233) | |
13/10/17 14:42:39 INFO util.ExitUtil: Exiting with status 1 | |
13/10/17 14:42:39 INFO namenode.NameNode: SHUTDOWN_MSG: | |
/************************************************************ | |
SHUTDOWN_MSG: Shutting down NameNode at ubuntu/10.0.10.53 | |
************************************************************/ | |
FATAL: all hosts have already failed -- aborting | |
PLAY RECAP ******************************************************************** | |
to retry, use: --limit @/Users/kbroughton/site.retry | |
Ubuntu_Cluster_01 : ok=35 changed=10 unreachable=0 failed=0 | |
Ubuntu_Cluster_02 : ok=49 changed=16 unreachable=0 failed=1 | |
Ubuntu_Cluster_03 : ok=48 changed=16 unreachable=0 failed=0 | |
Ubuntu_Cluster_04 : ok=48 changed=16 unreachable=0 failed=0 | |
Ubuntu_Cluster_05 : ok=48 changed=16 unreachable=0 failed=0 | |
Ubuntu_Cluster_06 : ok=51 changed=22 unreachable=0 failed=0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
- hosts: journalnodes:namenodes:datanodes | |
user: ansibler | |
accelerate: use_accelerate | |
sudo: true | |
roles: | |
- cdh_hadoop_common | |
- cdh_hadoop_config |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
- hosts: journalnodes:namenodes:datanodes | |
user: ansibler | |
accelerate: use_accelerate | |
sudo: true | |
roles: | |
- cdh_hadoop_common | |
- cdh_hadoop_config |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment