Created
July 23, 2014 01:02
-
-
Save apple-corps/c6123e91503fcac95534 to your computer and use it in GitHub Desktop.
more hadoop woes
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[root@rhel1 ~]# netstat -tunalp | grep LISTEN | |
tcp 0 0 0.0.0.0:111 0.0.0.0:* LISTEN 1259/rpcbind | |
tcp 0 0 172.16.0.6:19888 0.0.0.0:* LISTEN 1655/java | |
tcp 0 0 0.0.0.0:60787 0.0.0.0:* LISTEN 1277/rpc.statd | |
tcp 0 0 172.16.0.6:8020 0.0.0.0:* LISTEN 1541/java | |
tcp 0 0 0.0.0.0:50070 0.0.0.0:* LISTEN 1541/java | |
tcp 0 0 0.0.0.0:22 0.0.0.0:* LISTEN 1364/sshd | |
tcp 0 0 127.0.0.1:631 0.0.0.0:* LISTEN 1331/cupsd | |
tcp 0 0 127.0.0.1:25 0.0.0.0:* LISTEN 1440/master | |
tcp 0 0 172.16.0.6:10020 0.0.0.0:* LISTEN 1655/java | |
tcp 0 0 :::111 :::* LISTEN 1259/rpcbind | |
tcp 0 0 :::22 :::* LISTEN 1364/sshd | |
tcp 0 0 ::1:631 :::* LISTEN 1331/cupsd | |
tcp 0 0 :::11000 :::* LISTEN 1975/java | |
tcp 0 0 ::ffff:172.16.0.6:8088 :::* LISTEN 1806/java | |
tcp 0 0 ::ffff:127.0.0.1:11001 :::* LISTEN 1975/java | |
tcp 0 0 :::48540 :::* LISTEN 1875/java | |
tcp 0 0 ::ffff:172.16.0.6:8030 :::* LISTEN 1806/java | |
tcp 0 0 ::ffff:172.16.0.6:8031 :::* LISTEN 1806/java | |
tcp 0 0 ::ffff:172.16.0.6:8032 :::* LISTEN 1806/java | |
tcp 0 0 ::ffff:172.16.0.6:8033 :::* LISTEN 1806/java | |
tcp 0 0 :::53508 :::* LISTEN 1277/rpc.statd | |
tcp 0 0 :::2181 :::* LISTEN 1875/java | |
[root@rhel1 ~]# /usr/java/jdk1.7.0_60/bin/jps | |
1806 ResourceManager | |
1975 Bootstrap | |
2644 Jps | |
1875 QuorumPeerMain | |
1541 NameNode | |
1655 JobHistoryServer | |
[root@rhel2 hadoop-hdfs]# telnet 172.16.0.6 22 | |
Trying 172.16.0.6... | |
Connected to 172.16.0.6. | |
Escape character is '^]'. | |
SSH-2.0-OpenSSH_5.3 | |
^] | |
[root@rhel2 hadoop-hdfs]# telnet 172.16.0.6 8020 | |
Trying 172.16.0.6... | |
telnet: connect to address 172.16.0.6: No route to host | |
[root@rhel2 hadoop-hdfs]# ping rhel1.local | |
PING rhel1.local (172.16.0.6) 56(84) bytes of data. | |
64 bytes from rhel1.local (172.16.0.6): icmp_seq=1 ttl=64 time=0.401 ms | |
64 bytes from rhel1.local (172.16.0.6): icmp_seq=2 ttl=64 time=0.478 ms | |
64 bytes from rhel1.local (172.16.0.6): icmp_seq=3 ttl=64 time=0.356 ms | |
[root@rhel2 hadoop-hdfs]# cat hadoop-hdfs-datanode-rhel2.out | |
2014-07-22 17:47:00,681 INFO [main] datanode.DataNode (StringUtils.java:startupShutdownMessage(597)) - STARTUP_MSG: | |
/************************************************************ | |
STARTUP_MSG: Starting DataNode | |
STARTUP_MSG: host = rhel2.local/172.16.0.2 | |
STARTUP_MSG: args = [] | |
STARTUP_MSG: version = 2.0.0-cdh4.1.3 | |
STARTUP_MSG: classpath = /etc/hadoop/conf:/usr/lib/hadoop/lib/jersey-core-1.8.jar:/usr/lib/hadoop/lib/commons-cli-1.2.jar:/usr/lib/hadoop/lib/jaxb-impl-2.2.3-1.jar:/usr/lib/hadoop/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop/lib/jsch-0.1.42.jar:/usr/lib/hadoop/lib/asm-3.2.jar:/usr/lib/hadoop/lib/kfs-0.3.jar:/usr/lib/hadoop/lib/jsr305-1.3.9.jar:/usr/lib/hadoop/lib/commons-logging-1.1.1.jar:/usr/lib/hadoop/lib/jets3t-0.6.1.jar:/usr/lib/hadoop/lib/xmlenc-0.52.jar:/usr/lib/hadoop/lib/jetty-6.1.26.cloudera.2.jar:/usr/lib/hadoop/lib/jsp-api-2.1.jar:/usr/lib/hadoop/lib/commons-collections-3.2.1.jar:/usr/lib/hadoop/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop/lib/guava-11.0.2.jar:/usr/lib/hadoop/lib/zookeeper-3.4.3-cdh4.1.3.jar:/usr/lib/hadoop/lib/servlet-api-2.5.jar:/usr/lib/hadoop/lib/log4j-1.2.17.jar:/usr/lib/hadoop/lib/avro-1.7.1.cloudera.2.jar:/usr/lib/hadoop/lib/jersey-server-1.8.jar:/usr/lib/hadoop/lib/jasper-compiler-5.5.23.jar:/usr/lib/hadoop/lib/jackson-jaxrs-1.8.8.jar:/usr/lib/hadoop/lib/jline-0.9.94.jar:/usr/lib/hadoop/lib/commons-el-1.0.jar:/usr/lib/hadoop/lib/commons-io-2.1.jar:/usr/lib/hadoop/lib/commons-configuration-1.6.jar:/usr/lib/hadoop/lib/commons-net-3.1.jar:/usr/lib/hadoop/lib/jackson-xc-1.8.8.jar:/usr/lib/hadoop/lib/jasper-runtime-5.5.23.jar:/usr/lib/hadoop/lib/commons-digester-1.8.jar:/usr/lib/hadoop/lib/slf4j-api-1.6.1.jar:/usr/lib/hadoop/lib/commons-beanutils-core-1.8.0.jar:/usr/lib/hadoop/lib/commons-httpclient-3.1.jar:/usr/lib/hadoop/lib/activation-1.1.jar:/usr/lib/hadoop/lib/jetty-util-6.1.26.cloudera.2.jar:/usr/lib/hadoop/lib/commons-beanutils-1.7.0.jar:/usr/lib/hadoop/lib/commons-lang-2.5.jar:/usr/lib/hadoop/lib/snappy-java-1.0.4.1.jar:/usr/lib/hadoop/lib/junit-4.8.2.jar:/usr/lib/hadoop/lib/stax-api-1.0.1.jar:/usr/lib/hadoop/lib/commons-math-2.1.jar:/usr/lib/hadoop/lib/mockito-all-1.8.5.jar:/usr/lib/hadoop/lib/jettison-1.1.jar:/usr/lib/hadoop/lib/jaxb-api-2.2.2.jar:/usr/lib/hadoop/lib/paranamer-2.3.jar:/usr/lib/hadoop/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop/lib/jersey-json-1.8.jar:/usr/lib/hadoop/lib/slf4j-log4j12-1.6.1.jar:/usr/lib/hadoop/lib/commons-codec-1.4.jar:/usr/lib/hadoop/.//hadoop-annotations-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop/.//hadoop-common.jar:/usr/lib/hadoop/.//hadoop-common-2.0.0-cdh4.1.3-tests.jar:/usr/lib/hadoop/.//hadoop-common-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop/.//hadoop-annotations.jar:/usr/lib/hadoop/.//hadoop-auth-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop/.//hadoop-auth.jar:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/jersey-core-1.8.jar:/usr/lib/hadoop-hdfs/lib/commons-cli-1.2.jar:/usr/lib/hadoop-hdfs/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop-hdfs/lib/asm-3.2.jar:/usr/lib/hadoop-hdfs/lib/jsr305-1.3.9.jar:/usr/lib/hadoop-hdfs/lib/commons-logging-1.1.1.jar:/usr/lib/hadoop-hdfs/lib/xmlenc-0.52.jar:/usr/lib/hadoop-hdfs/lib/jetty-6.1.26.cloudera.2.jar:/usr/lib/hadoop-hdfs/lib/jsp-api-2.1.jar:/usr/lib/hadoop-hdfs/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop-hdfs/lib/guava-11.0.2.jar:/usr/lib/hadoop-hdfs/lib/zookeeper-3.4.3-cdh4.1.3.jar:/usr/lib/hadoop-hdfs/lib/servlet-api-2.5.jar:/usr/lib/hadoop-hdfs/lib/log4j-1.2.17.jar:/usr/lib/hadoop-hdfs/lib/jersey-server-1.8.jar:/usr/lib/hadoop-hdfs/lib/jline-0.9.94.jar:/usr/lib/hadoop-hdfs/lib/commons-el-1.0.jar:/usr/lib/hadoop-hdfs/lib/commons-io-2.1.jar:/usr/lib/hadoop-hdfs/lib/commons-daemon-1.0.3.jar:/usr/lib/hadoop-hdfs/lib/jasper-runtime-5.5.23.jar:/usr/lib/hadoop-hdfs/lib/jetty-util-6.1.26.cloudera.2.jar:/usr/lib/hadoop-hdfs/lib/commons-lang-2.5.jar:/usr/lib/hadoop-hdfs/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop-hdfs/lib/commons-codec-1.4.jar:/usr/lib/hadoop-hdfs/.//hadoop-hdfs-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-hdfs/.//hadoop-hdfs-2.0.0-cdh4.1.3-tests.jar:/usr/lib/hadoop-hdfs/.//hadoop-hdfs.jar:/usr/lib/hadoop-yarn/lib/jersey-core-1.8.jar:/usr/lib/hadoop-yarn/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop-yarn/lib/asm-3.2.jar:/usr/lib/hadoop-yarn/lib/netty-3.2.4.Final.jar:/usr/lib/hadoop-yarn/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop-yarn/lib/jersey-guice-1.8.jar:/usr/lib/hadoop-yarn/lib/log4j-1.2.17.jar:/usr/lib/hadoop-yarn/lib/avro-1.7.1.cloudera.2.jar:/usr/lib/hadoop-yarn/lib/jersey-server-1.8.jar:/usr/lib/hadoop-yarn/lib/guice-3.0.jar:/usr/lib/hadoop-yarn/lib/commons-io-2.1.jar:/usr/lib/hadoop-yarn/lib/aopalliance-1.0.jar:/usr/lib/hadoop-yarn/lib/javax.inject-1.jar:/usr/lib/hadoop-yarn/lib/snappy-java-1.0.4.1.jar:/usr/lib/hadoop-yarn/lib/guice-servlet-3.0.jar:/usr/lib/hadoop-yarn/lib/paranamer-2.3.jar:/usr/lib/hadoop-yarn/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-applications-distributedshell.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-api-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-site-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-common-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-api.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-site.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-nodemanager.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-tests.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-tests-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-common.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-applications-distributedshell-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-web-proxy-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-tests-2.0.0-cdh4.1.3-tests.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-resourcemanager.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-web-proxy.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-resourcemanager-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-common-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-server-nodemanager-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-yarn/.//hadoop-yarn-common.jar:/usr/lib/hadoop-mapreduce/lib/jersey-core-1.8.jar:/usr/lib/hadoop-mapreduce/lib/jackson-mapper-asl-1.8.8.jar:/usr/lib/hadoop-mapreduce/lib/asm-3.2.jar:/usr/lib/hadoop-mapreduce/lib/netty-3.2.4.Final.jar:/usr/lib/hadoop-mapreduce/lib/jackson-core-asl-1.8.8.jar:/usr/lib/hadoop-mapreduce/lib/jersey-guice-1.8.jar:/usr/lib/hadoop-mapreduce/lib/log4j-1.2.17.jar:/usr/lib/hadoop-mapreduce/lib/avro-1.7.1.cloudera.2.jar:/usr/lib/hadoop-mapreduce/lib/jersey-server-1.8.jar:/usr/lib/hadoop-mapreduce/lib/guice-3.0.jar:/usr/lib/hadoop-mapreduce/lib/commons-io-2.1.jar:/usr/lib/hadoop-mapreduce/lib/aopalliance-1.0.jar:/usr/lib/hadoop-mapreduce/lib/javax.inject-1.jar:/usr/lib/hadoop-mapreduce/lib/snappy-java-1.0.4.1.jar:/usr/lib/hadoop-mapreduce/lib/guice-servlet-3.0.jar:/usr/lib/hadoop-mapreduce/lib/paranamer-2.3.jar:/usr/lib/hadoop-mapreduce/lib/protobuf-java-2.4.0a.jar:/usr/lib/hadoop-mapreduce/.//hadoop-gridmix-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-streaming-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-archives-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-distcp.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-core-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-common-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-jobclient-2.0.0-cdh4.1.3-tests.jar:/usr/lib/hadoop-mapreduce/.//hadoop-streaming.jar:/usr/lib/hadoop-mapreduce/.//hadoop-archives.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-shuffle-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-app.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-hs.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-examples.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-jobclient.jar:/usr/lib/hadoop-mapreduce/.//hadoop-datajoin-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-shuffle.jar:/usr/lib/hadoop-mapreduce/.//hadoop-gridmix.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-jobclient-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-core.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-common.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-hs-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-datajoin.jar:/usr/lib/hadoop-mapreduce/.//hadoop-extras.jar:/usr/lib/hadoop-mapreduce/.//hadoop-extras-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-client-app-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-rumen.jar:/usr/lib/hadoop-mapreduce/.//hadoop-rumen-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-mapreduce-examples-2.0.0-cdh4.1.3.jar:/usr/lib/hadoop-mapreduce/.//hadoop-distcp-2.0.0-cdh4.1.3.jar | |
STARTUP_MSG: build = file:///data/1/jenkins/workspace/generic-package-rhel64-6-0/topdir/BUILD/hadoop-2.0.0-cdh4.1.3/src/hadoop-common-project/hadoop-common -r dbc7a60f9a798ef63afb7f5b723dc9c02d5321e1; compiled by 'jenkins' on Sat Jan 26 16:46:14 PST 2013 | |
************************************************************/ | |
2014-07-22 17:47:04,417 WARN [main] common.Util (Util.java:stringAsURI(56)) - Path /data1/dfs/dn should be specified as a URI in configuration files. Please update hdfs configuration. | |
2014-07-22 17:47:16,706 WARN [main] impl.MetricsConfig (MetricsConfig.java:loadFirst(125)) - Cannot locate configuration: tried hadoop-metrics2-datanode.properties,hadoop-metrics2.properties | |
2014-07-22 17:47:17,589 INFO [main] impl.MetricsSystemImpl (MetricsSystemImpl.java:startTimer(344)) - Scheduled snapshot period at 10 second(s). | |
2014-07-22 17:47:17,589 INFO [main] impl.MetricsSystemImpl (MetricsSystemImpl.java:start(183)) - DataNode metrics system started | |
2014-07-22 17:47:17,821 INFO [main] datanode.DataNode (DataNode.java:<init>(315)) - Configured hostname is rhel2.local | |
2014-07-22 17:47:18,082 INFO [main] datanode.DataNode (DataNode.java:initDataXceiver(538)) - Opened streaming server at /0.0.0.0:50010 | |
2014-07-22 17:47:18,147 INFO [main] datanode.DataNode (DataXceiverServer.java:<init>(77)) - Balancing bandwith is 1048576 bytes/s | |
2014-07-22 17:47:18,324 INFO [main] mortbay.log (Slf4jLog.java:info(67)) - Logging to org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via org.mortbay.log.Slf4jLog | |
2014-07-22 17:47:18,548 INFO [main] http.HttpServer (HttpServer.java:addGlobalFilter(504)) - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer$QuotingInputFilter) | |
2014-07-22 17:47:18,553 INFO [main] http.HttpServer (HttpServer.java:addFilter(482)) - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode | |
2014-07-22 17:47:18,553 INFO [main] http.HttpServer (HttpServer.java:addFilter(489)) - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs | |
2014-07-22 17:47:18,553 INFO [main] http.HttpServer (HttpServer.java:addFilter(489)) - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static | |
2014-07-22 17:47:18,560 INFO [main] datanode.DataNode (DataNode.java:startInfoServer(368)) - Opened info server at 0.0.0.0:50075 | |
2014-07-22 17:47:18,642 INFO [main] datanode.DataNode (WebHdfsFileSystem.java:isEnabled(143)) - dfs.webhdfs.enabled = false | |
2014-07-22 17:47:18,643 INFO [main] http.HttpServer (HttpServer.java:start(662)) - Jetty bound to port 50075 | |
2014-07-22 17:47:18,646 INFO [main] mortbay.log (Slf4jLog.java:info(67)) - jetty-6.1.26.cloudera.2 | |
2014-07-22 17:47:19,697 INFO [main] mortbay.log (Slf4jLog.java:info(67)) - Started [email protected]:50075 | |
2014-07-22 17:47:20,191 INFO [Socket Reader #1 for port 50020] ipc.Server (Server.java:run(483)) - Starting Socket Reader #1 for port 50020 | |
2014-07-22 17:47:20,220 INFO [main] datanode.DataNode (DataNode.java:initIpcServer(434)) - Opened IPC server at /0.0.0.0:50020 | |
2014-07-22 17:47:20,245 INFO [main] datanode.DataNode (BlockPoolManager.java:refreshNamenodes(148)) - Refresh request received for nameservices: null | |
2014-07-22 17:47:20,301 INFO [main] datanode.DataNode (BlockPoolManager.java:doRefreshNamenodes(193)) - Starting BPOfferServices for nameservices: <default> | |
2014-07-22 17:47:20,310 WARN [main] common.Util (Util.java:stringAsURI(56)) - Path /data1/dfs/dn should be specified as a URI in configuration files. Please update hdfs configuration. | |
2014-07-22 17:47:20,339 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] datanode.DataNode (BPServiceActor.java:run(655)) - Block pool <registering> (storage id unknown) service to rhel1.local/172.16.0.6:8020 starting to offer service | |
2014-07-22 17:47:20,605 INFO [IPC Server listener on 50020] ipc.Server (Server.java:run(607)) - IPC Server listener on 50020: starting | |
2014-07-22 17:47:20,631 INFO [IPC Server Responder] ipc.Server (Server.java:run(776)) - IPC Server Responder: starting | |
2014-07-22 17:47:21,797 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:22,798 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:23,799 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:24,801 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:25,802 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:26,804 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:27,805 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:28,806 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:29,808 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:30,810 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:30,815 WARN [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] datanode.DataNode (BPServiceActor.java:retrieveNamespaceInfo(165)) - Problem connecting to server: rhel1.local/172.16.0.6:8020 | |
2014-07-22 17:47:36,817 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:37,819 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:38,820 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:39,822 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:40,823 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:41,824 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:42,826 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:43,827 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
2014-07-22 17:47:44,829 INFO [DataNode: [file:/data1/dfs/dn] heartbeating to rhel1.local/172.16.0.6:8020] ipc.Client (Client.java:handleConnectionFailure(737)) - Retrying connect to server: rhel1.local/172.16.0.6:8020. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Another issue with virtualbox 'internal interfaces'