Created
May 21, 2014 12:38
-
-
Save qrtt1/fe0cd5ed40280ca8de85 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[hdfs@node1 ~]$ hdfs namenode | |
14/05/21 12:37:13 INFO namenode.NameNode: STARTUP_MSG: | |
/************************************************************ | |
STARTUP_MSG: Starting NameNode | |
STARTUP_MSG: host = node1/54.255.200.6 | |
STARTUP_MSG: args = [] | |
STARTUP_MSG: version = 2.2.0 | |
STARTUP_MSG: classpath = /opt/hadoop/etc/hadoop:/opt/hadoop/share/hadoop/common/lib/commons-logging-1.1.1.jar:/opt/hadoop/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/hadoop/share/hadoop/common/lib/paranamer-2.3.jar:/opt/hadoop/share/hadoop/common/lib/zookeeper-3.4.5.jar:/opt/hadoop/share/hadoop/common/lib/activation-1.1.jar:/opt/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar:/opt/hadoop/share/hadoop/common/lib/guava-11.0.2.jar:/opt/hadoop/share/hadoop/common/lib/commons-compress-1.4.1.jar:/opt/hadoop/share/hadoop/common/lib/jettison-1.1.jar:/opt/hadoop/share/hadoop/common/lib/commons-digester-1.8.jar:/opt/hadoop/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/hadoop/share/hadoop/common/lib/stax-api-1.0.1.jar:/opt/hadoop/share/hadoop/common/lib/jasper-runtime-5.5.23.jar:/opt/hadoop/share/hadoop/common/lib/avro-1.7.4.jar:/opt/hadoop/share/hadoop/common/lib/junit-4.8.2.jar:/opt/hadoop/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/opt/hadoop/share/hadoop/common/lib/commons-io-2.1.jar:/opt/hadoop/share/hadoop/common/lib/jersey-json-1.9.jar:/opt/hadoop/share/hadoop/common/lib/mockito-all-1.8.5.jar:/opt/hadoop/share/hadoop/common/lib/jackson-mapper-asl-1.8.8.jar:/opt/hadoop/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/opt/hadoop/share/hadoop/common/lib/jackson-jaxrs-1.8.8.jar:/opt/hadoop/share/hadoop/common/lib/asm-3.2.jar:/opt/hadoop/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/opt/hadoop/share/hadoop/common/lib/hadoop-auth-2.2.0.jar:/opt/hadoop/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/opt/hadoop/share/hadoop/common/lib/commons-math-2.1.jar:/opt/hadoop/share/hadoop/common/lib/commons-lang-2.5.jar:/opt/hadoop/share/hadoop/common/lib/commons-el-1.0.jar:/opt/hadoop/share/hadoop/common/lib/commons-codec-1.4.jar:/opt/hadoop/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/hadoop/share/hadoop/common/lib/jackson-xc-1.8.8.jar:/opt/hadoop/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/hadoop/share/hadoop/common/lib/jsch-0.1.42.jar:/opt/hadoop/share/hadoop/common/lib/log4j-1.2.17.jar:/opt/hadoop/share/hadoop/common/lib/jsr305-1.3.9.jar:/opt/hadoop/share/hadoop/common/lib/netty-3.6.2.Final.jar:/opt/hadoop/share/hadoop/common/lib/jetty-6.1.26.jar:/opt/hadoop/share/hadoop/common/lib/jasper-compiler-5.5.23.jar:/opt/hadoop/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/opt/hadoop/share/hadoop/common/lib/jets3t-0.6.1.jar:/opt/hadoop/share/hadoop/common/lib/xmlenc-0.52.jar:/opt/hadoop/share/hadoop/common/lib/jsp-api-2.1.jar:/opt/hadoop/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/hadoop/share/hadoop/common/lib/jackson-core-asl-1.8.8.jar:/opt/hadoop/share/hadoop/common/lib/slf4j-api-1.7.5.jar:/opt/hadoop/share/hadoop/common/lib/commons-httpclient-3.1.jar:/opt/hadoop/share/hadoop/common/lib/commons-configuration-1.6.jar:/opt/hadoop/share/hadoop/common/lib/commons-net-3.1.jar:/opt/hadoop/share/hadoop/common/lib/commons-collections-3.2.1.jar:/opt/hadoop/share/hadoop/common/lib/hadoop-annotations-2.2.0.jar:/opt/hadoop/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/opt/hadoop/share/hadoop/common/lib/xz-1.0.jar:/opt/hadoop/share/hadoop/common/hadoop-nfs-2.2.0.jar:/opt/hadoop/share/hadoop/common/hadoop-common-2.2.0-tests.jar:/opt/hadoop/share/hadoop/common/hadoop-common-2.2.0.jar:/opt/hadoop/share/hadoop/hdfs:/opt/hadoop/share/hadoop/hdfs/lib/commons-logging-1.1.1.jar:/opt/hadoop/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/opt/hadoop/share/hadoop/hdfs/lib/guava-11.0.2.jar:/opt/hadoop/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/opt/hadoop/share/hadoop/hdfs/lib/jasper-runtime-5.5.23.jar:/opt/hadoop/share/hadoop/hdfs/lib/commons-io-2.1.jar:/opt/hadoop/share/hadoop/hdfs/lib/jackson-mapper-asl-1.8.8.jar:/opt/hadoop/share/hadoop/hdfs/lib/asm-3.2.jar:/opt/hadoop/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/opt/hadoop/share/hadoop/hdfs/lib/commons-lang-2.5.jar:/opt/hadoop/share/hadoop/hdfs/lib/commons-el-1.0.jar:/opt/hadoop/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/hadoop/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/opt/hadoop/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/opt/hadoop/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/opt/hadoop/share/hadoop/hdfs/lib/jsr305-1.3.9.jar:/opt/hadoop/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/hadoop/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/hadoop/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/opt/hadoop/share/hadoop/hdfs/lib/jsp-api-2.1.jar:/opt/hadoop/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/opt/hadoop/share/hadoop/hdfs/lib/jackson-core-asl-1.8.8.jar:/opt/hadoop/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/opt/hadoop/share/hadoop/hdfs/hadoop-hdfs-nfs-2.2.0.jar:/opt/hadoop/share/hadoop/hdfs/hadoop-hdfs-2.2.0-tests.jar:/opt/hadoop/share/hadoop/hdfs/hadoop-hdfs-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/lib/paranamer-2.3.jar:/opt/hadoop/share/hadoop/yarn/lib/junit-4.10.jar:/opt/hadoop/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/opt/hadoop/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/opt/hadoop/share/hadoop/yarn/lib/avro-1.7.4.jar:/opt/hadoop/share/hadoop/yarn/lib/commons-io-2.1.jar:/opt/hadoop/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/opt/hadoop/share/hadoop/yarn/lib/jackson-mapper-asl-1.8.8.jar:/opt/hadoop/share/hadoop/yarn/lib/snappy-java-1.0.4.1.jar:/opt/hadoop/share/hadoop/yarn/lib/asm-3.2.jar:/opt/hadoop/share/hadoop/yarn/lib/javax.inject-1.jar:/opt/hadoop/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/opt/hadoop/share/hadoop/yarn/lib/aopalliance-1.0.jar:/opt/hadoop/share/hadoop/yarn/lib/jersey-core-1.9.jar:/opt/hadoop/share/hadoop/yarn/lib/log4j-1.2.17.jar:/opt/hadoop/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/opt/hadoop/share/hadoop/yarn/lib/guice-3.0.jar:/opt/hadoop/share/hadoop/yarn/lib/jersey-server-1.9.jar:/opt/hadoop/share/hadoop/yarn/lib/jackson-core-asl-1.8.8.jar:/opt/hadoop/share/hadoop/yarn/lib/hamcrest-core-1.1.jar:/opt/hadoop/share/hadoop/yarn/lib/hadoop-annotations-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/lib/xz-1.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-common-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-server-common-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-server-tests-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-api-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-client-2.2.0.jar:/opt/hadoop/share/hadoop/yarn/hadoop-yarn-site-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/opt/hadoop/share/hadoop/mapreduce/lib/junit-4.10.jar:/opt/hadoop/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/opt/hadoop/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/opt/hadoop/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/opt/hadoop/share/hadoop/mapreduce/lib/commons-io-2.1.jar:/opt/hadoop/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/opt/hadoop/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.8.8.jar:/opt/hadoop/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/opt/hadoop/share/hadoop/mapreduce/lib/asm-3.2.jar:/opt/hadoop/share/hadoop/mapreduce/lib/javax.inject-1.jar:/opt/hadoop/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/opt/hadoop/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/opt/hadoop/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/opt/hadoop/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/opt/hadoop/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/opt/hadoop/share/hadoop/mapreduce/lib/guice-3.0.jar:/opt/hadoop/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/opt/hadoop/share/hadoop/mapreduce/lib/jackson-core-asl-1.8.8.jar:/opt/hadoop/share/hadoop/mapreduce/lib/hamcrest-core-1.1.jar:/opt/hadoop/share/hadoop/mapreduce/lib/hadoop-annotations-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/lib/xz-1.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.2.0-tests.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.2.0.jar:/opt/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.2.0.jar:/opt/hadoop/contrib/capacity-scheduler/*.jar | |
STARTUP_MSG: build = https://svn.apache.org/repos/asf/hadoop/common -r 1529768; compiled by 'hortonmu' on 2013-10-07T06:28Z | |
STARTUP_MSG: java = 1.7.0_45 | |
************************************************************/ | |
14/05/21 12:37:13 INFO namenode.NameNode: registered UNIX signal handlers for [TERM, HUP, INT] | |
14/05/21 12:37:15 INFO impl.MetricsConfig: loaded properties from hadoop-metrics2.properties | |
14/05/21 12:37:15 INFO impl.MetricsSystemImpl: Scheduled snapshot period at 10 second(s). | |
14/05/21 12:37:15 INFO impl.MetricsSystemImpl: NameNode metrics system started | |
14/05/21 12:37:16 INFO mortbay.log: Logging to org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via org.mortbay.log.Slf4jLog | |
14/05/21 12:37:16 INFO http.HttpServer: Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer$QuotingInputFilter) | |
14/05/21 12:37:17 INFO http.HttpServer: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context hdfs | |
14/05/21 12:37:17 INFO http.HttpServer: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static | |
14/05/21 12:37:17 INFO http.HttpServer: Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context logs | |
14/05/21 12:37:17 INFO http.HttpServer: dfs.webhdfs.enabled = false | |
14/05/21 12:37:17 INFO http.HttpServer: Jetty bound to port 50070 | |
14/05/21 12:37:17 INFO mortbay.log: jetty-6.1.26 | |
14/05/21 12:37:18 INFO mortbay.log: Started [email protected]:50070 | |
14/05/21 12:37:18 INFO namenode.NameNode: Web-server up at: 0.0.0.0:50070 | |
14/05/21 12:37:18 WARN namenode.FSNamesystem: Only one image storage directory (dfs.namenode.name.dir) configured. Beware of dataloss due to lack of redundant storage directories! | |
14/05/21 12:37:18 INFO namenode.HostFileManager: read includes: | |
HostSet( | |
) | |
14/05/21 12:37:18 INFO namenode.HostFileManager: read excludes: | |
HostSet( | |
) | |
14/05/21 12:37:18 INFO blockmanagement.DatanodeManager: dfs.block.invalidate.limit=1000 | |
14/05/21 12:37:18 INFO util.GSet: Computing capacity for map BlocksMap | |
14/05/21 12:37:18 INFO util.GSet: VM type = 64-bit | |
14/05/21 12:37:18 INFO util.GSet: 2.0% max memory = 966.7 MB | |
14/05/21 12:37:18 INFO util.GSet: capacity = 2^21 = 2097152 entries | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: dfs.block.access.token.enable=false | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: defaultReplication = 3 | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: maxReplication = 512 | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: minReplication = 1 | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: maxReplicationStreams = 2 | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: shouldCheckForEnoughRacks = false | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: replicationRecheckInterval = 3000 | |
14/05/21 12:37:18 INFO blockmanagement.BlockManager: encryptDataTransfer = false | |
14/05/21 12:37:18 INFO namenode.FSNamesystem: fsOwner = hdfs (auth:SIMPLE) | |
14/05/21 12:37:18 INFO namenode.FSNamesystem: supergroup = supergroup | |
14/05/21 12:37:18 INFO namenode.FSNamesystem: isPermissionEnabled = true | |
14/05/21 12:37:18 INFO namenode.FSNamesystem: Determined nameservice ID: mycluster | |
14/05/21 12:37:18 INFO namenode.FSNamesystem: HA Enabled: true | |
14/05/21 12:37:18 INFO namenode.FSNamesystem: Append Enabled: true | |
14/05/21 12:37:19 INFO util.GSet: Computing capacity for map INodeMap | |
14/05/21 12:37:19 INFO util.GSet: VM type = 64-bit | |
14/05/21 12:37:19 INFO util.GSet: 1.0% max memory = 966.7 MB | |
14/05/21 12:37:19 INFO util.GSet: capacity = 2^20 = 1048576 entries | |
14/05/21 12:37:19 INFO namenode.NameNode: Caching file names occuring more than 10 times | |
14/05/21 12:37:19 INFO namenode.FSNamesystem: dfs.namenode.safemode.threshold-pct = 0.9990000128746033 | |
14/05/21 12:37:19 INFO namenode.FSNamesystem: dfs.namenode.safemode.min.datanodes = 0 | |
14/05/21 12:37:19 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension = 30000 | |
14/05/21 12:37:19 INFO namenode.FSNamesystem: Retry cache on namenode is enabled | |
14/05/21 12:37:19 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis | |
14/05/21 12:37:19 INFO util.GSet: Computing capacity for map Namenode Retry Cache | |
14/05/21 12:37:19 INFO util.GSet: VM type = 64-bit | |
14/05/21 12:37:19 INFO util.GSet: 0.029999999329447746% max memory = 966.7 MB | |
14/05/21 12:37:19 INFO util.GSet: capacity = 2^15 = 32768 entries | |
14/05/21 12:37:19 INFO common.Storage: Lock on /tmp/hadoop/hadoop-hdfs/dfs/name/in_use.lock acquired by nodename 3696@node1 | |
14/05/21 12:37:21 INFO namenode.FSImage: No edit log streams selected. | |
14/05/21 12:37:21 INFO namenode.FSImage: Loading image file /tmp/hadoop/hadoop-hdfs/dfs/name/current/fsimage_0000000000000000000 using no compression | |
14/05/21 12:37:21 INFO namenode.FSImage: Number of files = 1 | |
14/05/21 12:37:21 INFO namenode.FSImage: Number of files under construction = 0 | |
14/05/21 12:37:21 INFO namenode.FSImage: Image file /tmp/hadoop/hadoop-hdfs/dfs/name/current/fsimage_0000000000000000000 of size 196 bytes loaded in 0 seconds. | |
14/05/21 12:37:21 INFO namenode.FSImage: Loaded image for txid 0 from /tmp/hadoop/hadoop-hdfs/dfs/name/current/fsimage_0000000000000000000 | |
14/05/21 12:37:21 INFO namenode.NameCache: initialized with 0 entries 0 lookups | |
14/05/21 12:37:21 INFO namenode.FSNamesystem: Finished loading FSImage in 2220 msecs | |
14/05/21 12:37:22 INFO namenode.NameNode: RPC server is binding to mycluster:8020 | |
14/05/21 12:37:22 INFO namenode.FSNamesystem: Stopping services started for standby state | |
14/05/21 12:37:22 INFO namenode.FSNamesystem: Stopping services started for active state | |
14/05/21 12:37:22 INFO namenode.FSNamesystem: Stopping services started for standby state | |
14/05/21 12:37:22 INFO mortbay.log: Stopped [email protected]:50070 | |
14/05/21 12:37:22 INFO impl.MetricsSystemImpl: Stopping NameNode metrics system... | |
14/05/21 12:37:22 INFO impl.MetricsSystemImpl: NameNode metrics system stopped. | |
14/05/21 12:37:22 INFO impl.MetricsSystemImpl: NameNode metrics system shutdown complete. | |
14/05/21 12:37:22 FATAL namenode.NameNode: Exception in namenode join | |
java.io.IOException: Failed on local exception: java.net.SocketException: Unresolved address; Host Details : local host is: "mycluster"; destination host is: (unknown):0; | |
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:764) | |
at org.apache.hadoop.ipc.Server.bind(Server.java:422) | |
at org.apache.hadoop.ipc.Server$Listener.<init>(Server.java:534) | |
at org.apache.hadoop.ipc.Server.<init>(Server.java:2203) | |
at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:897) | |
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server.<init>(ProtobufRpcEngine.java:505) | |
at org.apache.hadoop.ipc.ProtobufRpcEngine.getServer(ProtobufRpcEngine.java:480) | |
at org.apache.hadoop.ipc.RPC$Builder.build(RPC.java:742) | |
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.<init>(NameNodeRpcServer.java:273) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.createRpcServer(NameNode.java:510) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:493) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:684) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:669) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1254) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.main(NameNode.java:1320) | |
Caused by: java.net.SocketException: Unresolved address | |
at sun.nio.ch.Net.translateToSocketException(Net.java:157) | |
at sun.nio.ch.Net.translateException(Net.java:183) | |
at sun.nio.ch.Net.translateException(Net.java:189) | |
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:76) | |
at org.apache.hadoop.ipc.Server.bind(Server.java:405) | |
... 13 more | |
Caused by: java.nio.channels.UnresolvedAddressException | |
at sun.nio.ch.Net.checkAddress(Net.java:127) | |
at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:208) | |
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74) | |
... 14 more | |
14/05/21 12:37:22 INFO util.ExitUtil: Exiting with status 1 | |
14/05/21 12:37:22 INFO namenode.NameNode: SHUTDOWN_MSG: | |
/************************************************************ | |
SHUTDOWN_MSG: Shutting down NameNode at node1/54.255.200.6 | |
************************************************************/ |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment