Created
May 13, 2012 09:00
-
-
Save miguno/2687064 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
2012-05-10 23:25:06,585 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: STARTUP_MSG: | |
/************************************************************ | |
STARTUP_MSG: Starting DataNode | |
STARTUP_MSG: host = slave/192.168.0.2 | |
STARTUP_MSG: args = [] | |
STARTUP_MSG: version = 0.20.2 | |
STARTUP_MSG: build = https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20 -r 911707; compiled by 'chrisdo' on Fri Feb 19 08:07:34 UTC 2010 | |
************************************************************/ | |
2012-05-10 23:25:12,240 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Registered FSDatasetStatusMBean | |
2012-05-10 23:25:12,244 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Opened info server at 50010 | |
2012-05-10 23:25:12,249 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Balancing bandwith is 1048576 bytes/s | |
2012-05-10 23:25:17,375 INFO org.mortbay.log: Logging to org.slf4j.impl.Log4jLoggerAdapter(org.mortbay.log) via org.mortbay.log.Slf4jLog | |
2012-05-10 23:25:17,516 INFO org.apache.hadoop.http.HttpServer: Port returned by webServer.getConnectors()[0].getLocalPort() before open() is -1. Opening the listener on 50075 | |
2012-05-10 23:25:17,516 INFO org.apache.hadoop.http.HttpServer: listener.getLocalPort() returned 50075 webServer.getConnectors()[0].getLocalPort() returned 50075 | |
2012-05-10 23:25:17,516 INFO org.apache.hadoop.http.HttpServer: Jetty bound to port 50075 | |
2012-05-10 23:25:17,517 INFO org.mortbay.log: jetty-6.1.14 | |
2012-05-10 23:25:18,173 INFO org.mortbay.log: Started [email protected]:50075 | |
2012-05-10 23:25:18,186 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=DataNode, sessionId=null | |
2012-05-10 23:25:23,224 INFO org.apache.hadoop.ipc.metrics.RpcMetrics: Initializing RPC Metrics with hostName=DataNode, port=50020 | |
2012-05-10 23:25:23,228 INFO org.apache.hadoop.ipc.Server: IPC Server Responder: starting | |
2012-05-10 23:25:23,234 INFO org.apache.hadoop.ipc.Server: IPC Server handler 0 on 50020: starting | |
2012-05-10 23:25:23,235 INFO org.apache.hadoop.ipc.Server: IPC Server handler 1 on 50020: starting | |
2012-05-10 23:25:23,235 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: dnRegistration = DatanodeRegistration(slave:50010, storageID=DS-391327016-127.0.1.1-50010-1336550556249, infoPort=50075, ipcPort=50020) | |
2012-05-10 23:25:23,236 INFO org.apache.hadoop.ipc.Server: IPC Server listener on 50020: starting | |
2012-05-10 23:25:23,240 INFO org.apache.hadoop.ipc.Server: IPC Server handler 2 on 50020: starting | |
2012-05-10 23:25:23,254 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(192.168.0.2:50010, storageID=DS-391327016-127.0.1.1-50010-1336550556249, infoPort=50075, ipcPort=50020)In DataNode.run, data = FSDataset{dirpath='/app/hadoop/tmp/dfs/data/current'} | |
2012-05-10 23:25:23,255 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: using BLOCKREPORT_INTERVAL of 3600000msec Initial delay: 0msec | |
2012-05-10 23:25:23,287 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: BlockReport of 7 blocks got processed in 12 msecs | |
2012-05-10 23:25:23,288 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Starting Periodic block scanner. | |
2012-05-10 23:25:32,260 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeCommand action: DNA_REGISTER | |
2012-05-10 23:25:35,261 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeCommand action: DNA_REGISTER | |
2012-05-10 23:25:38,262 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeCommand action: DNA_REGISTER | |
2012-05-10 23:25:41,269 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: DataNode is shutting down: org.apache.hadoop.ipc.RemoteException: org.apache.hadoop.hdfs.protocol.UnregisteredDatanodeException: Data node 192.168.0.2:50010 is attempting to report storage ID DS-391327016-127.0.1.1-50010-1336550556249. Node 192.168.0.1:50010 is expected to serve this storage. | |
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getDatanode(FSNamesystem.java:3920) | |
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.processReport(FSNamesystem.java:2891) | |
at org.apache.hadoop.hdfs.server.namenode.NameNode.blockReport(NameNode.java:715) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) | |
at java.lang.reflect.Method.invoke(Method.java:597) | |
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:508) | |
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:959) | |
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:955) | |
at java.security.AccessController.doPrivileged(Native Method) | |
at javax.security.auth.Subject.doAs(Subject.java:396) | |
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:953) | |
at org.apache.hadoop.ipc.Client.call(Client.java:740) | |
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:220) | |
at $Proxy4.blockReport(Unknown Source) | |
at org.apache.hadoop.hdfs.server.datanode.DataNode.offerService(DataNode.java:756) | |
at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1186) | |
at java.lang.Thread.run(Thread.java:662) | |
2012-05-10 23:25:41,302 INFO org.apache.hadoop.ipc.Server: Stopping server on 50020 | |
2012-05-10 23:25:41,302 INFO org.apache.hadoop.ipc.Server: IPC Server handler 0 on 50020: exiting | |
2012-05-10 23:25:41,303 INFO org.apache.hadoop.ipc.Server: IPC Server handler 2 on 50020: exiting | |
2012-05-10 23:25:41,302 INFO org.apache.hadoop.ipc.Server: IPC Server handler 1 on 50020: exiting | |
2012-05-10 23:25:41,304 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Waiting for threadgroup to exit, active threads is 1 | |
2012-05-10 23:25:41,304 INFO org.apache.hadoop.ipc.Server: Stopping IPC Server listener on 50020 | |
2012-05-10 23:25:41,305 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(192.168.0.2:50010, storageID=DS-391327016-127.0.1.1-50010-1336550556249, infoPort=50075, ipcPort=50020):DataXceiveServer: java.nio.channels.AsynchronousCloseException | |
at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185) | |
at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159) | |
at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84) | |
at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:130) | |
at java.lang.Thread.run(Thread.java:662) | |
2012-05-10 23:25:41,306 INFO org.apache.hadoop.ipc.Server: Stopping IPC Server Responder | |
2012-05-10 23:25:41,318 INFO org.apache.hadoop.hdfs.server.datanode.DataBlockScanner: Exiting DataBlockScanner thread. | |
2012-05-10 23:25:42,349 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Waiting for threadgroup to exit, active threads is 0 | |
2012-05-10 23:25:42,451 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(192.168.0.2:50010, storageID=DS-391327016-127.0.1.1-50010-1336550556249, infoPort=50075, ipcPort=50020):Finishing DataNode in: FSDataset{dirpath='/app/hadoop/tmp/dfs/data/current'} | |
2012-05-10 23:25:42,452 INFO org.apache.hadoop.ipc.Server: Stopping server on 50020 | |
2012-05-10 23:25:42,452 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Waiting for threadgroup to exit, active threads is 0 | |
2012-05-10 23:25:42,453 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: SHUTDOWN_MSG: | |
/************************************************************ | |
SHUTDOWN_MSG: Shutting down DataNode at slave/192.168.0.2 | |
************************************************************/ |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment