Created
March 18, 2015 23:22
-
-
Save md5/d42e97ab7a0bd656f09a to your computer and use it in GitHub Desktop.
HBase checksum failure
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ HADOOP_ROOT_LOGGER="TRACE,console" hdfs dfs -cat /hbase/mytable/67e74c08760b2b9ece01ee80573b2d27/mycf/0749920f2624428c98a3d5e213ded7fc > /dev/null' | |
... | |
15/03/18 22:52:52 TRACE hdfs.DFSClient: Address /10.96.4.22:50010 is local | |
15/03/18 22:52:52 INFO hdfs.DFSClient: Successfully connected to /10.96.4.22:50010 for BP-920812741-10.227.207.1-1402337163878:blk_-5555379348506809837_16582860 | |
15/03/18 22:52:52 TRACE datatransfer.PacketReceiver: readNextPacket: dataPlusChecksumLen = 66048 headerLen = 25 | |
15/03/18 22:52:52 TRACE hdfs.RemoteBlockReader2: DFSClient readNextPacket got header PacketHeader with packetLen=66048 header data: offsetInBlock: 63504384 | |
seqno: 0 | |
lastPacketInBlock: false | |
dataLen: 65536 | |
15/03/18 22:52:52 WARN hdfs.DFSClient: Found Checksum error for BP-920812741-10.227.207.1-1402337163878:blk_-5555379348506809837_16582860 from 10.96.4.22:50010 at 63562752 | |
15/03/18 22:52:52 DEBUG hdfs.DFSClient: Connecting to datanode 10.251.34.68:50010 | |
15/03/18 22:52:52 TRACE datatransfer.DataTransferProtocol: Sending DataTransferOp OpReadBlockProto: header { | |
baseHeader { | |
block { | |
poolId: "BP-920812741-10.227.207.1-1402337163878" | |
blockId: 12891364725202741779 | |
generationStamp: 16582860 | |
numBytes: 67108864 | |
} | |
token { | |
identifier: "" | |
password: "" | |
kind: "" | |
service: "" | |
} | |
} | |
clientName: "DFSClient_NONMAPREDUCE_1502279601_1" | |
} | |
offset: 63504384 | |
len: 3604480 | |
sendChecksums: true | |
15/03/18 22:52:52 TRACE hdfs.DFSClient: Address /10.251.34.68:50010 is not local | |
15/03/18 22:52:52 TRACE datatransfer.PacketReceiver: readNextPacket: dataPlusChecksumLen = 66048 headerLen = 25 | |
15/03/18 22:52:52 TRACE hdfs.RemoteBlockReader2: DFSClient readNextPacket got header PacketHeader with packetLen=66048 header data: offsetInBlock: 63504384 | |
seqno: 0 | |
lastPacketInBlock: false | |
dataLen: 65536 | |
15/03/18 22:52:52 WARN hdfs.DFSClient: Found Checksum error for BP-920812741-10.227.207.1-1402337163878:blk_-5555379348506809837_16582860 from 10.251.34.68:50010 at 63562752 | |
15/03/18 22:52:52 DEBUG hdfs.DFSClient: Connecting to datanode 10.33.166.104:50010 | |
15/03/18 22:52:52 TRACE datatransfer.DataTransferProtocol: Sending DataTransferOp OpReadBlockProto: header { | |
baseHeader { | |
block { | |
poolId: "BP-920812741-10.227.207.1-1402337163878" | |
blockId: 12891364725202741779 | |
generationStamp: 16582860 | |
numBytes: 67108864 | |
} | |
token { | |
identifier: "" | |
password: "" | |
kind: "" | |
service: "" | |
} | |
} | |
clientName: "DFSClient_NONMAPREDUCE_1502279601_1" | |
} | |
offset: 63504384 | |
len: 3604480 | |
sendChecksums: true | |
15/03/18 22:52:55 DEBUG hdfs.DFSClient: Exception while getting block reader, closing stale NioInetPeer(Socket[addr=/10.33.166.104,port=50010,localport=50586]) | |
java.io.IOException: Got error for OP_READ_BLOCK, self=/10.96.4.22:50586, remote=/10.33.166.104:50010, for file /hbase/mytable/67e74c08760b2b9ece01ee80573b2d27/mycf/0749920f2624428c98a3d5e213ded7fc, for pool BP-920812741-10.227.207.1-1402337163878 block -5555379348506809837_16582860 | |
at org.apache.hadoop.hdfs.RemoteBlockReader2.checkSuccess(RemoteBlockReader2.java:429) | |
at org.apache.hadoop.hdfs.RemoteBlockReader2.newBlockReader(RemoteBlockReader2.java:394) | |
at org.apache.hadoop.hdfs.BlockReaderFactory.newBlockReader(BlockReaderFactory.java:137) | |
at org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:1117) | |
at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:539) | |
at org.apache.hadoop.hdfs.DFSInputStream.seekToNewSource(DFSInputStream.java:1301) | |
at org.apache.hadoop.hdfs.DFSInputStream.readBuffer(DFSInputStream.java:714) | |
at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:741) | |
at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:782) | |
at java.io.DataInputStream.read(DataInputStream.java:100) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:84) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:52) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:112) | |
at org.apache.hadoop.fs.shell.Display$Cat.printToStdout(Display.java:86) | |
at org.apache.hadoop.fs.shell.Display$Cat.processPath(Display.java:81) | |
at org.apache.hadoop.fs.shell.Command.processPaths(Command.java:306) | |
at org.apache.hadoop.fs.shell.Command.processPathArgument(Command.java:278) | |
at org.apache.hadoop.fs.shell.Command.processArgument(Command.java:260) | |
at org.apache.hadoop.fs.shell.Command.processArguments(Command.java:244) | |
at org.apache.hadoop.fs.shell.Command.processRawArguments(Command.java:190) | |
at org.apache.hadoop.fs.shell.Command.run(Command.java:154) | |
at org.apache.hadoop.fs.FsShell.run(FsShell.java:254) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84) | |
at org.apache.hadoop.fs.FsShell.main(FsShell.java:304) | |
15/03/18 22:52:55 WARN hdfs.DFSClient: Failed to connect to /10.33.166.104:50010 for block, add to deadNodes and continue. java.io.IOException: Got error for OP_READ_BLOCK, self=/10.96.4.22:50586, remote=/10.33.166.104:50010, for file /hbase/mytable/67e74c08760b2b9ece01ee80573b2d27/mycf/0749920f2624428c98a3d5e213ded7fc, for pool BP-920812741-10.227.207.1-1402337163878 block -5555379348506809837_16582860 | |
java.io.IOException: Got error for OP_READ_BLOCK, self=/10.96.4.22:50586, remote=/10.33.166.104:50010, for file /hbase/mytable/67e74c08760b2b9ece01ee80573b2d27/mycf/0749920f2624428c98a3d5e213ded7fc, for pool BP-920812741-10.227.207.1-1402337163878 block -5555379348506809837_16582860 | |
at org.apache.hadoop.hdfs.RemoteBlockReader2.checkSuccess(RemoteBlockReader2.java:429) | |
at org.apache.hadoop.hdfs.RemoteBlockReader2.newBlockReader(RemoteBlockReader2.java:394) | |
at org.apache.hadoop.hdfs.BlockReaderFactory.newBlockReader(BlockReaderFactory.java:137) | |
at org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:1117) | |
at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:539) | |
at org.apache.hadoop.hdfs.DFSInputStream.seekToNewSource(DFSInputStream.java:1301) | |
at org.apache.hadoop.hdfs.DFSInputStream.readBuffer(DFSInputStream.java:714) | |
at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:741) | |
at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:782) | |
at java.io.DataInputStream.read(DataInputStream.java:100) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:84) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:52) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:112) | |
at org.apache.hadoop.fs.shell.Display$Cat.printToStdout(Display.java:86) | |
at org.apache.hadoop.fs.shell.Display$Cat.processPath(Display.java:81) | |
at org.apache.hadoop.fs.shell.Command.processPaths(Command.java:306) | |
at org.apache.hadoop.fs.shell.Command.processPathArgument(Command.java:278) | |
at org.apache.hadoop.fs.shell.Command.processArgument(Command.java:260) | |
at org.apache.hadoop.fs.shell.Command.processArguments(Command.java:244) | |
at org.apache.hadoop.fs.shell.Command.processRawArguments(Command.java:190) | |
at org.apache.hadoop.fs.shell.Command.run(Command.java:154) | |
at org.apache.hadoop.fs.FsShell.run(FsShell.java:254) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84) | |
at org.apache.hadoop.fs.FsShell.main(FsShell.java:304) | |
15/03/18 22:52:55 TRACE ipc.ProtobufRpcEngine: 1: Call -> [email protected]/10.80.3.147:8020: reportBadBlocks {blocks { b { poolId: "BP-920812741-10.227.207.1-1402337163878" blockId: 12891364725202741779 generationStamp: 16582860 numBytes: 67108864 } offset: 18446744073709551615 locs { id { ipAddr: "10.96.4.22" hostName: "ip-10-96-4-22.eu-west-1.compute.internal" storageID: "DS-1811035498-10.96.4.22-50010-1420810187338" xferPort: 50010 infoPort: 50075 ipcPort: 50020 } capacity: 1774898511872 dfsUsed: 1679870206589 remaining: 4588204032 blockPoolUsed: 1679870206589 lastUpdate: 1426719144856 xceiverCount: 10 location: "/default-rack" adminState: NORMAL } locs { id { ipAddr: "10.251.34.68" hostName: "ip-10-251-34-68.eu-west-1.compute.internal" storageID: "DS-1413244121-10.251.34.68-50010-1410790190078" xferPort: 50010 infoPort: 50075 ipcPort: 50020 } capacity: 1774898511872 dfsUsed: 1302979904997 remaining: 381348884480 blockPoolUsed: 1302979904997 lastUpdate: 1426719145579 xceiverCount: 21 location: "/default-rack" adminState: NORMAL } corrupt: false blockToken { identifier: "" password: "" kind: "" service: "" } }} | |
15/03/18 22:52:55 DEBUG ipc.Client: IPC Client (23583540) connection to namenode-hbase.example.net/10.80.3.147:8020 from hdfs sending #14 | |
15/03/18 22:52:55 DEBUG ipc.Client: IPC Client (23583540) connection to namenode-hbase.example.net/10.80.3.147:8020 from hdfs got value #14 | |
15/03/18 22:52:55 DEBUG ipc.ProtobufRpcEngine: Call: reportBadBlocks took 4ms | |
15/03/18 22:52:55 TRACE ipc.ProtobufRpcEngine: 1: Response <- [email protected]/10.80.3.147:8020: reportBadBlocks {} | |
15/03/18 22:52:55 WARN hdfs.DFSClient: DFS Read | |
org.apache.hadoop.hdfs.BlockMissingException: Could not obtain block: BP-920812741-10.227.207.1-1402337163878:blk_-5555379348506809837_16582860 file=/hbase/mytable/67e74c08760b2b9ece01ee80573b2d27/mycf/0749920f2624428c98a3d5e213ded7fc | |
at org.apache.hadoop.hdfs.DFSInputStream.chooseDataNode(DFSInputStream.java:827) | |
at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:532) | |
at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:738) | |
at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:782) | |
at java.io.DataInputStream.read(DataInputStream.java:100) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:84) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:52) | |
at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:112) | |
at org.apache.hadoop.fs.shell.Display$Cat.printToStdout(Display.java:86) | |
at org.apache.hadoop.fs.shell.Display$Cat.processPath(Display.java:81) | |
at org.apache.hadoop.fs.shell.Command.processPaths(Command.java:306) | |
at org.apache.hadoop.fs.shell.Command.processPathArgument(Command.java:278) | |
at org.apache.hadoop.fs.shell.Command.processArgument(Command.java:260) | |
at org.apache.hadoop.fs.shell.Command.processArguments(Command.java:244) | |
at org.apache.hadoop.fs.shell.Command.processRawArguments(Command.java:190) | |
at org.apache.hadoop.fs.shell.Command.run(Command.java:154) | |
at org.apache.hadoop.fs.FsShell.run(FsShell.java:254) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84) | |
at org.apache.hadoop.fs.FsShell.main(FsShell.java:304) | |
cat: Could not obtain block: BP-920812741-10.227.207.1-1402337163878:blk_-5555379348506809837_16582860 file=/hbase/mytable/67e74c08760b2b9ece01ee80573b2d27/mycf/0749920f2624428c98a3d5e213ded7fc |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment