-
-
Save GeorgesOatesLarsen/035c17f67158154220db7971e34314b2 to your computer and use it in GitHub Desktop.
read 1.9 chunk using protodef
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const fs=require("fs"); | |
var Chunk = require('prismarine-chunk')("1.8"); | |
var Vec3 = require("vec3"); | |
// based on http://wiki.vg/SMP_Map_Format#Format | |
// testing data from https://download.rom1504.fr/minecraft/chunks/chunks-1.9/ | |
// see http://lunarco.de/minecraft/chunks/ for explanations | |
// also see https://gist.github.com/Gjum/0375b643ec13a42ab3c0 | |
// and https://github.com/SpockBotMC/SpockBot/blob/0535c31/spockbot/plugins/tools/smpmap.py | |
//WIP by Romain Beaumont completed by Georges Oates Larsen | |
/* | |
OLD FORMAT: | |
The first w*l*h*2 bytes are blocks, each of which are shorts. | |
After that, the first w*l*h*0.5 bytes are block-light-levels, each half-bytes. | |
Next, the first w*l*h*0.5 bytes are sky-light-levels, each half-bytes. | |
Finally, the next w*l bytes are biomes. | |
*/ | |
const data=JSON.parse(fs.readFileSync('./packet_-10_-1.data')); | |
const chunk=fs.readFileSync('./chunk_-10_-1.dump'); | |
const ProtoDef=require('protodef').ProtoDef; | |
function readLongToByte(buffer,offset,typeArgs) { | |
var results = this.read(buffer, offset, typeArgs.type, {}); | |
return { | |
value:Math.ceil(results.value*8), | |
size:results.size | |
}; | |
} | |
function writeLongToByte(value, buffer,offset,typeArgs) { | |
return this.write(value/8, buffer, offset, typeArgs.type, {}); | |
} | |
function sizeOfLongToByte(value, typeArgs) { | |
return this.sizeOf(value/8, typeArgs.type, {}); | |
} | |
const longToByte=[readLongToByte,writeLongToByte,sizeOfLongToByte]; | |
const p=["container",[ | |
{ | |
"name":"bitsPerBlock", | |
"type":"u8" | |
}, | |
{ | |
"name":"palette", | |
"type":["array",{ | |
"type":"varint", | |
"countType":"varint" | |
}] | |
}, | |
{ | |
"name":"dataArray", | |
"type":["buffer",{ | |
"countType":"longToByte", | |
"countTypeArgs":{"type":"varint"} | |
}] | |
}, | |
{ | |
"name":"blockLight", | |
"type":["buffer",{ | |
"count":16*16*16/2 | |
}] | |
}, | |
{ | |
"name":"skyLight", | |
"type":["buffer",{ | |
"count":16*16*16/2 | |
}] | |
} | |
]]; | |
const proto=new ProtoDef(); | |
proto.addType('longToByte',longToByte); | |
proto.addType('section',p); | |
function readSection(section) | |
{ | |
try { | |
return proto.read(section, 0, 'section', {}); | |
} | |
catch(e) { | |
e.message=`Read error for ${e.field} : ${e.message}`; | |
throw e; | |
} | |
} | |
var testChunk=new Chunk(); | |
var histogram={}; | |
var total = 0; | |
testChunk.load(readChunk(chunk,data['bitMap'])); | |
for (var x = 0; x < 16;x++) { | |
for (var z = 0; z < 16; z++) { | |
for (var y = 0; y < 256; y++) { | |
let blocktype = testChunk.getBlockType(new Vec3(x, y, z)); | |
if (!(blocktype in histogram)) { | |
histogram[blocktype] = 0; | |
} | |
histogram[blocktype]+=(100.0/(16 * 16 * 256)); | |
total++; | |
} | |
} | |
} | |
console.log(histogram); | |
console.log(total); | |
function readChunk(chunk,bitMap) { | |
let offset=0; | |
let blocks = Buffer.alloc(0);//byte buffer containing shorts | |
let blocklights = Buffer.alloc(0);//byte buffer containing half-bytes | |
let skylights = Buffer.alloc(0);//byte buffer containing half-bytes | |
let biomes; | |
for(let y=0;y<16;y++){ | |
if(((bitMap>> y ) & 1) == 1) { | |
const {size,value} = readSection(chunk.slice(offset)); | |
offset+=size; | |
blocks = Buffer.concat([blocks, eatPackedBlockLongs(value.dataArray, value.palette, value.bitsPerBlock)]) | |
blocklights = Buffer.concat([blocklights, value.blockLight]); | |
skylights = Buffer.concat([skylights, value.skyLight]); | |
} | |
else {//Old format expects *all* blocks to be present, so if the new format omits a section, we must fill with zeroes. | |
blocks = Buffer.concat([blocks, Buffer.alloc(16*16*16*2)]); | |
blocklights = Buffer.concat([blocklights, Buffer.alloc(16*16*16/2)]); | |
skylights = Buffer.concat([skylights, Buffer.alloc(16*16*16/2)]); | |
} | |
biomes=chunk.slice(offset,offset+256);//Does this really generate valid biome data? | |
} | |
//Desired output format: | |
//{Blocks as shorts}{Block Light as half-bytes}{Sky Light as half-bytes}{biomes as bytes} | |
return Buffer.concat([blocks, blocklights, skylights, biomes]); | |
} | |
function eatPackedBlockLongs(rawBuffer, palette, bitsPerBlock) { | |
let blockCount = rawBuffer.length * 8 / bitsPerBlock; | |
let resultantBuffer = Buffer.alloc(blockCount * 2) | |
let localBit = 0; | |
for (let block = 0; block < blockCount; block++) { | |
//Determine the start-bit for the block. | |
let bit = block * bitsPerBlock; | |
//Determine the start-byte for that bit. | |
let targetbyte = Math.floor(bit/8); | |
//Read a 32-bit section surrounding the targeted block | |
let datatarget = rawBuffer.readUInt32BE(targetbyte, true); | |
//Determine the start bit local to the datatarget. | |
let localbit = bit%8; | |
//Chop off uninteresting bits, then shift to that start bit: | |
let paletteid = (datatarget << (32 - localbit - bitsPerBlock)) >>> (32 - bitsPerBlock); | |
//Grab the data from the pallette | |
let data = palette[paletteid] & 0b1111; | |
let id = palette[paletteid] >>> 4; | |
resultantBuffer.writeUInt16LE((id << 4) | data, block*2); | |
} | |
return resultantBuffer; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment