Created
July 8, 2024 17:25
-
-
Save Jire/9c31a72291462cca9508b74c3e8b0aac to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package org.jire.swiftfup.packing | |
import com.displee.cache.CacheLibrary | |
import com.displee.cache.index.Index317 | |
import io.netty.buffer.Unpooled | |
import java.io.File | |
import java.nio.file.Path | |
object TarnishPacker { | |
private const val CACHE_TO_PATH = "../server/cache/" | |
private const val CACHE_FROM_PATH = "data/osrs/cache218/" | |
private const val SPEC_BAR_MODELS = false | |
private const val SOUNDS = false | |
private const val REBUILD = true | |
private const val REBUILD_DIRECTORY_NAME = "rebuild" | |
private const val REBUILD_DIRECTORY_PATH = "${CACHE_TO_PATH}$REBUILD_DIRECTORY_NAME" | |
@JvmStatic | |
fun main(args: Array<String>) { | |
Index317.addMetaFiles("osrs_sprites_version", "osrs_sprites_crc") | |
Index317.addMetaFiles("osrs_sounds_version", "osrs_sounds_crc") | |
val cachePath = CACHE_TO_PATH | |
if (SPEC_BAR_MODELS) { | |
val cacheFrom = CacheLibrary.create("../packing/data/tarnishps/old-cache/") | |
val cacheTo = CacheLibrary.create(cachePath) | |
for (modelId in 18552..18562) { | |
val fromFile = cacheFrom.data(1, modelId)!! | |
cacheTo.put(1, modelId, fromFile) | |
println("packed spec bar model $modelId") | |
} | |
cacheTo.index(1).update() | |
cacheTo.update() | |
cacheTo.close() | |
cacheFrom.close() | |
return | |
} | |
val cacheFrom = CacheLibrary.create(CACHE_FROM_PATH) | |
val cacheTo = CacheLibrary.create(cachePath) | |
if (SOUNDS) { | |
if (true) { | |
println(cacheTo.data(SOUNDS_INDEX, 2720)!!.size) | |
return | |
} | |
sounds(cachePath, cacheTo) | |
cacheTo.update() | |
cacheTo.close() | |
cacheFrom.close() | |
return | |
} | |
if (false) { | |
val toIndexId = 5 | |
val indexTo = if (cacheTo.exists(toIndexId)) cacheTo.index(toIndexId).apply { clear() } | |
else cacheTo.createIndex() | |
val indexFrom = cacheFrom.index(8) | |
indexFrom.cache() | |
for (archive in indexFrom.archives()) { | |
if (!archive.containsData()) { | |
println("archive ${archive.id} doesn't contain data! (has ${archive.files().size} files)") | |
continue | |
} | |
for (file in archive.files()) { | |
val data = file.data!! | |
//println("put ${archive.id}:${file.id} with ${data.size} bytes") | |
cacheTo.put(toIndexId, archive.id, file.id, data) | |
} | |
} | |
indexTo.update() | |
} | |
models(cacheFrom, cacheTo) | |
seq(cacheFrom, cacheTo) | |
frames(cacheFrom, cacheTo) | |
frameBases(cacheFrom, cacheTo) | |
npc(cacheFrom, cacheTo) | |
obj(cacheFrom, cacheTo) | |
maps(cacheFrom, cacheTo) | |
underlays(cacheFrom, cacheTo) | |
overlays(cacheFrom, cacheTo) | |
textures(cacheFrom, cacheTo) | |
items(cacheFrom, cacheTo) | |
graphics(cacheFrom, cacheTo) | |
cacheTo.update() | |
cacheTo.close() | |
cacheFrom.close() | |
if (REBUILD) { | |
rebuild() | |
} | |
} | |
private const val SOUNDS_INDEX = 6 | |
private fun sounds(cachePath: String, cacheTo: CacheLibrary) { | |
if (false) { | |
cacheTo.createIndex() | |
/*val index = cacheTo.index(SOUNDS_INDEX) | |
index.cache() | |
for (archive in index.archives()) { | |
cacheTo.remove(SOUNDS_INDEX, archive.id) | |
println("removed archive ${archive.id}") | |
} | |
index.update()*/ | |
return | |
} | |
val index = cacheTo.index(SOUNDS_INDEX) | |
val indexBuf = Unpooled.buffer() | |
indexBuf.writeShort(0) | |
var amount = 0 | |
for (soundFile in File("../packing/data/tarnishps/sounds/").listFiles()!!) { | |
if (soundFile.extension != "wav") continue | |
val id = soundFile.nameWithoutExtension.toIntOrNull() ?: continue | |
val data = soundFile.readBytes() | |
indexBuf.writeShort(id) | |
cacheTo.put(SOUNDS_INDEX, id, data) | |
amount++ | |
} | |
indexBuf.setShort(0, amount) | |
val indexBufArray = ByteArray(indexBuf.writerIndex()) | |
indexBuf.readBytes(indexBufArray) | |
index.update() | |
println("Packed $amount sounds") | |
} | |
private fun models(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
val indexFrom = cacheFrom.index(7) | |
indexFrom.cache() | |
val indexTo = cacheTo.index(1) | |
indexTo.clear() | |
var count = 0 | |
for (archive in indexFrom.archives()) { | |
if (!archive.containsData()) continue | |
indexTo.add(archive) | |
count++ | |
} | |
indexTo.update() | |
println("models count $count") | |
} | |
private fun frames(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
val indexFrom = cacheFrom.index(0) | |
indexFrom.cache() | |
val indexTo = cacheTo.index(2) | |
indexTo.clear() | |
var count = 0 | |
for (archive in indexFrom.archives()) { | |
if (!archive.containsData()) continue | |
val buf = Unpooled.buffer() | |
var highestFileId = 0 | |
buf.writeShort(highestFileId) // placeholder | |
for (file in archive.files()) { | |
val fileId = file.id | |
val data = file.data!! | |
buf.writeShort(fileId) | |
buf.writeMedium(data.size) | |
buf.writeBytes(data) | |
if (fileId > highestFileId) | |
highestFileId = fileId | |
} | |
buf.setShort(0, highestFileId) | |
buf.readerIndex(0) | |
val array = ByteArray(buf.readableBytes()) | |
buf.readBytes(array) | |
cacheTo.put(2, archive.id, array) | |
count++ | |
} | |
indexTo.update() | |
println("frames count $count") | |
} | |
private fun frameBases(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
val indexFrom = cacheFrom.index(1) | |
indexFrom.cache() | |
val buf = Unpooled.buffer() | |
var count = 0 | |
buf.writeShort(count) // placeholder | |
for (archive in indexFrom.archives()) { | |
val groupId = archive.id | |
if (!archive.containsData()) throw IllegalStateException("MUST HAVE DATA! $groupId") | |
val file = archive.file(0)!! | |
val data = file.data!! | |
val dataSize = data.size | |
if (dataSize >= 65535) throw IllegalStateException("TOO LARGE DATA! $groupId size $dataSize") | |
buf.writeShort(groupId) | |
buf.writeShort(dataSize) | |
buf.writeBytes(data) | |
count++ | |
} | |
buf.setShort(0, count) | |
buf.readerIndex(0) | |
val array = ByteArray(buf.readableBytes()) | |
buf.readBytes(array) | |
cacheTo.put(0, 2, "framebases.dat", array) | |
cacheTo.index(0).update() | |
println("frame bases count $count and size ${array.size}") | |
} | |
private fun seq(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
var highestFileId = -1 | |
var biggestSize = 0 | |
val buf = Unpooled.buffer() | |
buf.writeShort(0) | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(12)!! | |
for (file in fromArchive.files()) { | |
val data = file.data!! | |
val dataSize = data.size | |
if (dataSize < 1) { | |
throw IllegalStateException("skipped seq file ${file.id} (no data)") | |
buf.writeShort(-1) | |
} else { | |
val fileId = file.id | |
buf.writeShort(fileId) | |
if (dataSize >= 65535) throw IllegalStateException("TOO LARGE DATA! ${fromArchive.id}:$fileId size $dataSize") | |
buf.writeShort(dataSize) | |
buf.writeBytes(data) | |
if (fileId > highestFileId) | |
highestFileId = fileId | |
if (data.size > biggestSize) | |
biggestSize = data.size | |
//println("seq $fileId length ${data.size}") | |
} | |
} | |
buf.setShort(0, highestFileId) | |
buf.readerIndex(0) | |
val array = ByteArray(buf.readableBytes()) | |
buf.readBytes(array) | |
println("seq highest $highestFileId and biggest was $biggestSize (total bytes=${array.size})") | |
cacheTo.put(0, 2, "seq.dat", array) | |
cacheTo.index(0).update() | |
} | |
private fun npc(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
val idx = Unpooled.buffer() | |
idx.writeShort(0) | |
val dat = Unpooled.directBuffer() | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(9)!! | |
val highestFileId = fromArchive.fileIds().max() | |
for (fileId in 0..highestFileId) { | |
val file = fromArchive.file(fileId) | |
val data = file?.data | |
val dataSize = data?.size ?: 0 | |
if (dataSize < 1) { | |
idx.writeShort(0) | |
println("WARNING: NPC skip $fileId") | |
} else { | |
if (dataSize >= 65535) throw IllegalStateException("TOO LARGE NPC size $dataSize for ID $fileId") | |
idx.writeShort(dataSize) | |
dat.writeBytes(data) | |
} | |
} | |
idx.writeShort(-1) // EOF | |
idx.setShort(0, highestFileId) | |
dat.readerIndex(0) | |
val datArray = ByteArray(dat.readableBytes()) | |
dat.readBytes(datArray) | |
idx.readerIndex(0) | |
val idxArray = ByteArray(idx.readableBytes()) | |
idx.readBytes(idxArray) | |
cacheTo.put(0, 2, "npc.dat", datArray) | |
cacheTo.put(0, 2, "npc.idx", idxArray) | |
cacheTo.index(0).update() | |
println("npc highest $highestFileId") | |
} | |
private fun obj(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
var highestFileId = -1 | |
val idx = Unpooled.buffer() | |
idx.writeShort(0) | |
val dat = Unpooled.directBuffer() | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(6)!! | |
for (file in fromArchive.files()) { | |
val data = file.data!! | |
val dataSize = data.size | |
if (dataSize < 1) { | |
idx.writeShort(0) | |
throw IllegalStateException("object skip ${file.id}") | |
} else { | |
val fileId = file.id | |
if (dataSize >= 65535) throw IllegalStateException("TOO LARGE OBJECT SIZE $dataSize for ${fromArchive.id}:$fileId") | |
idx.writeShort(dataSize) | |
dat.writeBytes(data) | |
if (fileId > highestFileId) { | |
highestFileId = fileId | |
} | |
} | |
} | |
idx.writeShort(-1) // EOF | |
idx.setShort(0, highestFileId) | |
dat.readerIndex(0) | |
val datArray = ByteArray(dat.readableBytes()) | |
dat.readBytes(datArray) | |
idx.readerIndex(0) | |
val idxArray = ByteArray(idx.readableBytes()) | |
idx.readBytes(idxArray) | |
cacheTo.put(0, 2, "loc.dat", datArray) | |
cacheTo.put(0, 2, "loc.idx", idxArray) | |
cacheTo.index(0).update() | |
println("object highest $highestFileId") | |
} | |
private fun underlays(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
var highestFileId = -1 | |
var biggestSize = 0 | |
val buf = Unpooled.buffer() | |
buf.writeShort(0) | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(1)!! | |
for (file in fromArchive.files()) { | |
val data = file.data | |
if (data == null || data.size < 1) { | |
println("skipped underlay file ${file.id} (no data)") | |
buf.writeShort(-1) | |
} else { | |
val fileId = file.id | |
buf.writeShort(fileId) | |
buf.writeShort(data.size) | |
buf.writeBytes(data) | |
if (fileId > highestFileId) | |
highestFileId = fileId | |
if (data.size > biggestSize) | |
biggestSize = data.size | |
} | |
} | |
buf.setShort(0, highestFileId) | |
buf.readerIndex(0) | |
val array = ByteArray(buf.readableBytes()) | |
buf.readBytes(array) | |
println("underlays highest $highestFileId and biggest was $biggestSize (total bytes=${array.size})") | |
cacheTo.put(0, 2, "underlays.dat", array) | |
cacheTo.index(0).update() | |
} | |
private fun overlays(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
var highestFileId = -1 | |
var biggestSize = 0 | |
val buf = Unpooled.buffer() | |
buf.writeShort(0) | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(4)!! | |
for (file in fromArchive.files()) { | |
val data = file.data | |
if (data == null || data.size < 1) { | |
println("skipped overlay file ${file.id} (no data)") | |
buf.writeShort(-1) | |
} else { | |
val fileId = file.id | |
buf.writeShort(fileId) | |
buf.writeShort(data.size) | |
buf.writeBytes(data) | |
if (fileId > highestFileId) | |
highestFileId = fileId | |
if (data.size > biggestSize) | |
biggestSize = data.size | |
} | |
} | |
buf.setShort(0, highestFileId) | |
buf.readerIndex(0) | |
val array = ByteArray(buf.readableBytes()) | |
buf.readBytes(array) | |
println("overlays highest $highestFileId and biggest was $biggestSize (total bytes=${array.size})") | |
cacheTo.put(0, 2, "overlays.dat", array) | |
cacheTo.index(0).update() | |
} | |
private fun textures(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
val fromTexturesIndex = cacheFrom.index(9) | |
fromTexturesIndex.cache() | |
val idx = Unpooled.buffer() | |
var highestFileId = 0 | |
idx.writeShort(highestFileId) // placeholder | |
val archive = fromTexturesIndex.archive(0)!! | |
for (file in archive.files()) { | |
val id = file.id | |
val data = file.data!! | |
val dataSize = data.size | |
if (dataSize >= 65535) | |
throw IllegalStateException("Too large texture data for file $id (size=$dataSize)") | |
idx.writeShort(id) | |
idx.writeShort(dataSize) | |
idx.writeBytes(data) | |
//cacheTo.put(0, 6, "${id}.dat", data) | |
if (id > highestFileId) { | |
highestFileId = id | |
} | |
} | |
idx.setShort(0, highestFileId) | |
val idxArray = ByteArray(idx.readableBytes()) | |
idx.readBytes(idxArray) | |
idx.release() | |
cacheTo.put(0, 2, "textures.dat", idxArray) | |
cacheTo.index(0).update() | |
println("packed textures highest=$highestFileId") | |
} | |
private fun items(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
var highestFileId = -1 | |
val idx = Unpooled.buffer() | |
idx.writeShort(0) | |
val dat = Unpooled.directBuffer() | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(10)!! | |
for (file in fromArchive.files()) { | |
val data = file.data!! | |
val dataSize = data.size | |
if (dataSize < 1) { | |
idx.writeShort(0) | |
throw IllegalStateException("item skip ${file.id}") | |
} else { | |
val fileId = file.id | |
if (dataSize >= 65535) throw IllegalStateException("TOO LARGE ITEM SIZE! $dataSize for ${fromArchive.id}:$fileId") | |
idx.writeShort(dataSize) | |
dat.writeBytes(data) | |
if (fileId > highestFileId) { | |
highestFileId = fileId | |
} | |
} | |
} | |
idx.writeShort(-1) // EOF | |
idx.setShort(0, highestFileId) | |
dat.readerIndex(0) | |
val datArray = ByteArray(dat.readableBytes()) | |
dat.readBytes(datArray) | |
idx.readerIndex(0) | |
val idxArray = ByteArray(idx.readableBytes()) | |
idx.readBytes(idxArray) | |
cacheTo.put(0, 2, "obj.dat", datArray) | |
cacheTo.put(0, 2, "obj.idx", idxArray) | |
cacheTo.index(0).update() | |
println("item highest $highestFileId") | |
} | |
private fun graphics(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
var highestFileId = -1 | |
var biggestSize = 0 | |
val buf = Unpooled.buffer() | |
buf.writeShort(0) | |
val configIndex = cacheFrom.index(2) | |
configIndex.cache() | |
val fromArchive = configIndex.archive(13)!! | |
for (file in fromArchive.files()) { | |
val data = file.data!! | |
val dataSize = data.size | |
if (dataSize < 1) { | |
println("skipped spotanim file ${file.id} (no data)") | |
buf.writeShort(-1) | |
} else { | |
val fileId = file.id | |
if (dataSize >= 65535) throw IllegalStateException("TOO LARGE GRAPHIC! ${fromArchive.id}:$fileId size was $dataSize") | |
buf.writeShort(fileId) | |
buf.writeShort(dataSize) | |
buf.writeBytes(data) | |
if (fileId > highestFileId) | |
highestFileId = fileId | |
if (dataSize > biggestSize) | |
biggestSize = dataSize | |
} | |
} | |
buf.writeShort(-1) | |
buf.setShort(0, highestFileId) | |
buf.readerIndex(0) | |
val array = ByteArray(buf.readableBytes()) | |
buf.readBytes(array) | |
println("spotanim highest $highestFileId and biggest was $biggestSize (total bytes=${array.size})") | |
cacheTo.put(0, 2, "spotanim.dat", array) | |
cacheTo.index(0).update() | |
} | |
private fun maps(cacheFrom: CacheLibrary, cacheTo: CacheLibrary) { | |
val idx = Unpooled.buffer() | |
idx.writeShort(0) | |
var mapCount = 0 | |
var fileId = 0 | |
val xteas = DefaultXteaRepository.load(path = Path.of(CACHE_FROM_PATH, "xteas.json")) | |
println("Loaded ${xteas.size} xteas") | |
val defaultXtea = intArrayOf(0, 0, 0, 0) | |
for (region in 0..65535) { | |
val x = (region ushr 8) and 0xFF | |
val y = region and 0xFF | |
val mapName = "m${x}_$y" | |
val mapData = cacheFrom.data(5, mapName, 0) ?: continue | |
val landName = "l${x}_$y" | |
val landData = cacheFrom.data(5, landName, 0, xteas.get(region)?.key ?: defaultXtea) ?: continue | |
val mapFileId = fileId++ | |
val landFileId = fileId++ | |
cacheTo.remove(4, mapFileId) | |
cacheTo.put(4, mapFileId, mapData) | |
cacheTo.remove(4, landFileId) | |
cacheTo.put(4, landFileId, landData) | |
idx.writeShort(region) | |
idx.writeShort(mapFileId) | |
idx.writeShort(landFileId) | |
mapCount++ | |
println("for region $region ($x,$y) map=$mapFileId and land=$landFileId") | |
} | |
idx.setShort(0, mapCount) | |
val idxArray = ByteArray(idx.writerIndex()) | |
idx.readBytes(idxArray) | |
cacheTo.put(0, 5, "map_index", idxArray) | |
cacheTo.index(0).update() | |
cacheTo.index(4).update() | |
} | |
private fun rebuild() { | |
val rebuildFile = File(REBUILD_DIRECTORY_PATH) | |
if (rebuildFile.exists()) { | |
rebuildFile.listFiles()?.forEach { | |
if (!it.delete()) { | |
throw IllegalStateException("Failed to delete rebuild directory file \"${it.name}\"") | |
} | |
} | |
} else if (!rebuildFile.mkdirs()) { | |
throw IllegalStateException("Failed to create rebuild directory \"${REBUILD_DIRECTORY_PATH}\"") | |
} | |
CacheLibrary.create(CACHE_TO_PATH).rebuild(rebuildFile) | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment