Created
          January 9, 2021 02:07 
        
      - 
      
 - 
        
Save Coolsonickirby/3bd294f7e6d16c4798d8603a808a0378 to your computer and use it in GitHub Desktop.  
  
    
      This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
      Learn more about bidirectional Unicode characters
    
  
  
    
  | import os, struct, zlib | |
| def get_padding_amount(offset): | |
| return ((24 - (offset % 16)) % 16) | |
| def intToBytes(number): | |
| return int(number).to_bytes(4, byteorder="little", signed=False) | |
| nus3file = open(r'D:\Modding\Super Smash Bros. Ultimate\Audio\new_test.nus3audio', "r+b") | |
| info = { | |
| "HEADER": {}, | |
| "AUDIINDX": {}, | |
| "TNID": {}, | |
| "NMOF": {}, | |
| "ADOF": {}, | |
| "TNNM": {}, | |
| "JUNK": {}, | |
| "PACK": {}, | |
| } | |
| #region Header | |
| info["HEADER"]["magic"] = nus3file.read(4).decode() | |
| info["HEADER"]["filesize"] = struct.unpack("<I", nus3file.read(4))[0] | |
| #endregion | |
| #region Audiindx | |
| info["AUDIINDX"]["magic"] = nus3file.read(8).decode() | |
| info["AUDIINDX"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| info["AUDIINDX"]["count"] = struct.unpack("<I", nus3file.read(4))[0] | |
| #endregion | |
| #region Tone ID | |
| info["TNID"]["magic"] = nus3file.read(4).decode() | |
| info["TNID"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| if info["TNID"]["size"] >= info["AUDIINDX"]["count"] * 4: | |
| info["TNID"]["tracknumbers"] = [] | |
| for x in range(info["AUDIINDX"]["count"]): | |
| info["TNID"]["tracknumbers"].append(struct.unpack("<I", nus3file.read(4))[0]) | |
| else: | |
| info["TNID"]["data"] = [] | |
| for x in range(info["TNID"]["size"]): | |
| info["TNID"]["data"].append(nus3file.read(1)) | |
| #endregion | |
| #region NMOF | |
| info["NMOF"]["magic"] = nus3file.read(4).decode() | |
| info["NMOF"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| if info["NMOF"]["size"] >= info["AUDIINDX"]["count"] * 4: | |
| info["NMOF"]["names"] = [] | |
| for x in range(info["AUDIINDX"]["count"]): | |
| info["NMOF"]["names"].append(struct.unpack("<I", nus3file.read(4))[0]) | |
| else: | |
| info["NMOF"]["data"] = [] | |
| for x in range(info["NMOF"]["size"]): | |
| info["NMOF"]["data"].append(nus3file.read(1)) | |
| #endregion | |
| #region ADOF | |
| info["ADOF"]["magic"] = nus3file.read(4).decode() | |
| info["ADOF"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| if info["ADOF"]["size"] >= info["AUDIINDX"]["count"] * 8: | |
| info["ADOF"]["file_entry"] = [] | |
| for x in range(info["AUDIINDX"]["count"]): | |
| file_entry = { | |
| "fileOffset": struct.unpack("<I", nus3file.read(4))[0], | |
| "fileSize": struct.unpack("<I", nus3file.read(4))[0] | |
| } | |
| info["ADOF"]["file_entry"].append(file_entry) | |
| else: | |
| info["ADOF"]["data"] = [] | |
| for x in range(info["ADOF"]["size"]): | |
| info["ADOF"]["data"].append(nus3file.read(1)) | |
| #endregion | |
| #region TNNM | |
| info["TNNM"]["magic"] = nus3file.read(4).decode() | |
| info["TNNM"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| info["TNNM"]["name_entries"] = [] | |
| nus3string = "" | |
| for x in range(info["TNNM"]["size"]): | |
| res = nus3file.read(1) | |
| if res != b'\x00': | |
| nus3string = "%s%s" % (nus3string, res.decode()) | |
| else: | |
| info["TNNM"]["name_entries"].append(nus3string) | |
| nus3string = "" | |
| #endregion | |
| #region JUNK | |
| info["JUNK"]["magic"] = nus3file.read(4).decode() | |
| info["JUNK"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| for x in range(info["JUNK"]["size"]): | |
| res = nus3file.read(1) | |
| #endregion | |
| #region PACK | |
| info["PACK"]["magic"] = nus3file.read(4).decode() | |
| info["PACK"]["size"] = struct.unpack("<I", nus3file.read(4))[0] | |
| #endregion | |
| #region Link file data with fileData entry | |
| for x in range(len(info["ADOF"]["file_entry"])): | |
| nus3file.seek(info["ADOF"]["file_entry"][x]["fileOffset"]) | |
| info["ADOF"]["file_entry"][x]["fileData"] = nus3file.read(info["ADOF"]["file_entry"][x]["fileSize"]) | |
| #endregion | |
| # Close original nus3file | |
| nus3file.close() | |
| #region Open and Save test.lopus to one of the entries | |
| custom_lopus = open("test.lopus", "r+b") | |
| info["ADOF"]["file_entry"][0]["fileData"] = custom_lopus.read() | |
| custom_lopus.close() | |
| #endregion | |
| string_offsets = [] | |
| file_offsets = [] | |
| nus3_size = len("NUS3") + 4 | |
| audi_size = len("AUDIINDX") + (4 * 2) | |
| tnid_size = len("TNID") + 4 + (4 * len(info["ADOF"]["file_entry"])) | |
| nmof_size = tnid_size | |
| adof_size = len("ADOF") + 4 + (4 * len(info["ADOF"]["file_entry"]) * 2) | |
| string_section_start = nus3_size + audi_size + tnid_size + nmof_size + adof_size + len("TNNM") + 4 | |
| string_section_size = 0 | |
| for x in range(len(info["TNNM"]["name_entries"])): | |
| string_offsets.append(string_section_start + string_section_size) | |
| string_section_size += len(info["TNNM"]["name_entries"][x]) + 1 | |
| junk_pad = get_padding_amount(string_section_start + string_section_size + len("JUNK") + 4) | |
| junk_size = len("JUNK") + 4 + junk_pad | |
| pack_section_start = string_section_start + string_section_size + junk_size + len("PACK") + 4 | |
| pack_section_size_no_pad = 0 | |
| pack_section_size = 0 | |
| existing_files = {} | |
| files_to_pack = [] | |
| for x in range(len(info["ADOF"]["file_entry"])): | |
| data_hash = zlib.crc32(info["ADOF"]["file_entry"][x]["fileData"]) | |
| offset_pair = {} | |
| if data_hash not in existing_files: | |
| pair = { | |
| "fileOffset": pack_section_start + pack_section_size, | |
| 'fileSize': len(info["ADOF"]["file_entry"][x]["fileData"]) | |
| } | |
| existing_files[data_hash] = pair | |
| files_to_pack.append(info["ADOF"]["file_entry"][x]) | |
| pack_section_size_no_pad = pack_section_size + len(info["ADOF"]["file_entry"][x]["fileData"]) | |
| pack_section_size += int(((len(info["ADOF"]["file_entry"][x]["fileData"]) + 15) / 16)) * 16 | |
| offset_pair = pair | |
| else: | |
| offset_pair = existing_files[data_hash] | |
| file_offsets.append(offset_pair) | |
| if len(info["ADOF"]["file_entry"]) == 1: | |
| pack_section_size = pack_section_size_no_pad | |
| filesize = pack_section_start + pack_section_size | |
| try: | |
| os.remove("output.nus3audio") | |
| except: | |
| pass | |
| nus3output = open("output.nus3audio", "x+b") | |
| nus3output.write(b'NUS3') | |
| nus3output.write(intToBytes(filesize - nus3_size)) | |
| nus3output.write(b'AUDIINDX') | |
| nus3output.write(intToBytes(4)) | |
| nus3output.write(intToBytes(len(info["ADOF"]["file_entry"]))) | |
| nus3output.write(b'TNID') | |
| nus3output.write(intToBytes(len(info["ADOF"]["file_entry"]) * 4)) | |
| for x in range(len(info["ADOF"]["file_entry"])): | |
| nus3output.write(intToBytes(x)) | |
| nus3output.write(b'NMOF') | |
| nus3output.write(intToBytes(len(info["ADOF"]["file_entry"]) * 4)) | |
| for x in string_offsets: | |
| nus3output.write(intToBytes(x)) | |
| nus3output.write(b'ADOF') | |
| nus3output.write(intToBytes(len(info["ADOF"]["file_entry"] * 8))) | |
| for x in file_offsets: | |
| nus3output.write(intToBytes(x["fileOffset"])) | |
| nus3output.write(intToBytes(x["fileSize"])) | |
| nus3output.write(b'TNNM') | |
| nus3output.write(intToBytes(string_section_size)) | |
| for x in range(len(info["TNNM"]["name_entries"])): | |
| nus3output.write(info["TNNM"]["name_entries"][x].encode() + b'\x00') | |
| nus3output.write(b'JUNK') | |
| nus3output.write(intToBytes(junk_pad)) | |
| for x in range(junk_pad): | |
| nus3output.write(int(0).to_bytes(1, byteorder="little", signed=False)) | |
| nus3output.write(b'PACK') | |
| nus3output.write(intToBytes(pack_section_size)) | |
| for x in files_to_pack: | |
| nus3output.write(x["fileData"]) | |
| fill = nus3output.tell() % 16 | |
| for i in range(fill): | |
| nus3output.write(int(0).to_bytes(1, byteorder="little", signed=False)) | 
  
    Sign up for free
    to join this conversation on GitHub.
    Already have an account?
    Sign in to comment