Last active
September 5, 2023 03:06
-
-
Save image72/975545dbf905dc539d46dcb6add5d71b to your computer and use it in GitHub Desktop.
compress functions with standard test.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const zlib = require('zlib'); | |
async function deflate(data) { | |
const encoder = new TextEncoder(); | |
const input = encoder.encode(data); | |
const deflatedChunks = []; | |
const compressionStream = new CompressionStream('deflate'); | |
const writableStream = new WritableStream({ | |
write(chunk) { | |
deflatedChunks.push(chunk); | |
}, | |
}); | |
const writer = writableStream.getWriter(); | |
const readableStream = compressionStream.pipeTo(writer); | |
const writerClosed = writer.closed; | |
const writerReady = writer.ready; | |
writerReady.then(() => writer.write(input)).then(() => writer.close()); | |
await writerClosed; | |
await readableStream; | |
const deflatedArray = await Promise.all( | |
deflatedChunks.map((chunk) => chunk.arrayBuffer()) | |
); | |
const concatenatedArrayBuffer = new Uint8Array( | |
deflatedArray.reduce((acc, chunk) => acc + chunk.byteLength, 0) | |
); | |
let offset = 0; | |
deflatedArray.forEach((chunk) => { | |
concatenatedArrayBuffer.set(new Uint8Array(chunk), offset); | |
offset += chunk.byteLength; | |
}); | |
const encodedString = btoa( | |
String.fromCharCode.apply(null, new Uint8Array(concatenatedArrayBuffer)) | |
); | |
return encodedString; | |
} | |
async function inflate(data) { | |
const compressedData = atob(data); | |
const compressedArray = Uint8Array.from(compressedData, (c) => | |
c.charCodeAt(0) | |
); | |
const readableStream = new ReadableStream({ | |
start(controller) { | |
controller.enqueue(compressedArray); | |
controller.close(); | |
}, | |
}); | |
const decompressionStream = new DecompressionStream('deflate'); | |
const decompressedStream = readableStream.pipeThrough(decompressionStream); | |
const reader = decompressedStream.getReader(); | |
const chunks = []; | |
while (true) { | |
const { done, value } = await reader.read(); | |
if (done) break; | |
chunks.push(value); | |
} | |
const concatenatedArray = new Uint8Array( | |
chunks.reduce((acc, chunk) => acc + chunk.length, 0) | |
); | |
let offset = 0; | |
chunks.forEach((chunk) => { | |
concatenatedArray.set(chunk, offset); | |
offset += chunk.length; | |
}); | |
const decompressedData = new TextDecoder().decode(concatenatedArray); | |
return decompressedData; | |
} | |
const deflateAndBase64Encode = (stringVal) => { | |
const inputBuf = Buffer.from(stringVal, 'utf8'); | |
return zlib.deflateSync(inputBuf).toString('base64'); // .slice(2, -4) | |
}; | |
const inflateAndBase64Encode = (data) => { | |
const inputBuf = Buffer.from(data, 'base64'); | |
return zlib.inflateSync(inputBuf).toString('utf8'); | |
}; | |
async function compress(data) { | |
const stream = new Blob([JSON.stringify(data)], { | |
type: 'application/json', | |
}) | |
.stream() | |
// new Response(data).body | |
.pipeThrough(new CompressionStream('gzip')); | |
const blob = await new Response(stream).blob(); | |
const buffer = await blob.arrayBuffer(); | |
return b64encode(buffer); | |
// compress | |
// new Response(data).body | |
// .pipeThrough(new CompressionStream('gzip'))) | |
} | |
/* example | |
await compress(body, 'deflate').arrayBuffer() | |
await compress(body, 'deflate').blob() | |
await compress(body, 'deflate').body.pipeTo(new WritableStream({ ... })) | |
await decompress(body, 'deflate').arrayBuffer() | |
await decompress(body, 'deflate').blob() | |
await decompress(body, 'deflate-raw').body.pipeTo(dest) | |
await decompress(body, 'deflate-raw').text() | |
await decompress(body, 'deflate-raw').json() | |
*/ | |
async function decompress(index) { | |
const stream = new Blob([b64decode(index)], { | |
type: 'application/json', | |
}) | |
.stream() | |
.pipeThrough(new DecompressionStream('gzip')); | |
const blob = await new Response(stream).text(); | |
return JSON.parse(blob); | |
// decompress | |
// new Response(data).body | |
// .pipeThrough(new DecompressionStream('gzip')) | |
} | |
function b64encode(buffer) { | |
let binary = ''; | |
const bytes = new Uint8Array(buffer); | |
const len = bytes.byteLength; | |
for (let i = 0; i < len; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
return btoa(binary); | |
} | |
function b64decode(str) { | |
const binaryString = atob(str); | |
const len = binaryString.length; | |
const bytes = new Uint8Array(new ArrayBuffer(len)); | |
for (let i = 0; i < len; i++) { | |
bytes[i] = binaryString.charCodeAt(i); | |
} | |
return bytes; | |
} | |
function deflateSync(input) { | |
return new Promise((resolve) => { | |
const encoder = new TextEncoder(); | |
const inputBuffer = encoder.encode(input); | |
const deflateStream = new CompressionStream('deflate'); | |
const reader = deflateStream.readable.getReader(); | |
const chunks = []; | |
function pump() { | |
return reader.read().then(({ done, value }) => { | |
if (done) { | |
// Concatenate chunks into single ArrayBuffer | |
const outputBuffer = new Uint8Array( | |
chunks.reduce((total, arr) => total + arr.length, 0) | |
); | |
chunks.forEach((chunk, i) => outputBuffer.set(chunk, i)); | |
// Convert ArrayBuffer to base64 string | |
const base64 = btoa( | |
String.fromCharCode(...new Uint8Array(outputBuffer)) | |
); | |
// FIX: Error: incorrect header check, code: 'Z_DATA_ERROR' | |
resolve(base64); | |
} else { | |
chunks.push(value); | |
return pump(); | |
} | |
}); | |
} | |
const writer = deflateStream.writable.getWriter(); | |
writer.write(inputBuffer); | |
writer.close(); | |
return pump(); | |
}); | |
} | |
function inflateSync(input) { | |
return new Promise((resolve) => { | |
// Base64 decode | |
const binary = atob(input); | |
const inputBuffer = new Uint8Array(binary.length); | |
for (let i = 0; i < binary.length; i++) { | |
inputBuffer[i] = binary.charCodeAt(i); | |
} | |
const inflateStream = new DecompressionStream('deflate', { | |
raw: true, | |
}); | |
const reader = inflateStream.readable.getReader(); | |
const chunks = []; | |
function pump() { | |
return reader.read().then(({ done, value }) => { | |
if (done) { | |
// Concatenate chunks into single ArrayBuffer | |
const outputBuffer = new Uint8Array( | |
chunks.reduce((total, arr) => total + arr.length, 0) | |
); | |
chunks.forEach((chunk, i) => outputBuffer.set(chunk, i)); | |
// Convert ArrayBuffer to string | |
const decoder = new TextDecoder(); | |
const outputString = decoder.decode(outputBuffer); | |
resolve(outputString); | |
} else { | |
chunks.push(value); | |
return pump(); | |
} | |
}); | |
} | |
const writer = inflateStream.writable.getWriter(); | |
writer.write(inputBuffer); | |
writer.close(); | |
return pump(); | |
}); | |
} | |
const deflateCompress = async (data) => { | |
const encoder = new TextEncoder(); | |
const compressionStream = new CompressionStream('deflate'); // deflate-raw | |
const writer = compressionStream.writable.getWriter(); | |
writer.write(encoder.encode(data)); | |
writer.close(); | |
const reader = compressionStream.readable.getReader(); | |
let chunks = []; | |
let totalLength = 0; | |
while (true) { | |
const { done, value } = await reader.read(); | |
if (done) break; | |
chunks.push(value); | |
totalLength += value.length; | |
} | |
let concatenatedChunks = new Uint8Array(totalLength); | |
let position = 0; | |
for (let chunk of chunks) { | |
concatenatedChunks.set(chunk, position); | |
position += chunk.length; | |
} | |
return concatenatedChunks; | |
}; | |
const deflateDecompress = async (data) => { | |
const decompressionStream = new DecompressionStream('deflate'); // deflate-raw | |
const writer = decompressionStream.writable.getWriter(); | |
writer.write(data); | |
writer.close(); | |
const reader = decompressionStream.readable.getReader(); | |
let chunks = []; | |
let totalLength = 0; | |
while (true) { | |
const { done, value } = await reader.read(); | |
if (done) break; | |
chunks.push(value); | |
totalLength += value.length; | |
} | |
let concatenatedChunks = new Uint8Array(totalLength); | |
let position = 0; | |
for (let chunk of chunks) { | |
concatenatedChunks.set(chunk, position); | |
position += chunk.length; | |
} | |
const decoder = new TextDecoder(); | |
return decoder.decode(concatenatedChunks); | |
}; | |
// var blob = new Blob([Uint8Array.from(atob(text), m => m.codePointAt(0))]) | |
// var stream = blob.stream().pipeThrough(new DecompressionStream("gzip")) | |
// var data = await new Response(stream).json() | |
const str1 = `{a: 1, b: 2}`; | |
// write a test, compare deflateAndBase64Encode and inflateAndBase64Encode vs deflate and inflate, use console.time | |
(async () => { | |
console.time('deflateAndBase64Encode'); | |
const encodedString1 = deflateAndBase64Encode(str1); | |
console.log('deflateAndBase64Encode-str', encodedString1); | |
console.timeEnd('deflateAndBase64Encode'); | |
console.time('inflateAndBase64Encode'); | |
const decodedString1 = inflateAndBase64Encode(encodedString1); | |
console.log('inflateAndBase64Encode-str', decodedString1); | |
console.timeEnd('inflateAndBase64Encode'); | |
console.time('compress'); | |
const encodedString3 = await compress(str1); | |
console.log('compress-str', encodedString3); | |
console.timeEnd('compress'); | |
console.time('decompress'); // TODO: fund bug. | |
const decodedString3 = await decompress(encodedString3); | |
console.log('decompress-str', decodedString3); | |
console.timeEnd('decompress'); | |
// console.time('deflate'); | |
// const encodedString2 = await deflate(str1); | |
// console.log('deflate-str', encodedString2); | |
// console.timeEnd('deflate'); | |
// console.time('inflate'); | |
// const decodedString2 = await inflate(encodedString2); | |
// console.log('inflate-str', decodedString2); | |
// console.timeEnd('inflate'); | |
console.time('deflateSync'); | |
const encodedString4 = await deflateSync(str1); | |
console.log('deflateSync-str', encodedString4); | |
console.timeEnd('deflateSync'); | |
console.time('inflateSync'); | |
const decodedString4 = await inflateSync(encodedString4); | |
console.log('inflateSync-str', decodedString4); | |
console.timeEnd('inflateSync'); | |
console.log(encodedString1 === encodedString2); // true | |
console.log(str1 === decodedString1); // true | |
console.log(str1 === decodedString2); // true | |
console.log(str1 === decodedString3); // true | |
console.log(str1 === decodedString4); // true | |
})(); | |
// deflate | |
const asyncDeflate = async (data) => { | |
const cs = new CompressionStream('deflate'); | |
const writer = cs.writable.getWriter(); | |
writer.write(new TextEncoder().encode(data)); | |
writer.close(); | |
return new Response(cs.readable).arrayBuffer().then(buffer => new Uint8Array(buffer)); | |
// return import('pako').then(pako => pako.inflate(data)) | |
} | |
// inflate | |
const asyncInflate = async (data) => { | |
const ds = new DecompressionStream('deflate'); | |
const writer = ds.writable.getWriter(); | |
writer.write(data); | |
writer.close(); | |
return new Response(ds.readable).arrayBuffer().then(buffer => new Uint8Array(buffer)).then(new TextDecoder().decode); | |
// return import('pako').then(pako => pako.deflate(data)) | |
} | |
// /** @typedef {'gzip' | 'deflate' | 'deflate-raw'} SupportedFormats */ | |
// /** | |
// * @param {*} body | |
// * @param {SupportedFormats} [format] | |
// */ | |
// export var compress = (data, format = 'gzip') => new Response( | |
// new Response(data).body | |
// .pipeThrough(new CompressionStream(format))) | |
// /** | |
// * @param {*} body | |
// * @param {SupportedFormats} [format] | |
// */ | |
// export var decompress = (data, format = 'gzip') => new Response( | |
// new Response(data).body | |
// .pipeThrough(new DecompressionStream(format))) | |
/** | |
* | |
* @param {Uint8Array} data | |
*/ | |
async function persistDataCache(data) { | |
const src = new Blob([data]); | |
const cs = src.stream().pipeThrough(new CompressionStream('gzip')); | |
const cache = await caches.open("my-cache"); | |
await cache.put("/data", new Response(cs)); | |
} | |
async function persistDataCache1(data) { | |
// data: Uint8Array | |
const cache = await caches.open("my-cache"); | |
await cache.put("/data", new Response(data)); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment