Created
May 3, 2021 20:35
-
-
Save sploders101/5bf626e7d2148fdd5347eecbb57b88c6 to your computer and use it in GitHub Desktop.
Readable stream to splice many different sequential streams into one
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { | |
Readable, | |
} from "stream"; | |
export interface SpliceEntry { | |
size: number; | |
stream: Readable | Buffer; | |
} | |
export class StreamSplicer extends Readable { | |
spliceList: SpliceEntry[]; | |
currentIndex: number = 0; | |
bytesRead: number = 0; | |
constructor(spliceList: SpliceEntry[]) { | |
super(); | |
this.spliceList = spliceList; | |
} | |
async _read(size: number) { | |
let reading = true; | |
while(reading) { | |
// Check if we need to switch or if we made a mistake | |
const currentPiece = this.spliceList[this.currentIndex]; | |
if(!currentPiece) return this.push(null); | |
const toRead = currentPiece.size - this.bytesRead; | |
if(toRead === 0) { | |
this.currentIndex++; | |
this.bytesRead = 0; | |
continue; | |
} else if(toRead < 0) throw new Error("StreamSplicer pushed too much data"); | |
// Keep reading | |
if(Buffer.isBuffer(currentPiece.stream)) { | |
reading = this.push(currentPiece.stream.slice(0, currentPiece.size)); | |
this.bytesRead = currentPiece.size; | |
} else { | |
let data: Buffer | string | null = currentPiece.stream.read(Math.min(size, toRead)); | |
if(data == null) { | |
// Wait for data to become available | |
await new Promise((res) => (currentPiece.stream as Readable).once("readable", res)); | |
} else { | |
data = Buffer.from(data); | |
this.bytesRead += Math.min(toRead, data.length); | |
reading = this.push(data.slice(0, toRead)); | |
} | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This is used starting in v2.0.0 of asar-async for the writer portion to handle concatenation of the header and all included files into the asar archive