Skip to content

Instantly share code, notes, and snippets.

@mertcanaltin
Last active November 2, 2025 15:37
Show Gist options
  • Save mertcanaltin/40629f16d79f0b60cd10911ffc6f64eb to your computer and use it in GitHub Desktop.
Save mertcanaltin/40629f16d79f0b60cd10911ffc6f64eb to your computer and use it in GitHub Desktop.
test-streaming.js for workerd buffer.copy bottleneck
import { Buffer } from 'node:buffer';
// 100 chunks × 4KB benchmark
export default {
async fetch(request, env) {
const url = new URL(request.url);
const tick = env?.TICK
? async () => { await env.TICK.fetch(new Request('http://tick/')); }
: async () => {};
try {
if (url.pathname === '/benchmark') {
const mode = url.searchParams.get('mode') || 'local'; // 'local' | 'io'
const reuse = url.searchParams.get('reuse') === '1'; // prealloc + reuse
// Parameters
const numChunks = Number(url.searchParams.get('numChunks') ?? 100);
const chunkSize = Number(url.searchParams.get('chunkSize') ?? 4096);
const iterations = Number(url.searchParams.get('iterations') ?? 100);
const minMs = Number(url.searchParams.get('minMs') ?? 250); // işi büyüt
const totalSize = numChunks * chunkSize;
// Test datas
const chunks = [];
for (let i = 0; i < numChunks; i++) {
const b = Buffer.allocUnsafe(chunkSize);
for (let j = 0; j < chunkSize; j++) b[j] = (i + j) % 256;
chunks.push(b);
}
// Warmup (JIT optimize)
for (let i = 0; i < 100; i++) {
const result = Buffer.allocUnsafe(totalSize);
let offset = 0;
for (const c of chunks) { c.copy(result, offset); offset += c.length; }
}
// measure helpers
function measureLocal(fn, { iterations, minMs }) {
let repeat = 1, elapsed = 0;
function baseline(rc) {
const t0 = performance.now();
for (let it = 0; it < iterations; it++) {
for (let r = 0; r < rc; r++) { /* boş döngü */ }
}
return performance.now() - t0;
}
while (true) {
const t0 = performance.now();
for (let it = 0; it < iterations; it++) {
for (let r = 0; r < repeat; r++) fn();
}
elapsed = performance.now() - t0;
if (elapsed >= minMs || repeat > (1 << 22)) break;
repeat <<= 1;
}
const base = baseline(repeat);
const net = Math.max(0, elapsed - base);
const perOpMs = net / (iterations * repeat);
return { totalMs: net, perOpMs, repeat, iterations };
}
async function measureIO(fn, { iterations }) {
// I/O
const t0 = performance.now();
for (let i = 0; i < iterations; i++) { await tick(); await tick(); }
const base = performance.now() - t0;
// I/O + compute
const t1 = performance.now();
for (let i = 0; i < iterations; i++) { await tick(); fn(); await tick(); }
const comb = performance.now() - t1;
const net = Math.max(0, comb - base);
const perOpMs = net / iterations;
return { totalMs: net, perOpMs, repeat: 1, iterations };
}
// Test alloc
const prealloc = reuse ? Buffer.allocUnsafe(totalSize) : null;
const copyOnce = () => {
const result = prealloc ?? Buffer.allocUnsafe(totalSize);
let offset = 0;
for (const c of chunks) { c.copy(result, offset); offset += c.length; }
};
const setOnce = () => {
const result = prealloc ?? Buffer.allocUnsafe(totalSize);
let offset = 0;
for (const c of chunks) { result.set(c, offset); offset += c.length; }
};
const runner = (mode === 'io' && env?.TICK) ? measureIO : measureLocal;
const mCopy = await runner(copyOnce, { iterations, minMs });
const mSet = await runner(setOnce, { iterations, minMs });
// per-op
const avgTimeCopy = mCopy.perOpMs;
const avgTimeSet = mSet.perOpMs;
const perChunkTimeCopy = avgTimeCopy / numChunks;
const perChunkTimeSet = avgTimeSet / numChunks;
const throughputCopyMBs = (totalSize / 1024 / 1024) / (avgTimeCopy / 1000);
const throughputSetMBs = (totalSize / 1024 / 1024) / (avgTimeSet / 1000);
// succes control
const testResult = Buffer.allocUnsafe(totalSize);
{
let offset = 0;
for (const c of chunks) { c.copy(testResult, offset); offset += c.length; }
}
let correct = true;
for (let i = 0; i < numChunks && correct; i++) {
const off = i * chunkSize;
for (let j = 0; j < chunkSize; j++) {
if (testResult[off + j] !== ((i + j) % 256)) { correct = false; break; }
}
}
return Response.json({
runtime: 'workerd',
mode: (mode === 'io' && !env?.TICK) ? 'local(fallback-no-TICK)' : mode,
reuse,
test: 'Streaming Buffer.copy Benchmark',
testData: {
numChunks, chunkSize, totalBytes: totalSize,
iterations, minMs,
repeat: { copy: mCopy.repeat, set: mSet.repeat }
},
results: {
bufferCopy: {
avgTimeMs: avgTimeCopy.toFixed(4),
perChunkUs: (perChunkTimeCopy * 1000).toFixed(2),
throughputMBs: throughputCopyMBs.toFixed(2)
},
typedArraySet: {
avgTimeMs: avgTimeSet.toFixed(4),
perChunkUs: (perChunkTimeSet * 1000).toFixed(2),
throughputMBs: throughputSetMBs.toFixed(2)
}
},
comparison: {
speedup: (avgTimeCopy / avgTimeSet).toFixed(2) + 'x',
bufferCopySlowerBy: (avgTimeCopy / avgTimeSet).toFixed(2) + 'x',
overhead: (((avgTimeCopy - avgTimeSet) / avgTimeSet) * 100).toFixed(1) + '%'
},
correctness: { dataIntegrity: correct ? 'PASS' : 'FAIL' }
});
}
return new Response('Use /benchmark endpoint', { status: 404 });
} catch (err) {
return new Response(
JSON.stringify({ error: String(err), stack: err?.stack }),
{ status: 500, headers: { 'content-type': 'application/json' } }
);
}
}
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment