Created
February 26, 2021 11:20
-
-
Save piglovesyou/537a8157ba691e8e9e023263bfc7838d to your computer and use it in GitHub Desktop.
graphql-let faster "readHash" test
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { createReadStream, existsSync, readFileSync, promises } from "fs"; | |
import assert from 'assert' | |
const {readFile} = promises; | |
const filepath = | |
'__tests__/.__fixtures/hmr/__generated__/src/viewer.graphql.tsx'; | |
const len = 1000; | |
const leadingStringOfGeneratedContent = '/* '; | |
const hexHashLength = 40; | |
const expect = 'a569b0aa92ed95d91042bad57795c0af105056a3'; | |
function readHashStream(filePath) { | |
if (!existsSync(filePath)) { | |
return Promise.resolve(null); | |
} | |
return new Promise((resolve, reject) => { | |
const stream = createReadStream(filePath, { | |
encoding: 'utf-8', | |
highWaterMark: leadingStringOfGeneratedContent.length + hexHashLength, | |
}); | |
stream.on('error', (error) => reject(error)); | |
stream.on('data', (chunk) => { | |
const hash = chunk.slice(leadingStringOfGeneratedContent.length); | |
if (hash.length !== hexHashLength) return resolve(null); | |
resolve(hash); | |
}); | |
stream.read(); | |
}); | |
} | |
function readHashSync(filepath) { | |
if (!existsSync(filepath)) return null; | |
try { | |
const content = readFileSync(filepath, 'utf-8') | |
return content.slice(leadingStringOfGeneratedContent.length, | |
leadingStringOfGeneratedContent.length + hexHashLength); | |
} catch (err) { | |
} | |
} | |
async function readHashAsync(filepath) { | |
if (!existsSync(filepath)) return null; | |
try { | |
const content = await readFile(filepath, 'utf-8') | |
return content.slice(leadingStringOfGeneratedContent.length, | |
leadingStringOfGeneratedContent.length + hexHashLength); | |
} catch (err) { | |
} | |
} | |
async function main() { | |
let v; | |
console.time('Use sync'); | |
for (let i = 0; i < len; i++) v = readHashSync(filepath) | |
console.timeEnd('Use sync'); | |
assert.strictEqual(v, expect); | |
v = '' | |
console.time('Use async'); | |
for (let i = 0; i < len; i++) v = await readHashAsync(filepath) | |
console.timeEnd('Use async'); | |
assert.strictEqual(v, expect); | |
v = '' | |
console.time('Use stream'); | |
for (let i = 0; i < len; i++) v = await readHashStream(filepath) | |
console.timeEnd('Use stream'); | |
assert.strictEqual(v, expect); | |
} | |
main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
It turned out loading the whole content synchronously is the fastest.