Last active
May 25, 2018 15:55
-
-
Save nemtsov/aabebc434ef8d1494d26512f9f8c3310 to your computer and use it in GitHub Desktop.
Require only when changed
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
function getJSON(filename) { | |
if (!IS_PROD) { | |
getJSON.lastMtimes = getJSON.lastMtimes || {}; | |
const filePath = require.resolve(filename); | |
const { mtimeMs } = fs.statSync(filePath); | |
const lastMtime = getJSON.lastMtimes[filePath]; | |
if (!lastMtime || lastMtime !== mtimeMs) { | |
getJSON.lastMtimes[filePath] = mtimeMs; | |
delete require.cache[filePath]; | |
} | |
} | |
return require(filename); | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
function getJSON(filename) { | |
if (!IS_PROD) { | |
/** | |
* With this optimization, we're making sure a large ~20MB file | |
* isn't JSON.parse(d) unnecessarily. I've tried using fs.statSync, | |
* but for some reason it lags behind the actual file-contents | |
* when used in docker with nfs. | |
*/ | |
getJSON.fileCache = getJSON.fileCache || {}; | |
const filePath = require.resolve(filename); | |
const cache = getJSON.fileCache[filePath]; | |
const buffer = fs.readFileSync(filePath); | |
if (cache && cache.buffer.equals(buffer)) { | |
return cache.file; | |
} | |
const file = JSON.parse(buffer); | |
getJSON.fileCache[filePath] = { buffer, file }; | |
return file; | |
} | |
return require(filename); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment