Created
March 13, 2018 00:12
-
-
Save niole/3d4ff649bb4e6fba4c68cd0c766f9cb9 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const path = require('path'); | |
const fs = require('fs'); | |
const isImportPattern = /^\s*import/; | |
const isFromPackagePattern = /@domino\/(.+)'|@domino\/(.+)"/; | |
const isLernaPackageImport = /@domino\/(.*)/; | |
const isExportPattern = /^\s*export/; | |
const getExportNameMatcher = /^\s*export\s*{\s*default\s*as\s*(.*)\s*}\s*from\s*.+/; | |
const getExportNameMultiLineMatcher = /^\s*default\s*as\s*(.*)\s*/; | |
const getExportPathMatcher = /^\s*export\s*{\s*default\s*as\s*.*\s*}\s*from\s*(.+)/; | |
const getExportMultiLinePathMatcher = /^\s*\s*}\s*from\s*(.+)/; | |
const getWildcardExportMatcher = /\s*export\s*(\*)\s*from\s*.*/; | |
const getWildcardExportPathMatcher = /\s*export\s*\*\s*from\s*(.+)/; | |
const defaultImportPattern = /\s*import\s*(.*)\s*from.*/; | |
const defaultImportFromPattern = /\s*import\s*.*\s*from\s*(.*)\s*('|")/; | |
const middleMultiLineImportPattern = /^\s*(.*)\s*,\s*$/; | |
const multiNonDefaultImportFromPattern = /\s*}\s*from\s*(.*)\s*('|")/; | |
const failedParses = []; | |
function walk({ | |
dir = ".", | |
depth = 0, | |
fileCallback = (fullPath, fileName, depth, state, cb) => cb(state), | |
dirCallback = (fullPath, dirName, depth, state, cb) => cb(state), | |
state = {}, | |
ignoreDirs = /node_modules|stories|test|tests|\.git|\.storybook/}, | |
cb | |
) { | |
fs.readdir(dir, (error, entities) => { | |
if (error) { | |
reject(error); | |
} else { | |
const filesList = entities.filter(e => !ignoreDirs.test(e)); | |
const promises = filesList.map(file => { | |
const nextPath = path.join(dir, file); | |
return new Promise(function(resolve, reject) { | |
fs.stat(nextPath, (error, stats) => { | |
if (error) { | |
reject(error); | |
} else if (stats.isDirectory()) { | |
dirCallback(nextPath, file, depth, state, (updatedState) => { | |
walk({ | |
dir: nextPath, | |
depth: depth + 1, | |
fileCallback, | |
dirCallback, | |
state: updatedState, | |
}, | |
stateComingUp => { | |
resolve(stateComingUp); | |
}); | |
}); | |
} else if (stats.isFile()) { | |
fileCallback(nextPath, file, depth, state, (updatedState) => { | |
resolve(updatedState); | |
}); | |
} | |
}); | |
}); | |
}); | |
Promise.all(promises).then(states => { | |
cb(states.reduce((acc, next) => Object.assign({}, next, acc), state)); | |
}); | |
} | |
}); | |
} | |
function getFileContent(filePath, callback) { | |
const content = fs.readFileSync(filePath, 'utf-8'); | |
if (callback) { | |
callback(content); | |
} else { | |
return content; | |
} | |
} | |
function formatImportExportName(name) { | |
return name.trim().replace(/,/g, ''); | |
} | |
/* | |
* tries to get name of export | |
*/ | |
function getExportName(exportLine) { | |
const name = exportLine.match(getExportNameMatcher); | |
if (name) { | |
return formatImportExportName(name[1]); | |
} else { | |
const wildcard = exportLine.match(getWildcardExportMatcher); | |
if (wildcard) { | |
return wildcard[1]; | |
} else { | |
const mulitName = exportLine.match(getExportNameMultiLineMatcher); | |
if (mulitName) { | |
return formatImportExportName(mulitName[1]); | |
} | |
} | |
} | |
console.warn("couldn't get name for line :", exportLine); | |
failedParses.push(exportLine); | |
return ""; | |
} | |
/* | |
* tries to get path to export | |
*/ | |
function getPathToDepFromExport(exportLine) { | |
const pathToExport = exportLine.match(getExportPathMatcher); | |
if (pathToExport) { | |
return pathToExport[1]; | |
} else { | |
const pathToWildCard = exportLine.match(getWildcardExportPathMatcher); | |
if (pathToWildCard) { | |
return pathToWildCard[1]; | |
} else { | |
const multilinpath = exportLine.match(getExportMultiLinePathMatcher); | |
if (multilinpath) { | |
return multilinpath[1]; | |
} | |
} | |
} | |
console.warn("couldn't get path from line :", exportLine); | |
failedParses.push(exportLine); | |
return ""; | |
} | |
function getExportLines(lines) { | |
const tests = [ | |
(line, exportLines) => { | |
if (isExportPattern.test(line)) { | |
return exportLines.concat([ | |
{ name: "", path: ""} | |
]); | |
} | |
}, | |
(line, exportLines) => { | |
const exportName = getExportName(line); | |
if (exportName) { | |
exportLines[exportLines.length - 1].name = exportName; | |
return exportLines; | |
} | |
}, | |
(line, exportLines) => { | |
const pathToDep = getPathToDepFromExport(line); | |
if (pathToDep) { | |
let path = pathToDep; | |
var p = /'\.(.+)'|"\.(.+)"/ | |
const inside = path.match(p); | |
if (inside) { | |
path = inside[1]; | |
} | |
exportLines[exportLines.length - 1].path = path; | |
return exportLines; | |
} | |
}, | |
]; | |
return matchMultiLinePattern(lines, tests); | |
} | |
/** | |
* lines - lines of file | |
* tests - [function] | |
* | |
* test function should return the updated matches or undefined if no match | |
* test functions are called with the current line and matches up till now and line index | |
*/ | |
function matchMultiLinePattern(lines, tests) { | |
// clone and set up tests | |
const orderedTests = tests.map((test, i) => { | |
if (!i) { | |
return { | |
test, | |
shouldTest: true, | |
}; | |
} | |
return { | |
test, | |
shouldTest: false, | |
}; | |
}); | |
let i = 0; | |
let matches = []; | |
while (i < lines.length) { | |
orderedTests.forEach((testGroup, testIndex) => { | |
if (testGroup.shouldTest) { | |
const updated = testGroup.test(lines[i], matches, i); | |
if (updated) { | |
if (updated.shouldContinueTest) { | |
matches = updated.matches; | |
} else { | |
matches = updated; | |
orderedTests[testIndex].shouldTest = false; | |
orderedTests[(testIndex + 1) % orderedTests.length].shouldTest = true; | |
} | |
} | |
} | |
}); | |
i += 1; | |
} | |
return matches; | |
} | |
/* | |
* make map from export name to path to export | |
* | |
* assume executed at top level | |
* find exports and build map | |
* { exportName: pathFromPackageLevel } | |
*/ | |
function buildExportMap() { | |
function fileCallback(fullPath, fileName, depth, state, cb) { | |
if (fileName === "index.js") { | |
getFileContent(fullPath, content => { | |
// look for export names | |
const lines = content.split("\n"); | |
const exportLines = getExportLines(lines); | |
const path = fullPath.slice(0, fullPath.length - fileName.length - 1); // subtract 1 for the slash at end of fullPath before fileName | |
const newState = exportLines.reduce((acc, nextSpec) => { | |
acc[nextSpec.name] = path + nextSpec.path; | |
return acc; | |
}, {}); | |
cb(newState); | |
}); | |
} else { | |
cb({}); | |
} | |
} | |
const opts = { | |
fileCallback, | |
}; | |
return new Promise(function(resolve, reject) { | |
walk(opts, exportMap => { | |
resolve(exportMap); | |
}); | |
}); | |
} | |
/* | |
* make mapping from top level package to its uses, detailed by import names, file path location, line start, line end | |
* this will help map to replacement strings in export map, but also allow easier replacement of import chunks | |
* | |
* walk tree, when file, run a matcher/builder that will match on import lines from lerna packages | |
* build the correct mapping: | |
* | |
* { packageName: { importLocations: [{ filePath: string, lineStart: #, lineEnd: #, importNames: [string,...]},..] } } | |
*/ | |
function makeImportMap() { | |
function fileCallback(fullPath, fileName, depth, state, cb) { | |
getFileContent(fullPath, content => { | |
const lines = content.split("\n"); | |
const tests = [ | |
(line, matches, lineIndex) => { | |
if (isImportPattern.test(line)) { | |
return matches.concat([ | |
{ | |
filePath: fullPath, | |
lineStart: lineIndex, | |
lineEnd: undefined, | |
importNames: [], | |
} | |
]); | |
} | |
}, | |
(line, matches, lineIndex) => { | |
// get names if default or single line of | |
const defaultImportName = line.match(defaultImportPattern); | |
if (defaultImportName) { | |
matches[matches.length - 1].importNames = | |
defaultImportName[1].replace(/{|}/g, ",").split(",").map(x => x.trim()).filter(x => !!x); | |
return matches; | |
} | |
const p = /^\s*import\s*{\s*$/; | |
if (p.test(line)) { | |
// beginning multimatch | |
return matches; | |
} | |
}, | |
(line, matches, lineIndex) => { | |
// get name matches middle of multiline | |
const match = line.match(middleMultiLineImportPattern); | |
if (match) { | |
matches[matches.length - 1].importNames = matches[matches.length - 1].importNames.concat([ | |
match[1] | |
]); | |
return { | |
matches, | |
shouldContinueTest: true, | |
}; | |
} | |
// get name matches at end of multiline or default | |
// get import name if from lerna package or remove previous matches if not | |
const importNameMatch = line.match(multiNonDefaultImportFromPattern) || line.match(defaultImportFromPattern); | |
if (importNameMatch) { | |
const importName = importNameMatch[1]; | |
const packageNameMatch = importName.match(isLernaPackageImport); | |
if (packageNameMatch) { | |
const packageName = packageNameMatch[1]; | |
matches[matches.length - 1].lineEnd = lineIndex; | |
matches[matches.length - 1].packageName = packageName; | |
return matches; | |
} else if (!matches[matches.length - 1].lineEnd) { | |
// fluke, kill it | |
matches.pop(); | |
return matches; | |
} | |
} | |
}, | |
]; | |
const matches = matchMultiLinePattern(lines, tests); | |
if (matches.length) { | |
cb({ [fullPath]: matches }); | |
} else { | |
cb({}); | |
} | |
}); | |
} | |
return new Promise(function(resolve, reject) { | |
walk({ | |
ignoreDirs: /node_modules|stories|test|tests|\.git|\.storybook/, | |
fileCallback | |
}, importMap => resolve(importMap)); | |
}); | |
} | |
/** | |
* per file with imports to be transformed, generate the relative paths | |
* | |
* { filePath: [{ lineStart: number, lineEnd: number, relativeImportPaths: [string]}, ...] } | |
*/ | |
function buildNewImports(exportSpecs, importSpecs) { | |
const generatedImports = {}; | |
for (let filePath in importSpecs) { | |
if (importSpecs.hasOwnProperty(filePath)) { | |
const imports = importSpecs[filePath].filter(l => !!l.lineEnd); | |
const depth = filePath.split("/").length - 1; | |
const relativePart = Array(depth).fill("..").join("/") + "/"; | |
const relativeImportPaths = imports.reduce((allImports, { | |
importNames, | |
lineStart, | |
lineEnd, | |
}) => { | |
const paths = importNames.map(name => ({ name, path: `${relativePart}${exportSpecs[name]}` })); | |
return allImports.concat([ | |
{ | |
paths, | |
lineStart, | |
lineEnd, | |
} | |
]) | |
}, []) | |
.sort((a, b) => a.lineStart < b.lineStart); | |
if (relativeImportPaths.length) { | |
generatedImports[filePath] = (generatedImports[filePath] || []).concat(relativeImportPaths); | |
} | |
} | |
} | |
return generatedImports; | |
} | |
/** | |
* create tmp dir at dir of target file to change | |
* create copy of this file in that tmp dir with updates | |
* | |
* ASSUMES THAT THIS SCRIPT IS EXECUTING IN THE PARENT DIR OF THESE PACKAGES | |
*/ | |
function generateNewFiles(newImportMap) { | |
Object.keys(newImportMap).forEach(filePath => { | |
const splitFilePath = filePath.split("/"); | |
const lastIndex = splitFilePath.length - 1; | |
const baseDir = splitFilePath.slice(0, lastIndex).join('/') + '/'+ 'tmp'; | |
if (!fs.existsSync(baseDir)) { | |
fs.mkdirSync(baseDir); | |
} | |
const importChunkToUpdate = newImportMap[filePath]; | |
const content = getFileContent(filePath); | |
const lines = content.split('\n'); | |
let transformed = [] | |
importChunkToUpdate.forEach((p, i) => { | |
const { | |
lineStart, | |
lineEnd, | |
paths, | |
} = p; | |
const imports = paths.map(p => `import ${p.name} from '${p.path}';`); | |
if (!transformed.length) { | |
transformed = transformed.concat(lines.slice(0, lineStart)); | |
} else { | |
transformed = transformed.concat(lines.slice(importChunkToUpdate[i - 1].lineEnd + 1, lineStart)); | |
} | |
transformed = transformed.concat(imports); | |
}); | |
const wholeFile = transformed.concat( | |
lines.slice(importChunkToUpdate[importChunkToUpdate.length - 1].lineEnd + 1) | |
); | |
const newFileContent = wholeFile.join('\n'); | |
const newFilePath = baseDir + '/' + splitFilePath[lastIndex]; | |
// write the new file | |
fs.writeFileSync(newFilePath, newFileContent, 'utf-8'); | |
}); | |
} | |
Promise.all([makeImportMap(), buildExportMap()]).then(([ importSpecs, exportSpecs ]) => { | |
const paths = buildNewImports(exportSpecs, importSpecs); | |
generateNewFiles(paths); | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This thing generates a map of export names to their definition locations, a map of lerna imports, what file they came from, at what line they started and what line they ended, the named imports in that chunk, and then creates a copy of each file that need to be updated, assembles the relative import paths and sticks them in slots where they should go. After that I manually went to each tmp directory, replaced the old files with the new ones, and manually fixed bugs if necessary (this script is not perfect).