|
var glob = require('glob'), |
|
path = require('path'), |
|
fs = require('fs'), |
|
uglify = require('uglify-js'), |
|
CleanCSS = require('clean-css'), |
|
less = require('less'), |
|
EventEmitter = require('events').EventEmitter, |
|
url = require('url'), |
|
util = require('util'), |
|
crypto = require('crypto'), |
|
cluster = require('cluster'), |
|
http = require('http'), |
|
numCPUs = require('os').cpus().length, |
|
staticRoot = process.argv[2], |
|
filesGlob = '**/*.{css,less,js}', // for resolving css url()'s that start with / |
|
minifyJs = false, |
|
minifyCss = true, |
|
contextToShow = 10, |
|
testOnly = false, |
|
maxWorkerCount = 8, |
|
cacheFileLimit = 200; |
|
|
|
if (!staticRoot) { |
|
console.log('Usage: node node-compile folder mask'); |
|
console.log('Example: node node-compile ../StackOverflow/Content'); |
|
console.log('Example: node node-compile ../StackOverflow/Content --minify'); |
|
console.log('Example: node node-compile ../StackOverflow/Content --glob="Js/*.js" --minify'); |
|
console.log('Example: node node-compile ../StackOverflow/Content --glob="**/*.{css,less}"'); |
|
process.exit(-1); |
|
} |
|
|
|
// Usage |
|
process.argv.forEach(function(arg) { |
|
if(arg.indexOf('--glob') === 0) |
|
filesGlob = arg.split('=')[1]; |
|
if(arg.indexOf('--minify') === 0) |
|
minifyJs = true; |
|
if(arg.indexOf('--test') === 0) |
|
testOnly = true; |
|
if(arg.indexOf('--less') === 0) |
|
filesGlob = '**/*.less'; |
|
if(arg.indexOf('--no-less-compress') === 0) |
|
minifyCss = false; |
|
if (arg.indexOf('--context') === 0) |
|
contextToShow = arg.split('=')[1]; |
|
}); |
|
|
|
siteRoot = root; |
|
|
|
var events = new EventEmitter(); |
|
|
|
var rootStat = fs.statSync(staticRoot); |
|
if (!(rootStat && rootStat.isDirectory())) { |
|
console.error(root + ' is not a directory'); |
|
process.exit(-1); |
|
} |
|
|
|
var exclude = /(\\|\/)_.*\\|(\.min\.(css|js)$)|((\\|\/)_(.+?)\.less$)|((\\|\/)less(\\|\/).*\.less$)|\\PartialJS\\|\\third-party\\|((\\|\/)_design(\\|\/))/; |
|
|
|
function processLess(file, source, done) { |
|
var dirname = path.dirname(file); |
|
var options = { paths: [dirname], compress: minifyCss }; |
|
less.render(source, options, function(e, css) { |
|
if (e) fileError(file, e); |
|
css = cacheBreakCss(file, css.css); |
|
done(null, css); |
|
}); |
|
} |
|
|
|
var cleaner = new CleanCSS(); |
|
function processCss(file, source, done) { |
|
var minCss = cleaner.minify(source); |
|
minCss = cacheBreakCss(file, minCss); |
|
done(null, minCss); |
|
} |
|
|
|
var compressor = uglify.Compressor({ warnings: false }); |
|
function processJs(file, source, done) { |
|
try { |
|
var ast = uglify.parse(source), |
|
s = uglify.OutputStream({ quote_keys: true }); |
|
ast.figure_out_scope(); |
|
ast = ast.transform(compressor); |
|
ast.figure_out_scope(); |
|
ast.compute_char_frequency(); |
|
ast.mangle_names(); |
|
ast.print(s); |
|
var code = s.get(), |
|
oldLength = source.length, |
|
newLength = code.length; |
|
var notes = 'Old size: ' + oldLength.toLocaleString() + ', New Size: ' + newLength.toLocaleString() + ' (' + Math.round(newLength/oldLength*100, 2) + '%)' |
|
done(null, code, notes); |
|
} catch (e) { |
|
throw new Error('Error parsing ' + file + ': ' + e.message + '\nLine ' + e.line + ', Col ' + e.col + ', Pos ' + e.pos); |
|
} |
|
} |
|
|
|
function getHash(file) { |
|
var sha = crypto.createHash('sha1'); |
|
sha.setEncoding('hex'); |
|
var contents = fs.readFileSync(file); |
|
sha.write(contents); |
|
sha.end(); |
|
return sha.read(); |
|
} |
|
|
|
var cacheBreakers = {}, |
|
outputCache = {}; |
|
function getCacheBreaker(file) { |
|
if(typeof cacheBreakers[file] === 'undefined') { |
|
cacheBreakers[file] = getHash(file); |
|
} |
|
|
|
return cacheBreakers[file]; |
|
} |
|
|
|
function isSiteLocalImagePath(file) { |
|
if(/^\/\//.test(file)) { |
|
return false; |
|
} |
|
if(/^http/.test(file)) { |
|
return false; |
|
} |
|
if(/^data:/.test(file)) { |
|
return false; |
|
} |
|
return true; |
|
} |
|
|
|
function cacheBreakCss(file, css) { |
|
return css.replace(/url\((['"]?)(.+?)\1\)/g, function(match, quote, p1, offset, fullText) { |
|
// exclude already cache broken things |
|
if(/\?v=/g.test(match)) { |
|
return match; |
|
} |
|
// anything else isn't a file ref on the web site |
|
if(!isSiteLocalImagePath(p1)) { |
|
return match; |
|
} |
|
// remove things like IEfix on fonts |
|
p1 = p1.replace(/\?#?iefix$|#.*$/, ''); |
|
// Local or absolute |
|
var startFrom = file[0] === '/' ? siteRoot : path.dirname(file); |
|
var urlpath = path.normalize(path.join(startFrom, p1)); |
|
var hash; |
|
try { |
|
hash = getCacheBreaker(urlpath); |
|
return 'url(' + quote + p1 + '?v=' + hash.substr(0, 12) + quote + ')'; |
|
} catch (e) { |
|
var errString = 'Error cache-breaking file: ' + match + ' at offset ' + offset + '\n' + e.toString(); |
|
errString += '\nContext in ' + file + ':\n' + getErrorContext(fullText, offset) + '\n'; |
|
fileError(file, errString); |
|
return ''; // we're going boom globally - no one cares...but at least break the CSS, to be safe. |
|
} |
|
}); |
|
} |
|
|
|
function getErrorContext(text, offset) { |
|
var contextStart, contextEnd, lineStart, lineEnd, i = 0, j = 0; |
|
for (i = offset; i > 0; i--) { |
|
if (text.charAt(i) === '\n' || text.charAt(i) === '\r') { |
|
j++; |
|
if (j === 1) { |
|
lineStart = i; |
|
} |
|
if (j === contextToShow) { |
|
contextStart = i; |
|
break; |
|
} |
|
} |
|
} |
|
j = 0; |
|
for (i = offset; i < text.length; i++) { |
|
if (text.charAt(i) === '\n' || text.charAt(i) === '\r') { |
|
j++; |
|
if (j === 1) { |
|
lineEnd = i; |
|
} |
|
if (j === contextToShow) { |
|
contextEnd = i; |
|
break; |
|
} |
|
} |
|
} |
|
var context = '\033[33m' + text.substring(contextStart, lineStart) + |
|
'\033[41m\033[37m' + text.substring(lineStart, lineEnd) + |
|
'\033[33m\033[40m' + text.substring(lineEnd, contextEnd); |
|
return context; |
|
} |
|
|
|
var transformers = { |
|
'less' : { |
|
name : function (name) { return name.replace(/\.less$/, '.css'); }, |
|
code : processLess |
|
}, |
|
'css' : { |
|
name : function (name) { return name; }, |
|
code : processCss, |
|
// Only process if the less file is also not present |
|
condition: function(name) { return !fs.existsSync(name.replace(/\.css$/, '.less')); } |
|
}, |
|
'js': { |
|
name: function (name) { |
|
return minifyJs ? name : (name + '.min'); |
|
}, |
|
code: processJs |
|
} |
|
}; |
|
|
|
function fileError(filename, e, errorJSON) { |
|
if(cluster.isWorker) { |
|
process.send({ msg: 'file-error', filename: filename, e: e, errorJSON: JSON.stringify(e, null, 2) }); |
|
process.exit(-1); |
|
} |
|
if (e.constructor !== Error) { |
|
//console.error('\033[33m' + (e.type === 'Parse' ? 'Parse ' : '') + 'Exception in [' + filename + ']: \n\033[31m' + (JSON.parse(errorJSON)) + '\033[0m'); |
|
throw new Error('\033[31mError compiling [' + filename + ']: ' + (e.message || e) + '\033[0m'); |
|
} |
|
throw e; |
|
} |
|
|
|
function getExtension(filename) { |
|
return filename.match(/\.([^\.]+)$/)[1]; |
|
} |
|
|
|
function getFilename(filename) { |
|
return filename.match(/\\([^\\]+)$/)[1]; |
|
} |
|
|
|
function getOutputName(filename) { |
|
return transformers[getExtension(filename)].name(filename); |
|
} |
|
|
|
function shouldRead(filename) { |
|
var check = transformers[getExtension(filename)].condition; |
|
return !check || check(filename); |
|
} |
|
|
|
function readFile(filename) { |
|
fs.readFile(filename, {encoding:'utf-8'}, function(e, contents) { |
|
if (e) fileError(filename,e); |
|
events.emit('file-read', filename, contents.trim()); |
|
}); |
|
} |
|
|
|
function cacheCheck(filename, contents) { |
|
// For short files, like a beta site, check output cache first |
|
if (contents.length <= cacheFileLimit) { |
|
process.send({ msg: 'cache-check', filename: filename, contents: contents }); |
|
} else { |
|
events.emit('file-ready', filename, contents); |
|
} |
|
} |
|
|
|
function processFile(filename, contents) { |
|
var transformer = transformers[getExtension(filename)].code; |
|
if (!transformer) throw new Error('Unknown file format: ' + filename); |
|
transformer(filename, contents, function(e, output, notes) { |
|
if (e) { |
|
fileError(filename,e); |
|
return; |
|
} |
|
if (contents.length <= cacheFileLimit) { |
|
process.send({ msg: 'cache-this', filename: filename, contents: contents, output: output }); |
|
} |
|
events.emit('file-processed', filename, output, notes); |
|
}); |
|
}; |
|
|
|
function writeOutputFile(filename, output, notes) { |
|
var outfilename = getOutputName(filename); |
|
if (testOnly) { |
|
events.emit('file-saved', filename, outfilename, output, notes); |
|
return; |
|
} |
|
fs.writeFile(outfilename, output, {encoding:'utf-8'}, function(e) { |
|
if (e) fileError(filename,e); |
|
events.emit('file-saved', filename, outfilename, output, notes); |
|
}); |
|
}; |
|
|
|
function findFiles(root) { |
|
glob(filesGlob, {cwd:root}, function(e, files) { |
|
if (e) fileError(root,e); |
|
// Convert to abs path |
|
files = files.map(function(f) { return path.join(root, f); }); |
|
// Global Exclude |
|
files = files.filter(function(f) { return !exclude.test(f); }); |
|
// Conditial check |
|
files = files.filter(shouldRead); |
|
|
|
// sort |
|
var ordering = ['js', 'less', 'css']; |
|
files.sort(function(a,b) { |
|
var ax = getExtension(a); |
|
var bx = getExtension(b); |
|
return ax !== bx |
|
? (ordering.indexOf(ax) - ordering.indexOf(bx)) |
|
: a.localeCompare(b); |
|
}); |
|
|
|
events.emit('files-found', files); |
|
}); |
|
}; |
|
|
|
function processFiles(files) { |
|
var workerCount = Math.min(files.length, Math.max(numCPUs - 2, 2), maxWorkerCount), |
|
doneCount = 0; |
|
|
|
if (files.length === 0) |
|
return; |
|
|
|
var pump = function(worker) { |
|
if(files.length > 0) { |
|
var file = files.shift(); |
|
worker.send({compileFile: file}); |
|
} else { |
|
worker.kill(); |
|
doneCount++; |
|
if (doneCount === workerCount) { |
|
events.emit('done'); |
|
} |
|
} |
|
}; |
|
|
|
function startWorker() { |
|
var worker = cluster.fork(); |
|
worker.on('listening', function(address) { |
|
console.log('Worker ' + worker.workerID + ' Started, Listening on ' + address.address + ':' + address.port); |
|
worker.send({ setMinifyJs: minifyJs, setTestOnly: testOnly }); |
|
pump(worker); |
|
}).on('message', function(msg) { // listening to requests FROM the workers TO the parent process |
|
switch(msg.msg) { |
|
case 'cache-check': |
|
worker.send({ cacheResult: { filename: msg.filename, contents: msg.contents, output: outputCache[msg.contents] } }); |
|
break; |
|
case 'cache-this': |
|
outputCache[msg.contents] = msg.output; |
|
break; |
|
case 'file-error': |
|
fileError(msg.filename, msg.e, msg.errorJSON); |
|
break; |
|
case 'completed': |
|
if (msg.notes) { |
|
console.info(getFilename(msg.file) + ': ' + msg.notes); |
|
} |
|
pump(worker); |
|
break; |
|
} |
|
}); |
|
} |
|
|
|
console.log(numCPUs + ' processor(s) detected, utilizing ' + workerCount + ' worker threads for ' + files.length + ' file(s).'); |
|
|
|
for (var i = 0; i < workerCount; i++) { |
|
startWorker(); |
|
} |
|
} |
|
|
|
if (cluster.isMaster) { |
|
events.on('begin', function() { console.time('Compile'); }) |
|
.on('begin', findFiles) |
|
.on('files-found', function(files) { |
|
console.log('Found ' + files.length + ' files to process with mask ' + filesGlob + ' in ' + staticRoot); |
|
processFiles(files); |
|
}) |
|
.on('done', function() { console.timeEnd('Compile'); }) |
|
.emit('begin', staticRoot); |
|
} |
|
else if (cluster.isWorker) { |
|
events.on('file-found', function(filename) { |
|
console.time('compiled [' + getExtension(filename) + ']: ' + getOutputName(filename)); |
|
readFile(filename); |
|
}) |
|
.on('file-read', cacheCheck) |
|
.on('file-ready', processFile) |
|
.on('file-processed', writeOutputFile) |
|
.on('file-saved', function(oldname, newname, output, notes) { |
|
console.timeEnd('compiled [' + getExtension(oldname) + ']: ' + newname); |
|
process.send({ msg: 'completed', file: newname, result: output, notes: notes }); |
|
}); |
|
|
|
var server = http.createServer(function(req, res) { |
|
res.writeHead(200); |
|
res.end('worker thread ahoy!\n'); |
|
}); |
|
server.listen(0, function() { |
|
console.log('HTTP IPC server active, listening on port ' + server.address().port + '.'); |
|
}); |
|
|
|
process.on('message', function(msg) { |
|
if (msg.setMinifyJs) { |
|
minifyJs = msg.setMinifyJs; |
|
} |
|
if (msg.setTestOnly) { |
|
testOnly = msg.setTestOnly; |
|
} |
|
if (msg.compileFile) { |
|
events.emit('file-found', msg.compileFile); |
|
} |
|
if (msg.cacheResult) { |
|
var result = msg.cacheResult; |
|
if (result.output) { |
|
writeOutputFile(result.filename, result.output, result.notes); |
|
} else { |
|
events.emit('file-ready', result.filename, result.contents); |
|
} |
|
} |
|
}); |
|
} |
|
|
|
function handleError(e) { |
|
if(e.stack) { |
|
console.error(e.stack); |
|
} else { |
|
console.error(e); |
|
} |
|
process.exit(-1); |
|
} |
|
|
|
process.on('uncaughtException', handleError) |
|
.on('error', handleError); |
I like the idea of creating SQL helper functions in the first migration that you can use in later migrations. It inspired me to create functions for other types of objects (sprocs, views, UDTs, ...). They can be found here: https://github.com/StevenLiekens/sqlhelpers