Skip to content

Instantly share code, notes, and snippets.

@SethVandebrooke
Last active September 18, 2018 15:27
Show Gist options
  • Save SethVandebrooke/4bf87d280ae42341263a4e5f06c37200 to your computer and use it in GitHub Desktop.
Save SethVandebrooke/4bf87d280ae42341263a4e5f06c37200 to your computer and use it in GitHub Desktop.
GMAB CLI

GMAB CLI

  • CONSOLIDATE: Whole directory includes for any (text based) file type
  • TEMPLATE: Propagate templates with data from a collection of JSON files
  • PRODUCE: Produce multiple files based off a single template file

GMAB is a self contained NodeJS file that has no dependencies on outside modules or code libraries.

Using the CLI

In its simplist form, GMAB can be run with no parameters: GMAB.js

If this is run, 4 things are assumed:

  1. You only want to compile once.
  2. Your input directory is called "GMAB" and is in the current working directory.
  3. The source directory is called "src" and is also in the current working directory.
  4. Your output directory is the current working directory itself.

Parameters

All parameters are optional.

GMAB.js [listen] [outputPath] [inputPath] [sourceDirectory]

Source Parameter

The source parameter is the path to the directory of files that you want to include other files into.

Input Directory Parameter

The input parameter is the path to the directory of templates, JSON data files, and or files that you want to include into the files in the source directory.

Output Parameter

The output parameter is the path to the directory where your generated/compiled files are placed when done.

Listen Parameter

GMAB can listen for changes and auto-compile in real time. You can enable this by setting the listen parameter to true. GMAB.js true If you only want it to run once, you can specify false: GMAB.js false

Consolidate

Concatenate all files in this directory and replace this text with the result ${directoryName}

Produce

Get all json files from this directory and for each file... replace anything wrapped in {{property}} with the value of the property in the JSON file.

$~directoryName~
{{prop}}
{{anotherProp}}

The name of each file created will be the same as the name of the json file it's using unless you specify a file name. You can do so by setting FILENAME in the json file like so:

{
    "FILENAME":"myfile.html",
    "prop":"my property value",
    "anotherProp":"another value"
}

Template

A template returns a collection of copies of a template file (1 for each JSON file in the "source directory", specified in the template.json file) where in each copy has its placeholders (e.g. {{property}} ) replaced by the values in the JSON file that matches the copy's index. In other words,

  • It loops through each JSON file in a specified directory
  • Creates a copy of the template file for each one and replaces its placeholders with the corresponding values
  • Concatenates them all together
  • And returns the result

${test_template}

To create a template you must create a template folder that ends with "_template", for example: "test_template". In that directory, create two files: a tempalte.whatever file (template.html) and a template.json file (specifically: template.json).

The JSON file must contain source and has the option to contain a "max" value:

{
  "source" : "posts",
  "max" : 5
}

The template file can contain anything you want and anything wrapped in {{property}} will be replaced by the value of the property for each JSON file.

const fs = require("fs");
var read = path => fs.readFileSync(path, 'utf8'), config;
config = {
path: "./GMAB",
out: "./",
listen: false,
sourcePages: "./src"
};
if (process.argv[2] == "help") {
console.log("Usage: GMAB.js [listen] [outputPath] [inputPath] [sourceDirectory]");
process.exit(-1);
}
config.listen = process.argv[2] == "true" ? true : false;
config.out = process.argv[3] || config.out;
config.path = process.argv[4] || config.path;
config.sourcePages = process.argv[5] || config.sourcePages;
function crawlDirectory(fs,path) {
var items = fs.readdirSync(path);
var output = [];
items.forEach(function(name){
var filePath = path + "/" + name;
let type = fs.lstatSync(filePath).isDirectory() ? "dir" : "file";
if (type === "dir") {
var content = crawlDirectory(fs,filePath);
output.push({ name, type, content, path: filePath });
} else {
output.push({ name, type, path: filePath });
}
});
return output;
}
function getFilesFromDirectories(path) {
var contents = crawlDirectory(fs, path);
var files = [];
contents.forEach(function (item) {
if (item.type === "file") {
files.push(item);
} else if (item.type === "dir") {
function crawlForFiles(items) {
var results = [];
items.forEach(function (item) {
if (item.type === "file") {
results.push(item);
} else {
crawlForFiles(item.content).forEach(function (item) {
results.push(item);
});
}
});
return results;
}
crawlForFiles(item.content).forEach(function (item) {
files.push(item);
});
}
});
return files;
}
function compileSite({path = "./source", out = "./public", listen = true, sourcePages = "/pages"} = {}) {
if ( path.split("")[0] != "." ) { path = "." + path; }
if ( out.split("")[0] != "." ) { out = "." + out; }
if (path != "./" && path != "/" && path.replace(/\s/g,"") != "") {
var pages = getFilesFromDirectories( sourcePages );
if (!fs.existsSync(out)) {
fs.mkdirSync(out);
}
pages.forEach(function(page){ // For each page
var html = read(page.path);
var ext = page.name.substring(page.name.lastIndexOf(".")+1,page.name.length);
// Replace all ${directory} with the html files form those directories
while (html.match(/\$[\{]\w+[^\}]/)!=null) {
let directory = html.match(/\$[\{]\w+[^\}]/)[0].replace(/((\$)|(\{)|(\}))/g,"");
var postsHTML = "";
if (fs.existsSync(path+"/"+directory) || fs.existsSync(path+"/templates/"+directory)) { // If the directory exists
if (directory.includes("_template")) { // If it's a template directory
if (fs.existsSync(path+"/templates/"+directory+"/template."+ext) &&
fs.existsSync(path+"/templates/"+directory+"/template.json")) {
var template = read(path+"/templates/"+directory+"/template."+ext);
var metaData = JSON.parse(read(path+"/templates/"+directory+"/template.json"));
if (metaData.source && fs.existsSync(path+"/"+metaData.source)) {
var posts = getFilesFromDirectories(path+"/"+metaData.source);
for (var i in posts) {
if (metaData.max && i >= metaData.max) {
break;
}
var post = posts[i];
if (post.name.includes(".json")) {
var data = JSON.parse(read(post.path));
var temp = template;
if (typeof data == "object") {
for (var k in data) {
while (temp.includes("{{"+k+"}}")) {
temp = temp.replace("{{"+k+"}}",data[k]);
}
}
postsHTML += temp;
} else {
return;
}
}
}
} else {
console.log("template source directory is not valid: ", metaData.source, " for " + directory);
}
} else {
console.log("template requires template."+ext+" and template.json to function correctly");
}
} else {
var posts = getFilesFromDirectories(path+"/"+directory);
posts.forEach(function(post){
postsHTML += read(post.path)+"\n";
});
}
html = html.replace("${"+directory+"}",postsHTML);
}
}
if (html.match(/\$[\~]\w+[^\~]/)!=null) {
let directory = html.match(/\$[\~]\w+[^\~]/)[0].replace(/((\$)|(\~)|(\~))/g,"");
html = html.replace(html.match(/\$\~\w+\~/)[0],"");
//console.log("Looking for "+directory);
if (fs.existsSync(path+"/"+directory)) { // If the directory exists
var packets = getFilesFromDirectories(path+"/"+directory);
var temphtml = html;
//console.log("Fetching "+directory);
packets.forEach(function(packet){
if (packet.name.includes(".json")) { // Only fetch json files
var data = JSON.parse(read(packet.path));
if (typeof data == "object") {
for (var k in data) {
while (temphtml.includes("{{"+k+"}}")) {
temphtml = temphtml.replace("{{"+k+"}}",data[k]);
}
}
} else {
return;
}
var newfilename = data.FILENAME||packet.name.replace(".json","."+ext);
fs.writeFileSync(out+"/"+newfilename,temphtml);
}
});
} else {
console.log("Directory not found");
}
} else {
fs.writeFileSync(out+"/"+page.name,html);
}
});
if (listen) {
fs.watch(path,{recursive:true},function(event,filename){
console.log("Recompiling");
console.log(filename,event,path);
compileSite({ path, out, listen : false, sourcePages });
});
}
} else {
throw new Error("Cannot compile directory containing server files.");
}
}
compileSite(config);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment