|
#!/usr/bin/env python2 |
|
|
|
''' |
|
Make Me (makeme), a custom static-site generator thingy built by |
|
Don Kuntz <http://don.kuntz.co/> to generate a host of sites. |
|
|
|
While the source *is* available for other people, it'll probably |
|
take some tinkering to get it working for your own site. Sorry. |
|
''' |
|
|
|
import logging as l |
|
import misaka as m |
|
import re |
|
from os import walk, mkdir, makedirs |
|
from os.path import basename, exists, getmtime |
|
from datetime import datetime |
|
from yaml import load |
|
from jinja2 import Environment, PackageLoader |
|
from shutil import copy2 |
|
|
|
# logging configuration |
|
l.basicConfig(level=l.INFO) |
|
|
|
|
|
# sites: a dictionary of directories to be used... |
|
sites = { |
|
'whimsy': 'whimsy.kuntz.co', |
|
} |
|
|
|
|
|
def generate_site(path, url): |
|
l.debug("Generating " + url) |
|
|
|
env = Environment(loader=PackageLoader('makeme', path + '/templates')) |
|
|
|
posts = [] |
|
archives = [] |
|
pages = [] |
|
|
|
if not exists('./generated/'): |
|
l.debug('creating generated directory') |
|
mkdir("./generated") |
|
|
|
if not exists('./generated/' + url): |
|
l.debug('creating directory for ' + path) |
|
mkdir("./generated/" + url) |
|
|
|
if exists("./" + path + "/posts/"): |
|
l.debug(url + " has blog posts") |
|
posts = expand_blog(parse_posts(path), env, path, url) |
|
archives = make_archives(posts, path, url) |
|
else: |
|
l.info(url + " is not a blog?") |
|
|
|
if exists("./" + path + "/pages/"): |
|
l.debug(url + " has pages!") |
|
pages = make_pages(parse_pages(path), env, path, url) |
|
else: |
|
l.info(url + " does not have pages?") |
|
|
|
make_index(posts, archives, pages, env, path, url) |
|
copy_assets(path, url) |
|
|
|
|
|
def parse_posts(path): |
|
posts = [] |
|
post_directory = './' + path + '/posts/' |
|
for root, dirs, files in walk(post_directory): |
|
for f in files: |
|
posts.append(parse_post(root + "/" + f)) |
|
|
|
return sorted(posts, key=lambda p: p['timestamp'], reverse=True) |
|
|
|
|
|
def parse_post(filename): |
|
l.debug("parsing " + filename) |
|
post = {} |
|
|
|
f = open(filename) |
|
contents = "" |
|
for line in f: |
|
contents += line |
|
f.close() |
|
|
|
frontmatter, bodymatter = re.search(r'\A---\s+^(.+?)$\s+---\s*(.*)\Z', contents, re.M | re.S).groups() |
|
|
|
post['content'] = m.html(bodymatter) |
|
config = load(frontmatter) |
|
for k, v in config.items(): |
|
post[k] = v |
|
|
|
name = basename(filename) |
|
timestuffs = parse_timestamp(name) |
|
post['timestamp'] = timestuffs[0] |
|
post['filename'] = timestuffs[1] |
|
post['changed'] = getmtime(filename) |
|
|
|
if 'layout' not in post: |
|
post['layout'] = 'post.html' |
|
|
|
return post |
|
|
|
|
|
def parse_timestamp(filename): |
|
sp = filename.split('-') |
|
hours = -1 |
|
mins = -1 |
|
try: |
|
hours = int(sp[3]) |
|
mins = int(sp[4]) |
|
except Exception: |
|
l.debug("No hours/minutes") |
|
|
|
num_p = 3 |
|
if hours != -1 and mins != -1: |
|
d = datetime(int(sp[0]), int(sp[1]), int(sp[2]), hours, mins) |
|
num_p = 5 |
|
else: |
|
d = datetime(int(sp[0]), int(sp[1]), int(sp[2])) |
|
|
|
path = "" |
|
for p in sp[num_p:]: |
|
path += p + "-" |
|
|
|
path = path.split(".")[0][:-1] |
|
|
|
|
|
|
|
l.debug(d) |
|
return (d, path) |
|
|
|
|
|
def expand_blog(posts, env, path, url): |
|
for p in posts: |
|
# is the current file around, and is is newer than the last change? |
|
outPath = './generated/' + url + p['timestamp'].strftime('/%Y/%m/%d/') |
|
outPath += p['filename'] + '/' |
|
|
|
outFile = outPath + "/index.html" |
|
|
|
p['url'] = "http://" + url + outPath[len('./generated'):] |
|
|
|
if exists(outPath): |
|
l.debug('path is here already') |
|
if p['changed'] < getmtime(outFile): |
|
l.debug('no need to change file') |
|
continue |
|
else: |
|
l.debug('no such path, making it') |
|
makedirs(outPath) |
|
|
|
writer = open(outFile, 'w') |
|
|
|
template = env.get_template(p['layout']) |
|
#l.debug(template.render(post=p)) |
|
writer.write(template.render(post=p)) |
|
writer.close() |
|
|
|
return posts |
|
|
|
|
|
def make_archives(posts, path, url): |
|
arc = {} |
|
|
|
for p in posts: |
|
if p['timestamp'].year not in arc: |
|
arc[p['timestamp'].year] = {} |
|
if p['timestamp'].month not in arc[p['timestamp'].year]: |
|
arc[p['timestamp'].year][p['timestamp'].month] = [] |
|
|
|
arc[p['timestamp'].year][p['timestamp'].month].append(p) |
|
|
|
# this is where creating archive pages *would* occur, if wanted |
|
|
|
return arc |
|
|
|
|
|
def parse_pages(path): |
|
pages = [] |
|
for root, dirs, files in walk('./' + path + '/pages/'): |
|
for f in files: |
|
pages.append(parse_page(root + '/' + f)) |
|
return pages |
|
|
|
|
|
def parse_page(filename): |
|
l.debug("parsing " + filename) |
|
page = {} |
|
|
|
f = open(filename) |
|
contents = "" |
|
for line in f: |
|
contents += line |
|
f.close() |
|
|
|
frontmatter, bodymatter = re.search(r'\A---\s+^(.+?)$\s+---\s*(.*)\Z', contents, re.M | re.S).groups() |
|
|
|
page['content'] = m.html(bodymatter) |
|
config = load(frontmatter) |
|
for k, v in config.items(): |
|
page[k] = v |
|
|
|
name = basename(filename) |
|
nameSp = name.split('.') |
|
|
|
path = '' |
|
for p in nameSp[:-1]: |
|
path += p + '.' |
|
|
|
page['path'] = path[:-1] |
|
page['changed'] = getmtime(filename) |
|
|
|
if 'layout' not in page: |
|
page['layout'] = 'page.html' |
|
|
|
return page |
|
|
|
def make_pages(pages, env, path, url): |
|
for p in pages: |
|
# is the current file around, and is is newer than the last change? |
|
outPath = './generated/' + url + '/' + p['path'] + '/' |
|
outFile = outPath + "/index.html" |
|
|
|
p['url'] = "http://" + url + outPath[len('./generated'):] |
|
|
|
if exists(outPath): |
|
l.debug('path is here already') |
|
if p['changed'] < getmtime(outFile): |
|
l.debug('no need to change file') |
|
continue |
|
else: |
|
l.debug('no such path, making it') |
|
makedirs(outPath) |
|
|
|
writer = open(outFile, 'w') |
|
|
|
template = env.get_template(p['layout']) |
|
#l.debug(template.render(post=p)) |
|
writer.write(template.render(page=p)) |
|
writer.close() |
|
|
|
return pages |
|
|
|
|
|
|
|
def make_index(posts, archives, pages, env, path, url): |
|
if not exists('./' + path + '/index.md'): |
|
l.info(path + " doesn't have an index") |
|
return |
|
|
|
f = open('./' + path + '/index.md') |
|
contents = "" |
|
for line in f: |
|
contents += line |
|
f.close() |
|
|
|
frontmatter, bodymatter = re.search(r'\A---\s+^(.+?)$\s+---\s*(.*)\Z', contents, re.M | re.S).groups() |
|
config = load(frontmatter) |
|
template = env.get_template(config['layout']) |
|
writer = open('./generated/' + url + '/index.html', 'w') |
|
writer.write(template.render(page=config,content=bodymatter,posts=posts,archives=archives)) |
|
writer.close() |
|
|
|
|
|
def copy_assets(path, url): |
|
if not exists('./' + path + '/assets'): |
|
l.info('no assets in ' + path) |
|
return |
|
|
|
newPath = './generated/' + url |
|
|
|
for root, dirs, files in walk('./' + path + '/assets'): |
|
for d in dirs: |
|
ndir = root + '/' + d |
|
ndir = ndir.replace('./' + path, newPath) |
|
|
|
if not exists(ndir): |
|
makedirs(ndir) |
|
|
|
for f in files: |
|
fPath = root + '/' + f |
|
nfPath = fPath.replace('./' + path, newPath) |
|
|
|
if exists(nfPath): |
|
if getmtime(nfPath) > getmtime(fPath): |
|
continue |
|
|
|
copy2(fPath, nfPath) |
|
|
|
|
|
for directory, url in sites.items(): |
|
generate_site(directory, url) |