Created
August 11, 2025 19:24
-
-
Save vralle/5ad0d01023f8bdce5d047db1eedb09b0 to your computer and use it in GitHub Desktop.
SSG With html-bundler-webpack-plugin
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import matter from "gray-matter"; | |
import limax from "limax"; | |
import { readdir, readFile } from "node:fs/promises"; | |
import path from "node:path"; | |
import YAML from "yaml"; | |
import { z } from "zod"; | |
/** | |
* ----------------------------- | |
* Types & helpers | |
* ----------------------------- | |
*/ | |
/** | |
* Base content entry without enforced filePath for static or loader-sourced data. | |
* @typedef {object} ContentEntry | |
* @property {string} slug | |
* @property {Record<string, unknown>} data | |
* @property {string} body | |
* @property {string} [id] - Usually the filename (if from filesystem) | |
* @property {string} [filePath] - Absolute path to the file (if from filesystem) | |
*/ | |
/** | |
* Fully loaded entry after validation and computed fields. | |
* @typedef {object} LoadedEntry | |
* @property {string} slug | |
* @property {Record<string, unknown>} data | |
* @property {string} body | |
* @property {string} [id] | |
* @property {string} [filePath] | |
*/ | |
/** | |
* Validation error for a specific entry. | |
* @typedef {object} ValidationIssue | |
* @property {string} source - filePath or slug | |
* @property {unknown} error | |
*/ | |
/** | |
* Loader callback for dynamic collection sources. | |
* @callback CollectionLoader | |
* @returns {Promise<ContentEntry[]>|ContentEntry[]} | |
*/ | |
/** | |
* Optional configuration object for a collection. | |
* @typedef {object} CollectionConfig | |
* @property {import("zod").ZodTypeAny | ((ctx: { image: () => import("zod").ZodTypeAny }) => import("zod").ZodTypeAny)} [schema] | |
* Schema to validate frontmatter/data; can be a Zod schema or a factory function. | |
* @property {CollectionLoader} [loader] | |
* Async or sync function returning entries for this collection. | |
* @property {(ctx: { slug: string; id?: string; filePath?: string; data: Record<string, unknown>; body: string }) => Promise<Record<string, unknown>>|Record<string, unknown>} [computed] | |
* Function that returns extra fields to merge into `data` after validation. | |
*/ | |
/** | |
* Collection source type: | |
* - a CollectionConfig object | |
* - a static ContentEntry array | |
* - a static object dictionary { [slug]: data | { data, body? } } | |
* @typedef {CollectionConfig | ContentEntry[] | Record<string, Record<string, unknown> | { data: Record<string, unknown>; body?: string }>} CollectionSource | |
*/ | |
/** | |
* Map of named collections. | |
* @typedef {Record<string, CollectionSource>} CollectionsMap | |
*/ | |
/** | |
* Options for the loader. | |
* @typedef {object} LoaderOptions | |
* @property {(error: unknown, source: string) => void} [onValidateError] | |
* Optional callback when validation fails for an entry. | |
* @property {boolean} [throwOnError] | |
* If true (default), throws if there are validation issues. | |
* @property {import("limax").Options} [slugOptions] | |
* Options for slug generation. | |
* @property {string} [contentDirRoot] | |
* Root directory for filesystem content. Defaults to `<process.cwd()>/src/content`. | |
*/ | |
/** | |
* Final output structure: { [collectionName]: LoadedEntry[] } | |
* @typedef {Record<string, LoadedEntry[]>} LoadedCollections | |
*/ | |
/** | |
* Astro-like image() helper — returns Zod string schema. | |
* @returns {import("zod").ZodString} | |
*/ | |
function image() { | |
return z.string(); | |
} | |
/** | |
* Get a human-readable error message from unknown error. | |
* @param {unknown} err Error | |
* @returns {string} Error message | |
*/ | |
function getErrorMessage(err) { | |
if (err && typeof err === "object" && "message" in err) { | |
return String(err.message ?? "Error"); | |
} | |
try { | |
return JSON.stringify(err); | |
} catch { | |
return String(err); | |
} | |
} | |
/** | |
* Check if a value is a plain object. | |
* @param {unknown} v Current data | |
* @returns {v is Record<string, unknown>} | |
*/ | |
function isPlainObject(v) { | |
return typeof v === "object" && v !== null && Object.getPrototypeOf(v) === Object.prototype; | |
} | |
/** | |
* Generate a slug from a file path. | |
* @param {string} filePath | |
* @param {import("limax").Options|undefined} slugOptions | |
* @returns {string} Entry slug | |
*/ | |
function toSlugFromPath(filePath, slugOptions) { | |
const base = path.basename(filePath, path.extname(filePath)); | |
return limax(base, slugOptions ?? {}); | |
} | |
/** | |
* YAML parser for gray-matter. | |
* @param {string} src | |
*/ | |
function yamlParser(src) { | |
return YAML.parse(src); | |
} | |
/** | |
* Normalize static entry source: | |
* - array of ContentEntry: returned as-is (with minimal shape check) | |
* - dictionary { slug: data | { data, body? } }: converted to ContentEntry[] | |
* @param {ContentEntry[] | Record<string, Record<string, unknown> | { data: Record<string, unknown>; body?: string }>} value | |
* @returns {ContentEntry[]} | |
*/ | |
function normalizeStaticEntries(value) { | |
if (Array.isArray(value)) { | |
return value.map((e, i) => { | |
if (typeof e?.slug !== "string" || !isPlainObject(e?.data) || typeof e?.body !== "string") { | |
throw new Error(`Static array entry at index ${i} must have { slug: string, data: object, body: string }`); | |
} | |
return e; | |
}); | |
} | |
/** @type {ContentEntry[]} */ | |
const out = []; | |
for (const [slug, v] of Object.entries(value)) { | |
if (isPlainObject(v) && "data" in v) { | |
const item = /** @type {{ data: Record<string, unknown>; body?: string }} */ (v); | |
out.push({ slug, data: item.data, body: item.body ?? "" }); | |
} else if (isPlainObject(v)) { | |
out.push({ slug, data: /** @type {Record<string, unknown>} */ (v), body: "" }); | |
} else { | |
throw new Error(`Static object entry for slug "${slug}" must be an object`); | |
} | |
} | |
return out; | |
} | |
/** | |
* Validate entries using optional schema and add computed fields. | |
* - Awaits async computed fields. | |
* - Collects issues; optionally throws if `throwOnError !== false`. | |
* @param {ContentEntry[]} entries | |
* @param {CollectionConfig | undefined} config | |
* @param {LoaderOptions | undefined} options | |
* @returns {Promise<{ items: LoadedEntry[]; issues: ValidationIssue[] }>} | |
*/ | |
async function validateAndCompute(entries, config, options) { | |
/** @type {LoadedEntry[]} */ | |
const items = []; | |
/** @type {ValidationIssue[]} */ | |
const issues = []; | |
const schema = typeof config?.schema === "function" ? config.schema({ image }) : config?.schema; | |
/** @type {Promise<void>[]} */ | |
const pending = []; | |
for (const raw of entries) { | |
/** @type {Record<string, unknown>} */ | |
let parsedData = raw.data; | |
/** @type {unknown} */ | |
let error = undefined; | |
if (schema) { | |
try { | |
if (typeof /** @type {any} */ (schema).safeParse === "function") { | |
const parsed = /** @type {{ success: boolean; data?: unknown; error?: unknown }} */ ( | |
/** @type {any} */ (schema).safeParse(parsedData) | |
); | |
if (!parsed.success) throw parsed.error; | |
parsedData = /** @type {Record<string, unknown>} */ (isPlainObject(parsed.data) ? parsed.data : {}); | |
} else if (typeof /** @type {any} */ (schema).parse === "function") { | |
const out = /** @type {unknown} */ (/** @type {any} */ (schema).parse(parsedData)); | |
parsedData = /** @type {Record<string, unknown>} */ (isPlainObject(out) ? out : {}); | |
} | |
} catch (err) { | |
error = err; | |
} | |
} | |
if (error) { | |
const source = raw.filePath ?? raw.slug; | |
options?.onValidateError?.(error, source); | |
issues.push({ source, error }); | |
continue; | |
} | |
const finalize = async () => { | |
const extra = typeof config?.computed === "function" | |
? await config.computed({ | |
slug: raw.slug, | |
id: raw.id, | |
filePath: raw.filePath, | |
data: parsedData, | |
body: raw.body, | |
}) | |
: {}; | |
const safeExtra = isPlainObject(extra) ? extra : {}; | |
items.push({ | |
slug: raw.slug, | |
data: { ...parsedData, ...safeExtra }, | |
body: raw.body, | |
id: raw.id, | |
filePath: raw.filePath, | |
}); | |
}; | |
pending.push(finalize()); | |
} | |
await Promise.all(pending); | |
if (issues.length && (options?.throwOnError ?? true)) { | |
const list = issues | |
.map((i) => `- ${i.source}: ${getErrorMessage(i.error)}`) | |
.join("\n"); | |
throw new Error(`Validation failed for ${issues.length} entr${issues.length === 1 ? "y" : "ies"}:\n${list}`); | |
} | |
return { items, issues }; | |
} | |
/** | |
* Recursively collect all files under a directory. | |
* @param {string} dir | |
* @returns {Promise<string[]>} | |
*/ | |
async function walk(dir) { | |
const entries = await readdir(dir, { withFileTypes: true }); | |
/** @type {string[]} */ | |
const files = []; | |
for (const entry of entries) { | |
const full = path.join(dir, entry.name); | |
if (entry.isDirectory()) { | |
files.push(...(await walk(full))); | |
} else { | |
files.push(full); | |
} | |
} | |
return files; | |
} | |
/** | |
* Read markdown (.md/.mdx) collections from the filesystem. | |
* Returns a map { [collectionName]: ContentEntry[] }. | |
* | |
* @param {string} rootDir | |
* Root directory for collections (can be absolute). Frequently something like `${process.cwd()}/src/content`. | |
* | |
* @param {Record<string, string>} collections | |
* Map of collection name -> directory (absolute or relative to rootDir). | |
* | |
* @param {LoaderOptions | undefined} options | |
* Options (currently only slugOptions is used here). | |
* | |
* @returns {Promise<Record<string, ContentEntry[]>>} | |
*/ | |
async function readCollectionsFromFs(rootDir, collections, options) { | |
/** @type {Record<string, ContentEntry[]>>} */ | |
const result = {}; | |
for (const [name, dirSpec] of Object.entries(collections)) { | |
const collectionDir = path.isAbsolute(dirSpec) ? dirSpec : path.join(rootDir, dirSpec); | |
const all = await walk(collectionDir); | |
const filePaths = all.filter((p) => /\.(md|mdx)$/i.test(p)); | |
/** @type {ContentEntry[]} */ | |
const entries = []; | |
for (const filePath of filePaths) { | |
const raw = await readFile(filePath, "utf8"); | |
const { data: frontMatter, content } = matter(raw, { | |
engines: { yaml: yamlParser }, | |
}); | |
const base = path.basename(filePath, path.extname(filePath)); | |
const fmSlug = /** @type {unknown} */ (frontMatter?.slug); | |
const slug = typeof fmSlug === "string" && fmSlug.trim().length > 0 | |
? fmSlug | |
: limax(base, options?.slugOptions ?? { locale: "en" }); | |
entries.push({ | |
id: base, | |
slug, | |
data: isPlainObject(frontMatter) ? frontMatter : {}, | |
body: content, | |
filePath, | |
}); | |
} | |
// Optional: sort by date desc, then by slug asc | |
entries.sort((a, b) => { | |
const da = isPlainObject(a.data) && (typeof a.data.date === "string" || typeof a.data.date === "number") | |
? new Date(a.data.date).getTime() | |
: 0; | |
const db = isPlainObject(b.data) && (typeof b.data.date === "string" || typeof b.data.date === "number") | |
? new Date(b.data.date).getTime() | |
: 0; | |
if (db !== da) return db - da; | |
return a.slug.localeCompare(b.slug); | |
}); | |
result[name] = entries; | |
} | |
return result; | |
} | |
export { | |
getErrorMessage, | |
image, | |
isPlainObject, | |
normalizeStaticEntries, | |
readCollectionsFromFs, | |
toSlugFromPath, | |
validateAndCompute, | |
yamlParser, | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment