Created
February 18, 2026 21:19
-
-
Save dpaola2/9147a0b759dac9287ca515c271e81a3e to your computer and use it in GitHub Desktop.
Obsidian vault link index — MCP server for Claude Code (TypeScript + SQLite)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import Database from "better-sqlite3"; | |
| import * as path from "path"; | |
| import { ParsedFile, ParsedLink, VaultParseResult } from "./parser.js"; | |
| export interface BacklinkResult { | |
| from: string; | |
| title: string | null; | |
| lineNumber: number; | |
| displayText: string | null; | |
| } | |
| export interface ForwardLinkResult { | |
| to: string; | |
| title: string | null; | |
| displayText: string | null; | |
| exists: boolean; | |
| } | |
| export interface FileStats { | |
| path: string; | |
| title: string | null; | |
| type: string | null; | |
| incoming: number; | |
| outgoing: number; | |
| } | |
| export interface VaultStatsResult { | |
| totalFiles: number; | |
| totalLinks: number; | |
| mostLinkedTo: FileStats[]; | |
| orphans: string[]; | |
| brokenLinks: { from: string; target: string }[]; | |
| } | |
| export class VaultIndex { | |
| private db: Database.Database; | |
| constructor(dbPath: string) { | |
| this.db = new Database(dbPath); | |
| this.db.pragma("journal_mode = WAL"); | |
| this.db.pragma("foreign_keys = ON"); | |
| this.initSchema(); | |
| } | |
| private initSchema(): void { | |
| this.db.exec(` | |
| CREATE TABLE IF NOT EXISTS files ( | |
| id INTEGER PRIMARY KEY, | |
| path TEXT UNIQUE NOT NULL, | |
| title TEXT, | |
| type TEXT, | |
| tags TEXT, | |
| last_modified INTEGER NOT NULL | |
| ); | |
| CREATE TABLE IF NOT EXISTS links ( | |
| id INTEGER PRIMARY KEY, | |
| source_file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE, | |
| target_path TEXT NOT NULL, | |
| target_file_id INTEGER, | |
| display_text TEXT, | |
| line_number INTEGER, | |
| UNIQUE(source_file_id, target_path, line_number) | |
| ); | |
| CREATE INDEX IF NOT EXISTS idx_links_target ON links(target_file_id); | |
| CREATE INDEX IF NOT EXISTS idx_links_source ON links(source_file_id); | |
| CREATE INDEX IF NOT EXISTS idx_links_target_path ON links(target_path); | |
| CREATE INDEX IF NOT EXISTS idx_files_path ON files(path); | |
| `); | |
| } | |
| rebuild(parseResult: VaultParseResult): { filesIndexed: number; linksIndexed: number } { | |
| const rebuildTx = this.db.transaction(() => { | |
| // Clear existing data | |
| this.db.exec("DELETE FROM links"); | |
| this.db.exec("DELETE FROM files"); | |
| // Insert files | |
| const insertFile = this.db.prepare( | |
| "INSERT INTO files (path, title, type, tags, last_modified) VALUES (?, ?, ?, ?, ?)" | |
| ); | |
| for (const file of parseResult.files) { | |
| insertFile.run( | |
| file.relativePath, | |
| file.title, | |
| file.type, | |
| JSON.stringify(file.tags), | |
| file.lastModified | |
| ); | |
| } | |
| // Build path->id lookup | |
| const fileIdLookup = new Map<string, number>(); | |
| const allFiles = this.db | |
| .prepare("SELECT id, path FROM files") | |
| .all() as { id: number; path: string }[]; | |
| for (const f of allFiles) { | |
| fileIdLookup.set(f.path, f.id); | |
| } | |
| // Insert links | |
| const insertLink = this.db.prepare( | |
| "INSERT OR IGNORE INTO links (source_file_id, target_path, target_file_id, display_text, line_number) VALUES (?, ?, ?, ?, ?)" | |
| ); | |
| let linkCount = 0; | |
| for (const [sourcePath, links] of parseResult.links) { | |
| const sourceId = fileIdLookup.get(sourcePath); | |
| if (!sourceId) continue; | |
| for (const link of links) { | |
| const targetId = fileIdLookup.get(link.targetPath) ?? null; | |
| insertLink.run( | |
| sourceId, | |
| link.targetPath, | |
| targetId, | |
| link.displayText, | |
| link.lineNumber | |
| ); | |
| linkCount++; | |
| } | |
| } | |
| return { filesIndexed: parseResult.files.length, linksIndexed: linkCount }; | |
| }); | |
| return rebuildTx(); | |
| } | |
| getBacklinks(filePath: string): BacklinkResult[] { | |
| // Find the target file ID | |
| const file = this.db | |
| .prepare("SELECT id FROM files WHERE path = ?") | |
| .get(filePath) as { id: number } | undefined; | |
| if (!file) { | |
| // Try matching without .md extension or with different path formats | |
| const fuzzyResults = this.db | |
| .prepare( | |
| `SELECT DISTINCT f.path, f.title, l.line_number, l.display_text | |
| FROM links l | |
| JOIN files f ON f.id = l.source_file_id | |
| WHERE l.target_path = ? OR l.target_path = ?` | |
| ) | |
| .all(filePath, filePath.replace(/\.md$/, "")) as BacklinkResult[]; | |
| return fuzzyResults.map((r: any) => ({ | |
| from: r.path, | |
| title: r.title, | |
| lineNumber: r.line_number, | |
| displayText: r.display_text, | |
| })); | |
| } | |
| const results = this.db | |
| .prepare( | |
| `SELECT f.path, f.title, l.line_number, l.display_text | |
| FROM links l | |
| JOIN files f ON f.id = l.source_file_id | |
| WHERE l.target_file_id = ? | |
| ORDER BY f.path` | |
| ) | |
| .all(file.id) as any[]; | |
| return results.map((r) => ({ | |
| from: r.path, | |
| title: r.title, | |
| lineNumber: r.line_number, | |
| displayText: r.display_text, | |
| })); | |
| } | |
| getForwardLinks(filePath: string): ForwardLinkResult[] { | |
| const file = this.db | |
| .prepare("SELECT id FROM files WHERE path = ?") | |
| .get(filePath) as { id: number } | undefined; | |
| if (!file) return []; | |
| const results = this.db | |
| .prepare( | |
| `SELECT l.target_path, t.title, l.display_text, (t.id IS NOT NULL) as exists_flag | |
| FROM links l | |
| LEFT JOIN files t ON t.id = l.target_file_id | |
| WHERE l.source_file_id = ? | |
| ORDER BY l.line_number` | |
| ) | |
| .all(file.id) as any[]; | |
| return results.map((r) => ({ | |
| to: r.target_path, | |
| title: r.title, | |
| displayText: r.display_text, | |
| exists: !!r.exists_flag, | |
| })); | |
| } | |
| getVaultStats(topN: number = 15): VaultStatsResult { | |
| const totalFiles = ( | |
| this.db.prepare("SELECT COUNT(*) as c FROM files").get() as { c: number } | |
| ).c; | |
| const totalLinks = ( | |
| this.db.prepare("SELECT COUNT(*) as c FROM links").get() as { c: number } | |
| ).c; | |
| // Most linked-to files | |
| const mostLinkedTo = this.db | |
| .prepare( | |
| `SELECT f.path, f.title, f.type, | |
| COUNT(l.id) as incoming, | |
| (SELECT COUNT(*) FROM links l2 WHERE l2.source_file_id = f.id) as outgoing | |
| FROM files f | |
| JOIN links l ON l.target_file_id = f.id | |
| GROUP BY f.id | |
| ORDER BY incoming DESC | |
| LIMIT ?` | |
| ) | |
| .all(topN) as any[]; | |
| // Orphans: files with no incoming links | |
| const orphans = this.db | |
| .prepare( | |
| `SELECT f.path FROM files f | |
| LEFT JOIN links l ON l.target_file_id = f.id | |
| WHERE l.id IS NULL | |
| ORDER BY f.path` | |
| ) | |
| .all() as { path: string }[]; | |
| // Broken links: links where target doesn't resolve to a file | |
| const brokenLinks = this.db | |
| .prepare( | |
| `SELECT f.path as from_path, l.target_path | |
| FROM links l | |
| JOIN files f ON f.id = l.source_file_id | |
| WHERE l.target_file_id IS NULL | |
| ORDER BY f.path` | |
| ) | |
| .all() as { from_path: string; target_path: string }[]; | |
| return { | |
| totalFiles, | |
| totalLinks, | |
| mostLinkedTo: mostLinkedTo.map((r: any) => ({ | |
| path: r.path, | |
| title: r.title, | |
| type: r.type, | |
| incoming: r.incoming, | |
| outgoing: r.outgoing, | |
| })), | |
| orphans: orphans.map((r) => r.path), | |
| brokenLinks: brokenLinks.map((r) => ({ | |
| from: r.from_path, | |
| target: r.target_path, | |
| })), | |
| }; | |
| } | |
| searchFiles(query: string): FileStats[] { | |
| const pattern = `%${query}%`; | |
| const results = this.db | |
| .prepare( | |
| `SELECT f.path, f.title, f.type, | |
| (SELECT COUNT(*) FROM links l WHERE l.target_file_id = f.id) as incoming, | |
| (SELECT COUNT(*) FROM links l2 WHERE l2.source_file_id = f.id) as outgoing | |
| FROM files f | |
| WHERE f.path LIKE ? OR f.title LIKE ? | |
| ORDER BY incoming DESC | |
| LIMIT 20` | |
| ) | |
| .all(pattern, pattern) as any[]; | |
| return results.map((r: any) => ({ | |
| path: r.path, | |
| title: r.title, | |
| type: r.type, | |
| incoming: r.incoming, | |
| outgoing: r.outgoing, | |
| })); | |
| } | |
| close(): void { | |
| this.db.close(); | |
| } | |
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env node | |
| import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; | |
| import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; | |
| import { z } from "zod"; | |
| import * as path from "path"; | |
| import * as fs from "fs"; | |
| import { parseVault } from "./parser.js"; | |
| import { VaultIndex } from "./database.js"; | |
| const VAULT_PATH = process.env.VAULT_PATH || path.resolve(__dirname, "../../.."); | |
| const DB_PATH = process.env.DB_PATH || path.join(__dirname, "..", "vault-index.sqlite"); | |
| let vaultIndex: VaultIndex; | |
| let lastRebuildTime = 0; | |
| function rebuildIndex(): { filesIndexed: number; linksIndexed: number } { | |
| const parseResult = parseVault(VAULT_PATH); | |
| const stats = vaultIndex.rebuild(parseResult); | |
| lastRebuildTime = Date.now(); | |
| return stats; | |
| } | |
| // Check if any files changed since last rebuild | |
| function needsRebuild(): boolean { | |
| if (lastRebuildTime === 0) return true; | |
| const checkDir = (dir: string): boolean => { | |
| try { | |
| const entries = fs.readdirSync(dir, { withFileTypes: true }); | |
| for (const entry of entries) { | |
| if (entry.name.startsWith(".") || entry.name === "node_modules" || entry.name === "tools" || entry.name === "dist") { | |
| continue; | |
| } | |
| const fullPath = path.join(dir, entry.name); | |
| if (entry.isDirectory()) { | |
| if (checkDir(fullPath)) return true; | |
| } else if (entry.name.endsWith(".md")) { | |
| const stat = fs.statSync(fullPath); | |
| if (stat.mtimeMs > lastRebuildTime) return true; | |
| } | |
| } | |
| } catch { | |
| // Skip unreadable directories | |
| } | |
| return false; | |
| }; | |
| return checkDir(VAULT_PATH); | |
| } | |
| function ensureFreshIndex(): void { | |
| if (needsRebuild()) { | |
| rebuildIndex(); | |
| } | |
| } | |
| async function main() { | |
| // Initialize database and build initial index | |
| vaultIndex = new VaultIndex(DB_PATH); | |
| const initialStats = rebuildIndex(); | |
| console.error( | |
| `[entity-index] Indexed ${initialStats.filesIndexed} files, ${initialStats.linksIndexed} links from ${VAULT_PATH}` | |
| ); | |
| const server = new McpServer({ | |
| name: "entity-index", | |
| version: "1.0.0", | |
| }); | |
| // Tool: vault_backlinks | |
| server.tool( | |
| "vault_backlinks", | |
| "Find all files that link TO a given file (reverse link lookup). Returns source files with line numbers.", | |
| { | |
| file: z | |
| .string() | |
| .describe( | |
| "Relative path to the file within the vault, e.g. '06-people/Kim.md' or '03-living-docs/Management-Philosophy.md'" | |
| ), | |
| }, | |
| async ({ file }) => { | |
| ensureFreshIndex(); | |
| const backlinks = vaultIndex.getBacklinks(file); | |
| return { | |
| content: [ | |
| { | |
| type: "text" as const, | |
| text: JSON.stringify( | |
| { | |
| file, | |
| backlinks: backlinks.map((b) => ({ | |
| from: b.from, | |
| title: b.title, | |
| line: b.lineNumber, | |
| display: b.displayText, | |
| })), | |
| count: backlinks.length, | |
| }, | |
| null, | |
| 2 | |
| ), | |
| }, | |
| ], | |
| }; | |
| } | |
| ); | |
| // Tool: vault_forward_links | |
| server.tool( | |
| "vault_forward_links", | |
| "Find all files that a given file links TO (forward link lookup). Shows which links resolve to existing files.", | |
| { | |
| file: z | |
| .string() | |
| .describe( | |
| "Relative path to the file within the vault, e.g. '06-people/Kim.md'" | |
| ), | |
| }, | |
| async ({ file }) => { | |
| ensureFreshIndex(); | |
| const links = vaultIndex.getForwardLinks(file); | |
| return { | |
| content: [ | |
| { | |
| type: "text" as const, | |
| text: JSON.stringify( | |
| { | |
| file, | |
| links_to: links.map((l) => ({ | |
| to: l.to, | |
| title: l.title, | |
| display: l.displayText, | |
| exists: l.exists, | |
| })), | |
| count: links.length, | |
| }, | |
| null, | |
| 2 | |
| ), | |
| }, | |
| ], | |
| }; | |
| } | |
| ); | |
| // Tool: vault_stats | |
| server.tool( | |
| "vault_stats", | |
| "Get an overview of the vault graph: total files, total links, most-linked files, orphans (no incoming links), and broken links.", | |
| {}, | |
| async () => { | |
| ensureFreshIndex(); | |
| const stats = vaultIndex.getVaultStats(); | |
| return { | |
| content: [ | |
| { | |
| type: "text" as const, | |
| text: JSON.stringify(stats, null, 2), | |
| }, | |
| ], | |
| }; | |
| } | |
| ); | |
| // Tool: vault_search_links | |
| server.tool( | |
| "vault_search_links", | |
| "Search for files by name or path fragment. Returns matching files with their incoming and outgoing link counts.", | |
| { | |
| query: z | |
| .string() | |
| .describe( | |
| "Search term to match against file paths and titles, e.g. 'Kim' or 'pattern' or 'show-notes'" | |
| ), | |
| }, | |
| async ({ query }) => { | |
| ensureFreshIndex(); | |
| const results = vaultIndex.searchFiles(query); | |
| return { | |
| content: [ | |
| { | |
| type: "text" as const, | |
| text: JSON.stringify( | |
| { | |
| query, | |
| results: results.map((r) => ({ | |
| path: r.path, | |
| title: r.title, | |
| type: r.type, | |
| incoming: r.incoming, | |
| outgoing: r.outgoing, | |
| })), | |
| count: results.length, | |
| }, | |
| null, | |
| 2 | |
| ), | |
| }, | |
| ], | |
| }; | |
| } | |
| ); | |
| // Tool: vault_rebuild | |
| server.tool( | |
| "vault_rebuild", | |
| "Force a full rebuild of the vault index. Use this if the index seems stale or after major vault changes.", | |
| {}, | |
| async () => { | |
| const stats = rebuildIndex(); | |
| return { | |
| content: [ | |
| { | |
| type: "text" as const, | |
| text: JSON.stringify( | |
| { | |
| message: "Index rebuilt successfully", | |
| filesIndexed: stats.filesIndexed, | |
| linksIndexed: stats.linksIndexed, | |
| vaultPath: VAULT_PATH, | |
| }, | |
| null, | |
| 2 | |
| ), | |
| }, | |
| ], | |
| }; | |
| } | |
| ); | |
| // Connect via stdio transport | |
| const transport = new StdioServerTransport(); | |
| await server.connect(transport); | |
| console.error("[entity-index] MCP server running on stdio"); | |
| } | |
| main().catch((err) => { | |
| console.error("[entity-index] Fatal error:", err); | |
| process.exit(1); | |
| }); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { | |
| "name": "entity-index", | |
| "version": "1.0.0", | |
| "description": "Vault backlink index exposed as an MCP server", | |
| "main": "dist/index.js", | |
| "scripts": { | |
| "build": "tsc", | |
| "start": "node dist/index.js" | |
| }, | |
| "keywords": [], | |
| "author": "", | |
| "license": "ISC", | |
| "dependencies": { | |
| "@modelcontextprotocol/sdk": "^1.26.0", | |
| "better-sqlite3": "^12.6.2" | |
| }, | |
| "devDependencies": { | |
| "@types/better-sqlite3": "^7.6.13", | |
| "@types/node": "^25.2.3", | |
| "typescript": "^5.9.3" | |
| } | |
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import * as fs from "fs"; | |
| import * as path from "path"; | |
| export interface ParsedFile { | |
| relativePath: string; | |
| title: string | null; | |
| type: string | null; | |
| tags: string[]; | |
| lastModified: number; | |
| } | |
| export interface ParsedLink { | |
| targetRaw: string; // raw link target as written | |
| targetPath: string; // resolved relative path (best guess) | |
| displayText: string | null; // alias after | | |
| lineNumber: number; | |
| } | |
| export interface VaultParseResult { | |
| files: ParsedFile[]; | |
| links: Map<string, ParsedLink[]>; // keyed by source file relativePath | |
| } | |
| // Match [[target]] or [[target|display]] or [[target#heading]] or [[target#heading|display]] | |
| const WIKI_LINK_RE = /\[\[([^\]|#]+)(?:#[^\]|]*)?(?:\|([^\]]*))?\]\]/g; | |
| // Extract frontmatter between --- delimiters | |
| function parseFrontmatter(content: string): { type: string | null; tags: string[] } { | |
| const match = content.match(/^---\n([\s\S]*?)\n---/); | |
| if (!match) return { type: null, tags: [] }; | |
| const fm = match[1]; | |
| let type: string | null = null; | |
| let tags: string[] = []; | |
| const typeMatch = fm.match(/^type:\s*(.+)$/m); | |
| if (typeMatch) { | |
| type = typeMatch[1].trim().replace(/^["']|["']$/g, ""); | |
| } | |
| const tagsMatch = fm.match(/^tags:\s*\[([^\]]*)\]/m); | |
| if (tagsMatch) { | |
| tags = tagsMatch[1].split(",").map((t) => t.trim().replace(/^["']|["']$/g, "")).filter(Boolean); | |
| } | |
| return { type, tags }; | |
| } | |
| // Extract first # heading as title | |
| function parseTitle(content: string): string | null { | |
| // Skip frontmatter | |
| let body = content; | |
| if (content.startsWith("---")) { | |
| const endIdx = content.indexOf("\n---", 3); | |
| if (endIdx !== -1) { | |
| body = content.slice(endIdx + 4); | |
| } | |
| } | |
| const match = body.match(/^#\s+(.+)$/m); | |
| return match ? match[1].trim() : null; | |
| } | |
| // Extract all wiki-links from content with line numbers | |
| function parseLinks(content: string): ParsedLink[] { | |
| const links: ParsedLink[] = []; | |
| const lines = content.split("\n"); | |
| for (let i = 0; i < lines.length; i++) { | |
| let match: RegExpExecArray | null; | |
| WIKI_LINK_RE.lastIndex = 0; | |
| while ((match = WIKI_LINK_RE.exec(lines[i])) !== null) { | |
| const targetRaw = match[1].trim(); | |
| const displayText = match[2]?.trim() || null; | |
| links.push({ | |
| targetRaw, | |
| targetPath: targetRaw, // will be resolved later | |
| displayText, | |
| lineNumber: i + 1, | |
| }); | |
| } | |
| } | |
| return links; | |
| } | |
| // Walk directory recursively, collecting .md files | |
| function walkDir(dir: string, rootDir: string): string[] { | |
| const results: string[] = []; | |
| const entries = fs.readdirSync(dir, { withFileTypes: true }); | |
| for (const entry of entries) { | |
| const fullPath = path.join(dir, entry.name); | |
| // Skip hidden directories, node_modules, .obsidian, tools | |
| if (entry.isDirectory()) { | |
| if ( | |
| entry.name.startsWith(".") || | |
| entry.name === "node_modules" || | |
| entry.name === "tools" || | |
| entry.name === "dist" | |
| ) { | |
| continue; | |
| } | |
| results.push(...walkDir(fullPath, rootDir)); | |
| } else if (entry.name.endsWith(".md") && !entry.name.startsWith(".")) { | |
| results.push(fullPath); | |
| } | |
| } | |
| return results; | |
| } | |
| // Build a lookup map: filename stem -> relative paths | |
| function buildFilenameLookup(filePaths: string[], vaultRoot: string): Map<string, string[]> { | |
| const lookup = new Map<string, string[]>(); | |
| for (const fullPath of filePaths) { | |
| const relPath = path.relative(vaultRoot, fullPath); | |
| const stem = path.basename(relPath, ".md"); | |
| const stemLower = stem.toLowerCase(); | |
| const existing = lookup.get(stemLower) || []; | |
| existing.push(relPath); | |
| lookup.set(stemLower, existing); | |
| } | |
| return lookup; | |
| } | |
| // Resolve a wiki-link target to a relative file path | |
| function resolveTarget( | |
| targetRaw: string, | |
| filenameLookup: Map<string, string[]>, | |
| vaultRoot: string | |
| ): string { | |
| // If it contains a /, treat as a path from vault root | |
| if (targetRaw.includes("/")) { | |
| const withExt = targetRaw.endsWith(".md") ? targetRaw : targetRaw + ".md"; | |
| if (fs.existsSync(path.join(vaultRoot, withExt))) { | |
| return withExt; | |
| } | |
| // Try without .md extension in case the raw target already resolves | |
| return withExt; | |
| } | |
| // Otherwise, search by filename stem | |
| const stemLower = targetRaw.toLowerCase(); | |
| const matches = filenameLookup.get(stemLower); | |
| if (matches && matches.length > 0) { | |
| return matches[0]; // first match (ambiguous if multiple, but that's rare) | |
| } | |
| // Unresolved — return as-is with .md | |
| return targetRaw.endsWith(".md") ? targetRaw : targetRaw + ".md"; | |
| } | |
| export function parseVault(vaultRoot: string): VaultParseResult { | |
| const absolutePaths = walkDir(vaultRoot, vaultRoot); | |
| const filenameLookup = buildFilenameLookup(absolutePaths, vaultRoot); | |
| const files: ParsedFile[] = []; | |
| const links = new Map<string, ParsedLink[]>(); | |
| for (const fullPath of absolutePaths) { | |
| const relPath = path.relative(vaultRoot, fullPath); | |
| const stat = fs.statSync(fullPath); | |
| const content = fs.readFileSync(fullPath, "utf-8"); | |
| const { type, tags } = parseFrontmatter(content); | |
| const title = parseTitle(content); | |
| files.push({ | |
| relativePath: relPath, | |
| title, | |
| type, | |
| tags, | |
| lastModified: Math.floor(stat.mtimeMs), | |
| }); | |
| const fileLinks = parseLinks(content); | |
| // Resolve link targets | |
| for (const link of fileLinks) { | |
| link.targetPath = resolveTarget(link.targetRaw, filenameLookup, vaultRoot); | |
| } | |
| if (fileLinks.length > 0) { | |
| links.set(relPath, fileLinks); | |
| } | |
| } | |
| return { files, links }; | |
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Show hidden characters
| { | |
| "compilerOptions": { | |
| "target": "ES2022", | |
| "module": "Node16", | |
| "moduleResolution": "Node16", | |
| "outDir": "./dist", | |
| "rootDir": "./src", | |
| "strict": true, | |
| "esModuleInterop": true, | |
| "skipLibCheck": true, | |
| "forceConsistentCasingInFileNames": true, | |
| "resolveJsonModule": true, | |
| "declaration": true | |
| }, | |
| "include": ["src/**/*"], | |
| "exclude": ["node_modules", "dist"] | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment