Last active
February 3, 2025 19:01
-
-
Save Livog/a5157cca56c4cd876e8539b3b2af8b7e to your computer and use it in GitHub Desktop.
A minimal caching utility for Next.js that stores large responses in .next/cache/custom using hashed filenames, auto-serves valid cached responses, and respects revalidation times. It supports forced refresh, helping you work around standard fetch cache size limits while seamlessly integrating with Next.js caching conventions.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { createHash } from 'crypto' | |
import fs from 'fs/promises' | |
import { createWriteStream } from 'fs' | |
import path from 'path' | |
import { Readable } from 'stream' | |
import { finished } from 'stream/promises' | |
import { glob } from 'glob' | |
const CACHE_DIR = path.join(process.cwd(), '.next/cache/custom') | |
type NextFetchRequestConfig = RequestInit & { | |
next?: { | |
revalidate?: number | false | |
tags?: string[] | |
} | |
} | |
interface CacheOptions extends NextFetchRequestConfig { | |
forceRefresh?: boolean | |
} | |
async function ensureCacheDir() { | |
await fs.mkdir(CACHE_DIR, { recursive: true }) | |
} | |
function hashUrl(url: string): string { | |
return createHash('sha256').update(url).digest('hex').slice(0, 8) | |
} | |
async function findExistingCacheFile(urlHash: string): Promise<string | null> { | |
const pattern = path.join(CACHE_DIR, `*.${urlHash}.*.json`) | |
try { | |
const files = await glob(pattern) | |
if (files.length === 0) return null | |
return files[0] | |
} catch (error) { | |
console.error('Error finding existing cache file:', error) | |
return null | |
} | |
} | |
async function findValidCacheFile(urlHash: string): Promise<string | null> { | |
const file = await findExistingCacheFile(urlHash) | |
if (!file) return null | |
const now = Date.now() | |
const [revalidate, , lastWrite] = path.basename(file).split('.') | |
const timestamp = parseInt(lastWrite) | |
const ttl = parseInt(revalidate) | |
if (isNaN(timestamp) || isNaN(ttl)) return null | |
const expirationTime = timestamp + ttl * 1000 | |
if (expirationTime <= now) return null | |
return file | |
} | |
export async function fetchAndCache<T>(url: string, { forceRefresh = false, ...fetchOptions }: CacheOptions = {}): Promise<T> { | |
const urlHash = hashUrl(url) | |
await ensureCacheDir() | |
const revalidate = fetchOptions?.next?.revalidate ?? 3600 | |
delete fetchOptions.next | |
const now = Date.now() | |
const validCacheFile = !forceRefresh ? await findValidCacheFile(urlHash) : null | |
if (validCacheFile) { | |
const data = await fs.readFile(validCacheFile, 'utf-8') | |
return JSON.parse(data) | |
} | |
console.log(`No valid cache file found for ${url}`) | |
console.log(`Starting fetch of ${url}`) | |
const response = await fetch(url, fetchOptions) | |
if (!response.ok) throw new Error(`Failed to fetch ${url}`) | |
if (!response.body) throw new Error('No response body') | |
console.log(`Finished fetch of ${url}`) | |
const tempFile = path.join(CACHE_DIR, `temp-${Math.random().toString(36).slice(2)}.tmp`) | |
const writeStream = createWriteStream(tempFile) | |
await finished(Readable.fromWeb(response.body as any).pipe(writeStream)) | |
try { | |
const existingFile = await findExistingCacheFile(urlHash) | |
if (existingFile) await fs.unlink(existingFile) | |
const cacheFile = path.join(CACHE_DIR, `${revalidate}.${urlHash}.${now}.json`) | |
await fs.rename(tempFile, cacheFile) | |
const data = await fs.readFile(cacheFile, 'utf-8') | |
return JSON.parse(data) | |
} catch (error) { | |
await fs.unlink(tempFile).catch(() => {}) | |
throw error | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment