Skip to content

Instantly share code, notes, and snippets.

@arafathusayn
Created June 14, 2022 10:20
Show Gist options
  • Save arafathusayn/4f56c37c1ec02c2c5790cd55ab751955 to your computer and use it in GitHub Desktop.
Save arafathusayn/4f56c37c1ec02c2c5790cd55ab751955 to your computer and use it in GitHub Desktop.
TypeScript Next.js Serverless Function API Rate-Limit using LRU Cache
import { NextApiResponse } from "next";
import LRU from "lru-cache";
const rateLimit = (options: {
uniqueTokenPerInterval: number;
interval: number;
}) => {
const tokenCache = new LRU<string, number[]>({
max: options.uniqueTokenPerInterval || 500,
maxAge: options.interval || 60000,
});
return {
check: (res: NextApiResponse, limit: number, token: string) =>
new Promise<boolean>((resolve, reject) => {
const tokenCount = tokenCache.get(token) || [0];
if (tokenCount[0] === 0) {
tokenCache.set(token, tokenCount);
}
tokenCount[0] += 1;
const currentUsage = tokenCount[0];
const isRateLimited = currentUsage >= limit;
res.setHeader("X-RateLimit-Limit", limit);
res.setHeader(
"X-RateLimit-Remaining",
isRateLimited ? 0 : limit - currentUsage
);
return isRateLimited ? reject(false) : resolve(true);
}),
};
};
export default rateLimit;
import { NextApiRequest, NextApiResponse } from "next";
import rateLimit from "./rate-limit";
const limiter = rateLimit({
interval: 60 * 1000, // 60 seconds
uniqueTokenPerInterval: 500, // Max 500 users per second
});
export default async function handler(
_req: NextApiRequest,
res: NextApiResponse
) {
try {
await limiter.check(res, 10, "CACHE_TOKEN"); // 10 requests per minute
} catch {
res.status(429).json({ error: "Rate limit exceeded" });
return;
}
res.status(200).json({ success: true });
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment