-
-
Save Lasalot/a94b6ba00cf99735d23df183b7b94a6d to your computer and use it in GitHub Desktop.
| // User agents handled by Prerender | |
| const BOT_AGENTS = [ | |
| // Search Engines | |
| "googlebot", | |
| "adsbot-google", | |
| "apis-google", | |
| "mediapartners-google", | |
| "google-safety", | |
| "feedfetcher-google", | |
| "googleproducer", | |
| "google-site-verification", | |
| "bingbot", | |
| "yandexbot", | |
| "yabrowser", | |
| "yahoo", | |
| "baiduspider", | |
| "naver", | |
| "seznambot", | |
| "sznprohlizec", | |
| "qwantbot", | |
| "ecosia", | |
| "duckduckbot", | |
| "duckassistbot", | |
| "applebot", | |
| // Social Media | |
| "facebookexternalhit", | |
| "facebookcatalog", | |
| "facebookbot", | |
| "meta-externalagent", | |
| "twitterbot", | |
| "linkedinbot", | |
| "whatsapp", | |
| "slackbot", | |
| "pinterest", | |
| "pinterestbot", | |
| "tiktok", | |
| "tiktokspider", | |
| "bytespider", | |
| "discordbot", | |
| // SEO Tools | |
| "semrushbot", | |
| "ahrefsbot", | |
| "chrome-lighthouse", | |
| "screaming-frog", | |
| "oncrawlbot", | |
| "botifybot", | |
| "deepcrawl", | |
| "lumar", | |
| "rogerbot", | |
| "dotbot", | |
| // AI Bots | |
| "gptbot", | |
| "chatgpt", | |
| "oai-searchbot", | |
| "chatgpt-user", | |
| "claudebot", | |
| "google-extended", | |
| "perplexitybot", | |
| "perplexity-user", | |
| "youbot", | |
| "amazonbot", | |
| "anthropic-ai", | |
| "claude-web", | |
| "ccbot", | |
| "mistralai-user", | |
| // Other Known Bots & Crawlers | |
| "embedly", | |
| "quora link preview", | |
| "showyoubot", | |
| "outbrain", | |
| "pinterest/0.", | |
| "developers.google.com/+/web/snippet", | |
| "vkshare", | |
| "w3c_validator", | |
| "redditbot", | |
| "flipboard", | |
| "tumblr", | |
| "bitlybot", | |
| "skypeuripreview", | |
| "nuzzel", | |
| "google page speed", | |
| "qwantify", | |
| "bitrix link preview", | |
| "xing-contenttabreceiver", | |
| "google-inspectiontool", | |
| "telegrambot", | |
| // Testing | |
| "integration-test" | |
| ]; | |
| // These are the extensions that the worker will skip prerendering | |
| // even if any other conditions pass. | |
| const IGNORE_EXTENSIONS = [ | |
| ".js", | |
| ".css", | |
| ".xml", | |
| ".less", | |
| ".png", | |
| ".jpg", | |
| ".jpeg", | |
| ".gif", | |
| ".pdf", | |
| ".doc", | |
| ".txt", | |
| ".ico", | |
| ".rss", | |
| ".zip", | |
| ".mp3", | |
| ".rar", | |
| ".exe", | |
| ".wmv", | |
| ".doc", | |
| ".avi", | |
| ".ppt", | |
| ".mpg", | |
| ".mpeg", | |
| ".tif", | |
| ".wav", | |
| ".mov", | |
| ".psd", | |
| ".ai", | |
| ".xls", | |
| ".mp4", | |
| ".m4a", | |
| ".swf", | |
| ".dat", | |
| ".dmg", | |
| ".iso", | |
| ".flv", | |
| ".m4v", | |
| ".torrent", | |
| ".woff", | |
| ".ttf", | |
| ".svg", | |
| ".webmanifest", | |
| ]; | |
| export default { | |
| /** | |
| * Hooks into the request, and changes origin if needed | |
| */ | |
| async fetch(request, env) { | |
| return await handleRequest(request, env).catch( | |
| (err) => new Response(err.stack, { status: 500 }) | |
| ); | |
| }, | |
| }; | |
| /** | |
| * @param {Request} request | |
| * @param {any} env | |
| * @returns {Promise<Response>} | |
| */ | |
| async function handleRequest(request, env) { | |
| const url = new URL(request.url); | |
| const userAgent = request.headers.get("User-Agent")?.toLowerCase() || ""; | |
| const isPrerender = request.headers.get("X-Prerender"); | |
| const pathName = url.pathname.toLowerCase(); | |
| const lastDot = pathName.lastIndexOf("."); | |
| const extension = lastDot > -1 ? pathName.substring(lastDot).toLowerCase() : ""; | |
| // Prerender loop protection | |
| // Non robot user agent | |
| // Ignore extensions | |
| if ( | |
| isPrerender || | |
| !BOT_AGENTS.some((bot) => userAgent.includes(bot.toLowerCase())) || | |
| (extension.length && IGNORE_EXTENSIONS.includes(extension)) | |
| ) { | |
| return fetch(request); | |
| } | |
| // Build Prerender request | |
| const newURL = `https://service.prerender.io/${request.url}`; | |
| const newHeaders = new Headers(request.headers); | |
| newHeaders.set("X-Prerender-Token", env.PRERENDER_TOKEN); | |
| newHeaders.set("X-Prerender-Int-Type", "CloudFlare"); | |
| return fetch(new Request(newURL, { | |
| headers: newHeaders, | |
| redirect: "manual", | |
| })); | |
| } |
Missing a " after CloudFlare on line 135
Thank you, added!
return fetch(new Request(newURL, {
headers: newHeaders,
redirect: "manual",
}));
is perfectly fine if the request always succeeds.
But when service.prerender.io returns an error (for example, rate-limited, timeout, or a 5xx),
Cloudflare will raise “Error: No response!” if the fetch promise rejects
Good to wrap the fetch in a try/catch and provide a fallback:
try {
const prerenderResp = await fetch(new Request(newURL, {
headers: newHeaders,
redirect: "manual",
}));
if (prerenderResp.ok) return prerenderResp;
return fetch(request); // fallback to normal site
} catch (err) {
console.error("Prerender fetch failed:", err);
return fetch(request);
}
Without this fallback, any Prerender downtime causes “No response!” from your Worker.
Unfortunately this didnt work despite following all the steps supplied here: https://docs.prerender.io/docs/cloudflare-integration-v2
We ended up asking Lovable for step by step instructions on how to do this and got a working version from there. When I put the code above through a code checker it highlighted a number of issues found, hence why it wouldnt deploy and work for us.
Missing a " after CloudFlare on line 135