From 4cc32c27deefb1009167f0abb756a8da57f9bde4 Mon Sep 17 00:00:00 2001 From: Gigi Date: Thu, 16 Oct 2025 14:43:29 +0200 Subject: [PATCH] fix(api): detect crawlers and redirect browsers to SPA Browsers get 302 redirect to / where the SPA handles routing client-side with the original /a/{naddr} URL preserved. Crawlers/bots get the full HTML with OG meta tags. --- api/article-og.ts | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/api/article-og.ts b/api/article-og.ts index c1d5c969..e8aee1e1 100644 --- a/api/article-og.ts +++ b/api/article-og.ts @@ -159,7 +159,6 @@ function generateHtml(naddr: string, meta: ArticleMetadata | null): string { - @@ -196,6 +195,16 @@ function generateHtml(naddr: string, meta: ArticleMetadata | null): string { ` } +function isCrawler(userAgent: string | undefined): boolean { + if (!userAgent) return false + const crawlers = [ + 'bot', 'crawl', 'spider', 'slurp', 'facebook', 'twitter', 'linkedin', + 'whatsapp', 'telegram', 'slack', 'discord', 'preview' + ] + const ua = userAgent.toLowerCase() + return crawlers.some(crawler => ua.includes(crawler)) +} + export default async function handler(req: VercelRequest, res: VercelResponse) { const naddr = (req.query.naddr as string | undefined)?.trim() @@ -203,7 +212,16 @@ export default async function handler(req: VercelRequest, res: VercelResponse) { return res.status(400).json({ error: 'Missing naddr parameter' }) } - // Check cache + const userAgent = req.headers['user-agent'] as string | undefined + + // If it's a regular browser (not a bot), redirect to index.html + // and let the SPA handle routing client-side + if (!isCrawler(userAgent)) { + res.setHeader('Location', '/') + return res.status(302).send('') + } + + // Check cache for bots/crawlers const now = Date.now() const cached = memoryCache.get(naddr) if (cached && cached.expires > now) {