import type { VercelRequest, VercelResponse } from '@vercel/node' import { RelayPool } from 'applesauce-relay' import { nip19 } from 'nostr-tools' import { AddressPointer } from 'nostr-tools/nip19' import { NostrEvent, Filter } from 'nostr-tools' import { Helpers } from 'applesauce-core' const { getArticleTitle, getArticleImage, getArticleSummary } = Helpers // Relay configuration (from src/config/relays.ts) const RELAYS = [ 'wss://relay.damus.io', 'wss://nos.lol', 'wss://relay.nostr.band', 'wss://relay.dergigi.com', 'wss://wot.dergigi.com', 'wss://relay.snort.social', 'wss://nostr-pub.wellorder.net', 'wss://purplepag.es', 'wss://relay.primal.net' ] type CacheEntry = { html: string expires: number } const WEEK_MS = 7 * 24 * 60 * 60 * 1000 const memoryCache = new Map() function escapeHtml(text: string): string { return text .replace(/&/g, '&') .replace(//g, '>') .replace(/"/g, '"') .replace(/'/g, ''') } function setCacheHeaders(res: VercelResponse, maxAge: number = 86400): void { res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=604800`) res.setHeader('Content-Type', 'text/html; charset=utf-8') } interface ArticleMetadata { title: string summary: string image: string author: string published?: number } async function fetchEventsFromRelays( relayPool: RelayPool, relayUrls: string[], filter: Filter, timeoutMs: number ): Promise { const events: NostrEvent[] = [] await new Promise((resolve) => { const timeout = setTimeout(() => resolve(), timeoutMs) // `request` emits NostrEvent objects directly relayPool.request(relayUrls, filter).subscribe({ next: (event) => { events.push(event) }, error: () => resolve(), complete: () => { clearTimeout(timeout) resolve() } }) }) // Sort by created_at and return most recent first return events.sort((a, b) => b.created_at - a.created_at) } async function fetchArticleMetadata(naddr: string): Promise { const relayPool = new RelayPool() try { // Decode naddr const decoded = nip19.decode(naddr) if (decoded.type !== 'naddr') { return null } const pointer = decoded.data as AddressPointer // Determine relay URLs const relayUrls = pointer.relays && pointer.relays.length > 0 ? pointer.relays : RELAYS // Fetch article and profile in parallel const [articleEvents, profileEvents] = await Promise.all([ fetchEventsFromRelays(relayPool, relayUrls, { kinds: [pointer.kind], authors: [pointer.pubkey], '#d': [pointer.identifier || ''] }, 5000), fetchEventsFromRelays(relayPool, relayUrls, { kinds: [0], authors: [pointer.pubkey] }, 3000) ]) if (articleEvents.length === 0) { return null } const article = articleEvents[0] // Extract article metadata const title = getArticleTitle(article) || 'Untitled Article' const summary = getArticleSummary(article) || 'Read this article on Boris' const image = getArticleImage(article) || '/boris-social-1200.png' // Extract author name from profile let authorName = pointer.pubkey.slice(0, 8) + '...' if (profileEvents.length > 0) { try { const profileData = JSON.parse(profileEvents[0].content) authorName = profileData.display_name || profileData.name || authorName } catch { // Use fallback } } return { title, summary, image, author: authorName, published: article.created_at } } catch (err) { console.error('Failed to fetch article metadata:', err) return null } finally { // No explicit close needed; pool manages connections internally } } function generateHtml(naddr: string, meta: ArticleMetadata | null): string { const baseUrl = 'https://read.withboris.com' const articleUrl = `${baseUrl}/a/${naddr}` const title = meta?.title || 'Boris – Nostr Bookmarks' const description = meta?.summary || 'Your reading list for the Nostr world. A minimal nostr client for bookmark management with highlights.' const image = meta?.image?.startsWith('http') ? meta.image : `${baseUrl}${meta?.image || '/boris-social-1200.png'}` const author = meta?.author || 'Boris' return ` ${escapeHtml(title)} ${meta?.published ? `` : ''} ` } function isCrawler(userAgent: string | undefined): boolean { if (!userAgent) return false const crawlers = [ 'bot', 'crawl', 'spider', 'slurp', 'facebook', 'twitter', 'linkedin', 'whatsapp', 'telegram', 'slack', 'discord', 'preview' ] const ua = userAgent.toLowerCase() return crawlers.some(crawler => ua.includes(crawler)) } export default async function handler(req: VercelRequest, res: VercelResponse) { const naddr = (req.query.naddr as string | undefined)?.trim() if (!naddr) { return res.status(400).json({ error: 'Missing naddr parameter' }) } const userAgent = req.headers['user-agent'] as string | undefined const isCrawlerRequest = isCrawler(userAgent) const debugEnabled = req.query.debug === '1' || req.headers['x-boris-debug'] === '1' if (debugEnabled) { res.setHeader('X-Boris-Debug', '1') } // If it's a regular browser (not a bot), serve HTML that loads SPA // Use history.replaceState to set the URL before the SPA boots if (!isCrawlerRequest) { const articlePath = `/a/${naddr}` // Serve a minimal HTML that sets up the URL and loads the SPA const html = ` Boris - Loading Article... ${debugEnabled ? `` : ''}
` res.setHeader('Content-Type', 'text/html; charset=utf-8') res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate') if (debugEnabled) { // Debug mode enabled } return res.status(200).send(html) } // Check cache for bots/crawlers const now = Date.now() const cached = memoryCache.get(naddr) if (cached && cached.expires > now) { setCacheHeaders(res) if (debugEnabled) { // Debug mode enabled } return res.status(200).send(cached.html) } try { // Fetch metadata const meta = await fetchArticleMetadata(naddr) // Generate HTML const html = generateHtml(naddr, meta) // Cache the result memoryCache.set(naddr, { html, expires: now + WEEK_MS }) // Send response setCacheHeaders(res) if (debugEnabled) { // Debug mode enabled } return res.status(200).send(html) } catch (err) { console.error('Error generating article OG HTML:', err) // Fallback to basic HTML with SPA boot const html = generateHtml(naddr, null) setCacheHeaders(res, 3600) if (debugEnabled) { // Debug mode enabled } return res.status(200).send(html) } }