mirror of
https://github.com/dergigi/boris.git
synced 2025-12-25 18:44:23 +01:00
Merge pull request #3 from dergigi/local-first
perf: parallel local-first fetching, instant render, and non-wiping refreshes
This commit is contained in:
@@ -111,16 +111,18 @@ export const BookmarkList: React.FC<BookmarkListProps> = ({
|
||||
isMobile={isMobile}
|
||||
/>
|
||||
|
||||
{loading ? (
|
||||
<div className="loading">
|
||||
<FontAwesomeIcon icon={faSpinner} spin />
|
||||
</div>
|
||||
) : allIndividualBookmarks.length === 0 ? (
|
||||
<div className="empty-state">
|
||||
<p>No bookmarks found.</p>
|
||||
<p>Add bookmarks using your nostr client to see them here.</p>
|
||||
<p>If you aren't on nostr yet, start here: <a href="https://nstart.me/" target="_blank" rel="noopener noreferrer">nstart.me</a></p>
|
||||
</div>
|
||||
{allIndividualBookmarks.length === 0 ? (
|
||||
loading ? (
|
||||
<div className="loading">
|
||||
<FontAwesomeIcon icon={faSpinner} spin />
|
||||
</div>
|
||||
) : (
|
||||
<div className="empty-state">
|
||||
<p>No bookmarks found.</p>
|
||||
<p>Add bookmarks using your nostr client to see them here.</p>
|
||||
<p>If you aren't on nostr yet, start here: <a href="https://nstart.me/" target="_blank" rel="noopener noreferrer">nstart.me</a></p>
|
||||
</div>
|
||||
)
|
||||
) : (
|
||||
<div className="bookmarks-list">
|
||||
<div className={`bookmarks-grid bookmarks-${viewMode}`}>
|
||||
|
||||
@@ -7,6 +7,7 @@ import { nip19 } from 'nostr-tools'
|
||||
import { fetchContacts } from '../services/contactService'
|
||||
import { fetchBlogPostsFromAuthors, BlogPostPreview } from '../services/exploreService'
|
||||
import BlogPostCard from './BlogPostCard'
|
||||
import { getCachedPosts, upsertCachedPost, setCachedPosts } from '../services/exploreCache'
|
||||
|
||||
interface ExploreProps {
|
||||
relayPool: RelayPool
|
||||
@@ -27,11 +28,59 @@ const Explore: React.FC<ExploreProps> = ({ relayPool }) => {
|
||||
}
|
||||
|
||||
try {
|
||||
// show spinner but keep existing posts
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
|
||||
// Seed from in-memory cache if available to avoid empty flash
|
||||
const cached = getCachedPosts(activeAccount.pubkey)
|
||||
if (cached && cached.length > 0 && blogPosts.length === 0) {
|
||||
setBlogPosts(cached)
|
||||
}
|
||||
|
||||
// Fetch the user's contacts (friends)
|
||||
const contacts = await fetchContacts(relayPool, activeAccount.pubkey)
|
||||
const contacts = await fetchContacts(
|
||||
relayPool,
|
||||
activeAccount.pubkey,
|
||||
(partial) => {
|
||||
// When local contacts are available, kick off early posts fetch
|
||||
if (partial.size > 0) {
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
fetchBlogPostsFromAuthors(
|
||||
relayPool,
|
||||
Array.from(partial),
|
||||
relayUrls,
|
||||
(post) => {
|
||||
// merge into UI and cache as we stream
|
||||
setBlogPosts((prev) => {
|
||||
const exists = prev.some(p => p.event.id === post.event.id)
|
||||
if (exists) return prev
|
||||
const next = [...prev, post]
|
||||
return next.sort((a, b) => {
|
||||
const timeA = a.published || a.event.created_at
|
||||
const timeB = b.published || b.event.created_at
|
||||
return timeB - timeA
|
||||
})
|
||||
})
|
||||
setCachedPosts(activeAccount.pubkey, upsertCachedPost(activeAccount.pubkey, post))
|
||||
}
|
||||
).then((all) => {
|
||||
// Ensure union of streamed + final is displayed
|
||||
setBlogPosts((prev) => {
|
||||
const byId = new Map(prev.map(p => [p.event.id, p]))
|
||||
for (const post of all) byId.set(post.event.id, post)
|
||||
const merged = Array.from(byId.values()).sort((a, b) => {
|
||||
const timeA = a.published || a.event.created_at
|
||||
const timeB = b.published || b.event.created_at
|
||||
return timeB - timeA
|
||||
})
|
||||
setCachedPosts(activeAccount.pubkey, merged)
|
||||
return merged
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (contacts.size === 0) {
|
||||
setError('You are not following anyone yet. Follow some people to see their blog posts!')
|
||||
@@ -39,21 +88,25 @@ const Explore: React.FC<ExploreProps> = ({ relayPool }) => {
|
||||
return
|
||||
}
|
||||
|
||||
// Get relay URLs from pool
|
||||
// After full contacts, do a final pass for completeness
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
|
||||
// Fetch blog posts from friends
|
||||
const posts = await fetchBlogPostsFromAuthors(
|
||||
relayPool,
|
||||
Array.from(contacts),
|
||||
relayUrls
|
||||
)
|
||||
const posts = await fetchBlogPostsFromAuthors(relayPool, Array.from(contacts), relayUrls)
|
||||
|
||||
if (posts.length === 0) {
|
||||
setError('No blog posts found from your friends yet')
|
||||
}
|
||||
|
||||
setBlogPosts(posts)
|
||||
setBlogPosts((prev) => {
|
||||
const byId = new Map(prev.map(p => [p.event.id, p]))
|
||||
for (const post of posts) byId.set(post.event.id, post)
|
||||
const merged = Array.from(byId.values()).sort((a, b) => {
|
||||
const timeA = a.published || a.event.created_at
|
||||
const timeB = b.published || b.event.created_at
|
||||
return timeB - timeA
|
||||
})
|
||||
setCachedPosts(activeAccount.pubkey, merged)
|
||||
return merged
|
||||
})
|
||||
} catch (err) {
|
||||
console.error('Failed to load blog posts:', err)
|
||||
setError('Failed to load blog posts. Please try again.')
|
||||
@@ -63,7 +116,7 @@ const Explore: React.FC<ExploreProps> = ({ relayPool }) => {
|
||||
}
|
||||
|
||||
loadBlogPosts()
|
||||
}, [relayPool, activeAccount])
|
||||
}, [relayPool, activeAccount, blogPosts.length])
|
||||
|
||||
const getPostUrl = (post: BlogPostPreview) => {
|
||||
// Get the d-tag identifier
|
||||
@@ -79,17 +132,6 @@ const Explore: React.FC<ExploreProps> = ({ relayPool }) => {
|
||||
return `/a/${naddr}`
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="explore-container">
|
||||
<div className="explore-loading">
|
||||
<FontAwesomeIcon icon={faSpinner} spin size="2x" />
|
||||
<p>Loading blog posts from your friends...</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="explore-container">
|
||||
@@ -112,6 +154,12 @@ const Explore: React.FC<ExploreProps> = ({ relayPool }) => {
|
||||
Discover blog posts from your friends on Nostr
|
||||
</p>
|
||||
</div>
|
||||
{loading && (
|
||||
<div className="explore-loading" style={{ display: 'flex', alignItems: 'center', gap: '0.5rem', padding: '0.5rem 0' }}>
|
||||
<FontAwesomeIcon icon={faSpinner} spin />
|
||||
<span>Refreshing posts…</span>
|
||||
</div>
|
||||
)}
|
||||
<div className="explore-grid">
|
||||
{blogPosts.map((post) => (
|
||||
<BlogPostCard
|
||||
@@ -120,6 +168,11 @@ const Explore: React.FC<ExploreProps> = ({ relayPool }) => {
|
||||
href={getPostUrl(post)}
|
||||
/>
|
||||
))}
|
||||
{!loading && blogPosts.length === 0 && (
|
||||
<div className="explore-empty" style={{ gridColumn: '1/-1', textAlign: 'center', color: 'var(--text-secondary)' }}>
|
||||
<p>No blog posts found yet.</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -44,10 +44,14 @@ export const useBookmarksData = ({
|
||||
|
||||
const handleFetchBookmarks = useCallback(async () => {
|
||||
if (!relayPool || !activeAccount) return
|
||||
// don't clear existing bookmarks: we keep UI stable and show spinner unobtrusively
|
||||
setBookmarksLoading(true)
|
||||
try {
|
||||
const fullAccount = accountManager.getActive()
|
||||
await fetchBookmarks(relayPool, fullAccount || activeAccount, setBookmarks, settings)
|
||||
// merge-friendly: updater form that preserves visible list until replacement
|
||||
await fetchBookmarks(relayPool, fullAccount || activeAccount, (next) => {
|
||||
setBookmarks(() => next)
|
||||
}, settings)
|
||||
} finally {
|
||||
setBookmarksLoading(false)
|
||||
}
|
||||
@@ -102,15 +106,21 @@ export const useBookmarksData = ({
|
||||
}
|
||||
}, [relayPool, activeAccount, isRefreshing, handleFetchBookmarks, handleFetchHighlights, handleFetchContacts])
|
||||
|
||||
// Load initial data
|
||||
// Load initial data (avoid clearing on route-only changes)
|
||||
useEffect(() => {
|
||||
if (!relayPool || !activeAccount) return
|
||||
// Only (re)fetch bookmarks when account or relayPool changes, not on naddr route changes
|
||||
handleFetchBookmarks()
|
||||
}, [relayPool, activeAccount, handleFetchBookmarks])
|
||||
|
||||
// Fetch highlights/contacts independently to avoid disturbing bookmarks
|
||||
useEffect(() => {
|
||||
if (!relayPool || !activeAccount) return
|
||||
if (!naddr) {
|
||||
handleFetchHighlights()
|
||||
}
|
||||
handleFetchContacts()
|
||||
}, [relayPool, activeAccount, naddr, handleFetchBookmarks, handleFetchHighlights, handleFetchContacts])
|
||||
}, [relayPool, activeAccount, naddr, handleFetchHighlights, handleFetchContacts])
|
||||
|
||||
return {
|
||||
bookmarks,
|
||||
|
||||
@@ -11,7 +11,7 @@ interface UseExternalUrlLoaderProps {
|
||||
setReaderContent: (content: ReadableContent | undefined) => void
|
||||
setReaderLoading: (loading: boolean) => void
|
||||
setIsCollapsed: (collapsed: boolean) => void
|
||||
setHighlights: (highlights: Highlight[]) => void
|
||||
setHighlights: (highlights: Highlight[] | ((prev: Highlight[]) => Highlight[])) => void
|
||||
setHighlightsLoading: (loading: boolean) => void
|
||||
setCurrentArticleCoordinate: (coord: string | undefined) => void
|
||||
setCurrentArticleEventId: (id: string | undefined) => void
|
||||
@@ -57,7 +57,21 @@ export function useExternalUrlLoader({
|
||||
|
||||
// Check if fetchHighlightsForUrl exists, otherwise skip
|
||||
if (typeof fetchHighlightsForUrl === 'function') {
|
||||
const highlightsList = await fetchHighlightsForUrl(relayPool, url)
|
||||
const seen = new Set<string>()
|
||||
const highlightsList = await fetchHighlightsForUrl(
|
||||
relayPool,
|
||||
url,
|
||||
(highlight) => {
|
||||
if (seen.has(highlight.id)) return
|
||||
seen.add(highlight.id)
|
||||
setHighlights((prev) => {
|
||||
if (prev.some(h => h.id === highlight.id)) return prev
|
||||
const next = [...prev, highlight]
|
||||
return next.sort((a, b) => b.created_at - a.created_at)
|
||||
})
|
||||
}
|
||||
)
|
||||
// Ensure final list is sorted and contains all items
|
||||
setHighlights(highlightsList.sort((a, b) => b.created_at - a.created_at))
|
||||
console.log(`📌 Found ${highlightsList.length} highlights for URL`)
|
||||
} else {
|
||||
|
||||
@@ -3260,10 +3260,15 @@ body.mobile-sidebar-open {
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
min-height: 50vh;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
}
|
||||
|
||||
/* Compact, inline loading row for Explore refresh */
|
||||
.explore-loading {
|
||||
min-height: 0;
|
||||
padding: 0.25rem 0;
|
||||
}
|
||||
|
||||
.explore-error {
|
||||
color: #ff6b6b;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { RelayPool, completeOnEose } from 'applesauce-relay'
|
||||
import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { lastValueFrom, take } from 'rxjs'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
import { AddressPointer } from 'nostr-tools/nip19'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Helpers } from 'applesauce-core'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { prioritizeLocalRelays, partitionRelays, createParallelReqStreams } from '../utils/helpers'
|
||||
import { merge, toArray as rxToArray } from 'rxjs'
|
||||
import { UserSettings } from './settingsService'
|
||||
import { rebroadcastEvents } from './rebroadcastService'
|
||||
|
||||
@@ -98,9 +100,11 @@ export async function fetchArticleByNaddr(
|
||||
const pointer = decoded.data as AddressPointer
|
||||
|
||||
// Define relays to query - prefer relays from naddr, fallback to configured relays (including local)
|
||||
const relays = pointer.relays && pointer.relays.length > 0
|
||||
const baseRelays = pointer.relays && pointer.relays.length > 0
|
||||
? pointer.relays
|
||||
: RELAYS
|
||||
const orderedRelays = prioritizeLocalRelays(baseRelays)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(orderedRelays)
|
||||
|
||||
// Fetch the article event
|
||||
const filter = {
|
||||
@@ -109,12 +113,10 @@ export async function fetchArticleByNaddr(
|
||||
'#d': [pointer.identifier]
|
||||
}
|
||||
|
||||
// Use applesauce relay pool pattern
|
||||
const events = await lastValueFrom(
|
||||
relayPool
|
||||
.req(relays, filter)
|
||||
.pipe(completeOnEose(), takeUntil(timer(10000)), toArray())
|
||||
)
|
||||
// Parallel local+remote, stream immediate, collect up to first from each
|
||||
const { local$, remote$ } = createParallelReqStreams(relayPool, localRelays, remoteRelays, filter, 1200, 6000)
|
||||
const collected = await lastValueFrom(merge(local$.pipe(take(1)), remote$.pipe(take(1))).pipe(rxToArray()))
|
||||
const events = collected as NostrEvent[]
|
||||
|
||||
if (events.length === 0) {
|
||||
throw new Error('Article not found')
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { RelayPool, completeOnEose } from 'applesauce-relay'
|
||||
import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { lastValueFrom, take } from 'rxjs'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
import { AddressPointer } from 'nostr-tools/nip19'
|
||||
import { Helpers } from 'applesauce-core'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { prioritizeLocalRelays, partitionRelays, createParallelReqStreams } from '../utils/helpers'
|
||||
import { merge, toArray as rxToArray } from 'rxjs'
|
||||
|
||||
const { getArticleTitle } = Helpers
|
||||
|
||||
@@ -25,9 +27,11 @@ export async function fetchArticleTitle(
|
||||
const pointer = decoded.data as AddressPointer
|
||||
|
||||
// Define relays to query
|
||||
const relays = pointer.relays && pointer.relays.length > 0
|
||||
const baseRelays = pointer.relays && pointer.relays.length > 0
|
||||
? pointer.relays
|
||||
: RELAYS
|
||||
const orderedRelays = prioritizeLocalRelays(baseRelays)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(orderedRelays)
|
||||
|
||||
// Fetch the article event
|
||||
const filter = {
|
||||
@@ -36,11 +40,11 @@ export async function fetchArticleTitle(
|
||||
'#d': [pointer.identifier]
|
||||
}
|
||||
|
||||
// Parallel local+remote: collect up to one event from each
|
||||
const { local$, remote$ } = createParallelReqStreams(relayPool, localRelays, remoteRelays, filter, 1200, 5000)
|
||||
const events = await lastValueFrom(
|
||||
relayPool
|
||||
.req(relays, filter)
|
||||
.pipe(completeOnEose(), takeUntil(timer(5000)), toArray())
|
||||
)
|
||||
merge(local$.pipe(take(1)), remote$.pipe(take(1))).pipe(rxToArray())
|
||||
) as unknown as { created_at: number }[]
|
||||
|
||||
if (events.length === 0) {
|
||||
return null
|
||||
@@ -48,7 +52,7 @@ export async function fetchArticleTitle(
|
||||
|
||||
// Sort by created_at and take the most recent
|
||||
events.sort((a, b) => b.created_at - a.created_at)
|
||||
const article = events[0]
|
||||
const article = events[0] as unknown as Parameters<typeof getArticleTitle>[0]
|
||||
|
||||
return getArticleTitle(article) || null
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { RelayPool, completeOnEose } from 'applesauce-relay'
|
||||
import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, toArray } from 'rxjs'
|
||||
import {
|
||||
AccountWithExtension,
|
||||
NostrEvent,
|
||||
@@ -16,6 +16,7 @@ import { Bookmark } from '../types/bookmarks'
|
||||
import { collectBookmarksFromEvents } from './bookmarkProcessing.ts'
|
||||
import { UserSettings } from './settingsService'
|
||||
import { rebroadcastEvents } from './rebroadcastService'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../utils/helpers'
|
||||
|
||||
|
||||
|
||||
@@ -31,14 +32,22 @@ export const fetchBookmarks = async (
|
||||
throw new Error('Invalid account object provided')
|
||||
}
|
||||
// Get relay URLs from the pool
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
const relayUrls = prioritizeLocalRelays(Array.from(relayPool.relays.values()).map(relay => relay.url))
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(relayUrls)
|
||||
// Fetch bookmark events - NIP-51 standards, legacy formats, and web bookmarks (NIP-B0)
|
||||
console.log('🔍 Fetching bookmark events from relays:', relayUrls)
|
||||
const rawEvents = await lastValueFrom(
|
||||
relayPool
|
||||
.req(relayUrls, { kinds: [10003, 30003, 30001, 39701], authors: [activeAccount.pubkey] })
|
||||
.pipe(completeOnEose(), takeUntil(timer(20000)), toArray())
|
||||
)
|
||||
// Try local-first quickly, then full set fallback
|
||||
const local$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [10003, 30003, 30001, 39701], authors: [activeAccount.pubkey] })
|
||||
.pipe(completeOnEose(), takeUntil(timer(1200)))
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [10003, 30003, 30001, 39701], authors: [activeAccount.pubkey] })
|
||||
.pipe(completeOnEose(), takeUntil(timer(6000)))
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const rawEvents = await lastValueFrom(merge(local$, remote$).pipe(toArray()))
|
||||
console.log('📊 Raw events fetched:', rawEvents.length, 'events')
|
||||
|
||||
// Rebroadcast bookmark events to local/all relays based on settings
|
||||
@@ -64,7 +73,7 @@ export const fetchBookmarks = async (
|
||||
const bookmarkListEvents = dedupeNip51Events(rawEvents)
|
||||
console.log('📋 After deduplication:', bookmarkListEvents.length, 'bookmark events')
|
||||
if (bookmarkListEvents.length === 0) {
|
||||
setBookmarks([])
|
||||
// Keep existing bookmarks visible; do not clear list if nothing new found
|
||||
return
|
||||
}
|
||||
// Aggregate across events
|
||||
@@ -102,9 +111,14 @@ export const fetchBookmarks = async (
|
||||
let idToEvent: Map<string, NostrEvent> = new Map()
|
||||
if (noteIds.length > 0) {
|
||||
try {
|
||||
const events = await lastValueFrom(
|
||||
relayPool.req(relayUrls, { ids: noteIds }).pipe(completeOnEose(), takeUntil(timer(10000)), toArray())
|
||||
)
|
||||
const { local: localHydrate, remote: remoteHydrate } = partitionRelays(relayUrls)
|
||||
const localHydrate$ = localHydrate.length > 0
|
||||
? relayPool.req(localHydrate, { ids: noteIds }).pipe(completeOnEose(), takeUntil(timer(800)))
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remoteHydrate$ = remoteHydrate.length > 0
|
||||
? relayPool.req(remoteHydrate, { ids: noteIds }).pipe(completeOnEose(), takeUntil(timer(2500)))
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const events: NostrEvent[] = await lastValueFrom(merge(localHydrate$, remoteHydrate$).pipe(toArray()))
|
||||
idToEvent = new Map(events.map((e: NostrEvent) => [e.id, e]))
|
||||
} catch (error) {
|
||||
console.warn('Failed to fetch events for hydration:', error)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { RelayPool, completeOnEose } from 'applesauce-relay'
|
||||
import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { prioritizeLocalRelays } from '../utils/helpers'
|
||||
|
||||
/**
|
||||
* Fetches the contact list (follows) for a specific user
|
||||
@@ -9,40 +10,49 @@ import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
*/
|
||||
export const fetchContacts = async (
|
||||
relayPool: RelayPool,
|
||||
pubkey: string
|
||||
pubkey: string,
|
||||
onPartial?: (contacts: Set<string>) => void
|
||||
): Promise<Set<string>> => {
|
||||
try {
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
const relayUrls = prioritizeLocalRelays(Array.from(relayPool.relays.values()).map(relay => relay.url))
|
||||
|
||||
console.log('🔍 Fetching contacts (kind 3) for user:', pubkey)
|
||||
|
||||
// Local-first quick attempt
|
||||
const localRelays = relayUrls.filter(url => url.includes('localhost') || url.includes('127.0.0.1'))
|
||||
const remoteRelays = relayUrls.filter(url => !url.includes('localhost') && !url.includes('127.0.0.1'))
|
||||
const local$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [3], authors: [pubkey] })
|
||||
.pipe(completeOnEose(), takeUntil(timer(1200)))
|
||||
: new Observable<{ created_at: number; tags: string[][] }>((sub) => sub.complete())
|
||||
const remote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [3], authors: [pubkey] })
|
||||
.pipe(completeOnEose(), takeUntil(timer(6000)))
|
||||
: new Observable<{ created_at: number; tags: string[][] }>((sub) => sub.complete())
|
||||
const events = await lastValueFrom(
|
||||
relayPool
|
||||
.req(relayUrls, { kinds: [3], authors: [pubkey] })
|
||||
.pipe(completeOnEose(), takeUntil(timer(10000)), toArray())
|
||||
merge(local$, remote$).pipe(toArray())
|
||||
)
|
||||
const followed = new Set<string>()
|
||||
if (events.length > 0) {
|
||||
// Get the most recent contact list
|
||||
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
|
||||
const contactList = sortedEvents[0]
|
||||
// Extract pubkeys from 'p' tags
|
||||
for (const tag of contactList.tags) {
|
||||
if (tag[0] === 'p' && tag[1]) {
|
||||
followed.add(tag[1])
|
||||
}
|
||||
}
|
||||
if (onPartial) onPartial(new Set(followed))
|
||||
}
|
||||
// merged already via streams
|
||||
|
||||
console.log('📊 Contact events fetched:', events.length)
|
||||
|
||||
if (events.length === 0) {
|
||||
return new Set()
|
||||
}
|
||||
|
||||
// Get the most recent contact list
|
||||
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
|
||||
const contactList = sortedEvents[0]
|
||||
|
||||
// Extract pubkeys from 'p' tags
|
||||
const followedPubkeys = new Set<string>()
|
||||
for (const tag of contactList.tags) {
|
||||
if (tag[0] === 'p' && tag[1]) {
|
||||
followedPubkeys.add(tag[1])
|
||||
}
|
||||
}
|
||||
|
||||
console.log('👥 Followed contacts:', followedPubkeys.size)
|
||||
|
||||
return followedPubkeys
|
||||
console.log('👥 Followed contacts:', followed.size)
|
||||
return followed
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch contacts:', error)
|
||||
return new Set()
|
||||
|
||||
42
src/services/exploreCache.ts
Normal file
42
src/services/exploreCache.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
|
||||
export interface CachedBlogPostPreview {
|
||||
event: NostrEvent
|
||||
title: string
|
||||
summary?: string
|
||||
image?: string
|
||||
published?: number
|
||||
author: string
|
||||
}
|
||||
|
||||
type CacheValue = {
|
||||
posts: CachedBlogPostPreview[]
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
const exploreCache = new Map<string, CacheValue>() // key: pubkey
|
||||
|
||||
export function getCachedPosts(pubkey: string): CachedBlogPostPreview[] | null {
|
||||
const entry = exploreCache.get(pubkey)
|
||||
if (!entry) return null
|
||||
return entry.posts
|
||||
}
|
||||
|
||||
export function setCachedPosts(pubkey: string, posts: CachedBlogPostPreview[]): void {
|
||||
exploreCache.set(pubkey, { posts, timestamp: Date.now() })
|
||||
}
|
||||
|
||||
export function upsertCachedPost(pubkey: string, post: CachedBlogPostPreview): CachedBlogPostPreview[] {
|
||||
const current = exploreCache.get(pubkey)?.posts || []
|
||||
const byId = new Map(current.map(p => [p.event.id, p]))
|
||||
byId.set(post.event.id, post)
|
||||
const merged = Array.from(byId.values()).sort((a, b) => {
|
||||
const ta = a.published || a.event.created_at
|
||||
const tb = b.published || b.event.created_at
|
||||
return tb - ta
|
||||
})
|
||||
setCachedPosts(pubkey, merged)
|
||||
return merged
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { RelayPool, completeOnEose } from 'applesauce-relay'
|
||||
import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../utils/helpers'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Helpers } from 'applesauce-core'
|
||||
|
||||
@@ -24,7 +25,8 @@ export interface BlogPostPreview {
|
||||
export const fetchBlogPostsFromAuthors = async (
|
||||
relayPool: RelayPool,
|
||||
pubkeys: string[],
|
||||
relayUrls: string[]
|
||||
relayUrls: string[],
|
||||
onPost?: (post: BlogPostPreview) => void
|
||||
): Promise<BlogPostPreview[]> => {
|
||||
try {
|
||||
if (pubkeys.length === 0) {
|
||||
@@ -34,42 +36,65 @@ export const fetchBlogPostsFromAuthors = async (
|
||||
|
||||
console.log('📚 Fetching blog posts (kind 30023) from', pubkeys.length, 'authors')
|
||||
|
||||
const events = await lastValueFrom(
|
||||
relayPool
|
||||
.req(relayUrls, {
|
||||
kinds: [30023],
|
||||
authors: pubkeys,
|
||||
limit: 100 // Fetch up to 100 recent posts
|
||||
})
|
||||
.pipe(completeOnEose(), takeUntil(timer(15000)), toArray())
|
||||
)
|
||||
|
||||
console.log('📊 Blog post events fetched:', events.length)
|
||||
|
||||
const prioritized = prioritizeLocalRelays(relayUrls)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(prioritized)
|
||||
|
||||
// Deduplicate replaceable events by keeping the most recent version
|
||||
// Group by author + d-tag identifier
|
||||
const uniqueEvents = new Map<string, NostrEvent>()
|
||||
|
||||
for (const event of events) {
|
||||
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const key = `${event.pubkey}:${dTag}`
|
||||
|
||||
const existing = uniqueEvents.get(key)
|
||||
if (!existing || event.created_at > existing.created_at) {
|
||||
uniqueEvents.set(key, event)
|
||||
|
||||
const processEvents = (incoming: NostrEvent[]) => {
|
||||
for (const event of incoming) {
|
||||
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const key = `${event.pubkey}:${dTag}`
|
||||
const existing = uniqueEvents.get(key)
|
||||
if (!existing || event.created_at > existing.created_at) {
|
||||
uniqueEvents.set(key, event)
|
||||
// Emit as we incorporate
|
||||
if (onPost) {
|
||||
const post: BlogPostPreview = {
|
||||
event,
|
||||
title: getArticleTitle(event) || 'Untitled',
|
||||
summary: getArticleSummary(event),
|
||||
image: getArticleImage(event),
|
||||
published: getArticlePublished(event),
|
||||
author: event.pubkey
|
||||
}
|
||||
onPost(post)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const local$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [30023], authors: pubkeys, limit: 100 })
|
||||
.pipe(completeOnEose(), takeUntil(timer(1200)))
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [30023], authors: pubkeys, limit: 100 })
|
||||
.pipe(completeOnEose(), takeUntil(timer(6000)))
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const events = await lastValueFrom(merge(local$, remote$).pipe(toArray()))
|
||||
processEvents(events)
|
||||
|
||||
console.log('📊 Blog post events fetched (unique):', uniqueEvents.size)
|
||||
|
||||
// Convert to blog post previews and sort by published date (most recent first)
|
||||
const blogPosts: BlogPostPreview[] = Array.from(uniqueEvents.values())
|
||||
.map(event => ({
|
||||
event,
|
||||
title: getArticleTitle(event) || 'Untitled',
|
||||
summary: getArticleSummary(event),
|
||||
image: getArticleImage(event),
|
||||
published: getArticlePublished(event),
|
||||
author: event.pubkey
|
||||
}))
|
||||
.map(event => {
|
||||
const post: BlogPostPreview = {
|
||||
event,
|
||||
title: getArticleTitle(event) || 'Untitled',
|
||||
summary: getArticleSummary(event),
|
||||
image: getArticleImage(event),
|
||||
published: getArticlePublished(event),
|
||||
author: event.pubkey
|
||||
}
|
||||
if (onPost) onPost(post)
|
||||
return post
|
||||
})
|
||||
.sort((a, b) => {
|
||||
const timeA = a.published || a.event.created_at
|
||||
const timeB = b.published || b.event.created_at
|
||||
|
||||
@@ -1,204 +1,5 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../types/highlights'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from './highlightEventProcessor'
|
||||
import { UserSettings } from './settingsService'
|
||||
import { rebroadcastEvents } from './rebroadcastService'
|
||||
export * from './highlights/fetchForArticle'
|
||||
export * from './highlights/fetchForUrl'
|
||||
export * from './highlights/fetchByAuthor'
|
||||
|
||||
/**
|
||||
* Fetches highlights for a specific article by its address coordinate and/or event ID
|
||||
* @param relayPool - The relay pool to query
|
||||
* @param articleCoordinate - The article's address in format "kind:pubkey:identifier" (e.g., "30023:abc...def:my-article")
|
||||
* @param eventId - Optional event ID to also query by 'e' tag
|
||||
* @param onHighlight - Optional callback to receive highlights as they arrive
|
||||
* @param settings - User settings for rebroadcast options
|
||||
*/
|
||||
export const fetchHighlightsForArticle = async (
|
||||
relayPool: RelayPool,
|
||||
articleCoordinate: string,
|
||||
eventId?: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
): Promise<Highlight[]> => {
|
||||
try {
|
||||
console.log('🔍 Fetching highlights (kind 9802) for article:', articleCoordinate)
|
||||
console.log('🔍 Event ID:', eventId || 'none')
|
||||
console.log('🔍 From relays (including local):', RELAYS)
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const processEvent = (event: NostrEvent): Highlight | null => {
|
||||
if (seenIds.has(event.id)) return null
|
||||
seenIds.add(event.id)
|
||||
return eventToHighlight(event)
|
||||
}
|
||||
|
||||
// Query for highlights that reference this article via the 'a' tag
|
||||
const aTagEvents = await lastValueFrom(
|
||||
relayPool
|
||||
.req(RELAYS, { kinds: [9802], '#a': [articleCoordinate] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) {
|
||||
onHighlight(highlight)
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(10000)),
|
||||
toArray()
|
||||
)
|
||||
)
|
||||
|
||||
console.log('📊 Highlights via a-tag:', aTagEvents.length)
|
||||
|
||||
// If we have an event ID, also query for highlights that reference via the 'e' tag
|
||||
let eTagEvents: NostrEvent[] = []
|
||||
if (eventId) {
|
||||
eTagEvents = await lastValueFrom(
|
||||
relayPool
|
||||
.req(RELAYS, { kinds: [9802], '#e': [eventId] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) {
|
||||
onHighlight(highlight)
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(10000)),
|
||||
toArray()
|
||||
)
|
||||
)
|
||||
console.log('📊 Highlights via e-tag:', eTagEvents.length)
|
||||
}
|
||||
|
||||
// Combine results from both queries
|
||||
const rawEvents = [...aTagEvents, ...eTagEvents]
|
||||
console.log('📊 Total raw highlight events fetched:', rawEvents.length)
|
||||
|
||||
// Rebroadcast highlight events to local/all relays based on settings
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
|
||||
if (rawEvents.length > 0) {
|
||||
console.log('📄 Sample highlight tags:', JSON.stringify(rawEvents[0].tags, null, 2))
|
||||
} else {
|
||||
console.log('❌ No highlights found. Article coordinate:', articleCoordinate)
|
||||
console.log('❌ Event ID:', eventId || 'none')
|
||||
console.log('💡 Try checking if there are any highlights on this article at https://highlighter.com')
|
||||
}
|
||||
|
||||
// Deduplicate events by ID
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
console.log('📊 Unique highlight events after deduplication:', uniqueEvents.length)
|
||||
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch highlights for article:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches highlights for a specific URL
|
||||
* @param relayPool - The relay pool to query
|
||||
* @param url - The external URL to find highlights for
|
||||
* @param settings - User settings for rebroadcast options
|
||||
*/
|
||||
export const fetchHighlightsForUrl = async (
|
||||
relayPool: RelayPool,
|
||||
url: string,
|
||||
settings?: UserSettings
|
||||
): Promise<Highlight[]> => {
|
||||
try {
|
||||
console.log('🔍 Fetching highlights (kind 9802) for URL:', url)
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const rawEvents = await lastValueFrom(
|
||||
relayPool
|
||||
.req(RELAYS, { kinds: [9802], '#r': [url] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
seenIds.add(event.id)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(10000)),
|
||||
toArray()
|
||||
)
|
||||
)
|
||||
|
||||
console.log('📊 Highlights for URL:', rawEvents.length)
|
||||
|
||||
// Rebroadcast highlight events to local/all relays based on settings
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch highlights for URL:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches highlights created by a specific user
|
||||
* @param relayPool - The relay pool to query
|
||||
* @param pubkey - The user's public key
|
||||
* @param onHighlight - Optional callback to receive highlights as they arrive
|
||||
* @param settings - User settings for rebroadcast options
|
||||
*/
|
||||
export const fetchHighlights = async (
|
||||
relayPool: RelayPool,
|
||||
pubkey: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
): Promise<Highlight[]> => {
|
||||
try {
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
|
||||
console.log('🔍 Fetching highlights (kind 9802) by author:', pubkey)
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const rawEvents = await lastValueFrom(
|
||||
relayPool
|
||||
.req(relayUrls, { kinds: [9802], authors: [pubkey] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
if (!seenIds.has(event.id)) {
|
||||
seenIds.add(event.id)
|
||||
const highlight = eventToHighlight(event)
|
||||
if (onHighlight) {
|
||||
onHighlight(highlight)
|
||||
}
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(10000)),
|
||||
toArray()
|
||||
)
|
||||
)
|
||||
|
||||
console.log('📊 Raw highlight events fetched:', rawEvents.length)
|
||||
|
||||
// Rebroadcast highlight events to local/all relays based on settings
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
|
||||
// Deduplicate and process events
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
console.log('📊 Unique highlight events after deduplication:', uniqueEvents.length)
|
||||
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch highlights by author:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
63
src/services/highlights/fetchByAuthor.ts
Normal file
63
src/services/highlights/fetchByAuthor.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../../types/highlights'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
|
||||
import { UserSettings } from '../settingsService'
|
||||
import { rebroadcastEvents } from '../rebroadcastService'
|
||||
|
||||
export const fetchHighlights = async (
|
||||
relayPool: RelayPool,
|
||||
pubkey: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
): Promise<Highlight[]> => {
|
||||
try {
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
const ordered = prioritizeLocalRelays(relayUrls)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(ordered)
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const local$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [9802], authors: [pubkey] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
if (!seenIds.has(event.id)) {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [9802], authors: [pubkey] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
if (!seenIds.has(event.id)) {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const rawEvents: NostrEvent[] = await lastValueFrom(merge(local$, remote$).pipe(toArray()))
|
||||
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
98
src/services/highlights/fetchForArticle.ts
Normal file
98
src/services/highlights/fetchForArticle.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../../types/highlights'
|
||||
import { RELAYS } from '../../config/relays'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
|
||||
import { UserSettings } from '../settingsService'
|
||||
import { rebroadcastEvents } from '../rebroadcastService'
|
||||
|
||||
export const fetchHighlightsForArticle = async (
|
||||
relayPool: RelayPool,
|
||||
articleCoordinate: string,
|
||||
eventId?: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
): Promise<Highlight[]> => {
|
||||
try {
|
||||
const seenIds = new Set<string>()
|
||||
const processEvent = (event: NostrEvent): Highlight | null => {
|
||||
if (seenIds.has(event.id)) return null
|
||||
seenIds.add(event.id)
|
||||
return eventToHighlight(event)
|
||||
}
|
||||
|
||||
const orderedRelays = prioritizeLocalRelays(RELAYS)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(orderedRelays)
|
||||
|
||||
const aLocal$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [9802], '#a': [articleCoordinate] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const aRemote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [9802], '#a': [articleCoordinate] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const aTagEvents: NostrEvent[] = await lastValueFrom(merge(aLocal$, aRemote$).pipe(toArray()))
|
||||
|
||||
let eTagEvents: NostrEvent[] = []
|
||||
if (eventId) {
|
||||
const eLocal$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [9802], '#e': [eventId] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const eRemote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [9802], '#e': [eventId] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
eTagEvents = await lastValueFrom(merge(eLocal$, eRemote$).pipe(toArray()))
|
||||
}
|
||||
|
||||
const rawEvents = [...aTagEvents, ...eTagEvents]
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
57
src/services/highlights/fetchForUrl.ts
Normal file
57
src/services/highlights/fetchForUrl.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../../types/highlights'
|
||||
import { RELAYS } from '../../config/relays'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
|
||||
import { UserSettings } from '../settingsService'
|
||||
import { rebroadcastEvents } from '../rebroadcastService'
|
||||
|
||||
export const fetchHighlightsForUrl = async (
|
||||
relayPool: RelayPool,
|
||||
url: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
): Promise<Highlight[]> => {
|
||||
try {
|
||||
const seenIds = new Set<string>()
|
||||
const orderedRelaysUrl = prioritizeLocalRelays(RELAYS)
|
||||
const { local: localRelaysUrl, remote: remoteRelaysUrl } = partitionRelays(orderedRelaysUrl)
|
||||
const local$ = localRelaysUrl.length > 0
|
||||
? relayPool
|
||||
.req(localRelaysUrl, { kinds: [9802], '#r': [url] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remote$ = remoteRelaysUrl.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelaysUrl, { kinds: [9802], '#r': [url] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const rawEvents: NostrEvent[] = await lastValueFrom(merge(local$, remote$).pipe(toArray()))
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,3 +63,58 @@ export const hasRemoteRelay = (relayUrls: string[]): boolean => {
|
||||
return relayUrls.some(url => !isLocalRelay(url))
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits relay URLs into local and remote groups
|
||||
*/
|
||||
export const partitionRelays = (
|
||||
relayUrls: string[]
|
||||
): { local: string[]; remote: string[] } => {
|
||||
const local: string[] = []
|
||||
const remote: string[] = []
|
||||
for (const url of relayUrls) {
|
||||
if (isLocalRelay(url)) local.push(url)
|
||||
else remote.push(url)
|
||||
}
|
||||
return { local, remote }
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns relays ordered with local first while keeping uniqueness
|
||||
*/
|
||||
export const prioritizeLocalRelays = (relayUrls: string[]): string[] => {
|
||||
const { local, remote } = partitionRelays(relayUrls)
|
||||
const seen = new Set<string>()
|
||||
const out: string[] = []
|
||||
for (const url of [...local, ...remote]) {
|
||||
if (!seen.has(url)) {
|
||||
seen.add(url)
|
||||
out.push(url)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// Parallel request helper
|
||||
import { completeOnEose, onlyEvents, RelayPool } from 'applesauce-relay'
|
||||
import { Observable, takeUntil, timer } from 'rxjs'
|
||||
|
||||
export function createParallelReqStreams(
|
||||
relayPool: RelayPool,
|
||||
localRelays: string[],
|
||||
remoteRelays: string[],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
filter: any,
|
||||
localTimeoutMs = 1200,
|
||||
remoteTimeoutMs = 6000
|
||||
): { local$: Observable<unknown>; remote$: Observable<unknown> } {
|
||||
const local$ = (localRelays.length > 0)
|
||||
? relayPool.req(localRelays, filter).pipe(onlyEvents(), completeOnEose(), takeUntil(timer(localTimeoutMs)))
|
||||
: new Observable<unknown>((sub) => { sub.complete() })
|
||||
|
||||
const remote$ = (remoteRelays.length > 0)
|
||||
? relayPool.req(remoteRelays, filter).pipe(onlyEvents(), completeOnEose(), takeUntil(timer(remoteTimeoutMs)))
|
||||
: new Observable<unknown>((sub) => { sub.complete() })
|
||||
|
||||
return { local$, remote$ }
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user