mirror of
https://github.com/dergigi/boris.git
synced 2026-02-17 04:54:56 +01:00
Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ab6847501 | ||
|
|
31afe3792e | ||
|
|
ebe8ecf63b | ||
|
|
c418000a0c | ||
|
|
15fd19f6a4 | ||
|
|
2a44b4e3c0 | ||
|
|
aa7807e3d2 | ||
|
|
359d3d0dd6 | ||
|
|
d40b3c0048 | ||
|
|
7b4ca50b16 | ||
|
|
76e001aba4 | ||
|
|
0b42aeb383 | ||
|
|
a4554e5176 | ||
|
|
2e844fc26b | ||
|
|
8c0a4cac16 | ||
|
|
c6eccc9589 | ||
|
|
2e5536c331 | ||
|
|
fc025b9579 | ||
|
|
88db14c352 |
@@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.8.6] - 2025-10-20
|
||||
|
||||
### Fixed
|
||||
|
||||
- React Hooks violations in NostrMentionLink component
|
||||
- Fixed useEffect dependency warnings by removing isMounted from dependencies
|
||||
- Reverted to inline mount tracking with useRef for safer lifecycle handling
|
||||
|
||||
## [0.8.4] - 2024-10-20
|
||||
|
||||
### Added
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "boris",
|
||||
"version": "0.8.6",
|
||||
"version": "0.9.0",
|
||||
"description": "A minimal nostr client for bookmark management",
|
||||
"homepage": "https://read.withboris.com/",
|
||||
"type": "module",
|
||||
|
||||
178
src/App.tsx
178
src/App.tsx
@@ -18,7 +18,8 @@ import { useToast } from './hooks/useToast'
|
||||
import { useOnlineStatus } from './hooks/useOnlineStatus'
|
||||
import { RELAYS } from './config/relays'
|
||||
import { SkeletonThemeProvider } from './components/Skeletons'
|
||||
import { DebugBus } from './utils/debugBus'
|
||||
import { loadUserRelayList, loadBlockedRelays, computeRelaySet } from './services/relayListService'
|
||||
import { applyRelaySetToPool, getActiveRelayUrls, ALWAYS_LOCAL_RELAYS } from './services/relayManager'
|
||||
import { Bookmark } from './types/bookmarks'
|
||||
import { bookmarkController } from './services/bookmarkController'
|
||||
import { contactsController } from './services/contactsController'
|
||||
@@ -400,6 +401,8 @@ function App() {
|
||||
|
||||
// Create a relay group for better event deduplication and management
|
||||
pool.group(RELAYS)
|
||||
console.log('[relay-init] Initial pool setup - added RELAYS:', RELAYS.length, 'relays')
|
||||
console.log('[relay-init] Pool now has:', Array.from(pool.relays.keys()).length, 'relays')
|
||||
|
||||
// Load persisted accounts from localStorage
|
||||
try {
|
||||
@@ -417,14 +420,10 @@ function App() {
|
||||
|
||||
if (account) {
|
||||
accounts.setActive(activeId)
|
||||
} else {
|
||||
console.warn('[bunker] ⚠️ Active ID found but account not in list')
|
||||
}
|
||||
} else {
|
||||
// No active account ID in localStorage
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[bunker] ❌ Failed to load accounts from storage:', err)
|
||||
console.error('Failed to load accounts from storage:', err)
|
||||
}
|
||||
|
||||
// Subscribe to accounts changes and persist to localStorage
|
||||
@@ -493,61 +492,27 @@ function App() {
|
||||
try {
|
||||
const mergedRelays = Array.from(new Set([...(signerData.relays || []), ...RELAYS]))
|
||||
recreatedSigner.relays = mergedRelays
|
||||
} catch (err) { console.warn('[bunker] failed to merge signer relays', err) }
|
||||
} catch (err) { /* ignore */ }
|
||||
|
||||
// Replace the signer on the account
|
||||
nostrConnectAccount.signer = recreatedSigner
|
||||
|
||||
// Debug: log publish/subscription calls made by signer (decrypt/sign requests)
|
||||
// Fire-and-forget publish for bunker: trigger but don't wait for completion
|
||||
// IMPORTANT: bind originals to preserve `this` context used internally by the signer
|
||||
const originalPublish = (recreatedSigner as unknown as { publishMethod: (relays: string[], event: unknown) => unknown }).publishMethod.bind(recreatedSigner)
|
||||
;(recreatedSigner as unknown as { publishMethod: (relays: string[], event: unknown) => unknown }).publishMethod = (relays: string[], event: unknown) => {
|
||||
try {
|
||||
let method: string | undefined
|
||||
const content = (event as { content?: unknown })?.content
|
||||
if (typeof content === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(content) as { method?: string; id?: unknown }
|
||||
method = parsed?.method
|
||||
} catch (err) { console.warn('[bunker] failed to parse event content', err) }
|
||||
}
|
||||
const summary = {
|
||||
relays,
|
||||
kind: (event as { kind?: number })?.kind,
|
||||
method,
|
||||
// include tags array for debugging (NIP-46 expects method tag)
|
||||
tags: (event as { tags?: unknown })?.tags,
|
||||
contentLength: typeof content === 'string' ? content.length : undefined
|
||||
}
|
||||
try { DebugBus.info('bunker', 'publish', summary) } catch (err) { console.warn('[bunker] failed to log to DebugBus', err) }
|
||||
} catch (err) { console.warn('[bunker] failed to log publish summary', err) }
|
||||
// Fire-and-forget publish: trigger the publish but do not return the
|
||||
// Observable/Promise to upstream to avoid their awaiting of completion.
|
||||
const result = originalPublish(relays, event)
|
||||
if (result && typeof (result as { subscribe?: unknown }).subscribe === 'function') {
|
||||
// Subscribe to the observable but ignore completion/errors (fire-and-forget)
|
||||
try { (result as { subscribe: (h: { complete?: () => void; error?: (e: unknown) => void }) => unknown }).subscribe({ complete: () => { /* noop */ }, error: () => { /* noop */ } }) } catch { /* ignore */ }
|
||||
}
|
||||
// If it's a Promise, simply ignore it (no await) so it resolves in the background.
|
||||
// Return a benign object so callers that probe for a "subscribe" property
|
||||
// (e.g., applesauce makeRequest) won't throw on `"subscribe" in result`.
|
||||
return {} as unknown as never
|
||||
}
|
||||
const originalSubscribe = (recreatedSigner as unknown as { subscriptionMethod: (relays: string[], filters: unknown[]) => unknown }).subscriptionMethod.bind(recreatedSigner)
|
||||
;(recreatedSigner as unknown as { subscriptionMethod: (relays: string[], filters: unknown[]) => unknown }).subscriptionMethod = (relays: string[], filters: unknown[]) => {
|
||||
try {
|
||||
try { DebugBus.info('bunker', 'subscribe', { relays, filters }) } catch (err) { console.warn('[bunker] failed to log subscribe to DebugBus', err) }
|
||||
} catch (err) { console.warn('[bunker] failed to log subscribe summary', err) }
|
||||
return originalSubscribe(relays, filters)
|
||||
}
|
||||
|
||||
|
||||
// Just ensure the signer is listening for responses - don't call connect() again
|
||||
// The fromBunkerURI already connected with permissions during login
|
||||
if (!nostrConnectAccount.signer.listening) {
|
||||
await nostrConnectAccount.signer.open()
|
||||
} else {
|
||||
// Signer already listening
|
||||
}
|
||||
|
||||
// Attempt a guarded reconnect to ensure Amber authorizes decrypt operations
|
||||
@@ -557,7 +522,7 @@ function App() {
|
||||
await nostrConnectAccount.signer.connect(undefined, permissions)
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('[bunker] ⚠️ Guarded connect() failed:', e)
|
||||
// Ignore reconnect errors
|
||||
}
|
||||
|
||||
// Give the subscription a moment to fully establish before allowing decrypt operations
|
||||
@@ -597,17 +562,137 @@ function App() {
|
||||
// Mark this account as reconnected
|
||||
reconnectedAccounts.add(account.id)
|
||||
} catch (error) {
|
||||
console.error('[bunker] ❌ Failed to open signer:', error)
|
||||
console.error('Failed to open signer:', error)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Handle user relay list and blocked relays when account changes
|
||||
const userRelaysSub = accounts.active$.subscribe((account) => {
|
||||
console.log('[relay-init] userRelaysSub fired, account:', account ? 'logged in' : 'logged out')
|
||||
console.log('[relay-init] Pool has', Array.from(pool.relays.keys()).length, 'relays before applying changes')
|
||||
if (account) {
|
||||
// User logged in - start with hardcoded relays immediately, then stream user relay list updates
|
||||
const pubkey = account.pubkey
|
||||
|
||||
// Bunker relays (if any)
|
||||
let bunkerRelays: string[] = []
|
||||
if (account.type === 'nostr-connect') {
|
||||
const nostrConnectAccount = account as Accounts.NostrConnectAccount<unknown>
|
||||
const signerData = nostrConnectAccount.toJSON().signer
|
||||
bunkerRelays = signerData.relays || []
|
||||
}
|
||||
console.log('[relay-init] Bunker relays:', bunkerRelays.length, 'relays', bunkerRelays)
|
||||
|
||||
// Start with hardcoded + bunker relays immediately (non-blocking)
|
||||
const initialRelays = computeRelaySet({
|
||||
hardcoded: RELAYS,
|
||||
bunker: bunkerRelays,
|
||||
userList: [],
|
||||
blocked: [],
|
||||
alwaysIncludeLocal: ALWAYS_LOCAL_RELAYS
|
||||
})
|
||||
console.log('[relay-init] Initial relay set (hardcoded):', initialRelays.length, 'relays', initialRelays)
|
||||
|
||||
// Apply initial set immediately
|
||||
applyRelaySetToPool(pool, initialRelays)
|
||||
console.log('[relay-init] After initial applyRelaySetToPool, pool has:', Array.from(pool.relays.keys()).length, 'relays')
|
||||
|
||||
// Prepare keep-alive helper
|
||||
const updateKeepAlive = () => {
|
||||
const poolWithSub = pool as unknown as { _keepAliveSubscription?: { unsubscribe: () => void } }
|
||||
if (poolWithSub._keepAliveSubscription) {
|
||||
poolWithSub._keepAliveSubscription.unsubscribe()
|
||||
}
|
||||
const activeRelays = getActiveRelayUrls(pool)
|
||||
const newKeepAliveSub = pool.subscription(activeRelays, { kinds: [0], limit: 0 }).subscribe({
|
||||
next: () => {},
|
||||
error: () => {}
|
||||
})
|
||||
poolWithSub._keepAliveSubscription = newKeepAliveSub
|
||||
}
|
||||
|
||||
// Begin loading blocked relays in background
|
||||
const blockedPromise = loadBlockedRelays(pool, pubkey)
|
||||
|
||||
// Stream user relay list; apply immediately on first/updated event
|
||||
loadUserRelayList(pool, pubkey, {
|
||||
onUpdate: (userRelays) => {
|
||||
const interimRelays = computeRelaySet({
|
||||
hardcoded: [],
|
||||
bunker: bunkerRelays,
|
||||
userList: userRelays,
|
||||
blocked: [],
|
||||
alwaysIncludeLocal: ALWAYS_LOCAL_RELAYS
|
||||
})
|
||||
console.log('[relay-init] Interim relay set from first user list:', interimRelays.length, 'relays', interimRelays)
|
||||
applyRelaySetToPool(pool, interimRelays)
|
||||
updateKeepAlive()
|
||||
}
|
||||
}).then(async (userRelayList) => {
|
||||
const blockedRelays = await blockedPromise.catch(() => [])
|
||||
console.log('[relay-init] User relay list (10002):', userRelayList.length, 'relays', userRelayList.map(r => r.url))
|
||||
console.log('[relay-init] Blocked relays (10006):', blockedRelays.length, 'relays', blockedRelays)
|
||||
|
||||
const finalRelays = computeRelaySet({
|
||||
hardcoded: userRelayList.length > 0 ? [] : RELAYS,
|
||||
bunker: bunkerRelays,
|
||||
userList: userRelayList,
|
||||
blocked: blockedRelays,
|
||||
alwaysIncludeLocal: ALWAYS_LOCAL_RELAYS
|
||||
})
|
||||
console.log('[relay-init] Final relay set (with user preferences):', finalRelays.length, 'relays', finalRelays)
|
||||
applyRelaySetToPool(pool, finalRelays)
|
||||
console.log('[relay-init] After user relay list apply, pool has:', Array.from(pool.relays.keys()).length, 'relays')
|
||||
console.log('[relay-init] Final relay URLs:', Array.from(pool.relays.keys()))
|
||||
updateKeepAlive()
|
||||
|
||||
// Update address loader with new relays
|
||||
const activeRelays = getActiveRelayUrls(pool)
|
||||
const addressLoader = createAddressLoader(pool, {
|
||||
eventStore: store,
|
||||
lookupRelays: activeRelays
|
||||
})
|
||||
store.addressableLoader = addressLoader
|
||||
store.replaceableLoader = addressLoader
|
||||
}).catch((error) => {
|
||||
console.error('[relay-init] Failed to load user relay list (continuing with initial set):', error)
|
||||
// Continue with initial relay set on error - no need to change anything
|
||||
})
|
||||
} else {
|
||||
// User logged out - reset to hardcoded relays
|
||||
console.log('[relay-init] Applying RELAYS for logged out user, RELAYS.length:', RELAYS.length)
|
||||
applyRelaySetToPool(pool, RELAYS)
|
||||
console.log('[relay-init] After applyRelaySetToPool (logged out), pool has:', Array.from(pool.relays.keys()).length, 'relays')
|
||||
console.log('[relay-init] Relay URLs:', Array.from(pool.relays.keys()))
|
||||
|
||||
// Update keep-alive subscription
|
||||
const poolWithSub = pool as unknown as { _keepAliveSubscription?: { unsubscribe: () => void } }
|
||||
if (poolWithSub._keepAliveSubscription) {
|
||||
poolWithSub._keepAliveSubscription.unsubscribe()
|
||||
}
|
||||
const newKeepAliveSub = pool.subscription(RELAYS, { kinds: [0], limit: 0 }).subscribe({
|
||||
next: () => {},
|
||||
error: () => {}
|
||||
})
|
||||
poolWithSub._keepAliveSubscription = newKeepAliveSub
|
||||
|
||||
// Reset address loader
|
||||
const addressLoader = createAddressLoader(pool, {
|
||||
eventStore: store,
|
||||
lookupRelays: RELAYS
|
||||
})
|
||||
store.addressableLoader = addressLoader
|
||||
store.replaceableLoader = addressLoader
|
||||
}
|
||||
})
|
||||
|
||||
// Keep all relay connections alive indefinitely by creating a persistent subscription
|
||||
// This prevents disconnection when no other subscriptions are active
|
||||
// Create a minimal subscription that never completes to keep connections alive
|
||||
const keepAliveSub = pool.subscription(RELAYS, { kinds: [0], limit: 0 }).subscribe({
|
||||
next: () => {}, // No-op, we don't care about events
|
||||
error: (err) => console.warn('Keep-alive subscription error:', err)
|
||||
next: () => {},
|
||||
error: () => {}
|
||||
})
|
||||
|
||||
// Store subscription for cleanup
|
||||
@@ -630,6 +715,7 @@ function App() {
|
||||
accountsSub.unsubscribe()
|
||||
activeSub.unsubscribe()
|
||||
bunkerReconnectSub.unsubscribe()
|
||||
userRelaysSub.unsubscribe()
|
||||
// Clean up keep-alive subscription if it exists
|
||||
const poolWithSub = pool as unknown as { _keepAliveSubscription?: { unsubscribe: () => void } }
|
||||
if (poolWithSub._keepAliveSubscription) {
|
||||
|
||||
@@ -17,8 +17,8 @@ import { groupIndividualBookmarks, hasContent, getBookmarkSets, getBookmarksWith
|
||||
import { UserSettings } from '../services/settingsService'
|
||||
import AddBookmarkModal from './AddBookmarkModal'
|
||||
import { createWebBookmark } from '../services/webBookmarkService'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { Hooks } from 'applesauce-react'
|
||||
import { getActiveRelayUrls } from '../services/relayManager'
|
||||
import BookmarkFilters, { BookmarkFilterType } from './BookmarkFilters'
|
||||
import { filterBookmarksByType } from '../utils/bookmarkTypeClassifier'
|
||||
import LoginOptions from './LoginOptions'
|
||||
@@ -125,7 +125,7 @@ export const BookmarkList: React.FC<BookmarkListProps> = ({
|
||||
throw new Error('Please login to create bookmarks')
|
||||
}
|
||||
|
||||
await createWebBookmark(url, title, description, tags, activeAccount, relayPool, RELAYS)
|
||||
await createWebBookmark(url, title, description, tags, activeAccount, relayPool, getActiveRelayUrls(relayPool))
|
||||
}
|
||||
|
||||
// Pull-to-refresh for bookmarks
|
||||
|
||||
@@ -10,8 +10,8 @@ import { faSpinner, faCheckCircle, faEllipsisH, faExternalLinkAlt, faMobileAlt,
|
||||
import { ContentSkeleton } from './Skeletons'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
import { getNostrUrl, getSearchUrl } from '../config/nostrGateways'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { getActiveRelayUrls } from '../services/relayManager'
|
||||
import { IAccount } from 'applesauce-accounts'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../types/highlights'
|
||||
@@ -357,7 +357,8 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
if (!currentArticle) return null
|
||||
|
||||
const dTag = currentArticle.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const relayHints = RELAYS.filter(r =>
|
||||
const activeRelays = relayPool ? getActiveRelayUrls(relayPool) : []
|
||||
const relayHints = activeRelays.filter(r =>
|
||||
!r.includes('localhost') && !r.includes('127.0.0.1')
|
||||
).slice(0, 3)
|
||||
|
||||
@@ -579,9 +580,8 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
try {
|
||||
const naddr = nip19.naddrEncode({ kind: 30023, pubkey: currentArticle.pubkey, identifier: dTag })
|
||||
hasRead = hasRead || archiveController.isMarked(naddr)
|
||||
console.log('[archive][content] check article', { naddr: naddr.slice(0, 24) + '...', hasRead })
|
||||
} catch (e) {
|
||||
console.warn('[archive][content] encode naddr failed', e)
|
||||
// Silently ignore encoding errors
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -593,7 +593,6 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
// Also check archiveController
|
||||
const ctrl = archiveController.isMarked(selectedUrl)
|
||||
hasRead = hasRead || ctrl
|
||||
console.log('[archive][content] check url', { url: selectedUrl, hasRead, ctrl })
|
||||
}
|
||||
setIsMarkedAsRead(hasRead)
|
||||
} catch (error) {
|
||||
@@ -674,7 +673,6 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
if (dTag) {
|
||||
const naddr = nip19.naddrEncode({ kind: 30023, pubkey: currentArticle.pubkey, identifier: dTag })
|
||||
archiveController.mark(naddr)
|
||||
console.log('[archive][content] optimistic mark article', naddr.slice(0, 24) + '...')
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('[archive][content] optimistic article mark failed', err)
|
||||
@@ -686,7 +684,6 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
relayPool
|
||||
)
|
||||
archiveController.mark(selectedUrl)
|
||||
console.log('[archive][content] optimistic mark url', selectedUrl)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to mark as read:', error)
|
||||
|
||||
@@ -114,6 +114,12 @@ const Debug: React.FC<DebugProps> = ({
|
||||
const [markAsReadReactions, setMarkAsReadReactions] = useState<NostrEvent[]>([])
|
||||
const [tLoadMarkAsRead, setTLoadMarkAsRead] = useState<number | null>(null)
|
||||
const [tFirstMarkAsRead, setTFirstMarkAsRead] = useState<number | null>(null)
|
||||
|
||||
// Relay list loading state
|
||||
const [isLoadingRelayList, setIsLoadingRelayList] = useState(false)
|
||||
const [relayListEvents, setRelayListEvents] = useState<NostrEvent[]>([])
|
||||
const [tLoadRelayList, setTLoadRelayList] = useState<number | null>(null)
|
||||
const [tFirstRelayList, setTFirstRelayList] = useState<number | null>(null)
|
||||
|
||||
// Deduplicated reading progress from controller
|
||||
const [deduplicatedProgressMap, setDeduplicatedProgressMap] = useState<Map<string, number>>(new Map())
|
||||
@@ -127,6 +133,7 @@ const Debug: React.FC<DebugProps> = ({
|
||||
loadHighlights?: { startTime: number }
|
||||
loadReadingProgress?: { startTime: number }
|
||||
loadMarkAsRead?: { startTime: number }
|
||||
loadRelayList?: { startTime: number }
|
||||
}>({})
|
||||
|
||||
// Web of Trust state
|
||||
@@ -886,6 +893,70 @@ const Debug: React.FC<DebugProps> = ({
|
||||
DebugBus.info('debug', 'Cleared mark-as-read reactions data')
|
||||
}
|
||||
|
||||
const handleLoadRelayList = async () => {
|
||||
if (!relayPool || !activeAccount?.pubkey) {
|
||||
DebugBus.warn('debug', 'Please log in to load relay list')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsLoadingRelayList(true)
|
||||
setRelayListEvents([])
|
||||
setTLoadRelayList(null)
|
||||
setTFirstRelayList(null)
|
||||
DebugBus.info('debug', 'Loading relay list (kind 10002)...')
|
||||
|
||||
const start = performance.now()
|
||||
let firstEventTime: number | null = null
|
||||
setLiveTiming(prev => ({ ...prev, loadRelayList: { startTime: start } }))
|
||||
|
||||
const { queryEvents } = await import('../services/dataFetch')
|
||||
|
||||
// Query for kind:10002 (relay list)
|
||||
const events = await queryEvents(relayPool, {
|
||||
kinds: [10002],
|
||||
authors: [activeAccount.pubkey],
|
||||
limit: 10
|
||||
}, {
|
||||
onEvent: (evt) => {
|
||||
if (firstEventTime === null) {
|
||||
firstEventTime = performance.now() - start
|
||||
setTFirstRelayList(Math.round(firstEventTime))
|
||||
}
|
||||
setRelayListEvents(prev => [...prev, evt])
|
||||
}
|
||||
})
|
||||
|
||||
const elapsed = Math.round(performance.now() - start)
|
||||
setTLoadRelayList(elapsed)
|
||||
setLiveTiming(prev => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, no-unused-vars
|
||||
const { loadRelayList, ...rest } = prev
|
||||
return rest
|
||||
})
|
||||
|
||||
DebugBus.info('debug', `Loaded ${events.length} relay list events in ${elapsed}ms`)
|
||||
|
||||
// Log details about the events
|
||||
events.forEach((event, index) => {
|
||||
const relayCount = event.tags.filter(tag => tag[0] === 'r').length
|
||||
DebugBus.info('debug', `Event ${index + 1}: ${relayCount} relays, created ${new Date(event.created_at * 1000).toISOString()}`)
|
||||
})
|
||||
} catch (err) {
|
||||
console.error('Failed to load relay list:', err)
|
||||
DebugBus.error('debug', `Failed to load relay list: ${err instanceof Error ? err.message : String(err)}`)
|
||||
} finally {
|
||||
setIsLoadingRelayList(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleClearRelayList = () => {
|
||||
setRelayListEvents([])
|
||||
setTLoadRelayList(null)
|
||||
setTFirstRelayList(null)
|
||||
DebugBus.info('debug', 'Cleared relay list data')
|
||||
}
|
||||
|
||||
const handleLoadFriendsList = async () => {
|
||||
if (!relayPool || !activeAccount?.pubkey) {
|
||||
DebugBus.warn('debug', 'Please log in to load friends list')
|
||||
@@ -1698,6 +1769,72 @@ const Debug: React.FC<DebugProps> = ({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Relay List Loading Section */}
|
||||
<div className="settings-section">
|
||||
<h3 className="section-title">Relay List Loading (kind 10002)</h3>
|
||||
<div className="text-sm opacity-70 mb-3">Load your relay list to debug dynamic relay integration:</div>
|
||||
|
||||
<div className="flex gap-2 mb-3 items-center">
|
||||
<button
|
||||
className="btn btn-primary"
|
||||
onClick={handleLoadRelayList}
|
||||
disabled={isLoadingRelayList || !relayPool || !activeAccount}
|
||||
>
|
||||
{isLoadingRelayList ? (
|
||||
<>
|
||||
<FontAwesomeIcon icon={faSpinner} className="animate-spin mr-2" />
|
||||
Loading...
|
||||
</>
|
||||
) : (
|
||||
'Load Relay List'
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-secondary ml-auto"
|
||||
onClick={handleClearRelayList}
|
||||
disabled={relayListEvents.length === 0}
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="flex gap-4 mb-3 text-sm">
|
||||
<Stat label="total" value={tLoadRelayList} />
|
||||
<Stat label="first event" value={tFirstRelayList} />
|
||||
</div>
|
||||
{relayListEvents.length > 0 && (
|
||||
<div className="mb-3">
|
||||
<div className="text-sm opacity-70 mb-2">Loaded Relay List Events ({relayListEvents.length}):</div>
|
||||
<div className="space-y-2 max-h-96 overflow-y-auto">
|
||||
{relayListEvents.map((evt, idx) => {
|
||||
const relayTags = evt.tags?.filter((t: string[]) => t[0] === 'r') || []
|
||||
|
||||
return (
|
||||
<div key={idx} className="font-mono text-xs p-2 bg-gray-100 dark:bg-gray-800 rounded">
|
||||
<div className="font-semibold mb-1">Relay List Event #{idx + 1}</div>
|
||||
<div className="opacity-70 mb-1">
|
||||
<div>Kind: {evt.kind}</div>
|
||||
<div>Author: {evt.pubkey.slice(0, 16)}...</div>
|
||||
<div>Created: {new Date(evt.created_at * 1000).toLocaleString()}</div>
|
||||
<div>Relays: {relayTags.length}</div>
|
||||
</div>
|
||||
<div className="mt-1">
|
||||
<div className="text-[11px] opacity-70 mb-1">Relay URLs:</div>
|
||||
{relayTags.map((tag, tagIdx) => (
|
||||
<div key={tagIdx} className="text-[10px] opacity-60 break-all">
|
||||
{tag[1]} {tag[2] ? `(${tag[2]})` : ''}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<div className="opacity-50 mt-1 text-[10px] break-all">ID: {evt.id}</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Web of Trust Section */}
|
||||
<div className="settings-section">
|
||||
<h3 className="section-title">Web of Trust</h3>
|
||||
|
||||
@@ -8,8 +8,8 @@ import { Models, IEventStore } from 'applesauce-core'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { Hooks } from 'applesauce-react'
|
||||
import { onSyncStateChange, isEventSyncing } from '../services/offlineSyncService'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { areAllRelaysLocal } from '../utils/helpers'
|
||||
import { getActiveRelayUrls } from '../services/relayManager'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
import { formatDateCompact } from '../utils/bookmarkUtils'
|
||||
import { createDeletionRequest } from '../services/deletionService'
|
||||
@@ -150,10 +150,10 @@ export const HighlightItem: React.FC<HighlightItemProps> = ({
|
||||
setShowOfflineIndicator(false)
|
||||
|
||||
// Update the highlight with all relays after successful sync
|
||||
if (onHighlightUpdate && highlight.isLocalOnly) {
|
||||
if (onHighlightUpdate && highlight.isLocalOnly && relayPool) {
|
||||
const updatedHighlight = {
|
||||
...highlight,
|
||||
publishedRelays: RELAYS,
|
||||
publishedRelays: getActiveRelayUrls(relayPool),
|
||||
isLocalOnly: false,
|
||||
isOfflineCreated: false
|
||||
}
|
||||
@@ -164,7 +164,7 @@ export const HighlightItem: React.FC<HighlightItemProps> = ({
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [highlight, onHighlightUpdate])
|
||||
}, [highlight, onHighlightUpdate, relayPool])
|
||||
|
||||
useEffect(() => {
|
||||
if (isSelected && itemRef.current) {
|
||||
@@ -224,7 +224,8 @@ export const HighlightItem: React.FC<HighlightItemProps> = ({
|
||||
const getHighlightLinks = () => {
|
||||
// Encode the highlight event itself (kind 9802) as a nevent
|
||||
// Get non-local relays for the hint
|
||||
const relayHints = RELAYS.filter(r =>
|
||||
const activeRelays = relayPool ? getActiveRelayUrls(relayPool) : []
|
||||
const relayHints = activeRelays.filter(r =>
|
||||
!r.includes('localhost') && !r.includes('127.0.0.1')
|
||||
).slice(0, 3) // Include up to 3 relay hints
|
||||
|
||||
@@ -260,7 +261,7 @@ export const HighlightItem: React.FC<HighlightItemProps> = ({
|
||||
}
|
||||
|
||||
// Publish to all configured relays - let the relay pool handle connection state
|
||||
const targetRelays = RELAYS
|
||||
const targetRelays = getActiveRelayUrls(relayPool)
|
||||
|
||||
|
||||
await relayPool.publish(targetRelays, event)
|
||||
@@ -328,7 +329,8 @@ export const HighlightItem: React.FC<HighlightItemProps> = ({
|
||||
}
|
||||
|
||||
// Fallback: show all relays we queried (where this was likely fetched from)
|
||||
const relayNames = RELAYS.map(url =>
|
||||
const activeRelays = relayPool ? getActiveRelayUrls(relayPool) : []
|
||||
const relayNames = activeRelays.map(url =>
|
||||
url.replace(/^wss?:\/\//, '').replace(/\/$/, '')
|
||||
)
|
||||
return {
|
||||
|
||||
@@ -564,27 +564,6 @@ const Me: React.FC<MeProps> = ({
|
||||
? buildArchiveOnly(linksWithProgress, { kind: 'external' })
|
||||
: []
|
||||
|
||||
// Debug logs for archive filter issues
|
||||
if (readingProgressFilter === 'archive') {
|
||||
const ids = Array.from(new Set([
|
||||
...archiveController.getMarkedIds(),
|
||||
...readingProgressController.getMarkedAsReadIds()
|
||||
]))
|
||||
const readIds = new Set(reads.map(i => i.id))
|
||||
const matches = ids.filter(id => readIds.has(id))
|
||||
const nonMatches = ids.filter(id => !readIds.has(id)).slice(0, 5)
|
||||
console.log('[archive][me] counts', {
|
||||
reads: reads.length,
|
||||
filteredReads: filteredReads.length,
|
||||
links: links.length,
|
||||
linksWithProgress: linksWithProgress.length,
|
||||
filteredLinks: filteredLinks.length,
|
||||
markedIds: ids.length,
|
||||
sampleMarked: ids.slice(0, 3),
|
||||
matches: matches.length,
|
||||
nonMatches
|
||||
})
|
||||
}
|
||||
const sections: Array<{ key: string; title: string; items: IndividualBookmark[] }> =
|
||||
groupingMode === 'flat'
|
||||
? [{ key: 'all', title: `All Bookmarks (${filteredBookmarks.length})`, items: filteredBookmarks }]
|
||||
|
||||
@@ -8,8 +8,8 @@ import { useNavigate } from 'react-router-dom'
|
||||
import { HighlightItem } from './HighlightItem'
|
||||
import { BlogPostPreview, fetchBlogPostsFromAuthors } from '../services/exploreService'
|
||||
import { fetchHighlights } from '../services/highlightService'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { KINDS } from '../config/kinds'
|
||||
import { getActiveRelayUrls } from '../services/relayManager'
|
||||
import AuthorCard from './AuthorCard'
|
||||
import BlogPostCard from './BlogPostCard'
|
||||
import { BlogPostSkeleton, HighlightSkeleton } from './Skeletons'
|
||||
@@ -109,7 +109,7 @@ const Profile: React.FC<ProfileProps> = ({
|
||||
})
|
||||
|
||||
// Fetch writings in background (no limit for single user profile)
|
||||
fetchBlogPostsFromAuthors(relayPool, [pubkey], RELAYS, undefined, null)
|
||||
fetchBlogPostsFromAuthors(relayPool, [pubkey], getActiveRelayUrls(relayPool), undefined, null)
|
||||
.then(writings => {
|
||||
writings.forEach(w => eventStore.add(w.event))
|
||||
})
|
||||
|
||||
@@ -11,12 +11,11 @@ export const RELAYS = [
|
||||
'wss://relay.damus.io',
|
||||
'wss://nos.lol',
|
||||
'wss://relay.nostr.band',
|
||||
'wss://relay.dergigi.com',
|
||||
'wss://wot.dergigi.com',
|
||||
'wss://relay.snort.social',
|
||||
'wss://nostr-pub.wellorder.net',
|
||||
'wss://purplepag.es',
|
||||
'wss://relay.primal.net',
|
||||
'wss://proxy.nostr-relay.app/5d0d38afc49c4b84ca0da951a336affa18438efed302aeedfa92eb8b0d3fcb87'
|
||||
'wss://proxy.nostr-relay.app/5d0d38afc49c4b84ca0da951a336affa18438efed302aeedfa92eb8b0d3fcb87',
|
||||
]
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import { IEventStore } from 'applesauce-core'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { queryEvents } from './dataFetch'
|
||||
import { KINDS } from '../config/kinds'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { ARCHIVE_EMOJI } from './reactionService'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
|
||||
@@ -35,14 +34,12 @@ class ArchiveController {
|
||||
if (!this.markedIds.has(id)) {
|
||||
this.markedIds.add(id)
|
||||
this.emit()
|
||||
console.log('[archive] mark() added', id.slice(0, 48))
|
||||
}
|
||||
}
|
||||
|
||||
unmark(id: string): void {
|
||||
if (this.markedIds.delete(id)) {
|
||||
this.emit()
|
||||
console.log('[archive] unmark() removed', id.slice(0, 48))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +58,7 @@ class ArchiveController {
|
||||
reset(): void {
|
||||
this.generation++
|
||||
if (this.timelineSubscription) {
|
||||
try { this.timelineSubscription.unsubscribe() } catch (e) { console.warn('[archive] timeline unsub error', e) }
|
||||
try { this.timelineSubscription.unsubscribe() } catch { /* ignore */ }
|
||||
this.timelineSubscription = null
|
||||
}
|
||||
this.markedIds = new Set()
|
||||
@@ -80,13 +77,11 @@ class ArchiveController {
|
||||
const startGen = this.generation
|
||||
|
||||
if (!force && this.isLoadedFor(pubkey)) {
|
||||
console.log('[archive] start() skipped - already loaded for pubkey')
|
||||
return
|
||||
}
|
||||
|
||||
// Mark as loaded immediately (fetch runs non-blocking)
|
||||
this.lastLoadedPubkey = pubkey
|
||||
console.log('[archive] start() begin for pubkey:', pubkey.slice(0, 12), '...')
|
||||
|
||||
// Handlers for streaming queries
|
||||
const handleUrlReaction = (evt: NostrEvent) => {
|
||||
@@ -95,7 +90,6 @@ class ArchiveController {
|
||||
if (!rTag) return
|
||||
this.markedIds.add(rTag)
|
||||
this.emit()
|
||||
console.log('[archive] mark url:', rTag)
|
||||
}
|
||||
|
||||
const handleEventReaction = (evt: NostrEvent) => {
|
||||
@@ -110,7 +104,6 @@ class ArchiveController {
|
||||
const naddr = nip19.naddrEncode({ kind, pubkey, identifier })
|
||||
this.markedIds.add(naddr)
|
||||
this.emit()
|
||||
console.log('[archive] mark naddr via a-tag:', naddr.slice(0, 24), '...')
|
||||
return
|
||||
}
|
||||
} catch { /* ignore malformed a-tag */ }
|
||||
@@ -118,14 +111,13 @@ class ArchiveController {
|
||||
const eTag = evt.tags.find(t => t[0] === 'e')?.[1]
|
||||
if (!eTag) return
|
||||
this.pendingEventIds.add(eTag)
|
||||
console.log('[archive] pending event id:', eTag)
|
||||
}
|
||||
|
||||
try {
|
||||
// Stream kind:17 and kind:7 in parallel
|
||||
const [kind17, kind7] = await Promise.all([
|
||||
queryEvents(relayPool, { kinds: [17], authors: [pubkey] }, { relayUrls: RELAYS, onEvent: handleUrlReaction }),
|
||||
queryEvents(relayPool, { kinds: [7], authors: [pubkey] }, { relayUrls: RELAYS, onEvent: handleEventReaction })
|
||||
queryEvents(relayPool, { kinds: [17], authors: [pubkey] }, { onEvent: handleUrlReaction }),
|
||||
queryEvents(relayPool, { kinds: [7], authors: [pubkey] }, { onEvent: handleEventReaction })
|
||||
])
|
||||
|
||||
if (startGen !== this.generation) return
|
||||
@@ -133,27 +125,23 @@ class ArchiveController {
|
||||
// Include EOSE events
|
||||
kind17.forEach(handleUrlReaction)
|
||||
kind7.forEach(handleEventReaction)
|
||||
console.log('[archive] EOSE sizes kind17:', kind17.length, 'kind7:', kind7.length, 'pendingEventIds:', this.pendingEventIds.size)
|
||||
|
||||
if (this.pendingEventIds.size > 0) {
|
||||
// Fetch referenced articles (kind:30023) and map event IDs to naddr
|
||||
const ids = Array.from(this.pendingEventIds)
|
||||
const articleEvents = await queryEvents(relayPool, { kinds: [KINDS.BlogPost], ids }, { relayUrls: RELAYS })
|
||||
console.log('[archive] fetched articles for mapping:', articleEvents.length)
|
||||
const articleEvents = await queryEvents(relayPool, { kinds: [KINDS.BlogPost], ids })
|
||||
for (const article of articleEvents) {
|
||||
const dTag = article.tags.find(t => t[0] === 'd')?.[1]
|
||||
if (!dTag) continue
|
||||
try {
|
||||
const naddr = nip19.naddrEncode({ kind: KINDS.BlogPost, pubkey: article.pubkey, identifier: dTag })
|
||||
this.markedIds.add(naddr)
|
||||
console.log('[archive] mark naddr:', naddr.slice(0, 24), '...')
|
||||
} catch {
|
||||
// skip invalid
|
||||
}
|
||||
}
|
||||
this.emit()
|
||||
}
|
||||
console.log('[archive] total marked ids:', this.markedIds.size)
|
||||
|
||||
// Try immediate mapping via eventStore for any still-pending e-ids
|
||||
if (this.pendingEventIds.size > 0) {
|
||||
@@ -167,7 +155,6 @@ class ArchiveController {
|
||||
if (dTag) {
|
||||
const naddr = nip19.naddrEncode({ kind: KINDS.BlogPost, pubkey: evt.pubkey, identifier: dTag })
|
||||
this.markedIds.add(naddr)
|
||||
console.log('[archive] map via eventStore naddr:', naddr.slice(0, 24), '...')
|
||||
}
|
||||
} else {
|
||||
stillPending.add(eId)
|
||||
@@ -178,7 +165,7 @@ class ArchiveController {
|
||||
if (stillPending.size > 0) {
|
||||
// Subscribe to future 30023 arrivals to finalize mapping
|
||||
if (this.timelineSubscription) {
|
||||
try { this.timelineSubscription.unsubscribe() } catch (e) { console.warn('[archive] timeline unsub error', e) }
|
||||
try { this.timelineSubscription.unsubscribe() } catch { /* ignore */ }
|
||||
this.timelineSubscription = null
|
||||
}
|
||||
const sub$ = eventStore.timeline({ kinds: [KINDS.BlogPost] })
|
||||
@@ -193,16 +180,14 @@ class ArchiveController {
|
||||
const naddr = nip19.naddrEncode({ kind: KINDS.BlogPost, pubkey: evt.pubkey, identifier: dTag })
|
||||
this.markedIds.add(naddr)
|
||||
this.pendingEventIds.delete(evt.id)
|
||||
console.log('[archive] map via timeline naddr:', naddr.slice(0, 24), '...')
|
||||
this.emit()
|
||||
} catch (e) { console.warn('[archive] map via timeline encode error', e) }
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// Non-blocking fetch; ignore errors here
|
||||
console.warn('[archive] start() error:', err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,15 +67,12 @@ export const processApplesauceBookmarks = (
|
||||
): IndividualBookmark[] => {
|
||||
if (!bookmarks) return []
|
||||
|
||||
console.log('[BOOKMARK_TS] processApplesauceBookmarks called with parentCreatedAt:', parentCreatedAt, 'isPrivate:', isPrivate)
|
||||
|
||||
if (typeof bookmarks === 'object' && bookmarks !== null && !Array.isArray(bookmarks)) {
|
||||
const applesauceBookmarks = bookmarks as ApplesauceBookmarks
|
||||
const allItems: IndividualBookmark[] = []
|
||||
|
||||
// Process notes (EventPointer[])
|
||||
if (applesauceBookmarks.notes) {
|
||||
console.log('[BOOKMARK_TS] Processing', applesauceBookmarks.notes.length, 'notes with timestamp:', parentCreatedAt || 0)
|
||||
applesauceBookmarks.notes.forEach((note: EventPointer) => {
|
||||
allItems.push({
|
||||
id: note.id,
|
||||
@@ -94,7 +91,6 @@ export const processApplesauceBookmarks = (
|
||||
|
||||
// Process articles (AddressPointer[])
|
||||
if (applesauceBookmarks.articles) {
|
||||
console.log('[BOOKMARK_TS] Processing', applesauceBookmarks.articles.length, 'articles with timestamp:', parentCreatedAt || 0)
|
||||
applesauceBookmarks.articles.forEach((article: AddressPointer) => {
|
||||
// Convert AddressPointer to coordinate format: kind:pubkey:identifier
|
||||
const coordinate = `${article.kind}:${article.pubkey}:${article.identifier || ''}`
|
||||
@@ -133,7 +129,6 @@ export const processApplesauceBookmarks = (
|
||||
|
||||
// Process URLs (string[])
|
||||
if (applesauceBookmarks.urls) {
|
||||
console.log('[BOOKMARK_TS] Processing', applesauceBookmarks.urls.length, 'URLs with timestamp:', parentCreatedAt || 0)
|
||||
applesauceBookmarks.urls.forEach((url: string) => {
|
||||
allItems.push({
|
||||
id: `url-${url}`,
|
||||
@@ -202,7 +197,6 @@ export function hydrateItems(
|
||||
.filter(item => {
|
||||
// Filter out bookmark list events (they're containers, not content)
|
||||
const isBookmarkListEvent = item.kind === 10003 || item.kind === 30003 || item.kind === 30001
|
||||
console.log('[BOOKMARK_TS] After hydration - id:', item.id, 'kind:', item.kind, 'isBookmarkListEvent:', isBookmarkListEvent, 'content:', item.content?.substring(0, 50))
|
||||
return !isBookmarkListEvent
|
||||
})
|
||||
}
|
||||
|
||||
@@ -121,7 +121,6 @@ export async function collectBookmarksFromEvents(
|
||||
const decryptJobs: Array<{ evt: NostrEvent; metadata: { dTag?: string; setTitle?: string; setDescription?: string; setImage?: string } }> = []
|
||||
|
||||
for (const evt of bookmarkListEvents) {
|
||||
console.log('[BOOKMARK_TS] Processing bookmark event', evt.id, 'kind:', evt.kind, 'created_at:', evt.created_at)
|
||||
newestCreatedAt = Math.max(newestCreatedAt, evt.created_at || 0)
|
||||
if (!latestContent && evt.content && !Helpers.hasHiddenContent(evt)) latestContent = evt.content
|
||||
if (Array.isArray(evt.tags)) allTags = allTags.concat(evt.tags)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Helpers } from 'applesauce-core'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { KINDS } from '../config/kinds'
|
||||
import { ARCHIVE_EMOJI } from './reactionService'
|
||||
import { BlogPostPreview } from './exploreService'
|
||||
@@ -30,8 +29,8 @@ export async function fetchReadArticles(
|
||||
try {
|
||||
// Fetch kind:7 and kind:17 reactions in parallel
|
||||
const [kind7Events, kind17Events] = await Promise.all([
|
||||
queryEvents(relayPool, { kinds: [KINDS.ReactionToEvent], authors: [userPubkey] }, { relayUrls: RELAYS }),
|
||||
queryEvents(relayPool, { kinds: [KINDS.ReactionToUrl], authors: [userPubkey] }, { relayUrls: RELAYS })
|
||||
queryEvents(relayPool, { kinds: [KINDS.ReactionToEvent], authors: [userPubkey] }),
|
||||
queryEvents(relayPool, { kinds: [KINDS.ReactionToUrl], authors: [userPubkey] })
|
||||
])
|
||||
|
||||
const readArticles: ReadArticle[] = []
|
||||
@@ -115,8 +114,7 @@ export async function fetchReadArticlesWithData(
|
||||
|
||||
const articleEvents = await queryEvents(
|
||||
relayPool,
|
||||
{ kinds: [KINDS.BlogPost], ids: eventIds },
|
||||
{ relayUrls: RELAYS }
|
||||
{ kinds: [KINDS.BlogPost], ids: eventIds }
|
||||
)
|
||||
|
||||
// Deduplicate article events by ID
|
||||
|
||||
@@ -2,8 +2,8 @@ import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { IAccount } from 'applesauce-accounts'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { lastValueFrom, takeUntil, timer, toArray } from 'rxjs'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { EventFactory } from 'applesauce-factory'
|
||||
import { getActiveRelayUrls } from './relayManager'
|
||||
|
||||
const ARCHIVE_EMOJI = '📚'
|
||||
|
||||
@@ -35,7 +35,6 @@ export async function createEventReaction(
|
||||
]
|
||||
if (options?.aCoord) {
|
||||
tags.push(['a', options.aCoord])
|
||||
console.log('[archive] createEventReaction add a-tag:', options.aCoord)
|
||||
}
|
||||
|
||||
const draft = await factory.create(async () => ({
|
||||
@@ -49,7 +48,7 @@ export async function createEventReaction(
|
||||
|
||||
|
||||
// Publish to relays
|
||||
await relayPool.publish(RELAYS, signed)
|
||||
await relayPool.publish(getActiveRelayUrls(relayPool), signed)
|
||||
|
||||
|
||||
return signed
|
||||
@@ -99,7 +98,7 @@ export async function createWebsiteReaction(
|
||||
|
||||
|
||||
// Publish to relays
|
||||
await relayPool.publish(RELAYS, signed)
|
||||
await relayPool.publish(getActiveRelayUrls(relayPool), signed)
|
||||
|
||||
|
||||
return signed
|
||||
@@ -122,7 +121,7 @@ export async function deleteReaction(
|
||||
created_at: Math.floor(Date.now() / 1000)
|
||||
}))
|
||||
const signed = await factory.sign(draft)
|
||||
await relayPool.publish(RELAYS, signed)
|
||||
await relayPool.publish(getActiveRelayUrls(relayPool), signed)
|
||||
return signed
|
||||
}
|
||||
|
||||
@@ -146,7 +145,7 @@ export async function hasMarkedEventAsRead(
|
||||
}
|
||||
|
||||
const events$ = relayPool
|
||||
.req(RELAYS, filter)
|
||||
.req(getActiveRelayUrls(relayPool), filter)
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
completeOnEose(),
|
||||
@@ -199,7 +198,7 @@ export async function hasMarkedWebsiteAsRead(
|
||||
}
|
||||
|
||||
const events$ = relayPool
|
||||
.req(RELAYS, filter)
|
||||
.req(getActiveRelayUrls(relayPool), filter)
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
completeOnEose(),
|
||||
|
||||
@@ -3,13 +3,11 @@ import { IEventStore } from 'applesauce-core'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { queryEvents } from './dataFetch'
|
||||
import { KINDS } from '../config/kinds'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { processReadingProgress } from './readingDataProcessor'
|
||||
import { ReadItem } from './readsService'
|
||||
import { ARCHIVE_EMOJI } from './reactionService'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
|
||||
console.log('[readingProgress] Module loaded')
|
||||
|
||||
type ProgressMapCallback = (progressMap: Map<string, number>) => void
|
||||
type LoadingCallback = (loading: boolean) => void
|
||||
@@ -176,17 +174,14 @@ class ReadingProgressController {
|
||||
const { relayPool, eventStore, pubkey, force = false } = params
|
||||
const startGeneration = this.generation
|
||||
|
||||
console.log('[readingProgress] start() called for pubkey:', pubkey.slice(0, 16), '...', 'force:', force)
|
||||
|
||||
// Skip if already loaded for this pubkey and not forcing
|
||||
if (!force && this.isLoadedFor(pubkey)) {
|
||||
console.log('[readingProgress] Already loaded for pubkey, skipping')
|
||||
return
|
||||
}
|
||||
|
||||
// Prevent concurrent starts
|
||||
if (this.isLoading) {
|
||||
console.log('[readingProgress] Already loading, skipping concurrent start')
|
||||
return
|
||||
}
|
||||
|
||||
@@ -212,7 +207,6 @@ class ReadingProgressController {
|
||||
this.timelineSubscription = null
|
||||
}
|
||||
|
||||
console.log('[readingProgress] Setting up eventStore subscription...')
|
||||
const timeline$ = eventStore.timeline({
|
||||
kinds: [KINDS.ReadingProgress],
|
||||
authors: [pubkey]
|
||||
@@ -223,20 +217,17 @@ class ReadingProgressController {
|
||||
if (!Array.isArray(localEvents) || localEvents.length === 0) return
|
||||
this.processEvents(localEvents)
|
||||
})
|
||||
console.log('[readingProgress] EventStore subscription ready - updates streaming')
|
||||
|
||||
// Mark as loaded immediately - queries run in background non-blocking
|
||||
this.lastLoadedPubkey = pubkey
|
||||
|
||||
// Query reading progress from relays in background (non-blocking, fire-and-forget)
|
||||
console.log('[readingProgress] Starting background relay query for reading progress...')
|
||||
queryEvents(relayPool, {
|
||||
kinds: [KINDS.ReadingProgress],
|
||||
authors: [pubkey]
|
||||
}, { relayUrls: RELAYS })
|
||||
})
|
||||
.then((relayEvents) => {
|
||||
if (startGeneration !== this.generation) return
|
||||
console.log('[readingProgress] Got reading progress from relays:', relayEvents.length)
|
||||
if (relayEvents.length > 0) {
|
||||
relayEvents.forEach(e => eventStore.add(e))
|
||||
this.processEvents(relayEvents)
|
||||
@@ -249,10 +240,8 @@ class ReadingProgressController {
|
||||
})
|
||||
|
||||
// Load mark-as-read reactions in background (non-blocking, streaming)
|
||||
console.log('[readingProgress] Starting background relay query for mark-as-read reactions...')
|
||||
this.loadMarkAsReadReactions(relayPool, eventStore, pubkey, startGeneration)
|
||||
.then(() => {
|
||||
console.log('[readingProgress] Mark-as-read reactions loading complete')
|
||||
})
|
||||
.catch((err) => {
|
||||
console.warn('[readingProgress] Mark-as-read reactions loading failed:', err)
|
||||
@@ -265,9 +254,6 @@ class ReadingProgressController {
|
||||
this.setLoading(false)
|
||||
}
|
||||
this.isLoading = false
|
||||
console.log('[readingProgress] === LOADED ===')
|
||||
console.log('[readingProgress] progressMap keys:', Array.from(this.currentProgressMap.keys()))
|
||||
console.log('[readingProgress] markedAsReadIds:', Array.from(this.markedAsReadIds))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,7 +304,6 @@ class ReadingProgressController {
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Stream kind:17 (URL reactions) and kind:7 (event reactions) in parallel
|
||||
console.log('[readingProgress] Querying kind:17 and kind:7 reactions (streaming)...')
|
||||
const seenReactionIds = new Set<string>()
|
||||
|
||||
const handleUrlReaction = (evt: NostrEvent) => {
|
||||
@@ -343,8 +328,8 @@ class ReadingProgressController {
|
||||
|
||||
// Fire queries with onEvent callbacks for streaming behavior
|
||||
const [kind17Events, kind7Events] = await Promise.all([
|
||||
queryEvents(relayPool, { kinds: [17], authors: [pubkey] }, { relayUrls: RELAYS, onEvent: handleUrlReaction }),
|
||||
queryEvents(relayPool, { kinds: [7], authors: [pubkey] }, { relayUrls: RELAYS, onEvent: handleEventReaction })
|
||||
queryEvents(relayPool, { kinds: [17], authors: [pubkey] }, { onEvent: handleUrlReaction }),
|
||||
queryEvents(relayPool, { kinds: [7], authors: [pubkey] }, { onEvent: handleEventReaction })
|
||||
])
|
||||
|
||||
if (generation !== this.generation) return
|
||||
@@ -356,7 +341,7 @@ class ReadingProgressController {
|
||||
if (pendingEventIds.size > 0) {
|
||||
// Fetch referenced 30023 events, streaming not required here
|
||||
const ids = Array.from(pendingEventIds)
|
||||
const articleEvents = await queryEvents(relayPool, { kinds: [KINDS.BlogPost], ids }, { relayUrls: RELAYS })
|
||||
const articleEvents = await queryEvents(relayPool, { kinds: [KINDS.BlogPost], ids })
|
||||
const eventIdToNaddr = new Map<string, string>()
|
||||
for (const article of articleEvents) {
|
||||
const dTag = article.tags.find(t => t[0] === 'd')?.[1]
|
||||
@@ -379,7 +364,6 @@ class ReadingProgressController {
|
||||
this.emitMarkedAsReadChanged()
|
||||
}
|
||||
|
||||
console.log('[readingProgress] Mark-as-read reactions complete. Total:', Array.from(this.markedAsReadIds).length)
|
||||
} catch (err) {
|
||||
console.warn('[readingProgress] Failed to load mark-as-read reactions:', err)
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ import { Helpers } from 'applesauce-core'
|
||||
import { Bookmark } from '../types/bookmarks'
|
||||
import { fetchReadArticles } from './libraryService'
|
||||
import { queryEvents } from './dataFetch'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { KINDS } from '../config/kinds'
|
||||
import { classifyBookmarkType } from '../utils/bookmarkTypeClassifier'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
@@ -44,7 +43,7 @@ export async function fetchAllReads(
|
||||
try {
|
||||
// Fetch all data sources in parallel
|
||||
const [progressEvents, markedAsReadArticles] = await Promise.all([
|
||||
queryEvents(relayPool, { kinds: [KINDS.ReadingProgress], authors: [userPubkey] }, { relayUrls: RELAYS }),
|
||||
queryEvents(relayPool, { kinds: [KINDS.ReadingProgress], authors: [userPubkey] }),
|
||||
fetchReadArticles(relayPool, userPubkey)
|
||||
])
|
||||
|
||||
@@ -130,8 +129,7 @@ export async function fetchAllReads(
|
||||
|
||||
const events = await queryEvents(
|
||||
relayPool,
|
||||
{ kinds: [KINDS.BlogPost], authors, '#d': identifiers },
|
||||
{ relayUrls: RELAYS }
|
||||
{ kinds: [KINDS.BlogPost], authors, '#d': identifiers }
|
||||
)
|
||||
|
||||
// Merge event data into ReadItems and emit
|
||||
|
||||
194
src/services/relayListService.ts
Normal file
194
src/services/relayListService.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { queryEvents } from './dataFetch'
|
||||
|
||||
export interface UserRelayInfo {
|
||||
url: string
|
||||
mode?: 'read' | 'write' | 'both'
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads user's relay list from kind 10002 (NIP-65)
|
||||
*/
|
||||
export async function loadUserRelayList(
|
||||
relayPool: RelayPool,
|
||||
pubkey: string,
|
||||
options?: {
|
||||
onUpdate?: (relays: UserRelayInfo[]) => void
|
||||
}
|
||||
): Promise<UserRelayInfo[]> {
|
||||
try {
|
||||
console.log('[relayListService] Loading user relay list for pubkey:', pubkey.slice(0, 16) + '...')
|
||||
console.log('[relayListService] Available relays:', Array.from(relayPool.relays.keys()))
|
||||
|
||||
console.log('[relayListService] Starting query for kind 10002...')
|
||||
const startTime = Date.now()
|
||||
|
||||
// Try querying with streaming callback for faster results
|
||||
const events: NostrEvent[] = []
|
||||
const eventsMap = new Map<string, NostrEvent>()
|
||||
|
||||
const result = await queryEvents(relayPool, {
|
||||
kinds: [10002],
|
||||
authors: [pubkey],
|
||||
limit: 10
|
||||
}, {
|
||||
onEvent: (evt) => {
|
||||
// Deduplicate by id and keep most recent
|
||||
const existing = eventsMap.get(evt.id)
|
||||
if (!existing || evt.created_at > existing.created_at) {
|
||||
eventsMap.set(evt.id, evt)
|
||||
// Update events array with deduplicated events
|
||||
events.length = 0
|
||||
events.push(...Array.from(eventsMap.values()))
|
||||
|
||||
// Stream immediate updates to caller using the newest event
|
||||
if (options?.onUpdate) {
|
||||
const tags = evt.tags || []
|
||||
const relays: UserRelayInfo[] = []
|
||||
for (const tag of tags) {
|
||||
if (tag[0] === 'r' && tag[1]) {
|
||||
const url = tag[1]
|
||||
const mode = (tag[2] as 'read' | 'write' | undefined) || 'both'
|
||||
relays.push({ url, mode })
|
||||
}
|
||||
}
|
||||
if (relays.length > 0) {
|
||||
options.onUpdate(relays)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Use the streaming results if we got any, otherwise fall back to the full result
|
||||
const finalEvents = events.length > 0 ? events : result
|
||||
|
||||
const queryTime = Date.now() - startTime
|
||||
console.log('[relayListService] Query completed in', queryTime, 'ms')
|
||||
|
||||
// Also try a broader query to see if we get any events at all
|
||||
console.log('[relayListService] Trying broader query for any kind 10002 events...')
|
||||
const allEvents = await queryEvents(relayPool, {
|
||||
kinds: [10002],
|
||||
limit: 5
|
||||
})
|
||||
console.log('[relayListService] Found', allEvents.length, 'total kind 10002 events from any author')
|
||||
|
||||
|
||||
console.log('[relayListService] Found', finalEvents.length, 'kind 10002 events')
|
||||
if (finalEvents.length > 0) {
|
||||
console.log('[relayListService] Event details:', finalEvents.map(e => ({ id: e.id, created_at: e.created_at, tags: e.tags.length })))
|
||||
}
|
||||
|
||||
if (finalEvents.length === 0) return []
|
||||
|
||||
// Get most recent event
|
||||
const sortedEvents = finalEvents.sort((a, b) => b.created_at - a.created_at)
|
||||
const relayListEvent = sortedEvents[0]
|
||||
|
||||
const relays: UserRelayInfo[] = []
|
||||
for (const tag of relayListEvent.tags) {
|
||||
if (tag[0] === 'r' && tag[1]) {
|
||||
const url = tag[1]
|
||||
const mode = tag[2] as 'read' | 'write' | undefined
|
||||
relays.push({
|
||||
url,
|
||||
mode: mode || 'both'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
console.log('[relayListService] Parsed', relays.length, 'relays from event')
|
||||
return relays
|
||||
} catch (error) {
|
||||
console.error('Failed to load user relay list:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads blocked relays from kind 10006 (NIP-51 mute list)
|
||||
*/
|
||||
export async function loadBlockedRelays(
|
||||
relayPool: RelayPool,
|
||||
pubkey: string
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const events = await queryEvents(relayPool, {
|
||||
kinds: [10006],
|
||||
authors: [pubkey]
|
||||
})
|
||||
|
||||
if (events.length === 0) return []
|
||||
|
||||
// Get most recent event
|
||||
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
|
||||
const muteListEvent = sortedEvents[0]
|
||||
|
||||
const blocked: string[] = []
|
||||
for (const tag of muteListEvent.tags) {
|
||||
if (tag[0] === 'r' && tag[1]) {
|
||||
blocked.push(tag[1])
|
||||
}
|
||||
}
|
||||
|
||||
return blocked
|
||||
} catch (error) {
|
||||
console.error('Failed to load blocked relays:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes final relay set by merging inputs and removing blocked relays
|
||||
*/
|
||||
export function computeRelaySet(params: {
|
||||
hardcoded: string[]
|
||||
bunker?: string[]
|
||||
userList?: UserRelayInfo[]
|
||||
blocked?: string[]
|
||||
alwaysIncludeLocal: string[]
|
||||
}): string[] {
|
||||
const {
|
||||
hardcoded,
|
||||
bunker = [],
|
||||
userList = [],
|
||||
blocked = [],
|
||||
alwaysIncludeLocal
|
||||
} = params
|
||||
|
||||
const relaySet = new Set<string>()
|
||||
const blockedSet = new Set(blocked)
|
||||
|
||||
// Helper to check if relay should be included
|
||||
const shouldInclude = (url: string): boolean => {
|
||||
// Always include local relays
|
||||
if (alwaysIncludeLocal.includes(url)) return true
|
||||
// Otherwise check if blocked
|
||||
return !blockedSet.has(url)
|
||||
}
|
||||
|
||||
// Add hardcoded relays
|
||||
for (const url of hardcoded) {
|
||||
if (shouldInclude(url)) relaySet.add(url)
|
||||
}
|
||||
|
||||
// Add bunker relays
|
||||
for (const url of bunker) {
|
||||
if (shouldInclude(url)) relaySet.add(url)
|
||||
}
|
||||
|
||||
// Add user relays (treating 'both' and 'read' as applicable for queries)
|
||||
for (const relay of userList) {
|
||||
if (shouldInclude(relay.url)) relaySet.add(relay.url)
|
||||
}
|
||||
|
||||
// Always ensure local relays are present
|
||||
for (const url of alwaysIncludeLocal) {
|
||||
relaySet.add(url)
|
||||
}
|
||||
|
||||
return Array.from(relaySet)
|
||||
}
|
||||
|
||||
86
src/services/relayManager.ts
Normal file
86
src/services/relayManager.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { prioritizeLocalRelays } from '../utils/helpers'
|
||||
|
||||
/**
|
||||
* Local relays that are always included
|
||||
*/
|
||||
export const ALWAYS_LOCAL_RELAYS = [
|
||||
'ws://localhost:10547',
|
||||
'ws://localhost:4869'
|
||||
]
|
||||
|
||||
/**
|
||||
* Gets active relay URLs from the relay pool
|
||||
*/
|
||||
export function getActiveRelayUrls(relayPool: RelayPool): string[] {
|
||||
const urls = Array.from(relayPool.relays.keys())
|
||||
return prioritizeLocalRelays(urls)
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a relay URL to match what applesauce-relay stores internally
|
||||
* Adds trailing slash for URLs without a path
|
||||
*/
|
||||
function normalizeRelayUrl(url: string): string {
|
||||
try {
|
||||
const parsed = new URL(url)
|
||||
// If the pathname is empty or just "/", ensure it ends with "/"
|
||||
if (parsed.pathname === '' || parsed.pathname === '/') {
|
||||
return url.endsWith('/') ? url : url + '/'
|
||||
}
|
||||
return url
|
||||
} catch {
|
||||
// If URL parsing fails, return as-is
|
||||
return url
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies a new relay set to the pool: adds missing relays, removes extras
|
||||
*/
|
||||
export function applyRelaySetToPool(
|
||||
relayPool: RelayPool,
|
||||
finalUrls: string[]
|
||||
): void {
|
||||
// Normalize all URLs to match pool's internal format
|
||||
const currentUrls = new Set(Array.from(relayPool.relays.keys()))
|
||||
const normalizedTargetUrls = new Set(finalUrls.map(normalizeRelayUrl))
|
||||
|
||||
console.log('[relayManager] applyRelaySetToPool called')
|
||||
console.log('[relayManager] Current pool has:', currentUrls.size, 'relays')
|
||||
console.log('[relayManager] Target has:', finalUrls.length, 'relays')
|
||||
|
||||
// Add new relays (use original URLs for adding, not normalized)
|
||||
const toAdd = finalUrls.filter(url => !currentUrls.has(normalizeRelayUrl(url)))
|
||||
console.log('[relayManager] Will add:', toAdd.length, 'relays', toAdd)
|
||||
if (toAdd.length > 0) {
|
||||
relayPool.group(toAdd)
|
||||
}
|
||||
|
||||
// Remove relays not in target (but always keep local relays)
|
||||
const toRemove: string[] = []
|
||||
for (const url of currentUrls) {
|
||||
// Check if this normalized URL is in the target set
|
||||
if (!normalizedTargetUrls.has(url)) {
|
||||
// Also check if it's a local relay (check both normalized and original forms)
|
||||
const isLocal = ALWAYS_LOCAL_RELAYS.some(localUrl =>
|
||||
normalizeRelayUrl(localUrl) === url || localUrl === url
|
||||
)
|
||||
if (!isLocal) {
|
||||
toRemove.push(url)
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log('[relayManager] Will remove:', toRemove.length, 'relays', toRemove)
|
||||
|
||||
for (const url of toRemove) {
|
||||
const relay = relayPool.relays.get(url)
|
||||
if (relay) {
|
||||
relay.close()
|
||||
relayPool.relays.delete(url)
|
||||
}
|
||||
}
|
||||
|
||||
console.log('[relayManager] After apply, pool has:', relayPool.relays.size, 'relays')
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { IEventStore } from 'applesauce-core'
|
||||
import { RELAYS } from '../config/relays'
|
||||
import { isLocalRelay, areAllRelaysLocal } from '../utils/helpers'
|
||||
import { markEventAsOfflineCreated } from './offlineSyncService'
|
||||
import { getActiveRelayUrls } from './relayManager'
|
||||
|
||||
/**
|
||||
* Unified write helper: add event to EventStore, detect connectivity,
|
||||
@@ -27,10 +27,13 @@ export async function publishEvent(
|
||||
|
||||
const hasRemoteConnection = connectedRelays.some(url => !isLocalRelay(url))
|
||||
|
||||
// Get active relay URLs from the pool
|
||||
const activeRelays = getActiveRelayUrls(relayPool)
|
||||
|
||||
// Determine which relays we expect to succeed
|
||||
const expectedSuccessRelays = hasRemoteConnection
|
||||
? RELAYS
|
||||
: RELAYS.filter(isLocalRelay)
|
||||
? activeRelays
|
||||
: activeRelays.filter(isLocalRelay)
|
||||
|
||||
const isLocalOnly = areAllRelaysLocal(expectedSuccessRelays)
|
||||
|
||||
@@ -42,7 +45,7 @@ export async function publishEvent(
|
||||
}
|
||||
|
||||
// Publish to all configured relays in the background (non-blocking)
|
||||
relayPool.publish(RELAYS, event)
|
||||
relayPool.publish(activeRelays, event)
|
||||
.then(() => {
|
||||
})
|
||||
.catch((error) => {
|
||||
|
||||
Reference in New Issue
Block a user