mirror of
https://github.com/dergigi/boris.git
synced 2026-02-16 20:45:01 +01:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a08e4fdc24 | ||
|
|
bc7b4ae42d | ||
|
|
4dc1894ef3 | ||
|
|
f00f26dfe0 | ||
|
|
2e59bc9375 | ||
|
|
0d50d05245 | ||
|
|
90c74a8e9d |
121
CHANGELOG.md
121
CHANGELOG.md
@@ -7,6 +7,124 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.7.0] - 2025-10-18
|
||||
|
||||
### Added
|
||||
|
||||
- Login with Bunker (NIP-46) authentication support
|
||||
- Support for remote signing via Nostr Connect protocol
|
||||
- Bunker URI input with validation and error handling
|
||||
- Automatic reconnection on app restore with proper permissions
|
||||
- Signer suggestions in error messages (Amber, nsec.app, Nostrum)
|
||||
- Debug page (`/debug`) for diagnostics and testing
|
||||
- Interactive NIP-04 and NIP-44 encryption/decryption testing
|
||||
- Live performance timing with stopwatch display
|
||||
- Bookmark loading and decryption diagnostics
|
||||
- Real-time bunker logs with filtering and clearing
|
||||
- Version and git commit footer
|
||||
- Progressive bookmark loading with streaming updates
|
||||
- Non-blocking, progressive bookmark updates via callback pattern
|
||||
- Batched background hydration using EventLoader and AddressLoader
|
||||
- Auto-decrypt bookmarks as they arrive from relays
|
||||
- Individual decrypt buttons for encrypted bookmark events
|
||||
- Bookmark grouping toggle (grouped by source vs flat chronological)
|
||||
- Toggle between grouped view and flat chronological list
|
||||
- Amethyst-style bookmark detection and grouping
|
||||
- Display bookmarks even when they only have IDs (content loads in background)
|
||||
|
||||
### Changed
|
||||
|
||||
- Improved login UI with better copy and modern design
|
||||
- Personable title and nostr-native language
|
||||
- Highlighted 'your own highlights' in login copy
|
||||
- Simplified button text to single words (Extension, Signer)
|
||||
- Hide login button and user icon when logged out
|
||||
- Hide Extension button when Bunker input is shown
|
||||
- Auto-load bookmarks on login and page mount
|
||||
- Enhanced bunker error messages
|
||||
- Formatted error messages with signer suggestions
|
||||
- Links to nos2x, Amber, nsec.app, and Nostrum signers
|
||||
- Better error handling for missing signer extensions
|
||||
- Centered and constrained bunker input field
|
||||
- Centralized bookmark loading architecture
|
||||
- Single shared bookmark controller for consistent loading
|
||||
- Unified bookmark loading with streaming and auto-decrypt
|
||||
- Consolidated bookmark loading into single centralized function
|
||||
- Bookmarks passed as props throughout component tree
|
||||
- Renamed UI elements for clarity
|
||||
- "Bunker" button renamed to "Signer"
|
||||
- Hide bookmark controls when logged out
|
||||
- Settings version footer improvements
|
||||
- Separate links for version (to GitHub release) and commit (to commit page)
|
||||
- Proper spacing around middot separator
|
||||
|
||||
### Fixed
|
||||
|
||||
- NIP-46 bunker signing and decryption
|
||||
- NostrConnectSigner properly reconnects with permissions on app restore
|
||||
- Bunker relays added to relay pool for signing requests
|
||||
- Proper setup of pool and relays before bunker reconnection
|
||||
- Expose nip04/nip44 on NostrConnectAccount for bookmark decryption
|
||||
- Cache wrapped nip04/nip44 objects instead of using getters
|
||||
- Wait for bunker relay connections before marking signer ready
|
||||
- Validate bunker URI (remote must differ from user pubkey)
|
||||
- Accept remote===pubkey for Amber compatibility
|
||||
- Bookmark loading and decryption
|
||||
- Bookmarks load and complete properly with streaming
|
||||
- Auto-decrypt private bookmarks with NIP-04 detection
|
||||
- Include decrypted private bookmarks in sidebar
|
||||
- Skip background event fetching when there are too many IDs
|
||||
- Only build bookmarks from ready events (unencrypted or decrypted)
|
||||
- Restore Debug page decrypt display via onDecryptComplete callback
|
||||
- Make controller onEvent non-blocking for queryEvents completion
|
||||
- Proper timeout handling for bookmark decryption (no hanging)
|
||||
- Smart encryption detection with consistent padlock display
|
||||
- Sequential decryption instead of concurrent to avoid queue issues
|
||||
- Add extraRelays to EventLoader and AddressLoader
|
||||
- PWA cache limit increased to 3 MiB for larger bundles
|
||||
- Extension login error messages with nos2x link
|
||||
- TypeScript and linting errors throughout
|
||||
- Replace empty catch blocks with warnings
|
||||
- Fix explicit any types
|
||||
- Add missing useEffect dependencies
|
||||
- Resolve all linting issues in App.tsx, Debug.tsx, and async utilities
|
||||
|
||||
### Performance
|
||||
|
||||
- Non-blocking NIP-46 operations
|
||||
- Fire-and-forget NIP-46 publish for better UI responsiveness
|
||||
- Non-blocking bookmark decryption with sequential processing
|
||||
- Make controller onEvent non-blocking for queryEvents completion
|
||||
- Optimized bookmark loading
|
||||
- Batched background hydration using EventLoader and AddressLoader
|
||||
- Progressive, non-blocking bookmark loading with streaming
|
||||
- Shorter timeouts for debug page bookmark loading
|
||||
- Remove artificial delays from bookmark decryption
|
||||
|
||||
### Refactored
|
||||
|
||||
- Centralized bookmark controller architecture
|
||||
- Extract bookmark streaming helpers and centralize loading
|
||||
- Consolidated bookmark loading into single function
|
||||
- Remove deprecated bookmark service files
|
||||
- Share bookmark controller between components
|
||||
- Debug page organization
|
||||
- Extract VersionFooter component to eliminate duplication
|
||||
- Structured sections with proper layout and styling
|
||||
- Apply settings page styling structure
|
||||
- Simplified bunker implementation following applesauce patterns
|
||||
- Clean up bunker implementation for better maintainability
|
||||
- Import RELAYS from central config (DRY principle)
|
||||
- Update RELAYS list with relay.nsec.app
|
||||
|
||||
### Documentation
|
||||
|
||||
- Comprehensive Amber.md documentation
|
||||
- Amethyst-style bookmarks section
|
||||
- Bunker decrypt investigation summary
|
||||
- Critical queue disabling requirement
|
||||
- NIP-46 setup and troubleshooting
|
||||
|
||||
## [0.6.24] - 2025-01-16
|
||||
|
||||
### Fixed
|
||||
@@ -1760,7 +1878,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Optimize relay usage following applesauce-relay best practices
|
||||
- Use applesauce-react event models for better profile handling
|
||||
|
||||
[Unreleased]: https://github.com/dergigi/boris/compare/v0.6.24...HEAD
|
||||
[Unreleased]: https://github.com/dergigi/boris/compare/v0.7.0...HEAD
|
||||
[0.7.0]: https://github.com/dergigi/boris/compare/v0.6.24...v0.7.0
|
||||
[0.6.24]: https://github.com/dergigi/boris/compare/v0.6.23...v0.6.24
|
||||
[0.6.23]: https://github.com/dergigi/boris/compare/v0.6.22...v0.6.23
|
||||
[0.6.21]: https://github.com/dergigi/boris/compare/v0.6.20...v0.6.21
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "boris",
|
||||
"version": "0.7.0",
|
||||
"version": "0.7.1",
|
||||
"description": "A minimal nostr client for bookmark management",
|
||||
"homepage": "https://read.withboris.com/",
|
||||
"type": "module",
|
||||
|
||||
@@ -76,16 +76,28 @@ const Debug: React.FC<DebugProps> = ({
|
||||
const [bookmarkStats, setBookmarkStats] = useState<{ public: number; private: number } | null>(null)
|
||||
const [tLoadBookmarks, setTLoadBookmarks] = useState<number | null>(null)
|
||||
const [tDecryptBookmarks, setTDecryptBookmarks] = useState<number | null>(null)
|
||||
const [tFirstBookmark, setTFirstBookmark] = useState<number | null>(null)
|
||||
|
||||
// Individual event decryption results
|
||||
const [decryptedEvents, setDecryptedEvents] = useState<Map<string, { public: number; private: number }>>(new Map())
|
||||
|
||||
// Highlight loading state
|
||||
const [highlightMode, setHighlightMode] = useState<'article' | 'url' | 'author'>('author')
|
||||
const [highlightArticleCoord, setHighlightArticleCoord] = useState<string>('')
|
||||
const [highlightUrl, setHighlightUrl] = useState<string>('')
|
||||
const [highlightAuthor, setHighlightAuthor] = useState<string>('')
|
||||
const [isLoadingHighlights, setIsLoadingHighlights] = useState(false)
|
||||
const [highlightEvents, setHighlightEvents] = useState<NostrEvent[]>([])
|
||||
const [tLoadHighlights, setTLoadHighlights] = useState<number | null>(null)
|
||||
const [tFirstHighlight, setTFirstHighlight] = useState<number | null>(null)
|
||||
|
||||
// Live timing state
|
||||
const [liveTiming, setLiveTiming] = useState<{
|
||||
nip44?: { type: 'encrypt' | 'decrypt'; startTime: number }
|
||||
nip04?: { type: 'encrypt' | 'decrypt'; startTime: number }
|
||||
loadBookmarks?: { startTime: number }
|
||||
decryptBookmarks?: { startTime: number }
|
||||
loadHighlights?: { startTime: number }
|
||||
}>({})
|
||||
|
||||
useEffect(() => {
|
||||
@@ -243,10 +255,12 @@ const Debug: React.FC<DebugProps> = ({
|
||||
setBookmarkStats(null)
|
||||
setBookmarkEvents([]) // Clear existing events
|
||||
setDecryptedEvents(new Map())
|
||||
setTFirstBookmark(null)
|
||||
DebugBus.info('debug', 'Loading bookmark events...')
|
||||
|
||||
// Start timing
|
||||
const start = performance.now()
|
||||
let firstEventTime: number | null = null
|
||||
setLiveTiming(prev => ({ ...prev, loadBookmarks: { startTime: start } }))
|
||||
|
||||
// Import controller at runtime to avoid circular dependencies
|
||||
@@ -254,6 +268,12 @@ const Debug: React.FC<DebugProps> = ({
|
||||
|
||||
// Subscribe to raw events for Debug UI display
|
||||
const unsubscribeRaw = bookmarkController.onRawEvent((evt) => {
|
||||
// Track time to first event
|
||||
if (firstEventTime === null) {
|
||||
firstEventTime = performance.now() - start
|
||||
setTFirstBookmark(Math.round(firstEventTime))
|
||||
}
|
||||
|
||||
// Add event immediately with live deduplication
|
||||
setBookmarkEvents(prev => {
|
||||
const key = getEventKey(evt)
|
||||
@@ -311,10 +331,94 @@ const Debug: React.FC<DebugProps> = ({
|
||||
setBookmarkStats(null)
|
||||
setTLoadBookmarks(null)
|
||||
setTDecryptBookmarks(null)
|
||||
setTFirstBookmark(null)
|
||||
setDecryptedEvents(new Map())
|
||||
DebugBus.info('debug', 'Cleared bookmark data')
|
||||
}
|
||||
|
||||
const handleLoadHighlights = async () => {
|
||||
if (!relayPool) {
|
||||
DebugBus.warn('debug', 'Cannot load highlights: missing relayPool')
|
||||
return
|
||||
}
|
||||
|
||||
// Default to logged-in user's highlights if no specific query provided
|
||||
const getValue = () => {
|
||||
if (highlightMode === 'article') return highlightArticleCoord.trim()
|
||||
if (highlightMode === 'url') return highlightUrl.trim()
|
||||
const authorValue = highlightAuthor.trim()
|
||||
return authorValue || pubkey || ''
|
||||
}
|
||||
|
||||
const value = getValue()
|
||||
if (!value) {
|
||||
DebugBus.warn('debug', 'Please provide a value to query or log in')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsLoadingHighlights(true)
|
||||
setHighlightEvents([])
|
||||
setTFirstHighlight(null)
|
||||
DebugBus.info('debug', `Loading highlights (${highlightMode}: ${value})...`)
|
||||
|
||||
const start = performance.now()
|
||||
setLiveTiming(prev => ({ ...prev, loadHighlights: { startTime: start } }))
|
||||
|
||||
let firstEventTime: number | null = null
|
||||
const seenIds = new Set<string>()
|
||||
|
||||
// Import highlight services
|
||||
const { queryEvents } = await import('../services/dataFetch')
|
||||
const { KINDS } = await import('../config/kinds')
|
||||
|
||||
// Build filter based on mode
|
||||
let filter: { kinds: number[]; '#a'?: string[]; '#r'?: string[]; authors?: string[] }
|
||||
if (highlightMode === 'article') {
|
||||
filter = { kinds: [KINDS.Highlights], '#a': [value] }
|
||||
} else if (highlightMode === 'url') {
|
||||
filter = { kinds: [KINDS.Highlights], '#r': [value] }
|
||||
} else {
|
||||
filter = { kinds: [KINDS.Highlights], authors: [value] }
|
||||
}
|
||||
|
||||
const events = await queryEvents(relayPool, filter, {
|
||||
onEvent: (evt) => {
|
||||
if (seenIds.has(evt.id)) return
|
||||
seenIds.add(evt.id)
|
||||
|
||||
if (firstEventTime === null) {
|
||||
firstEventTime = performance.now() - start
|
||||
setTFirstHighlight(Math.round(firstEventTime))
|
||||
}
|
||||
|
||||
setHighlightEvents(prev => [...prev, evt])
|
||||
}
|
||||
})
|
||||
|
||||
const elapsed = Math.round(performance.now() - start)
|
||||
setTLoadHighlights(elapsed)
|
||||
setLiveTiming(prev => {
|
||||
const { loadHighlights, ...rest } = prev
|
||||
return rest
|
||||
})
|
||||
|
||||
DebugBus.info('debug', `Loaded ${events.length} highlight events in ${elapsed}ms`)
|
||||
} catch (err) {
|
||||
console.error('Failed to load highlights:', err)
|
||||
DebugBus.error('debug', `Failed to load highlights: ${err instanceof Error ? err.message : String(err)}`)
|
||||
} finally {
|
||||
setIsLoadingHighlights(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleClearHighlights = () => {
|
||||
setHighlightEvents([])
|
||||
setTLoadHighlights(null)
|
||||
setTFirstHighlight(null)
|
||||
DebugBus.info('debug', 'Cleared highlight data')
|
||||
}
|
||||
|
||||
const handleBunkerLogin = async () => {
|
||||
if (!bunkerUri.trim()) {
|
||||
setBunkerError('Please enter a bunker URI')
|
||||
@@ -376,7 +480,7 @@ const Debug: React.FC<DebugProps> = ({
|
||||
return null
|
||||
}
|
||||
|
||||
const getBookmarkLiveTiming = (operation: 'loadBookmarks' | 'decryptBookmarks') => {
|
||||
const getBookmarkLiveTiming = (operation: 'loadBookmarks' | 'decryptBookmarks' | 'loadHighlights') => {
|
||||
const timing = liveTiming[operation]
|
||||
if (timing) {
|
||||
const elapsed = Math.round(performance.now() - timing.startTime)
|
||||
@@ -390,7 +494,7 @@ const Debug: React.FC<DebugProps> = ({
|
||||
value?: string | number | null;
|
||||
mode?: 'nip44' | 'nip04';
|
||||
type?: 'encrypt' | 'decrypt';
|
||||
bookmarkOp?: 'loadBookmarks' | 'decryptBookmarks';
|
||||
bookmarkOp?: 'loadBookmarks' | 'decryptBookmarks' | 'loadHighlights';
|
||||
}) => {
|
||||
const liveValue = bookmarkOp ? getBookmarkLiveTiming(bookmarkOp) : (mode && type ? getLiveTiming(mode, type) : null)
|
||||
const isLive = !!liveValue
|
||||
@@ -596,7 +700,8 @@ const Debug: React.FC<DebugProps> = ({
|
||||
</div>
|
||||
|
||||
<div className="mb-3 flex gap-2 flex-wrap">
|
||||
<Stat label="load" value={tLoadBookmarks} bookmarkOp="loadBookmarks" />
|
||||
<Stat label="total" value={tLoadBookmarks} bookmarkOp="loadBookmarks" />
|
||||
<Stat label="first event" value={tFirstBookmark} />
|
||||
<Stat label="decrypt" value={tDecryptBookmarks} bookmarkOp="decryptBookmarks" />
|
||||
</div>
|
||||
|
||||
@@ -647,6 +752,143 @@ const Debug: React.FC<DebugProps> = ({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Highlight Loading Section */}
|
||||
<div className="settings-section">
|
||||
<h3 className="section-title">Highlight Loading</h3>
|
||||
<div className="text-sm opacity-70 mb-3">Test highlight loading with EOSE-based queryEvents (kind: 9802). Author mode defaults to your highlights.</div>
|
||||
|
||||
<div className="mb-3">
|
||||
<div className="text-sm opacity-70 mb-2">Query Mode:</div>
|
||||
<div className="flex gap-2">
|
||||
<label className="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="radio"
|
||||
checked={highlightMode === 'article'}
|
||||
onChange={() => setHighlightMode('article')}
|
||||
/>
|
||||
<span>Article (#a)</span>
|
||||
</label>
|
||||
<label className="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="radio"
|
||||
checked={highlightMode === 'url'}
|
||||
onChange={() => setHighlightMode('url')}
|
||||
/>
|
||||
<span>URL (#r)</span>
|
||||
</label>
|
||||
<label className="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="radio"
|
||||
checked={highlightMode === 'author'}
|
||||
onChange={() => setHighlightMode('author')}
|
||||
/>
|
||||
<span>Author</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mb-3">
|
||||
{highlightMode === 'article' && (
|
||||
<input
|
||||
type="text"
|
||||
className="input w-full"
|
||||
placeholder="30023:pubkey:identifier"
|
||||
value={highlightArticleCoord}
|
||||
onChange={(e) => setHighlightArticleCoord(e.target.value)}
|
||||
disabled={isLoadingHighlights}
|
||||
/>
|
||||
)}
|
||||
{highlightMode === 'url' && (
|
||||
<input
|
||||
type="text"
|
||||
className="input w-full"
|
||||
placeholder="https://example.com/article"
|
||||
value={highlightUrl}
|
||||
onChange={(e) => setHighlightUrl(e.target.value)}
|
||||
disabled={isLoadingHighlights}
|
||||
/>
|
||||
)}
|
||||
{highlightMode === 'author' && (
|
||||
<input
|
||||
type="text"
|
||||
className="input w-full"
|
||||
placeholder={pubkey ? `${pubkey.slice(0, 16)}... (logged-in user)` : 'pubkey (hex)'}
|
||||
value={highlightAuthor}
|
||||
onChange={(e) => setHighlightAuthor(e.target.value)}
|
||||
disabled={isLoadingHighlights}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex gap-2 mb-3 items-center">
|
||||
<button
|
||||
className="btn btn-primary"
|
||||
onClick={handleLoadHighlights}
|
||||
disabled={isLoadingHighlights || !relayPool}
|
||||
>
|
||||
{isLoadingHighlights ? (
|
||||
<>
|
||||
<FontAwesomeIcon icon={faSpinner} className="animate-spin mr-2" />
|
||||
Loading...
|
||||
</>
|
||||
) : (
|
||||
'Load Highlights'
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
className="btn btn-secondary ml-auto"
|
||||
onClick={handleClearHighlights}
|
||||
disabled={highlightEvents.length === 0}
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="mb-3 flex gap-2 flex-wrap">
|
||||
<Stat label="total" value={tLoadHighlights} bookmarkOp="loadHighlights" />
|
||||
<Stat label="first event" value={tFirstHighlight} />
|
||||
</div>
|
||||
|
||||
{highlightEvents.length > 0 && (
|
||||
<div className="mb-3">
|
||||
<div className="text-sm opacity-70 mb-2">Loaded Highlights ({highlightEvents.length}):</div>
|
||||
<div className="space-y-2 max-h-96 overflow-y-auto">
|
||||
{highlightEvents.map((evt, idx) => {
|
||||
const content = evt.content || ''
|
||||
const shortContent = content.length > 100 ? content.substring(0, 100) + '...' : content
|
||||
const aTag = evt.tags?.find((t: string[]) => t[0] === 'a')?.[1]
|
||||
const rTag = evt.tags?.find((t: string[]) => t[0] === 'r')?.[1]
|
||||
const eTag = evt.tags?.find((t: string[]) => t[0] === 'e')?.[1]
|
||||
const contextTag = evt.tags?.find((t: string[]) => t[0] === 'context')?.[1]
|
||||
|
||||
return (
|
||||
<div key={idx} className="font-mono text-xs p-2 bg-gray-100 dark:bg-gray-800 rounded">
|
||||
<div className="font-semibold mb-1">Highlight #{idx + 1}</div>
|
||||
<div className="opacity-70 mb-1">
|
||||
<div>Author: {evt.pubkey.slice(0, 16)}...</div>
|
||||
<div>Created: {new Date(evt.created_at * 1000).toLocaleString()}</div>
|
||||
</div>
|
||||
<div className="mt-1">
|
||||
<div className="font-semibold text-[11px]">Content:</div>
|
||||
<div className="italic">"{shortContent}"</div>
|
||||
</div>
|
||||
{contextTag && (
|
||||
<div className="mt-1 text-[11px] opacity-70">
|
||||
<div>Context: {contextTag.substring(0, 60)}...</div>
|
||||
</div>
|
||||
)}
|
||||
{aTag && <div className="mt-1 text-[11px] opacity-70">#a: {aTag}</div>}
|
||||
{rTag && <div className="mt-1 text-[11px] opacity-70">#r: {rTag}</div>}
|
||||
{eTag && <div className="mt-1 text-[11px] opacity-70">#e: {eTag.slice(0, 16)}...</div>}
|
||||
<div className="opacity-50 mt-1 text-[10px] break-all">ID: {evt.id}</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Debug Logs Section */}
|
||||
<div className="settings-section">
|
||||
<h3 className="section-title">Debug Logs</h3>
|
||||
|
||||
96
src/services/highlights/cache.ts
Normal file
96
src/services/highlights/cache.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { Highlight } from '../../types/highlights'
|
||||
|
||||
interface CacheEntry {
|
||||
highlights: Highlight[]
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple in-memory session cache for highlight queries with TTL
|
||||
*/
|
||||
class HighlightCache {
|
||||
private cache = new Map<string, CacheEntry>()
|
||||
private ttlMs = 60000 // 60 seconds
|
||||
|
||||
/**
|
||||
* Generate cache key for article coordinate
|
||||
*/
|
||||
articleKey(coordinate: string): string {
|
||||
return `article:${coordinate}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate cache key for URL
|
||||
*/
|
||||
urlKey(url: string): string {
|
||||
// Normalize URL for consistent caching
|
||||
try {
|
||||
const normalized = new URL(url)
|
||||
normalized.hash = '' // Remove hash
|
||||
return `url:${normalized.toString()}`
|
||||
} catch {
|
||||
return `url:${url}`
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate cache key for author pubkey
|
||||
*/
|
||||
authorKey(pubkey: string): string {
|
||||
return `author:${pubkey}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached highlights if not expired
|
||||
*/
|
||||
get(key: string): Highlight[] | null {
|
||||
const entry = this.cache.get(key)
|
||||
if (!entry) return null
|
||||
|
||||
const now = Date.now()
|
||||
if (now - entry.timestamp > this.ttlMs) {
|
||||
this.cache.delete(key)
|
||||
return null
|
||||
}
|
||||
|
||||
return entry.highlights
|
||||
}
|
||||
|
||||
/**
|
||||
* Store highlights in cache
|
||||
*/
|
||||
set(key: string, highlights: Highlight[]): void {
|
||||
this.cache.set(key, {
|
||||
highlights,
|
||||
timestamp: Date.now()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear specific cache entry
|
||||
*/
|
||||
clear(key: string): void {
|
||||
this.cache.delete(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all cache entries
|
||||
*/
|
||||
clearAll(): void {
|
||||
this.cache.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache stats
|
||||
*/
|
||||
stats(): { size: number; keys: string[] } {
|
||||
return {
|
||||
size: this.cache.size,
|
||||
keys: Array.from(this.cache.keys())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const highlightCache = new HighlightCache()
|
||||
|
||||
@@ -1,61 +1,64 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../../types/highlights'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
|
||||
import { UserSettings } from '../settingsService'
|
||||
import { rebroadcastEvents } from '../rebroadcastService'
|
||||
import { KINDS } from '../../config/kinds'
|
||||
import { queryEvents } from '../dataFetch'
|
||||
import { highlightCache } from './cache'
|
||||
|
||||
export const fetchHighlights = async (
|
||||
relayPool: RelayPool,
|
||||
pubkey: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
settings?: UserSettings,
|
||||
force = false
|
||||
): Promise<Highlight[]> => {
|
||||
// Check cache first unless force refresh
|
||||
if (!force) {
|
||||
const cacheKey = highlightCache.authorKey(pubkey)
|
||||
const cached = highlightCache.get(cacheKey)
|
||||
if (cached) {
|
||||
console.log(`📌 Using cached highlights for author (${cached.length} items)`)
|
||||
// Stream cached highlights if callback provided
|
||||
if (onHighlight) {
|
||||
cached.forEach(h => onHighlight(h))
|
||||
}
|
||||
return cached
|
||||
}
|
||||
}
|
||||
try {
|
||||
const relayUrls = Array.from(relayPool.relays.values()).map(relay => relay.url)
|
||||
const ordered = prioritizeLocalRelays(relayUrls)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(ordered)
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const local$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
if (!seenIds.has(event.id)) {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
if (!seenIds.has(event.id)) {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const rawEvents: NostrEvent[] = await lastValueFrom(merge(local$, remote$).pipe(toArray()))
|
||||
const rawEvents: NostrEvent[] = await queryEvents(
|
||||
relayPool,
|
||||
{ kinds: [KINDS.Highlights], authors: [pubkey] },
|
||||
{
|
||||
onEvent: (event: NostrEvent) => {
|
||||
if (seenIds.has(event.id)) return
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`📌 Fetched ${rawEvents.length} highlight events for author:`, pubkey.slice(0, 8))
|
||||
|
||||
try {
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
} catch (err) {
|
||||
console.warn('Failed to rebroadcast highlight events:', err)
|
||||
}
|
||||
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
const sorted = sortHighlights(highlights)
|
||||
|
||||
// Cache the results
|
||||
const cacheKey = highlightCache.authorKey(pubkey)
|
||||
highlightCache.set(cacheKey, sorted)
|
||||
|
||||
return sorted
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
|
||||
@@ -1,95 +1,68 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../../types/highlights'
|
||||
import { RELAYS } from '../../config/relays'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
|
||||
import { KINDS } from '../../config/kinds'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
|
||||
import { UserSettings } from '../settingsService'
|
||||
import { rebroadcastEvents } from '../rebroadcastService'
|
||||
import { queryEvents } from '../dataFetch'
|
||||
import { highlightCache } from './cache'
|
||||
|
||||
export const fetchHighlightsForArticle = async (
|
||||
relayPool: RelayPool,
|
||||
articleCoordinate: string,
|
||||
eventId?: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
settings?: UserSettings,
|
||||
force = false
|
||||
): Promise<Highlight[]> => {
|
||||
// Check cache first unless force refresh
|
||||
if (!force) {
|
||||
const cacheKey = highlightCache.articleKey(articleCoordinate)
|
||||
const cached = highlightCache.get(cacheKey)
|
||||
if (cached) {
|
||||
console.log(`📌 Using cached highlights for article (${cached.length} items)`)
|
||||
// Stream cached highlights if callback provided
|
||||
if (onHighlight) {
|
||||
cached.forEach(h => onHighlight(h))
|
||||
}
|
||||
return cached
|
||||
}
|
||||
}
|
||||
try {
|
||||
const seenIds = new Set<string>()
|
||||
const processEvent = (event: NostrEvent): Highlight | null => {
|
||||
if (seenIds.has(event.id)) return null
|
||||
const onEvent = (event: NostrEvent) => {
|
||||
if (seenIds.has(event.id)) return
|
||||
seenIds.add(event.id)
|
||||
return eventToHighlight(event)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
|
||||
const orderedRelays = prioritizeLocalRelays(RELAYS)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(orderedRelays)
|
||||
|
||||
const aLocal$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [9802], '#a': [articleCoordinate] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const aRemote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [9802], '#a': [articleCoordinate] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const aTagEvents: NostrEvent[] = await lastValueFrom(merge(aLocal$, aRemote$).pipe(toArray()))
|
||||
|
||||
let eTagEvents: NostrEvent[] = []
|
||||
if (eventId) {
|
||||
const eLocal$ = localRelays.length > 0
|
||||
? relayPool
|
||||
.req(localRelays, { kinds: [9802], '#e': [eventId] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const eRemote$ = remoteRelays.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelays, { kinds: [9802], '#e': [eventId] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
const highlight = processEvent(event)
|
||||
if (highlight && onHighlight) onHighlight(highlight)
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
eTagEvents = await lastValueFrom(merge(eLocal$, eRemote$).pipe(toArray()))
|
||||
}
|
||||
// Query for both #a and #e tags in parallel
|
||||
const [aTagEvents, eTagEvents] = await Promise.all([
|
||||
queryEvents(relayPool, { kinds: [KINDS.Highlights], '#a': [articleCoordinate] }, { onEvent }),
|
||||
eventId
|
||||
? queryEvents(relayPool, { kinds: [KINDS.Highlights], '#e': [eventId] }, { onEvent })
|
||||
: Promise.resolve([] as NostrEvent[])
|
||||
])
|
||||
|
||||
const rawEvents = [...aTagEvents, ...eTagEvents]
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
console.log(`📌 Fetched ${rawEvents.length} highlight events for article:`, articleCoordinate)
|
||||
|
||||
try {
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
} catch (err) {
|
||||
console.warn('Failed to rebroadcast highlight events:', err)
|
||||
}
|
||||
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
const sorted = sortHighlights(highlights)
|
||||
|
||||
// Cache the results
|
||||
const cacheKey = highlightCache.articleKey(articleCoordinate)
|
||||
highlightCache.set(cacheKey, sorted)
|
||||
|
||||
return sorted
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
|
||||
@@ -1,68 +1,67 @@
|
||||
import { RelayPool, completeOnEose, onlyEvents } from 'applesauce-relay'
|
||||
import { lastValueFrom, merge, Observable, takeUntil, timer, tap, toArray } from 'rxjs'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { Highlight } from '../../types/highlights'
|
||||
import { RELAYS } from '../../config/relays'
|
||||
import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
|
||||
import { KINDS } from '../../config/kinds'
|
||||
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
|
||||
import { UserSettings } from '../settingsService'
|
||||
import { rebroadcastEvents } from '../rebroadcastService'
|
||||
import { queryEvents } from '../dataFetch'
|
||||
import { highlightCache } from './cache'
|
||||
|
||||
export const fetchHighlightsForUrl = async (
|
||||
relayPool: RelayPool,
|
||||
url: string,
|
||||
onHighlight?: (highlight: Highlight) => void,
|
||||
settings?: UserSettings
|
||||
settings?: UserSettings,
|
||||
force = false
|
||||
): Promise<Highlight[]> => {
|
||||
const seenIds = new Set<string>()
|
||||
const orderedRelaysUrl = prioritizeLocalRelays(RELAYS)
|
||||
const { local: localRelaysUrl, remote: remoteRelaysUrl } = partitionRelays(orderedRelaysUrl)
|
||||
|
||||
// Check cache first unless force refresh
|
||||
if (!force) {
|
||||
const cacheKey = highlightCache.urlKey(url)
|
||||
const cached = highlightCache.get(cacheKey)
|
||||
if (cached) {
|
||||
console.log(`📌 Using cached highlights for URL (${cached.length} items)`)
|
||||
// Stream cached highlights if callback provided
|
||||
if (onHighlight) {
|
||||
cached.forEach(h => onHighlight(h))
|
||||
}
|
||||
return cached
|
||||
}
|
||||
}
|
||||
try {
|
||||
const local$ = localRelaysUrl.length > 0
|
||||
? relayPool
|
||||
.req(localRelaysUrl, { kinds: [9802], '#r': [url] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(1200))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const remote$ = remoteRelaysUrl.length > 0
|
||||
? relayPool
|
||||
.req(remoteRelaysUrl, { kinds: [9802], '#r': [url] })
|
||||
.pipe(
|
||||
onlyEvents(),
|
||||
tap((event: NostrEvent) => {
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}),
|
||||
completeOnEose(),
|
||||
takeUntil(timer(6000))
|
||||
)
|
||||
: new Observable<NostrEvent>((sub) => sub.complete())
|
||||
const rawEvents: NostrEvent[] = await lastValueFrom(merge(local$, remote$).pipe(toArray()))
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const rawEvents: NostrEvent[] = await queryEvents(
|
||||
relayPool,
|
||||
{ kinds: [KINDS.Highlights], '#r': [url] },
|
||||
{
|
||||
onEvent: (event: NostrEvent) => {
|
||||
if (seenIds.has(event.id)) return
|
||||
seenIds.add(event.id)
|
||||
if (onHighlight) onHighlight(eventToHighlight(event))
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`📌 Fetched ${rawEvents.length} highlight events for URL:`, url)
|
||||
|
||||
|
||||
// Rebroadcast events - but don't let errors here break the highlight display
|
||||
try {
|
||||
await rebroadcastEvents(rawEvents, relayPool, settings)
|
||||
} catch (err) {
|
||||
console.warn('Failed to rebroadcast highlight events:', err)
|
||||
}
|
||||
|
||||
|
||||
const uniqueEvents = dedupeHighlights(rawEvents)
|
||||
const highlights: Highlight[] = uniqueEvents.map(eventToHighlight)
|
||||
return sortHighlights(highlights)
|
||||
const sorted = sortHighlights(highlights)
|
||||
|
||||
// Cache the results
|
||||
const cacheKey = highlightCache.urlKey(url)
|
||||
highlightCache.set(cacheKey, sorted)
|
||||
|
||||
return sorted
|
||||
} catch (err) {
|
||||
console.error('Error fetching highlights for URL:', err)
|
||||
// Return highlights that were already streamed via callback
|
||||
// Don't return empty array as that would clear already-displayed highlights
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user