mirror of
https://github.com/dergigi/boris.git
synced 2026-02-16 12:34:41 +01:00
Compare commits
60 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
29746f1042 | ||
|
|
829ec4bf6e | ||
|
|
30ae0d9dfb | ||
|
|
8924f1b307 | ||
|
|
f92fa2cc93 | ||
|
|
cc70b533e5 | ||
|
|
003c439658 | ||
|
|
019958073c | ||
|
|
3d47dddbd2 | ||
|
|
cabf897df8 | ||
|
|
4801c0d621 | ||
|
|
ae76d6e4ea | ||
|
|
a611e99ff6 | ||
|
|
1c039e164f | ||
|
|
ffa4b38106 | ||
|
|
3b22cb5c5d | ||
|
|
7bc4522be4 | ||
|
|
048e0d802b | ||
|
|
b282bc4972 | ||
|
|
c1a23c1f8f | ||
|
|
8a5aacfe7b | ||
|
|
9126910de5 | ||
|
|
496bbc36f4 | ||
|
|
90f25420b2 | ||
|
|
9167134a89 | ||
|
|
b5717f1ebf | ||
|
|
0c8eaaf220 | ||
|
|
80b2720838 | ||
|
|
ea69740fc8 | ||
|
|
d650997ff9 | ||
|
|
ba3554b173 | ||
|
|
2cc39d0200 | ||
|
|
9aa914a704 | ||
|
|
497b6fa4be | ||
|
|
4c838b0123 | ||
|
|
d551f66ef1 | ||
|
|
34514199ee | ||
|
|
228304f68a | ||
|
|
ba263acdff | ||
|
|
5131cbe12c | ||
|
|
fa8eed4f4e | ||
|
|
3ff57c4b67 | ||
|
|
51c364ea53 | ||
|
|
4d032372dc | ||
|
|
48b5aa3a30 | ||
|
|
d4483a2f91 | ||
|
|
c62cb21962 | ||
|
|
3f7d726ae6 | ||
|
|
ac0e5eb585 | ||
|
|
5a0dd49e4e | ||
|
|
d067193f21 | ||
|
|
774e2ba67c | ||
|
|
6f1c31058f | ||
|
|
7551a05aee | ||
|
|
df485b883d | ||
|
|
6f428af1bc | ||
|
|
e821aaf058 | ||
|
|
a84d439489 | ||
|
|
67bf7e017d | ||
|
|
e47419a0b8 |
152
CHANGELOG.md
152
CHANGELOG.md
@@ -7,6 +7,154 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
|
||||
- Comprehensive debug logging for reading position system
|
||||
- All position restore, save, and suppression events logged with `[reading-position]` prefix
|
||||
- Emoji indicators for easy visual scanning (🎯 restore, 💾 save, 🛡️ suppression, etc.)
|
||||
- Detailed metrics for troubleshooting scroll behavior
|
||||
|
||||
### Changed
|
||||
|
||||
- Reading position auto-save now uses simple 3-second debounce
|
||||
- Saves only after 3s of no scrolling (was 15s minimum interval)
|
||||
- Much less aggressive, reduces relay traffic
|
||||
- Still saves instantly at 100% completion
|
||||
|
||||
### Fixed
|
||||
|
||||
- Reading position restore no longer causes jumpy scrolling
|
||||
- Stabilized position collector buffers updates for ~700ms, then applies best one (newest timestamp, tie-break by highest progress)
|
||||
- Auto-saves suppressed for 1.5s after programmatic restore to prevent feedback loops
|
||||
- Tiny scroll deltas (<48px or <5%) ignored to avoid unnecessary movement
|
||||
- Instant scroll (behavior: auto) instead of smooth animation reduces perceived oscillation
|
||||
- Fixes jumpy behavior from conflicting relay updates and save-restore loops
|
||||
|
||||
## [0.10.14] - 2025-01-27
|
||||
|
||||
### Added
|
||||
|
||||
- Third relay education article link in PWA settings
|
||||
- Added "Relay Setup 101" article to relay information section
|
||||
- Now links to three educational resources about relays
|
||||
|
||||
### Changed
|
||||
|
||||
- Timestamp links in bookmark cards now navigate within app
|
||||
- Articles (kind:30023) open in `/a/{naddr}` route
|
||||
- Notes (kind:1) open in `/e/{eventId}` route
|
||||
- External URLs open in `/r/{encodedUrl}` route
|
||||
- Uses React Router Link for client-side navigation instead of external search
|
||||
- Relay article links punctuation improved for better readability
|
||||
- Changed from "here and here" to "here, here, and here"
|
||||
|
||||
### Fixed
|
||||
|
||||
- Duplicate video embeds and stray HTML artifacts eliminated
|
||||
- VideoEmbedProcessor now processes HTML and extracts URLs in single pass
|
||||
- Placeholder indices now correctly match collected video URLs
|
||||
- Empty HTML parts no longer rendered, preventing stray characters like `">`
|
||||
- Highlights loading spinner no longer spins forever when article has zero highlights
|
||||
- Loading state properly cleared when no highlights exist
|
||||
- "No highlights" message displays immediately
|
||||
|
||||
## [0.10.13] - 2025-01-27
|
||||
|
||||
### Added
|
||||
|
||||
- Instant article preview when navigating from blog post cards
|
||||
- Title, image, summary, and date display immediately via navigation state
|
||||
- No skeleton loading for metadata already visible on cards
|
||||
- Article content loads seamlessly in background from eventStore or relays
|
||||
- Reliable relay fallback for article fetching
|
||||
- Queries nostr.band, primal, damus, and nos.lol if initial fetch returns no events
|
||||
- Reduces "Article not found" errors
|
||||
|
||||
### Changed
|
||||
|
||||
- Article loading now follows local-first controller pattern
|
||||
- Uses eventStore and queryEvents for streaming results
|
||||
- Emits content immediately on first event from store or local relays
|
||||
- Finalizes with newest version after EOSE (no artificial timeouts)
|
||||
- Background relay query continues to check for updates
|
||||
- Service Worker now only registers in production builds
|
||||
- Disabled in development to avoid stale cache issues
|
||||
- Preserves PWA functionality in production
|
||||
- Article fetching queries union of naddr relay hints and configured relays
|
||||
- Prevents failures when naddr contains stale or unreachable relay hints
|
||||
- Maintains fast local/hinted paths with reliable fallback
|
||||
|
||||
### Fixed
|
||||
|
||||
- Article loading race conditions eliminated
|
||||
- Request ID guards prevent stale fetches from overwriting current content
|
||||
- Stale highlights from previous articles no longer appear
|
||||
- Content/title mismatch when switching articles resolved
|
||||
- Markdown preview clears immediately on content change
|
||||
- Forced re-mount of rendered HTML per article via stable content keys
|
||||
- Request guards in external URL loader prevent cross-article bleed
|
||||
- Article re-fetching on settings changes prevented
|
||||
- Settings memoized via ref to avoid triggering effect dependencies
|
||||
- Explore writings tab now shows skeletons instead of spinner when loading
|
||||
- Consistent loading UI across all views
|
||||
|
||||
## [0.10.12] - 2025-01-27
|
||||
|
||||
### Added
|
||||
|
||||
- Person hiking icon (fa-person-hiking) for explore navigation
|
||||
|
||||
### Changed
|
||||
|
||||
- Explore icon changed from newspaper to person hiking for better semantic meaning
|
||||
- Settings button moved before explore button in sidebar navigation
|
||||
- Profile avatar button now uses 44px touch target on mobile (matches other icon buttons)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Web bookmarks (kind:39701) now properly deduplicate by d-tag
|
||||
- Same URL bookmarked multiple times now only appears once
|
||||
- Web bookmark IDs use coordinate format (kind:pubkey:d-tag) for consistent deduplication
|
||||
- Profile avatar button sizing on mobile now matches other IconButton components
|
||||
- Removed all console.log statements from bookmarkController and bookmarkProcessing
|
||||
|
||||
## [0.10.11] - 2025-01-27
|
||||
|
||||
### Added
|
||||
|
||||
- Clock icon for chronological bookmark view
|
||||
- Clickable highlight count to open highlights sidebar
|
||||
- Dynamic bookmark filter titles based on selected filter
|
||||
- Profile picture moved to first position (left-aligned) with consistent sizing
|
||||
|
||||
### Changed
|
||||
|
||||
- Default bookmark view changed to flat chronological list (newest first)
|
||||
- Bookmark URL changed from `/me/reading-list` to `/me/bookmarks`
|
||||
- Router updated to handle `/me/reading-list` → `/me/bookmarks` redirect
|
||||
- Me.tsx bookmarks tab now uses dynamic filter titles and chronological sorting
|
||||
- Me.tsx updated to use faClock icon instead of faBars
|
||||
- Removed bookmark count from section headings for cleaner display
|
||||
- Hide close/collapse sidebar buttons on mobile for better UX
|
||||
|
||||
### Fixed
|
||||
|
||||
- Bookmark sorting now uses proper display time (created_at || listUpdatedAt) with nulls last
|
||||
- Robust sorting of merged bookmarks with fallback timestamps
|
||||
- Corrected bookmark timestamp to use bookmark list creation time, not content creation time
|
||||
- Preserved content created_at while adding listUpdatedAt for proper sorting
|
||||
- Removed synthetic added_at field, now uses created_at from bookmark list event
|
||||
- Consistent chronological sorting with useMemo optimization
|
||||
- Removed unused faTimes import
|
||||
- Bookmark timestamps now show sane dates using created_at fallback to listUpdatedAt
|
||||
- Guarded formatters to prevent timestamp display errors
|
||||
|
||||
### Refactored
|
||||
|
||||
- Removed excessive debug logging for cleaner console output
|
||||
- Bookmark timestamp handling never defaults to "now", allows nulls and sorts nulls last
|
||||
- Renders empty when timestamp is missing instead of showing invalid dates
|
||||
|
||||
## [0.10.10] - 2025-10-22
|
||||
|
||||
### Changed
|
||||
@@ -2442,7 +2590,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Optimize relay usage following applesauce-relay best practices
|
||||
- Use applesauce-react event models for better profile handling
|
||||
|
||||
[Unreleased]: https://github.com/dergigi/boris/compare/v0.10.10...HEAD
|
||||
[Unreleased]: https://github.com/dergigi/boris/compare/v0.10.12...HEAD
|
||||
[0.10.12]: https://github.com/dergigi/boris/compare/v0.10.11...v0.10.12
|
||||
[0.10.11]: https://github.com/dergigi/boris/compare/v0.10.10...v0.10.11
|
||||
[0.10.10]: https://github.com/dergigi/boris/compare/v0.10.9...v0.10.10
|
||||
[0.10.9]: https://github.com/dergigi/boris/compare/v0.10.8...v0.10.9
|
||||
[0.10.8]: https://github.com/dergigi/boris/compare/v0.10.7...v0.10.8
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "boris",
|
||||
"version": "0.10.11",
|
||||
"version": "0.10.15",
|
||||
"description": "A minimal nostr client for bookmark management",
|
||||
"homepage": "https://read.withboris.com/",
|
||||
"type": "module",
|
||||
|
||||
@@ -50,6 +50,14 @@ const BlogPostCard: React.FC<BlogPostCardProps> = ({ post, href, level, readingP
|
||||
return (
|
||||
<Link
|
||||
to={href}
|
||||
state={{
|
||||
previewData: {
|
||||
title: post.title,
|
||||
image: post.image,
|
||||
summary: post.summary,
|
||||
published: post.published
|
||||
}
|
||||
}}
|
||||
className={`blog-post-card ${level ? `level-${level}` : ''}`}
|
||||
style={{ textDecoration: 'none', color: 'inherit' }}
|
||||
>
|
||||
|
||||
@@ -4,7 +4,7 @@ import { faGlobe, faLink } from '@fortawesome/free-solid-svg-icons'
|
||||
import { IconDefinition } from '@fortawesome/fontawesome-svg-core'
|
||||
import { useEventModel } from 'applesauce-react/hooks'
|
||||
import { Models } from 'applesauce-core'
|
||||
import { npubEncode, neventEncode } from 'nostr-tools/nip19'
|
||||
import { npubEncode } from 'nostr-tools/nip19'
|
||||
import { IndividualBookmark } from '../types/bookmarks'
|
||||
import { extractUrlsFromContent } from '../services/bookmarkHelpers'
|
||||
import { classifyUrl } from '../utils/helpers'
|
||||
@@ -58,8 +58,6 @@ export const BookmarkItem: React.FC<BookmarkItemProps> = ({ bookmark, index, onS
|
||||
// Resolve author profile using applesauce
|
||||
const authorProfile = useEventModel(Models.ProfileModel, [bookmark.pubkey])
|
||||
const authorNpub = npubEncode(bookmark.pubkey)
|
||||
const isHexId = /^[0-9a-f]{64}$/i.test(bookmark.id)
|
||||
const eventNevent = isHexId ? neventEncode({ id: bookmark.id }) : undefined
|
||||
|
||||
// Get display name for author
|
||||
const getAuthorDisplayName = () => {
|
||||
@@ -135,7 +133,6 @@ export const BookmarkItem: React.FC<BookmarkItemProps> = ({ bookmark, index, onS
|
||||
extractedUrls,
|
||||
onSelectUrl,
|
||||
authorNpub,
|
||||
eventNevent,
|
||||
getAuthorDisplayName,
|
||||
handleReadNow,
|
||||
articleImage,
|
||||
@@ -152,7 +149,6 @@ export const BookmarkItem: React.FC<BookmarkItemProps> = ({ bookmark, index, onS
|
||||
extractedUrls,
|
||||
onSelectUrl,
|
||||
authorNpub,
|
||||
eventNevent,
|
||||
getAuthorDisplayName,
|
||||
handleReadNow,
|
||||
articleSummary,
|
||||
|
||||
@@ -9,7 +9,7 @@ import RichContent from '../RichContent'
|
||||
import { classifyUrl } from '../../utils/helpers'
|
||||
import { useImageCache } from '../../hooks/useImageCache'
|
||||
import { getPreviewImage, fetchOgImage } from '../../utils/imagePreview'
|
||||
import { getEventUrl } from '../../config/nostrGateways'
|
||||
import { naddrEncode } from 'nostr-tools/nip19'
|
||||
|
||||
interface CardViewProps {
|
||||
bookmark: IndividualBookmark
|
||||
@@ -18,7 +18,6 @@ interface CardViewProps {
|
||||
extractedUrls: string[]
|
||||
onSelectUrl?: (url: string, bookmark?: { id: string; kind: number; tags: string[][]; pubkey: string }) => void
|
||||
authorNpub: string
|
||||
eventNevent?: string
|
||||
getAuthorDisplayName: () => string
|
||||
handleReadNow: (e: React.MouseEvent<HTMLButtonElement>) => void
|
||||
articleImage?: string
|
||||
@@ -34,7 +33,6 @@ export const CardView: React.FC<CardViewProps> = ({
|
||||
extractedUrls,
|
||||
onSelectUrl,
|
||||
authorNpub,
|
||||
eventNevent,
|
||||
getAuthorDisplayName,
|
||||
handleReadNow,
|
||||
articleImage,
|
||||
@@ -82,6 +80,29 @@ export const CardView: React.FC<CardViewProps> = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Get internal route for the bookmark
|
||||
const getInternalRoute = (): string | null => {
|
||||
if (bookmark.kind === 30023) {
|
||||
// Nostr-native article - use /a/ route
|
||||
const dTag = bookmark.tags.find(t => t[0] === 'd')?.[1]
|
||||
if (dTag) {
|
||||
const naddr = naddrEncode({
|
||||
kind: bookmark.kind,
|
||||
pubkey: bookmark.pubkey,
|
||||
identifier: dTag
|
||||
})
|
||||
return `/a/${naddr}`
|
||||
}
|
||||
} else if (bookmark.kind === 1) {
|
||||
// Note - use /e/ route
|
||||
return `/e/${bookmark.id}`
|
||||
} else if (firstUrl) {
|
||||
// External URL - use /r/ route
|
||||
return `/r/${encodeURIComponent(firstUrl)}`
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`${bookmark.id}-${index}`}
|
||||
@@ -103,17 +124,15 @@ export const CardView: React.FC<CardViewProps> = ({
|
||||
<FontAwesomeIcon icon={contentTypeIcon} className="content-type-icon" />
|
||||
</span>
|
||||
|
||||
{eventNevent ? (
|
||||
<a
|
||||
href={getEventUrl(eventNevent)}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
{getInternalRoute() ? (
|
||||
<Link
|
||||
to={getInternalRoute()!}
|
||||
className="bookmark-date-link"
|
||||
title="Open event in search"
|
||||
title="Open in app"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{formatDate(bookmark.created_at ?? bookmark.listUpdatedAt)}
|
||||
</a>
|
||||
</Link>
|
||||
) : (
|
||||
<span className="bookmark-date">{formatDate(bookmark.created_at ?? bookmark.listUpdatedAt)}</span>
|
||||
)}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { formatDate } from '../../utils/bookmarkUtils'
|
||||
import RichContent from '../RichContent'
|
||||
import { IconGetter } from './shared'
|
||||
import { useImageCache } from '../../hooks/useImageCache'
|
||||
import { getEventUrl } from '../../config/nostrGateways'
|
||||
import { naddrEncode } from 'nostr-tools/nip19'
|
||||
|
||||
interface LargeViewProps {
|
||||
bookmark: IndividualBookmark
|
||||
@@ -18,7 +18,6 @@ interface LargeViewProps {
|
||||
getIconForUrlType: IconGetter
|
||||
previewImage: string | null
|
||||
authorNpub: string
|
||||
eventNevent?: string
|
||||
getAuthorDisplayName: () => string
|
||||
handleReadNow: (e: React.MouseEvent<HTMLButtonElement>) => void
|
||||
articleSummary?: string
|
||||
@@ -35,7 +34,6 @@ export const LargeView: React.FC<LargeViewProps> = ({
|
||||
getIconForUrlType,
|
||||
previewImage,
|
||||
authorNpub,
|
||||
eventNevent,
|
||||
getAuthorDisplayName,
|
||||
handleReadNow,
|
||||
articleSummary,
|
||||
@@ -63,6 +61,30 @@ export const LargeView: React.FC<LargeViewProps> = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Get internal route for the bookmark
|
||||
const getInternalRoute = (): string | null => {
|
||||
const firstUrl = hasUrls ? extractedUrls[0] : null
|
||||
if (bookmark.kind === 30023) {
|
||||
// Nostr-native article - use /a/ route
|
||||
const dTag = bookmark.tags.find(t => t[0] === 'd')?.[1]
|
||||
if (dTag) {
|
||||
const naddr = naddrEncode({
|
||||
kind: bookmark.kind,
|
||||
pubkey: bookmark.pubkey,
|
||||
identifier: dTag
|
||||
})
|
||||
return `/a/${naddr}`
|
||||
}
|
||||
} else if (bookmark.kind === 1) {
|
||||
// Note - use /e/ route
|
||||
return `/e/${bookmark.id}`
|
||||
} else if (firstUrl) {
|
||||
// External URL - use /r/ route
|
||||
return `/r/${encodeURIComponent(firstUrl)}`
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`${bookmark.id}-${index}`}
|
||||
@@ -136,16 +158,17 @@ export const LargeView: React.FC<LargeViewProps> = ({
|
||||
</Link>
|
||||
</span>
|
||||
|
||||
{eventNevent && (
|
||||
<a
|
||||
href={getEventUrl(eventNevent)}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
{getInternalRoute() ? (
|
||||
<Link
|
||||
to={getInternalRoute()!}
|
||||
className="bookmark-date-link"
|
||||
title="Open in app"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{formatDate(bookmark.created_at ?? bookmark.listUpdatedAt)}
|
||||
</a>
|
||||
</Link>
|
||||
) : (
|
||||
<span className="bookmark-date">{formatDate(bookmark.created_at ?? bookmark.listUpdatedAt)}</span>
|
||||
)}
|
||||
|
||||
{/* CTA removed */}
|
||||
|
||||
@@ -230,6 +230,7 @@ const Bookmarks: React.FC<BookmarksProps> = ({
|
||||
useArticleLoader({
|
||||
naddr,
|
||||
relayPool,
|
||||
eventStore,
|
||||
setSelectedUrl,
|
||||
setReaderContent,
|
||||
setReaderLoading,
|
||||
|
||||
@@ -43,9 +43,9 @@ import { EventFactory } from 'applesauce-factory'
|
||||
import { Hooks } from 'applesauce-react'
|
||||
import {
|
||||
generateArticleIdentifier,
|
||||
saveReadingPosition,
|
||||
startReadingPositionStream
|
||||
saveReadingPosition
|
||||
} from '../services/readingPositionService'
|
||||
import { readingProgressController } from '../services/readingProgressController'
|
||||
import TTSControls from './TTSControls'
|
||||
|
||||
interface ContentPanelProps {
|
||||
@@ -134,6 +134,11 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
currentUserPubkey,
|
||||
followedPubkeys
|
||||
})
|
||||
// Key used to force re-mount of markdown preview/render when content changes
|
||||
const contentKey = useMemo(() => {
|
||||
// Prefer selectedUrl as a stable per-article key; fallback to title+length
|
||||
return selectedUrl || `${title || ''}:${(markdown || html || '').length}`
|
||||
}, [selectedUrl, title, markdown, html])
|
||||
|
||||
const { contentRef, handleSelectionEnd } = useHighlightInteractions({
|
||||
onHighlightClick,
|
||||
@@ -145,8 +150,15 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
// Get event store for reading position service
|
||||
const eventStore = Hooks.useEventStore()
|
||||
|
||||
// Reading position tracking - only for text content, not videos
|
||||
const isTextContent = !loading && !!(markdown || html) && !selectedUrl?.includes('youtube') && !selectedUrl?.includes('vimeo')
|
||||
// Reading position tracking - only for text content that's loaded and long enough
|
||||
// Wait for content to load, check it's not a video, and verify it's long enough to track
|
||||
const isTextContent = useMemo(() => {
|
||||
if (loading) return false
|
||||
if (!markdown && !html) return false
|
||||
if (selectedUrl?.includes('youtube') || selectedUrl?.includes('vimeo')) return false
|
||||
if (!shouldTrackReadingProgress(html, markdown)) return false
|
||||
return true
|
||||
}, [loading, markdown, html, selectedUrl])
|
||||
|
||||
// Generate article identifier for saving/loading position
|
||||
const articleIdentifier = useMemo(() => {
|
||||
@@ -157,20 +169,24 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
// Callback to save reading position
|
||||
const handleSavePosition = useCallback(async (position: number) => {
|
||||
if (!activeAccount || !relayPool || !eventStore || !articleIdentifier) {
|
||||
console.log('[reading-position] ❌ Cannot save: missing dependencies')
|
||||
return
|
||||
}
|
||||
if (!settings?.syncReadingPosition) {
|
||||
console.log('[reading-position] ⚠️ Save skipped: sync disabled in settings')
|
||||
return
|
||||
}
|
||||
|
||||
// Check if content is long enough to track reading progress
|
||||
if (!shouldTrackReadingProgress(html, markdown)) {
|
||||
console.log('[reading-position] ⚠️ Save skipped: content too short')
|
||||
return
|
||||
}
|
||||
|
||||
const scrollTop = window.pageYOffset || document.documentElement.scrollTop
|
||||
|
||||
try {
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 🚀 Publishing position ${Math.round(position * 100)}% to relays...`)
|
||||
const factory = new EventFactory({ signer: activeAccount })
|
||||
await saveReadingPosition(
|
||||
relayPool,
|
||||
@@ -183,13 +199,34 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
scrollTop
|
||||
}
|
||||
)
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] ✅ Position published successfully`)
|
||||
} catch (error) {
|
||||
console.error('[progress] ❌ ContentPanel: Failed to save reading position:', error)
|
||||
console.error(`[reading-position] [${new Date().toISOString()}] ❌ Failed to save reading position:`, error)
|
||||
}
|
||||
}, [activeAccount, relayPool, eventStore, articleIdentifier, settings?.syncReadingPosition, html, markdown])
|
||||
|
||||
const { progressPercentage, saveNow } = useReadingPosition({
|
||||
enabled: isTextContent,
|
||||
// Delay enabling position tracking to ensure content is stable
|
||||
const [isTrackingEnabled, setIsTrackingEnabled] = useState(false)
|
||||
|
||||
// Reset tracking when article changes
|
||||
useEffect(() => {
|
||||
setIsTrackingEnabled(false)
|
||||
}, [selectedUrl])
|
||||
|
||||
// Enable tracking after content is stable
|
||||
useEffect(() => {
|
||||
if (isTextContent && !isTrackingEnabled) {
|
||||
// Wait 500ms after content loads before enabling tracking
|
||||
const timer = setTimeout(() => {
|
||||
console.log('[reading-position] ✅ Enabling tracking after stability delay')
|
||||
setIsTrackingEnabled(true)
|
||||
}, 500)
|
||||
return () => clearTimeout(timer)
|
||||
}
|
||||
}, [isTextContent, isTrackingEnabled])
|
||||
|
||||
const { progressPercentage, suppressSavesFor } = useReadingPosition({
|
||||
enabled: isTrackingEnabled,
|
||||
syncEnabled: settings?.syncReadingPosition !== false,
|
||||
onSave: handleSavePosition,
|
||||
onReadingComplete: () => {
|
||||
@@ -209,48 +246,109 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
useEffect(() => {
|
||||
}, [isTextContent, settings?.syncReadingPosition, activeAccount, relayPool, eventStore, articleIdentifier, progressPercentage])
|
||||
|
||||
// Load saved reading position when article loads (non-blocking, EOSE-driven)
|
||||
// Load saved reading position when article loads (using pre-loaded data from controller)
|
||||
const suppressSavesForRef = useRef(suppressSavesFor)
|
||||
useEffect(() => {
|
||||
if (!isTextContent || !activeAccount || !relayPool || !eventStore || !articleIdentifier) {
|
||||
suppressSavesForRef.current = suppressSavesFor
|
||||
}, [suppressSavesFor])
|
||||
|
||||
// Track if we've successfully started restore for this article + tracking state
|
||||
// Use a composite key to ensure we only restore once per article when tracking is enabled
|
||||
const restoreKey = `${articleIdentifier}-${isTrackingEnabled}`
|
||||
const hasAttemptedRestoreRef = useRef<string | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
console.log('[reading-position] 🔍 Restore effect running:', {
|
||||
isTextContent,
|
||||
isTrackingEnabled,
|
||||
hasAccount: !!activeAccount,
|
||||
articleIdentifier,
|
||||
restoreKey,
|
||||
hasAttempted: hasAttemptedRestoreRef.current
|
||||
})
|
||||
|
||||
if (!isTextContent || !activeAccount || !articleIdentifier) {
|
||||
console.log('[reading-position] ⏭️ Restore skipped: missing dependencies or not text content')
|
||||
return
|
||||
}
|
||||
if (settings?.syncReadingPosition === false) {
|
||||
console.log('[reading-position] ⏭️ Restore skipped: sync disabled in settings')
|
||||
return
|
||||
}
|
||||
if (!isTrackingEnabled) {
|
||||
console.log('[reading-position] ⏭️ Restore skipped: tracking not yet enabled (waiting for content stability)')
|
||||
return
|
||||
}
|
||||
|
||||
const stop = startReadingPositionStream(
|
||||
relayPool,
|
||||
eventStore,
|
||||
activeAccount.pubkey,
|
||||
articleIdentifier,
|
||||
(savedPosition) => {
|
||||
if (savedPosition && savedPosition.position > 0.05 && savedPosition.position < 1) {
|
||||
// Wait for content to be fully rendered before scrolling
|
||||
setTimeout(() => {
|
||||
const documentHeight = document.documentElement.scrollHeight
|
||||
const windowHeight = window.innerHeight
|
||||
const scrollTop = savedPosition.position * (documentHeight - windowHeight)
|
||||
|
||||
window.scrollTo({
|
||||
top: scrollTop,
|
||||
behavior: 'smooth'
|
||||
})
|
||||
}, 500) // Give content time to render
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return () => stop()
|
||||
}, [isTextContent, activeAccount, relayPool, eventStore, articleIdentifier, settings?.syncReadingPosition, selectedUrl])
|
||||
|
||||
// Save position before unmounting or changing article
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (saveNow) {
|
||||
saveNow()
|
||||
}
|
||||
// Only attempt restore once per article (after tracking is enabled)
|
||||
if (hasAttemptedRestoreRef.current === restoreKey) {
|
||||
console.log('[reading-position] ⏭️ Restore skipped: already attempted for this article')
|
||||
return
|
||||
}
|
||||
}, [saveNow, selectedUrl])
|
||||
|
||||
console.log('[reading-position] 🔄 Initiating restore for article:', articleIdentifier)
|
||||
// Mark as attempted using composite key
|
||||
hasAttemptedRestoreRef.current = restoreKey
|
||||
|
||||
// Get the saved position from the controller (already loaded and displayed on card)
|
||||
const savedProgress = readingProgressController.getProgress(articleIdentifier)
|
||||
|
||||
if (!savedProgress || savedProgress <= 0.05 || savedProgress >= 1) {
|
||||
console.log('[reading-position] ℹ️ No position to restore (progress:', savedProgress, ')')
|
||||
return
|
||||
}
|
||||
|
||||
console.log('[reading-position] 🎯 Found saved position:', Math.round(savedProgress * 100) + '%')
|
||||
|
||||
// Suppress saves during restore (500ms render + 1000ms animation + 500ms buffer = 2000ms)
|
||||
if (suppressSavesForRef.current) {
|
||||
suppressSavesForRef.current(2000)
|
||||
}
|
||||
|
||||
// Wait for content to be fully rendered
|
||||
setTimeout(() => {
|
||||
const docH = document.documentElement.scrollHeight
|
||||
const winH = window.innerHeight
|
||||
const maxScroll = Math.max(0, docH - winH)
|
||||
const currentTop = window.pageYOffset || document.documentElement.scrollTop
|
||||
const targetTop = savedProgress * maxScroll
|
||||
|
||||
console.log('[reading-position] 📐 Restore calculation:', {
|
||||
docHeight: docH,
|
||||
winHeight: winH,
|
||||
maxScroll,
|
||||
currentTop,
|
||||
targetTop,
|
||||
targetPercent: Math.round(savedProgress * 100) + '%'
|
||||
})
|
||||
|
||||
// Skip if delta is too small (< 48px or < 5%)
|
||||
const deltaPx = Math.abs(targetTop - currentTop)
|
||||
const deltaPct = maxScroll > 0 ? Math.abs((targetTop - currentTop) / maxScroll) : 0
|
||||
if (deltaPx < 48 || deltaPct < 0.05) {
|
||||
console.log('[reading-position] ⏭️ Restore skipped: delta too small (', deltaPx, 'px,', Math.round(deltaPct * 100) + '%)')
|
||||
// Allow saves immediately since no scroll happened
|
||||
if (suppressSavesForRef.current) {
|
||||
suppressSavesForRef.current(0)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
console.log('[reading-position] 📜 Restoring scroll position (delta:', deltaPx, 'px,', Math.round(deltaPct * 100) + '%)')
|
||||
|
||||
// Perform smooth animated restore
|
||||
window.scrollTo({
|
||||
top: targetTop,
|
||||
behavior: 'smooth'
|
||||
})
|
||||
console.log('[reading-position] ✅ Scroll restored to', Math.round(savedProgress * 100) + '%')
|
||||
}, 500) // Give content time to render
|
||||
}, [isTextContent, activeAccount, articleIdentifier, settings?.syncReadingPosition, selectedUrl, isTrackingEnabled, restoreKey])
|
||||
|
||||
// Note: We intentionally do NOT save on unmount because:
|
||||
// 1. Browser may scroll to top during back navigation, causing 0% saves
|
||||
// 2. The auto-save with 3s debounce already captures position during reading
|
||||
// 3. Position state may not reflect actual reading position during navigation
|
||||
|
||||
// Close menu when clicking outside
|
||||
useEffect(() => {
|
||||
@@ -751,7 +849,7 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
<div className="reader" style={{ '--highlight-rgb': highlightRgb } as React.CSSProperties}>
|
||||
{/* Hidden markdown preview to convert markdown to HTML */}
|
||||
{markdown && (
|
||||
<div ref={markdownPreviewRef} style={{ display: 'none' }}>
|
||||
<div ref={markdownPreviewRef} key={`preview:${contentKey}`} style={{ display: 'none' }}>
|
||||
<ReactMarkdown
|
||||
remarkPlugins={[remarkGfm]}
|
||||
rehypePlugins={[rehypeRaw, rehypePrism]}
|
||||
@@ -872,6 +970,7 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
{markdown ? (
|
||||
renderedMarkdownHtml && finalHtml ? (
|
||||
<VideoEmbedProcessor
|
||||
key={`content:${contentKey}`}
|
||||
ref={contentRef}
|
||||
html={finalHtml}
|
||||
renderVideoLinksAsEmbeds={settings?.renderVideoLinksAsEmbeds === true && !isExternalVideo}
|
||||
@@ -888,6 +987,7 @@ const ContentPanel: React.FC<ContentPanelProps> = ({
|
||||
)
|
||||
) : (
|
||||
<VideoEmbedProcessor
|
||||
key={`content:${contentKey}`}
|
||||
ref={contentRef}
|
||||
html={finalHtml || html || ''}
|
||||
renderVideoLinksAsEmbeds={settings?.renderVideoLinksAsEmbeds === true && !isExternalVideo}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useState, useEffect, useMemo, useCallback, useRef } from 'react'
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
|
||||
import { faNewspaper, faHighlighter, faUser, faUserGroup, faNetworkWired, faArrowsRotate, faSpinner } from '@fortawesome/free-solid-svg-icons'
|
||||
import { faPersonHiking, faNewspaper, faHighlighter, faUser, faUserGroup, faNetworkWired, faArrowsRotate } from '@fortawesome/free-solid-svg-icons'
|
||||
import IconButton from './IconButton'
|
||||
import { BlogPostSkeleton, HighlightSkeleton } from './Skeletons'
|
||||
import { Hooks } from 'applesauce-react'
|
||||
@@ -523,8 +523,10 @@ const Explore: React.FC<ExploreProps> = ({ relayPool, eventStore, settings, acti
|
||||
)
|
||||
}
|
||||
return filteredBlogPosts.length === 0 ? (
|
||||
<div className="explore-loading" style={{ gridColumn: '1/-1', display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
|
||||
<FontAwesomeIcon icon={faSpinner} spin size="2x" />
|
||||
<div className="explore-grid">
|
||||
{Array.from({ length: 6 }).map((_, i) => (
|
||||
<BlogPostSkeleton key={i} />
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<div className="explore-grid">
|
||||
@@ -584,7 +586,7 @@ const Explore: React.FC<ExploreProps> = ({ relayPool, eventStore, settings, acti
|
||||
/>
|
||||
<div className="explore-header">
|
||||
<h1>
|
||||
<FontAwesomeIcon icon={faNewspaper} />
|
||||
<FontAwesomeIcon icon={faPersonHiking} />
|
||||
Explore
|
||||
</h1>
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ const PWASettings: React.FC<PWASettingsProps> = ({ settings, onUpdate, onClose }
|
||||
>
|
||||
here
|
||||
</a>
|
||||
{' and '}
|
||||
{', '}
|
||||
<a
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
@@ -161,6 +161,16 @@ const PWASettings: React.FC<PWASettingsProps> = ({ settings, onUpdate, onClose }
|
||||
>
|
||||
here
|
||||
</a>
|
||||
{', and '}
|
||||
<a
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
handleLinkClick('/a/naddr1qvzqqqr4gupzq3svyhng9ld8sv44950j957j9vchdktj7cxumsep9mvvjthc2pjuqq9hyetvv9uj6um9w36hq9mgjg8')
|
||||
}}
|
||||
style={{ color: 'var(--accent, #8b5cf6)', cursor: 'pointer' }}
|
||||
>
|
||||
here
|
||||
</a>
|
||||
.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from 'react'
|
||||
import { useNavigate } from 'react-router-dom'
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
|
||||
import { faChevronRight, faRightFromBracket, faUserCircle, faGear, faHome, faNewspaper } from '@fortawesome/free-solid-svg-icons'
|
||||
import { faChevronRight, faRightFromBracket, faUserCircle, faGear, faHome, faPersonHiking } from '@fortawesome/free-solid-svg-icons'
|
||||
import { Hooks } from 'applesauce-react'
|
||||
import { useEventModel } from 'applesauce-react/hooks'
|
||||
import { Models } from 'applesauce-core'
|
||||
@@ -58,13 +58,6 @@ const SidebarHeader: React.FC<SidebarHeaderProps> = ({ onToggleCollapse, onLogou
|
||||
ariaLabel="Home"
|
||||
variant="ghost"
|
||||
/>
|
||||
<IconButton
|
||||
icon={faNewspaper}
|
||||
onClick={() => navigate('/explore')}
|
||||
title="Explore"
|
||||
ariaLabel="Explore"
|
||||
variant="ghost"
|
||||
/>
|
||||
<IconButton
|
||||
icon={faGear}
|
||||
onClick={onOpenSettings}
|
||||
@@ -72,6 +65,13 @@ const SidebarHeader: React.FC<SidebarHeaderProps> = ({ onToggleCollapse, onLogou
|
||||
ariaLabel="Settings"
|
||||
variant="ghost"
|
||||
/>
|
||||
<IconButton
|
||||
icon={faPersonHiking}
|
||||
onClick={() => navigate('/explore')}
|
||||
title="Explore"
|
||||
ariaLabel="Explore"
|
||||
variant="ghost"
|
||||
/>
|
||||
{activeAccount && (
|
||||
<IconButton
|
||||
icon={faRightFromBracket}
|
||||
|
||||
@@ -21,9 +21,10 @@ const VideoEmbedProcessor = forwardRef<HTMLDivElement, VideoEmbedProcessorProps>
|
||||
onMouseUp,
|
||||
onTouchEnd
|
||||
}, ref) => {
|
||||
const processedHtml = useMemo(() => {
|
||||
// Process HTML and extract video URLs in a single pass to keep them in sync
|
||||
const { processedHtml, videoUrls } = useMemo(() => {
|
||||
if (!renderVideoLinksAsEmbeds || !html) {
|
||||
return html
|
||||
return { processedHtml: html, videoUrls: [] }
|
||||
}
|
||||
|
||||
// Process HTML in stages: <video> blocks, <img> tags with video src, and bare video URLs
|
||||
@@ -86,71 +87,19 @@ const VideoEmbedProcessor = forwardRef<HTMLDivElement, VideoEmbedProcessorProps>
|
||||
|
||||
const remainingUrls = [...fileVideoUrls, ...platformVideoUrls].filter(url => !collectedUrls.includes(url))
|
||||
|
||||
let processedHtml = result
|
||||
let finalHtml = result
|
||||
remainingUrls.forEach((url) => {
|
||||
const placeholder = `__VIDEO_EMBED_${placeholderIndex}__`
|
||||
processedHtml = processedHtml.replace(new RegExp(url.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), placeholder)
|
||||
finalHtml = finalHtml.replace(new RegExp(url.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), placeholder)
|
||||
collectedUrls.push(url)
|
||||
placeholderIndex++
|
||||
})
|
||||
|
||||
// If nothing collected, return original html
|
||||
if (collectedUrls.length === 0) {
|
||||
return html
|
||||
// Return both processed HTML and collected URLs (in the same order as placeholders)
|
||||
return {
|
||||
processedHtml: collectedUrls.length > 0 ? finalHtml : html,
|
||||
videoUrls: collectedUrls
|
||||
}
|
||||
|
||||
return processedHtml
|
||||
}, [html, renderVideoLinksAsEmbeds])
|
||||
|
||||
const videoUrls = useMemo(() => {
|
||||
if (!renderVideoLinksAsEmbeds || !html) {
|
||||
return []
|
||||
}
|
||||
|
||||
const urls: string[] = []
|
||||
|
||||
// 1) Extract from <video> blocks first (video src or nested source src)
|
||||
const videoBlockPattern = /<video[^>]*>[\s\S]*?<\/video>/gi
|
||||
const videoBlocks = html.match(videoBlockPattern) || []
|
||||
videoBlocks.forEach((block) => {
|
||||
let url: string | null = null
|
||||
const videoSrcMatch = block.match(/<video[^>]*\s+src=["']?(https?:\/\/[^\s<>"']+\.(mp4|webm|ogg|mov|avi|mkv|m4v)[^\s<>"']*)["']?[^>]*>/i)
|
||||
if (videoSrcMatch && videoSrcMatch[1]) {
|
||||
url = videoSrcMatch[1]
|
||||
} else {
|
||||
const sourceSrcMatch = block.match(/<source[^>]*\s+src=["']?(https?:\/\/[^\s<>"']+\.(mp4|webm|ogg|mov|avi|mkv|m4v)[^\s<>"']*)["']?[^>]*>/i)
|
||||
if (sourceSrcMatch && sourceSrcMatch[1]) {
|
||||
url = sourceSrcMatch[1]
|
||||
}
|
||||
}
|
||||
if (url && !urls.includes(url)) urls.push(url)
|
||||
})
|
||||
|
||||
// 2) Extract from <img> tags with video src
|
||||
const imgTagPattern = /<img[^>]*>/gi
|
||||
const allImgTags = html.match(imgTagPattern) || []
|
||||
allImgTags.forEach((imgTag) => {
|
||||
const srcMatch = imgTag.match(/src=["']?(https?:\/\/[^\s<>"']+\.(mp4|webm|ogg|mov|avi|mkv|m4v)[^\s<>"']*)["']?/i)
|
||||
if (srcMatch && srcMatch[1] && !urls.includes(srcMatch[1])) {
|
||||
urls.push(srcMatch[1])
|
||||
}
|
||||
})
|
||||
|
||||
// 3) Extract remaining direct file URLs and platform-classified video URLs
|
||||
const fileVideoPattern = /https?:\/\/[^\s<>"']+\.(mp4|webm|ogg|mov|avi|mkv|m4v)(?:\?[^\s<>"']*)?/gi
|
||||
const fileVideoUrls: string[] = html.match(fileVideoPattern) || []
|
||||
fileVideoUrls.forEach(u => { if (!urls.includes(u)) urls.push(u) })
|
||||
|
||||
const allUrlPattern = /https?:\/\/[^\s<>"']+(?=\s|>|"|'|$)/gi
|
||||
const allUrls: string[] = html.match(allUrlPattern) || []
|
||||
allUrls.forEach(u => {
|
||||
const classification = classifyUrl(u)
|
||||
if (classification.type === 'video' && !urls.includes(u)) {
|
||||
urls.push(u)
|
||||
}
|
||||
})
|
||||
|
||||
return urls
|
||||
}, [html, renderVideoLinksAsEmbeds])
|
||||
|
||||
// If no video embedding is enabled, just render the HTML normally
|
||||
@@ -195,13 +144,16 @@ const VideoEmbedProcessor = forwardRef<HTMLDivElement, VideoEmbedProcessorProps>
|
||||
}
|
||||
}
|
||||
|
||||
// Regular HTML content
|
||||
return (
|
||||
<div
|
||||
key={index}
|
||||
dangerouslySetInnerHTML={{ __html: part }}
|
||||
/>
|
||||
)
|
||||
// Regular HTML content - only render if not empty
|
||||
if (part.trim()) {
|
||||
return (
|
||||
<div
|
||||
key={index}
|
||||
dangerouslySetInnerHTML={{ __html: part }}
|
||||
/>
|
||||
)
|
||||
}
|
||||
return null
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import { useEffect, useRef, Dispatch, SetStateAction } from 'react'
|
||||
import { useLocation } from 'react-router-dom'
|
||||
import { RelayPool } from 'applesauce-relay'
|
||||
import type { IEventStore } from 'applesauce-core'
|
||||
import { nip19 } from 'nostr-tools'
|
||||
import { AddressPointer } from 'nostr-tools/nip19'
|
||||
import { Helpers } from 'applesauce-core'
|
||||
import { queryEvents } from '../services/dataFetch'
|
||||
import { fetchArticleByNaddr } from '../services/articleService'
|
||||
import { fetchHighlightsForArticle } from '../services/highlightService'
|
||||
import { ReadableContent } from '../services/readerService'
|
||||
@@ -7,9 +13,17 @@ import { Highlight } from '../types/highlights'
|
||||
import { NostrEvent } from 'nostr-tools'
|
||||
import { UserSettings } from '../services/settingsService'
|
||||
|
||||
interface PreviewData {
|
||||
title: string
|
||||
image?: string
|
||||
summary?: string
|
||||
published?: number
|
||||
}
|
||||
|
||||
interface UseArticleLoaderProps {
|
||||
naddr: string | undefined
|
||||
relayPool: RelayPool | null
|
||||
eventStore?: IEventStore | null
|
||||
setSelectedUrl: (url: string) => void
|
||||
setReaderContent: (content: ReadableContent | undefined) => void
|
||||
setReaderLoading: (loading: boolean) => void
|
||||
@@ -25,6 +39,7 @@ interface UseArticleLoaderProps {
|
||||
export function useArticleLoader({
|
||||
naddr,
|
||||
relayPool,
|
||||
eventStore,
|
||||
setSelectedUrl,
|
||||
setReaderContent,
|
||||
setReaderLoading,
|
||||
@@ -36,7 +51,18 @@ export function useArticleLoader({
|
||||
setCurrentArticle,
|
||||
settings
|
||||
}: UseArticleLoaderProps) {
|
||||
const location = useLocation()
|
||||
const mountedRef = useRef(true)
|
||||
// Hold latest settings without retriggering effect
|
||||
const settingsRef = useRef<UserSettings | undefined>(settings)
|
||||
useEffect(() => {
|
||||
settingsRef.current = settings
|
||||
}, [settings])
|
||||
// Track in-flight request to prevent stale updates from previous naddr
|
||||
const currentRequestIdRef = useRef(0)
|
||||
|
||||
// Extract preview data from navigation state (from blog post cards)
|
||||
const previewData = (location.state as { previewData?: PreviewData })?.previewData
|
||||
|
||||
useEffect(() => {
|
||||
mountedRef.current = true
|
||||
@@ -44,67 +70,204 @@ export function useArticleLoader({
|
||||
if (!relayPool || !naddr) return
|
||||
|
||||
const loadArticle = async () => {
|
||||
const requestId = ++currentRequestIdRef.current
|
||||
if (!mountedRef.current) return
|
||||
|
||||
setReaderLoading(true)
|
||||
setReaderContent(undefined)
|
||||
setSelectedUrl(`nostr:${naddr}`)
|
||||
setIsCollapsed(true)
|
||||
|
||||
try {
|
||||
const article = await fetchArticleByNaddr(relayPool, naddr, false, settings)
|
||||
|
||||
if (!mountedRef.current) return
|
||||
|
||||
// If we have preview data from navigation, show it immediately (no skeleton!)
|
||||
if (previewData) {
|
||||
setReaderContent({
|
||||
title: article.title,
|
||||
markdown: article.markdown,
|
||||
image: article.image,
|
||||
summary: article.summary,
|
||||
published: article.published,
|
||||
title: previewData.title,
|
||||
markdown: '', // Will be loaded from store or relay
|
||||
image: previewData.image,
|
||||
summary: previewData.summary,
|
||||
published: previewData.published,
|
||||
url: `nostr:${naddr}`
|
||||
})
|
||||
|
||||
const dTag = article.event.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const articleCoordinate = `${article.event.kind}:${article.author}:${dTag}`
|
||||
|
||||
setCurrentArticleCoordinate(articleCoordinate)
|
||||
setCurrentArticleEventId(article.event.id)
|
||||
setCurrentArticle?.(article.event)
|
||||
setReaderLoading(false)
|
||||
|
||||
// Fetch highlights asynchronously without blocking article display
|
||||
setReaderLoading(false) // Turn off loading immediately - we have the preview!
|
||||
} else {
|
||||
setReaderLoading(true)
|
||||
setReaderContent(undefined)
|
||||
}
|
||||
|
||||
try {
|
||||
// Decode naddr to filter
|
||||
const decoded = nip19.decode(naddr)
|
||||
if (decoded.type !== 'naddr') {
|
||||
throw new Error('Invalid naddr format')
|
||||
}
|
||||
const pointer = decoded.data as AddressPointer
|
||||
const filter = {
|
||||
kinds: [pointer.kind],
|
||||
authors: [pointer.pubkey],
|
||||
'#d': [pointer.identifier]
|
||||
}
|
||||
|
||||
let firstEmitted = false
|
||||
let latestEvent: NostrEvent | null = null
|
||||
|
||||
// Check eventStore first for instant load (from bookmark cards, explore, etc.)
|
||||
if (eventStore) {
|
||||
try {
|
||||
const coordinate = `${pointer.kind}:${pointer.pubkey}:${pointer.identifier}`
|
||||
const storedEvent = eventStore.getEvent?.(coordinate)
|
||||
if (storedEvent) {
|
||||
latestEvent = storedEvent as NostrEvent
|
||||
firstEmitted = true
|
||||
const title = Helpers.getArticleTitle(storedEvent) || 'Untitled Article'
|
||||
const image = Helpers.getArticleImage(storedEvent)
|
||||
const summary = Helpers.getArticleSummary(storedEvent)
|
||||
const published = Helpers.getArticlePublished(storedEvent)
|
||||
setReaderContent({
|
||||
title,
|
||||
markdown: storedEvent.content,
|
||||
image,
|
||||
summary,
|
||||
published,
|
||||
url: `nostr:${naddr}`
|
||||
})
|
||||
const dTag = storedEvent.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const articleCoordinate = `${storedEvent.kind}:${storedEvent.pubkey}:${dTag}`
|
||||
setCurrentArticleCoordinate(articleCoordinate)
|
||||
setCurrentArticleEventId(storedEvent.id)
|
||||
setCurrentArticle?.(storedEvent)
|
||||
setReaderLoading(false)
|
||||
}
|
||||
} catch (err) {
|
||||
// Ignore store errors, fall through to relay query
|
||||
}
|
||||
}
|
||||
|
||||
// Stream local-first via queryEvents; rely on EOSE (no timeouts)
|
||||
const events = await queryEvents(relayPool, filter, {
|
||||
onEvent: (evt) => {
|
||||
if (!mountedRef.current) return
|
||||
if (currentRequestIdRef.current !== requestId) return
|
||||
|
||||
// Store in event store for future local reads
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
eventStore?.add?.(evt as unknown as any)
|
||||
} catch {
|
||||
// Silently ignore store errors
|
||||
}
|
||||
|
||||
// Keep latest by created_at
|
||||
if (!latestEvent || evt.created_at > latestEvent.created_at) {
|
||||
latestEvent = evt
|
||||
}
|
||||
|
||||
// Emit immediately on first event
|
||||
if (!firstEmitted) {
|
||||
firstEmitted = true
|
||||
const title = Helpers.getArticleTitle(evt) || 'Untitled Article'
|
||||
const image = Helpers.getArticleImage(evt)
|
||||
const summary = Helpers.getArticleSummary(evt)
|
||||
const published = Helpers.getArticlePublished(evt)
|
||||
setReaderContent({
|
||||
title,
|
||||
markdown: evt.content,
|
||||
image,
|
||||
summary,
|
||||
published,
|
||||
url: `nostr:${naddr}`
|
||||
})
|
||||
const dTag = evt.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const articleCoordinate = `${evt.kind}:${evt.pubkey}:${dTag}`
|
||||
setCurrentArticleCoordinate(articleCoordinate)
|
||||
setCurrentArticleEventId(evt.id)
|
||||
setCurrentArticle?.(evt)
|
||||
setReaderLoading(false)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!mountedRef.current || currentRequestIdRef.current !== requestId) return
|
||||
|
||||
// Finalize with newest version if it's newer than what we first rendered
|
||||
const finalEvent = (events.sort((a, b) => b.created_at - a.created_at)[0]) || latestEvent
|
||||
if (finalEvent) {
|
||||
const title = Helpers.getArticleTitle(finalEvent) || 'Untitled Article'
|
||||
const image = Helpers.getArticleImage(finalEvent)
|
||||
const summary = Helpers.getArticleSummary(finalEvent)
|
||||
const published = Helpers.getArticlePublished(finalEvent)
|
||||
setReaderContent({
|
||||
title,
|
||||
markdown: finalEvent.content,
|
||||
image,
|
||||
summary,
|
||||
published,
|
||||
url: `nostr:${naddr}`
|
||||
})
|
||||
|
||||
const dTag = finalEvent.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const articleCoordinate = `${finalEvent.kind}:${finalEvent.pubkey}:${dTag}`
|
||||
setCurrentArticleCoordinate(articleCoordinate)
|
||||
setCurrentArticleEventId(finalEvent.id)
|
||||
setCurrentArticle?.(finalEvent)
|
||||
} else {
|
||||
// As a last resort, fall back to the legacy helper (which includes cache)
|
||||
const article = await fetchArticleByNaddr(relayPool, naddr, false, settingsRef.current)
|
||||
if (!mountedRef.current || currentRequestIdRef.current !== requestId) return
|
||||
setReaderContent({
|
||||
title: article.title,
|
||||
markdown: article.markdown,
|
||||
image: article.image,
|
||||
summary: article.summary,
|
||||
published: article.published,
|
||||
url: `nostr:${naddr}`
|
||||
})
|
||||
const dTag = article.event.tags.find(t => t[0] === 'd')?.[1] || ''
|
||||
const articleCoordinate = `${article.event.kind}:${article.author}:${dTag}`
|
||||
setCurrentArticleCoordinate(articleCoordinate)
|
||||
setCurrentArticleEventId(article.event.id)
|
||||
setCurrentArticle?.(article.event)
|
||||
}
|
||||
|
||||
// Fetch highlights after content is shown
|
||||
try {
|
||||
if (!mountedRef.current) return
|
||||
|
||||
setHighlightsLoading(true)
|
||||
setHighlights([])
|
||||
const le = latestEvent as NostrEvent | null
|
||||
const dTag = le ? (le.tags.find((t: string[]) => t[0] === 'd')?.[1] || '') : ''
|
||||
const coord = le && dTag ? `${le.kind}:${le.pubkey}:${dTag}` : undefined
|
||||
const eventId = le ? le.id : undefined
|
||||
|
||||
await fetchHighlightsForArticle(
|
||||
relayPool,
|
||||
articleCoordinate,
|
||||
article.event.id,
|
||||
(highlight) => {
|
||||
if (!mountedRef.current) return
|
||||
|
||||
setHighlights((prev: Highlight[]) => {
|
||||
if (prev.some((h: Highlight) => h.id === highlight.id)) return prev
|
||||
const next = [highlight, ...prev]
|
||||
return next.sort((a, b) => b.created_at - a.created_at)
|
||||
})
|
||||
},
|
||||
settings
|
||||
)
|
||||
if (coord && eventId) {
|
||||
setHighlightsLoading(true)
|
||||
setHighlights([])
|
||||
await fetchHighlightsForArticle(
|
||||
relayPool,
|
||||
coord,
|
||||
eventId,
|
||||
(highlight) => {
|
||||
if (!mountedRef.current) return
|
||||
if (currentRequestIdRef.current !== requestId) return
|
||||
setHighlights((prev: Highlight[]) => {
|
||||
if (prev.some((h: Highlight) => h.id === highlight.id)) return prev
|
||||
const next = [highlight, ...prev]
|
||||
return next.sort((a, b) => b.created_at - a.created_at)
|
||||
})
|
||||
},
|
||||
settingsRef.current
|
||||
)
|
||||
} else {
|
||||
// No article event to fetch highlights for - clear and don't show loading
|
||||
setHighlights([])
|
||||
setHighlightsLoading(false)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to fetch highlights:', err)
|
||||
} finally {
|
||||
if (mountedRef.current) {
|
||||
if (mountedRef.current && currentRequestIdRef.current === requestId) {
|
||||
setHighlightsLoading(false)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to load article:', err)
|
||||
if (mountedRef.current) {
|
||||
if (mountedRef.current && currentRequestIdRef.current === requestId) {
|
||||
setReaderContent({
|
||||
title: 'Error Loading Article',
|
||||
html: `<p>Failed to load article: ${err instanceof Error ? err.message : 'Unknown error'}</p>`,
|
||||
@@ -123,7 +286,8 @@ export function useArticleLoader({
|
||||
}, [
|
||||
naddr,
|
||||
relayPool,
|
||||
settings,
|
||||
eventStore,
|
||||
previewData,
|
||||
setSelectedUrl,
|
||||
setReaderContent,
|
||||
setReaderLoading,
|
||||
|
||||
@@ -49,6 +49,8 @@ export function useExternalUrlLoader({
|
||||
setCurrentArticleEventId
|
||||
}: UseExternalUrlLoaderProps) {
|
||||
const mountedRef = useRef(true)
|
||||
// Track in-flight request to prevent stale updates when switching quickly
|
||||
const currentRequestIdRef = useRef(0)
|
||||
|
||||
// Load cached URL-specific highlights from event store
|
||||
const urlFilter = useMemo(() => {
|
||||
@@ -70,6 +72,7 @@ export function useExternalUrlLoader({
|
||||
if (!relayPool || !url) return
|
||||
|
||||
const loadExternalUrl = async () => {
|
||||
const requestId = ++currentRequestIdRef.current
|
||||
if (!mountedRef.current) return
|
||||
|
||||
setReaderLoading(true)
|
||||
@@ -83,6 +86,7 @@ export function useExternalUrlLoader({
|
||||
const content = await fetchReadableContent(url)
|
||||
|
||||
if (!mountedRef.current) return
|
||||
if (currentRequestIdRef.current !== requestId) return
|
||||
|
||||
setReaderContent(content)
|
||||
setReaderLoading(false)
|
||||
@@ -114,6 +118,7 @@ export function useExternalUrlLoader({
|
||||
url,
|
||||
(highlight) => {
|
||||
if (!mountedRef.current) return
|
||||
if (currentRequestIdRef.current !== requestId) return
|
||||
|
||||
if (seen.has(highlight.id)) return
|
||||
seen.add(highlight.id)
|
||||
@@ -131,13 +136,13 @@ export function useExternalUrlLoader({
|
||||
} catch (err) {
|
||||
console.error('Failed to fetch highlights:', err)
|
||||
} finally {
|
||||
if (mountedRef.current) {
|
||||
if (mountedRef.current && currentRequestIdRef.current === requestId) {
|
||||
setHighlightsLoading(false)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to load external URL:', err)
|
||||
if (mountedRef.current) {
|
||||
if (mountedRef.current && currentRequestIdRef.current === requestId) {
|
||||
const filename = getFilenameFromUrl(url)
|
||||
setReaderContent({
|
||||
title: filename,
|
||||
|
||||
@@ -20,9 +20,11 @@ export const useMarkdownToHTML = (
|
||||
const [processedMarkdown, setProcessedMarkdown] = useState<string>('')
|
||||
|
||||
useEffect(() => {
|
||||
// Always clear previous render immediately to avoid showing stale content while processing
|
||||
setRenderedHtml('')
|
||||
setProcessedMarkdown('')
|
||||
|
||||
if (!markdown) {
|
||||
setRenderedHtml('')
|
||||
setProcessedMarkdown('')
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ interface UseReadingPositionOptions {
|
||||
readingCompleteThreshold?: number // Default 0.95 (95%) - matches filter threshold
|
||||
syncEnabled?: boolean // Whether to sync positions to Nostr
|
||||
onSave?: (position: number) => void // Callback for saving position
|
||||
autoSaveInterval?: number // Auto-save interval in ms (default 5000)
|
||||
completionHoldMs?: number // How long to hold at 100% before firing complete (default 2000)
|
||||
}
|
||||
|
||||
@@ -18,7 +17,6 @@ export const useReadingPosition = ({
|
||||
readingCompleteThreshold = 0.95, // Match filter threshold for consistency
|
||||
syncEnabled = false,
|
||||
onSave,
|
||||
autoSaveInterval = 5000,
|
||||
completionHoldMs = 2000
|
||||
}: UseReadingPositionOptions = {}) => {
|
||||
const [position, setPosition] = useState(0)
|
||||
@@ -30,10 +28,27 @@ export const useReadingPosition = ({
|
||||
const hasSavedOnce = useRef(false)
|
||||
const completionTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const lastSavedAtRef = useRef<number>(0)
|
||||
const suppressUntilRef = useRef<number>(0)
|
||||
const syncEnabledRef = useRef(syncEnabled)
|
||||
const onSaveRef = useRef(onSave)
|
||||
const scheduleSaveRef = useRef<((pos: number) => void) | null>(null)
|
||||
|
||||
// Debounced save function
|
||||
// Keep refs in sync with props
|
||||
useEffect(() => {
|
||||
syncEnabledRef.current = syncEnabled
|
||||
onSaveRef.current = onSave
|
||||
}, [syncEnabled, onSave])
|
||||
|
||||
// Suppress auto-saves for a given duration (used after programmatic restore)
|
||||
const suppressSavesFor = useCallback((ms: number) => {
|
||||
const until = Date.now() + ms
|
||||
suppressUntilRef.current = until
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 🛡️ Suppressing saves for ${ms}ms until ${new Date(until).toISOString()}`)
|
||||
}, [])
|
||||
|
||||
// Debounced save function - simple 2s debounce
|
||||
const scheduleSave = useCallback((currentPosition: number) => {
|
||||
if (!syncEnabled || !onSave) {
|
||||
if (!syncEnabledRef.current || !onSaveRef.current) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -43,10 +58,11 @@ export const useReadingPosition = ({
|
||||
clearTimeout(saveTimerRef.current)
|
||||
saveTimerRef.current = null
|
||||
}
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 💾 Instant save at 100% completion`)
|
||||
lastSavedPosition.current = 1
|
||||
hasSavedOnce.current = true
|
||||
lastSavedAtRef.current = Date.now()
|
||||
onSave(1)
|
||||
onSaveRef.current(1)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -54,62 +70,54 @@ export const useReadingPosition = ({
|
||||
const MIN_DELTA = 0.05
|
||||
const hasSignificantChange = Math.abs(currentPosition - lastSavedPosition.current) >= MIN_DELTA
|
||||
|
||||
// Enforce a minimum interval between saves (15s) to avoid spamming
|
||||
const MIN_INTERVAL_MS = 15000
|
||||
const nowMs = Date.now()
|
||||
const enoughTimeElapsed = nowMs - lastSavedAtRef.current >= MIN_INTERVAL_MS
|
||||
|
||||
// Allow the very first meaningful save (when crossing 5%) regardless of interval
|
||||
const isFirstMeaningful = !hasSavedOnce.current && currentPosition >= MIN_DELTA
|
||||
|
||||
if (!hasSignificantChange && !isFirstMeaningful) {
|
||||
if (!hasSignificantChange) {
|
||||
return
|
||||
}
|
||||
|
||||
// If interval hasn't elapsed yet, delay until autoSaveInterval but still cap frequency
|
||||
if (!enoughTimeElapsed && !isFirstMeaningful) {
|
||||
// Clear and reschedule within the remaining window, but not sooner than MIN_INTERVAL_MS
|
||||
if (saveTimerRef.current) {
|
||||
clearTimeout(saveTimerRef.current)
|
||||
}
|
||||
const remaining = Math.max(0, MIN_INTERVAL_MS - (nowMs - lastSavedAtRef.current))
|
||||
const delay = Math.max(autoSaveInterval, remaining)
|
||||
saveTimerRef.current = setTimeout(() => {
|
||||
lastSavedPosition.current = currentPosition
|
||||
hasSavedOnce.current = true
|
||||
lastSavedAtRef.current = Date.now()
|
||||
onSave(currentPosition)
|
||||
}, delay)
|
||||
return
|
||||
}
|
||||
|
||||
// Clear existing timer
|
||||
// Clear any existing timer and schedule new save
|
||||
if (saveTimerRef.current) {
|
||||
clearTimeout(saveTimerRef.current)
|
||||
}
|
||||
|
||||
// Schedule new save using the larger of autoSaveInterval and MIN_INTERVAL_MS
|
||||
const delay = Math.max(autoSaveInterval, MIN_INTERVAL_MS)
|
||||
const DEBOUNCE_MS = 3000 // Save max every 3 seconds
|
||||
saveTimerRef.current = setTimeout(() => {
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 💾 Auto-save at ${Math.round(currentPosition * 100)}%`)
|
||||
lastSavedPosition.current = currentPosition
|
||||
hasSavedOnce.current = true
|
||||
lastSavedAtRef.current = Date.now()
|
||||
onSave(currentPosition)
|
||||
}, delay)
|
||||
}, [syncEnabled, onSave, autoSaveInterval])
|
||||
if (onSaveRef.current) {
|
||||
onSaveRef.current(currentPosition)
|
||||
}
|
||||
saveTimerRef.current = null
|
||||
}, DEBOUNCE_MS)
|
||||
}, [])
|
||||
|
||||
// Store scheduleSave in ref for use in scroll handler
|
||||
useEffect(() => {
|
||||
scheduleSaveRef.current = scheduleSave
|
||||
}, [scheduleSave])
|
||||
|
||||
// Immediate save function
|
||||
const saveNow = useCallback(() => {
|
||||
if (!syncEnabled || !onSave) return
|
||||
if (!syncEnabledRef.current || !onSaveRef.current) return
|
||||
|
||||
// Check suppression even for saveNow (e.g., during restore)
|
||||
if (Date.now() < suppressUntilRef.current) {
|
||||
const remainingMs = suppressUntilRef.current - Date.now()
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] ⏭️ saveNow() suppressed (${remainingMs}ms remaining) at ${Math.round(positionRef.current * 100)}%`)
|
||||
return
|
||||
}
|
||||
|
||||
if (saveTimerRef.current) {
|
||||
clearTimeout(saveTimerRef.current)
|
||||
saveTimerRef.current = null
|
||||
}
|
||||
lastSavedPosition.current = position
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 💾 saveNow() called at ${Math.round(positionRef.current * 100)}%`)
|
||||
lastSavedPosition.current = positionRef.current
|
||||
hasSavedOnce.current = true
|
||||
lastSavedAtRef.current = Date.now()
|
||||
onSave(position)
|
||||
}, [syncEnabled, onSave, position])
|
||||
onSaveRef.current(positionRef.current)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled) return
|
||||
@@ -123,21 +131,29 @@ export const useReadingPosition = ({
|
||||
const windowHeight = window.innerHeight
|
||||
const documentHeight = document.documentElement.scrollHeight
|
||||
|
||||
// Ignore if document is too small (likely during page transition)
|
||||
if (documentHeight < 100) return
|
||||
|
||||
// Calculate position based on how much of the content has been scrolled through
|
||||
// Add a small threshold (5px) to account for rounding and make it easier to reach 100%
|
||||
const maxScroll = documentHeight - windowHeight
|
||||
const scrollProgress = maxScroll > 0 ? scrollTop / maxScroll : 0
|
||||
|
||||
// If we're within 5px of the bottom, consider it 100%
|
||||
const isAtBottom = scrollTop + windowHeight >= documentHeight - 5
|
||||
// Only consider it 100% if we're truly at the bottom AND have scrolled significantly
|
||||
// This prevents false 100% during page transitions
|
||||
const isAtBottom = scrollTop + windowHeight >= documentHeight - 5 && scrollTop > 100
|
||||
const clampedProgress = isAtBottom ? 1 : Math.max(0, Math.min(1, scrollProgress))
|
||||
|
||||
setPosition(clampedProgress)
|
||||
positionRef.current = clampedProgress
|
||||
onPositionChange?.(clampedProgress)
|
||||
|
||||
// Schedule auto-save if sync is enabled
|
||||
scheduleSave(clampedProgress)
|
||||
// Schedule auto-save if sync is enabled (unless suppressed)
|
||||
if (Date.now() >= suppressUntilRef.current) {
|
||||
scheduleSaveRef.current?.(clampedProgress)
|
||||
} else {
|
||||
const remainingMs = suppressUntilRef.current - Date.now()
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 🛡️ Save suppressed (${remainingMs}ms remaining) at ${Math.round(clampedProgress * 100)}%`)
|
||||
}
|
||||
|
||||
// Completion detection with 2s hold at 100%
|
||||
if (!hasTriggeredComplete.current) {
|
||||
@@ -180,15 +196,24 @@ export const useReadingPosition = ({
|
||||
window.removeEventListener('scroll', handleScroll)
|
||||
window.removeEventListener('resize', handleScroll)
|
||||
|
||||
// Clear save timer on unmount
|
||||
if (saveTimerRef.current) {
|
||||
// Flush pending save before unmount (don't lose progress if navigating away during debounce window)
|
||||
if (saveTimerRef.current && syncEnabledRef.current && onSaveRef.current) {
|
||||
clearTimeout(saveTimerRef.current)
|
||||
saveTimerRef.current = null
|
||||
|
||||
// Only flush if we have unsaved progress (position differs from last saved)
|
||||
const hasUnsavedProgress = Math.abs(positionRef.current - lastSavedPosition.current) >= 0.05
|
||||
if (hasUnsavedProgress && Date.now() >= suppressUntilRef.current) {
|
||||
console.log(`[reading-position] [${new Date().toISOString()}] 💾 Flushing pending save on unmount at ${Math.round(positionRef.current * 100)}%`)
|
||||
onSaveRef.current(positionRef.current)
|
||||
}
|
||||
}
|
||||
|
||||
if (completionTimerRef.current) {
|
||||
clearTimeout(completionTimerRef.current)
|
||||
}
|
||||
}
|
||||
}, [enabled, onPositionChange, onReadingComplete, readingCompleteThreshold, scheduleSave, completionHoldMs])
|
||||
}, [enabled, onPositionChange, onReadingComplete, readingCompleteThreshold, completionHoldMs])
|
||||
|
||||
// Reset reading complete state when enabled changes
|
||||
useEffect(() => {
|
||||
@@ -208,6 +233,7 @@ export const useReadingPosition = ({
|
||||
position,
|
||||
isReadingComplete,
|
||||
progressPercentage: Math.round(position * 100),
|
||||
saveNow
|
||||
saveNow,
|
||||
suppressSavesFor
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,13 +5,12 @@ import './styles/tailwind.css'
|
||||
import './index.css'
|
||||
import 'react-loading-skeleton/dist/skeleton.css'
|
||||
|
||||
// Register Service Worker for PWA functionality
|
||||
if ('serviceWorker' in navigator) {
|
||||
// Register Service Worker for PWA functionality (production only)
|
||||
if ('serviceWorker' in navigator && import.meta.env.PROD) {
|
||||
window.addEventListener('load', () => {
|
||||
navigator.serviceWorker
|
||||
.register('/sw.js', { type: 'module' })
|
||||
.register('/sw.js')
|
||||
.then(registration => {
|
||||
|
||||
// Check for updates periodically
|
||||
setInterval(() => {
|
||||
registration.update()
|
||||
@@ -24,8 +23,6 @@ if ('serviceWorker' in navigator) {
|
||||
newWorker.addEventListener('statechange', () => {
|
||||
if (newWorker.state === 'installed' && navigator.serviceWorker.controller) {
|
||||
// New service worker available
|
||||
|
||||
// Optionally show a toast notification
|
||||
const updateAvailable = new CustomEvent('sw-update-available')
|
||||
window.dispatchEvent(updateAvailable)
|
||||
}
|
||||
|
||||
@@ -97,10 +97,10 @@ export async function fetchArticleByNaddr(
|
||||
|
||||
const pointer = decoded.data as AddressPointer
|
||||
|
||||
// Define relays to query - prefer relays from naddr, fallback to configured relays (including local)
|
||||
const baseRelays = pointer.relays && pointer.relays.length > 0
|
||||
? pointer.relays
|
||||
: RELAYS
|
||||
// Define relays to query - use union of relay hints from naddr and configured relays
|
||||
// This avoids failures when naddr contains stale/unreachable relay hints
|
||||
const hintedRelays = (pointer.relays && pointer.relays.length > 0) ? pointer.relays : []
|
||||
const baseRelays = Array.from(new Set<string>([...hintedRelays, ...RELAYS]))
|
||||
const orderedRelays = prioritizeLocalRelays(baseRelays)
|
||||
const { local: localRelays, remote: remoteRelays } = partitionRelays(orderedRelays)
|
||||
|
||||
@@ -114,7 +114,28 @@ export async function fetchArticleByNaddr(
|
||||
// Parallel local+remote, stream immediate, collect up to first from each
|
||||
const { local$, remote$ } = createParallelReqStreams(relayPool, localRelays, remoteRelays, filter, 1200, 6000)
|
||||
const collected = await lastValueFrom(merge(local$.pipe(take(1)), remote$.pipe(take(1))).pipe(rxToArray()))
|
||||
const events = collected as NostrEvent[]
|
||||
let events = collected as NostrEvent[]
|
||||
|
||||
// Fallback: if nothing found, try a second round against a set of reliable public relays
|
||||
if (events.length === 0) {
|
||||
const reliableRelays = Array.from(new Set<string>([
|
||||
'wss://relay.nostr.band',
|
||||
'wss://relay.primal.net',
|
||||
'wss://relay.damus.io',
|
||||
'wss://nos.lol',
|
||||
...remoteRelays // keep any configured remote relays
|
||||
]))
|
||||
const { remote$: fallback$ } = createParallelReqStreams(
|
||||
relayPool,
|
||||
[], // no local
|
||||
reliableRelays,
|
||||
filter,
|
||||
1500,
|
||||
12000
|
||||
)
|
||||
const fallbackCollected = await lastValueFrom(fallback$.pipe(take(1), rxToArray()))
|
||||
events = fallbackCollected as NostrEvent[]
|
||||
}
|
||||
|
||||
if (events.length === 0) {
|
||||
throw new Error('Article not found')
|
||||
|
||||
@@ -357,14 +357,6 @@ class BookmarkController {
|
||||
const bTs = (b.created_at ?? b.listUpdatedAt ?? -Infinity)
|
||||
return bTs - aTs
|
||||
})
|
||||
|
||||
// DEBUG: Show top 5 sorted bookmarks
|
||||
console.log(`🔍 Top 5 bookmarks after sorting:`)
|
||||
sortedBookmarks.slice(0, 5).forEach((b, i) => {
|
||||
const listDate = b.listUpdatedAt ? new Date(b.listUpdatedAt * 1000).toISOString() : 'MISSING'
|
||||
console.log(` ${i + 1}. listUpdatedAt: ${b.listUpdatedAt} (${listDate})`)
|
||||
console.log(` content: ${(b.content || '').substring(0, 40)}`)
|
||||
})
|
||||
|
||||
const bookmark: Bookmark = {
|
||||
id: `${activeAccount.pubkey}-bookmarks`,
|
||||
|
||||
@@ -15,28 +15,30 @@ export function dedupeNip51Events(events: NostrEvent[]): NostrEvent[] {
|
||||
}
|
||||
const unique = Array.from(byId.values())
|
||||
|
||||
// Separate web bookmarks (kind:39701) from list-based bookmarks
|
||||
const webBookmarks = unique.filter(e => e.kind === 39701)
|
||||
|
||||
const bookmarkLists = unique
|
||||
.filter(e => e.kind === 10003 || e.kind === 30003 || e.kind === 30001)
|
||||
.sort((a, b) => (b.created_at || 0) - (a.created_at || 0))
|
||||
const latestBookmarkList = bookmarkLists.find(list => !list.tags?.some((t: string[]) => t[0] === 'd'))
|
||||
|
||||
// Deduplicate replaceable events (kind:30003, 30001, 39701) by d-tag
|
||||
const byD = new Map<string, NostrEvent>()
|
||||
for (const e of unique) {
|
||||
if (e.kind === 10003 || e.kind === 30003 || e.kind === 30001) {
|
||||
if (e.kind === 10003 || e.kind === 30003 || e.kind === 30001 || e.kind === 39701) {
|
||||
const d = (e.tags || []).find((t: string[]) => t[0] === 'd')?.[1] || ''
|
||||
const prev = byD.get(d)
|
||||
if (!prev || (e.created_at || 0) > (prev.created_at || 0)) byD.set(d, e)
|
||||
}
|
||||
}
|
||||
|
||||
const setsAndNamedLists = Array.from(byD.values())
|
||||
// Separate web bookmarks from bookmark sets/lists
|
||||
const allReplaceable = Array.from(byD.values())
|
||||
const webBookmarks = allReplaceable.filter(e => e.kind === 39701)
|
||||
const setsAndNamedLists = allReplaceable.filter(e => e.kind !== 39701)
|
||||
|
||||
const out: NostrEvent[] = []
|
||||
if (latestBookmarkList) out.push(latestBookmarkList)
|
||||
out.push(...setsAndNamedLists)
|
||||
// Add web bookmarks as individual events
|
||||
// Add deduplicated web bookmarks as individual events
|
||||
out.push(...webBookmarks)
|
||||
return out
|
||||
}
|
||||
|
||||
@@ -133,8 +133,12 @@ export async function collectBookmarksFromEvents(
|
||||
|
||||
// Handle web bookmarks (kind:39701) as individual bookmarks
|
||||
if (evt.kind === 39701) {
|
||||
// Use coordinate format for web bookmarks to enable proper deduplication
|
||||
// Web bookmarks are replaceable events (kind:39701:pubkey:d-tag)
|
||||
const webBookmarkId = dTag ? `${evt.kind}:${evt.pubkey}:${dTag}` : evt.id
|
||||
|
||||
publicItemsAll.push({
|
||||
id: evt.id,
|
||||
id: webBookmarkId,
|
||||
content: evt.content || '',
|
||||
created_at: evt.created_at ?? null,
|
||||
pubkey: evt.pubkey,
|
||||
@@ -156,13 +160,6 @@ export async function collectBookmarksFromEvents(
|
||||
const pub = Helpers.getPublicBookmarks(evt)
|
||||
const processedPub = processApplesauceBookmarks(pub, activeAccount, false, evt.created_at)
|
||||
|
||||
// DEBUG: Check timestamps
|
||||
if (processedPub.length > 0) {
|
||||
const first = processedPub[0]
|
||||
console.log(`📋 Bookmark list event kind:${evt.kind}`)
|
||||
console.log(` evt.created_at: ${evt.created_at} (${evt.created_at ? new Date(evt.created_at * 1000).toISOString() : 'MISSING'})`)
|
||||
console.log(` first bookmark listUpdatedAt: ${first.listUpdatedAt} (${first.listUpdatedAt ? new Date(first.listUpdatedAt * 1000).toISOString() : 'MISSING'})`)
|
||||
}
|
||||
|
||||
publicItemsAll.push(
|
||||
...processedPub.map(i => ({
|
||||
|
||||
@@ -178,6 +178,101 @@ export function startReadingPositionStream(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stabilized reading position collector
|
||||
* Collects position updates for a brief window, then emits the best one (newest, then highest progress)
|
||||
* @returns Object with stop() to cancel and onStable(cb) to register callback
|
||||
*/
|
||||
export function collectReadingPositionsOnce(params: {
|
||||
relayPool: RelayPool
|
||||
eventStore: IEventStore
|
||||
pubkey: string
|
||||
articleIdentifier: string
|
||||
windowMs?: number
|
||||
}): { stop: () => void; onStable: (cb: (pos: ReadingPosition | null) => void) => void } {
|
||||
const { relayPool, eventStore, pubkey, articleIdentifier, windowMs = 700 } = params
|
||||
|
||||
const candidates: ReadingPosition[] = []
|
||||
let stableCallback: ((pos: ReadingPosition | null) => void) | null = null
|
||||
let timer: ReturnType<typeof setTimeout> | null = null
|
||||
let streamStop: (() => void) | null = null
|
||||
let hasEmitted = false
|
||||
|
||||
const emitStable = () => {
|
||||
if (hasEmitted || !stableCallback) return
|
||||
hasEmitted = true
|
||||
|
||||
if (candidates.length === 0) {
|
||||
console.log('[reading-position] 📊 No candidates collected during stabilization window')
|
||||
stableCallback(null)
|
||||
return
|
||||
}
|
||||
|
||||
console.log('[reading-position] 📊 Collected', candidates.length, 'position candidates:',
|
||||
candidates.map(c => `${Math.round(c.position * 100)}% @${new Date(c.timestamp * 1000).toLocaleTimeString()}`).join(', '))
|
||||
|
||||
// Sort: newest first, then highest progress
|
||||
candidates.sort((a, b) => {
|
||||
const timeDiff = b.timestamp - a.timestamp
|
||||
if (timeDiff !== 0) return timeDiff
|
||||
return b.position - a.position
|
||||
})
|
||||
|
||||
console.log('[reading-position] ✅ Best position selected:', Math.round(candidates[0].position * 100) + '%',
|
||||
'from', new Date(candidates[0].timestamp * 1000).toLocaleTimeString())
|
||||
stableCallback(candidates[0])
|
||||
}
|
||||
|
||||
// Start streaming and collecting
|
||||
console.log('[reading-position] 🎯 Starting stabilized position collector (window:', windowMs, 'ms)')
|
||||
streamStop = startReadingPositionStream(
|
||||
relayPool,
|
||||
eventStore,
|
||||
pubkey,
|
||||
articleIdentifier,
|
||||
(pos) => {
|
||||
if (hasEmitted) return
|
||||
if (!pos) {
|
||||
console.log('[reading-position] 📥 Received null position')
|
||||
return
|
||||
}
|
||||
if (pos.position <= 0.05 || pos.position >= 1) {
|
||||
console.log('[reading-position] 🚫 Ignoring position', Math.round(pos.position * 100) + '% (outside 5%-100% range)')
|
||||
return
|
||||
}
|
||||
|
||||
console.log('[reading-position] 📥 Received position candidate:', Math.round(pos.position * 100) + '%',
|
||||
'from', new Date(pos.timestamp * 1000).toLocaleTimeString())
|
||||
candidates.push(pos)
|
||||
|
||||
// Schedule one-shot emission if not already scheduled
|
||||
if (!timer) {
|
||||
console.log('[reading-position] ⏰ Starting', windowMs, 'ms stabilization timer')
|
||||
timer = setTimeout(() => {
|
||||
emitStable()
|
||||
if (streamStop) streamStop()
|
||||
}, windowMs)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
stop: () => {
|
||||
if (timer) {
|
||||
clearTimeout(timer)
|
||||
timer = null
|
||||
}
|
||||
if (streamStop) {
|
||||
streamStop()
|
||||
streamStop = null
|
||||
}
|
||||
},
|
||||
onStable: (cb) => {
|
||||
stableCallback = cb
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load reading position from Nostr (kind 39802)
|
||||
* @deprecated Use startReadingPositionStream for non-blocking behavior
|
||||
|
||||
@@ -114,6 +114,16 @@
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
/* Mobile touch target improvements */
|
||||
@media (max-width: 768px) {
|
||||
.profile-avatar-button {
|
||||
min-width: var(--min-touch-target);
|
||||
min-height: var(--min-touch-target);
|
||||
width: var(--min-touch-target);
|
||||
height: var(--min-touch-target);
|
||||
}
|
||||
}
|
||||
|
||||
.profile-avatar-button:hover {
|
||||
background: var(--color-bg-hover);
|
||||
border-color: var(--color-border);
|
||||
|
||||
Reference in New Issue
Block a user