= ({
)}
+ {/* Reading progress indicator for articles - shown only if there's progress */}
+ {isArticle && readingProgress !== undefined && readingProgress > 0 && (
+
+ )}
+
diff --git a/src/components/Bookmarks.tsx b/src/components/Bookmarks.tsx
index 8aceb36c..21ab6dbf 100644
--- a/src/components/Bookmarks.tsx
+++ b/src/components/Bookmarks.tsx
@@ -52,7 +52,8 @@ const Bookmarks: React.FC = ({ relayPool, onLogout }) => {
const meTab = location.pathname === '/me' ? 'highlights' :
location.pathname === '/me/highlights' ? 'highlights' :
location.pathname === '/me/reading-list' ? 'reading-list' :
- location.pathname === '/me/archive' ? 'archive' :
+ location.pathname.startsWith('/me/reads') ? 'reads' :
+ location.pathname === '/me/links' ? 'links' :
location.pathname === '/me/writings' ? 'writings' : 'highlights'
// Extract tab from profile routes
diff --git a/src/components/Me.tsx b/src/components/Me.tsx
index d3502cea..d35d2a96 100644
--- a/src/components/Me.tsx
+++ b/src/components/Me.tsx
@@ -1,16 +1,17 @@
import React, { useState, useEffect } from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
-import { faSpinner, faHighlighter, faBookmark, faList, faThLarge, faImage, faPenToSquare } from '@fortawesome/free-solid-svg-icons'
+import { faHighlighter, faBookmark, faList, faThLarge, faImage, faPenToSquare, faLink } from '@fortawesome/free-solid-svg-icons'
import { Hooks } from 'applesauce-react'
import { BlogPostSkeleton, HighlightSkeleton, BookmarkSkeleton } from './Skeletons'
import { RelayPool } from 'applesauce-relay'
import { nip19 } from 'nostr-tools'
-import { useNavigate } from 'react-router-dom'
+import { useNavigate, useParams } from 'react-router-dom'
import { Highlight } from '../types/highlights'
import { HighlightItem } from './HighlightItem'
import { fetchHighlights } from '../services/highlightService'
import { fetchBookmarks } from '../services/bookmarkService'
-import { fetchReadArticlesWithData } from '../services/libraryService'
+import { fetchAllReads, ReadItem } from '../services/readsService'
+import { fetchLinks } from '../services/linksService'
import { BlogPostPreview, fetchBlogPostsFromAuthors } from '../services/exploreService'
import { RELAYS } from '../config/relays'
import { Bookmark, IndividualBookmark } from '../types/bookmarks'
@@ -19,15 +20,18 @@ import BlogPostCard from './BlogPostCard'
import { BookmarkItem } from './BookmarkItem'
import IconButton from './IconButton'
import { ViewMode } from './Bookmarks'
-import { getCachedMeData, setCachedMeData, updateCachedHighlights } from '../services/meCache'
+import { getCachedMeData, updateCachedHighlights } from '../services/meCache'
import { faBooks } from '../icons/customIcons'
import { usePullToRefresh } from 'use-pull-to-refresh'
import RefreshIndicator from './RefreshIndicator'
import { groupIndividualBookmarks, hasContent } from '../utils/bookmarkUtils'
import BookmarkFilters, { BookmarkFilterType } from './BookmarkFilters'
import { filterBookmarksByType } from '../utils/bookmarkTypeClassifier'
-import { generateArticleIdentifier, loadReadingPosition } from '../services/readingPositionService'
-import ArchiveFilters, { ArchiveFilterType } from './ArchiveFilters'
+import ReadingProgressFilters, { ReadingProgressFilterType } from './ReadingProgressFilters'
+import { filterByReadingProgress } from '../utils/readingProgressUtils'
+import { deriveReadsFromBookmarks } from '../utils/readsFromBookmarks'
+import { deriveLinksFromBookmarks } from '../utils/linksFromBookmarks'
+import { mergeReadItem } from '../utils/readItemMerge'
interface MeProps {
relayPool: RelayPool
@@ -35,12 +39,15 @@ interface MeProps {
pubkey?: string // Optional pubkey for viewing other users' profiles
}
-type TabType = 'highlights' | 'reading-list' | 'archive' | 'writings'
+type TabType = 'highlights' | 'reading-list' | 'reads' | 'links' | 'writings'
+
+// Valid reading progress filters
+const VALID_FILTERS: ReadingProgressFilterType[] = ['all', 'unopened', 'started', 'reading', 'completed']
const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: propPubkey }) => {
const activeAccount = Hooks.useActiveAccount()
- const eventStore = Hooks.useEventStore()
const navigate = useNavigate()
+ const { filter: urlFilter } = useParams<{ filter?: string }>()
const [activeTab, setActiveTab] = useState(propActiveTab || 'highlights')
// Use provided pubkey or fall back to active account
@@ -48,14 +55,22 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
const isOwnProfile = !propPubkey || (activeAccount?.pubkey === propPubkey)
const [highlights, setHighlights] = useState([])
const [bookmarks, setBookmarks] = useState([])
- const [readArticles, setReadArticles] = useState([])
+ const [reads, setReads] = useState([])
+ const [, setReadsMap] = useState>(new Map())
+ const [links, setLinks] = useState([])
+ const [, setLinksMap] = useState>(new Map())
const [writings, setWritings] = useState([])
const [loading, setLoading] = useState(true)
+ const [loadedTabs, setLoadedTabs] = useState>(new Set())
const [viewMode, setViewMode] = useState('cards')
const [refreshTrigger, setRefreshTrigger] = useState(0)
const [bookmarkFilter, setBookmarkFilter] = useState('all')
- const [archiveFilter, setArchiveFilter] = useState('all')
- const [readingPositions, setReadingPositions] = useState>(new Map())
+
+ // Initialize reading progress filter from URL param
+ const initialFilter = urlFilter && VALID_FILTERS.includes(urlFilter as ReadingProgressFilterType)
+ ? (urlFilter as ReadingProgressFilterType)
+ : 'all'
+ const [readingProgressFilter, setReadingProgressFilter] = useState(initialFilter)
// Update local state when prop changes
useEffect(() => {
@@ -64,131 +79,246 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
}
}, [propActiveTab])
+ // Sync filter state with URL changes
useEffect(() => {
- const loadData = async () => {
- if (!viewingPubkey) {
- setLoading(false)
- return
- }
+ const filterFromUrl = urlFilter && VALID_FILTERS.includes(urlFilter as ReadingProgressFilterType)
+ ? (urlFilter as ReadingProgressFilterType)
+ : 'all'
+ setReadingProgressFilter(filterFromUrl)
+ }, [urlFilter])
+ // Handler to change reading progress filter and update URL
+ const handleReadingProgressFilterChange = (filter: ReadingProgressFilterType) => {
+ setReadingProgressFilter(filter)
+ if (activeTab === 'reads') {
+ if (filter === 'all') {
+ navigate('/me/reads', { replace: true })
+ } else {
+ navigate(`/me/reads/${filter}`, { replace: true })
+ }
+ }
+ }
+
+ // Tab-specific loading functions
+ const loadHighlightsTab = async () => {
+ if (!viewingPubkey) return
+
+ // Only show loading skeleton if tab hasn't been loaded yet
+ const hasBeenLoaded = loadedTabs.has('highlights')
+
+ try {
+ if (!hasBeenLoaded) setLoading(true)
+ const userHighlights = await fetchHighlights(relayPool, viewingPubkey)
+ setHighlights(userHighlights)
+ setLoadedTabs(prev => new Set(prev).add('highlights'))
+ } catch (err) {
+ console.error('Failed to load highlights:', err)
+ } finally {
+ if (!hasBeenLoaded) setLoading(false)
+ }
+ }
+
+ const loadWritingsTab = async () => {
+ if (!viewingPubkey) return
+
+ const hasBeenLoaded = loadedTabs.has('writings')
+
+ try {
+ if (!hasBeenLoaded) setLoading(true)
+ const userWritings = await fetchBlogPostsFromAuthors(relayPool, [viewingPubkey], RELAYS)
+ setWritings(userWritings)
+ setLoadedTabs(prev => new Set(prev).add('writings'))
+ } catch (err) {
+ console.error('Failed to load writings:', err)
+ } finally {
+ if (!hasBeenLoaded) setLoading(false)
+ }
+ }
+
+ const loadReadingListTab = async () => {
+ if (!viewingPubkey || !isOwnProfile || !activeAccount) return
+
+ const hasBeenLoaded = loadedTabs.has('reading-list')
+
+ try {
+ if (!hasBeenLoaded) setLoading(true)
try {
- setLoading(true)
-
- // Seed from cache if available to avoid empty flash (own profile only)
- if (isOwnProfile) {
- const cached = getCachedMeData(viewingPubkey)
- if (cached) {
- setHighlights(cached.highlights)
- setBookmarks(cached.bookmarks)
- setReadArticles(cached.readArticles)
- }
- }
-
- // Fetch highlights and writings (public data)
- const [userHighlights, userWritings] = await Promise.all([
- fetchHighlights(relayPool, viewingPubkey),
- fetchBlogPostsFromAuthors(relayPool, [viewingPubkey], RELAYS)
- ])
-
- setHighlights(userHighlights)
- setWritings(userWritings)
-
- // Only fetch private data for own profile
- if (isOwnProfile && activeAccount) {
- const userReadArticles = await fetchReadArticlesWithData(relayPool, viewingPubkey)
- setReadArticles(userReadArticles)
-
- // Fetch bookmarks using callback pattern
- let fetchedBookmarks: Bookmark[] = []
- try {
- await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
- fetchedBookmarks = newBookmarks
- setBookmarks(newBookmarks)
- })
- } catch (err) {
- console.warn('Failed to load bookmarks:', err)
- setBookmarks([])
- }
-
- // Update cache with all fetched data
- setCachedMeData(viewingPubkey, userHighlights, fetchedBookmarks, userReadArticles)
- } else {
- setBookmarks([])
- setReadArticles([])
- }
+ await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
+ setBookmarks(newBookmarks)
+ })
} catch (err) {
- console.error('Failed to load data:', err)
- // No blocking error - user can pull-to-refresh
- } finally {
- setLoading(false)
+ console.warn('Failed to load bookmarks:', err)
+ setBookmarks([])
}
+ setLoadedTabs(prev => new Set(prev).add('reading-list'))
+ } catch (err) {
+ console.error('Failed to load reading list:', err)
+ } finally {
+ if (!hasBeenLoaded) setLoading(false)
}
+ }
- loadData()
- }, [relayPool, viewingPubkey, isOwnProfile, activeAccount, refreshTrigger])
-
- // Load reading positions for read articles (only for own profile)
- useEffect(() => {
- const loadPositions = async () => {
- if (!isOwnProfile || !activeAccount || !relayPool || !eventStore || readArticles.length === 0) {
- console.log('🔍 [Archive] Skipping position load:', {
- isOwnProfile,
- hasAccount: !!activeAccount,
- hasRelayPool: !!relayPool,
- hasEventStore: !!eventStore,
- articlesCount: readArticles.length
- })
- return
+ const loadReadsTab = async () => {
+ if (!viewingPubkey || !isOwnProfile || !activeAccount) return
+
+ const hasBeenLoaded = loadedTabs.has('reads')
+
+ try {
+ if (!hasBeenLoaded) setLoading(true)
+
+ // Ensure bookmarks are loaded
+ let fetchedBookmarks: Bookmark[] = bookmarks
+ if (bookmarks.length === 0) {
+ try {
+ await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
+ fetchedBookmarks = newBookmarks
+ setBookmarks(newBookmarks)
+ })
+ } catch (err) {
+ console.warn('Failed to load bookmarks:', err)
+ fetchedBookmarks = []
+ }
}
- console.log('📊 [Archive] Loading reading positions for', readArticles.length, 'articles')
-
- const positions = new Map()
-
- // Load positions for all read articles
- await Promise.all(
- readArticles.map(async (post) => {
- try {
- const dTag = post.event.tags.find(t => t[0] === 'd')?.[1] || ''
- const naddr = nip19.naddrEncode({
- kind: 30023,
- pubkey: post.author,
- identifier: dTag
- })
- const articleUrl = `nostr:${naddr}`
- const identifier = generateArticleIdentifier(articleUrl)
-
- console.log('🔍 [Archive] Loading position for:', post.title?.slice(0, 50), 'identifier:', identifier.slice(0, 32))
-
- const savedPosition = await loadReadingPosition(
- relayPool,
- eventStore,
- activeAccount.pubkey,
- identifier
- )
-
- if (savedPosition && savedPosition.position > 0) {
- console.log('✅ [Archive] Found position:', Math.round(savedPosition.position * 100) + '%', 'for', post.title?.slice(0, 50))
- positions.set(post.event.id, savedPosition.position)
- } else {
- console.log('❌ [Archive] No position found for:', post.title?.slice(0, 50))
- }
- } catch (error) {
- console.warn('⚠️ [Archive] Failed to load reading position for article:', error)
+ // Derive reads from bookmarks immediately
+ const initialReads = deriveReadsFromBookmarks(fetchedBookmarks)
+ const initialMap = new Map(initialReads.map(item => [item.id, item]))
+ setReadsMap(initialMap)
+ setReads(initialReads)
+ setLoadedTabs(prev => new Set(prev).add('reads'))
+ if (!hasBeenLoaded) setLoading(false)
+
+ // Background enrichment: merge reading progress and mark-as-read
+ // Only update items that are already in our map
+ fetchAllReads(relayPool, viewingPubkey, fetchedBookmarks, (item) => {
+ console.log('📈 [Reads] Enrichment item received:', {
+ id: item.id.slice(0, 20) + '...',
+ progress: item.readingProgress,
+ hasProgress: item.readingProgress !== undefined && item.readingProgress > 0
+ })
+
+ setReadsMap(prevMap => {
+ // Only update if item exists in our current map
+ if (!prevMap.has(item.id)) {
+ console.log('⚠️ [Reads] Item not in map, skipping:', item.id.slice(0, 20) + '...')
+ return prevMap
}
+
+ const newMap = new Map(prevMap)
+ const merged = mergeReadItem(newMap, item)
+ if (merged) {
+ console.log('✅ [Reads] Merged progress:', item.id.slice(0, 20) + '...', item.readingProgress)
+ // Update reads array after map is updated
+ setReads(Array.from(newMap.values()))
+ return newMap
+ }
+ return prevMap
})
- )
+ }).catch(err => console.warn('Failed to enrich reads:', err))
+
+ } catch (err) {
+ console.error('Failed to load reads:', err)
+ if (!hasBeenLoaded) setLoading(false)
+ }
+ }
- console.log('📊 [Archive] Loaded positions for', positions.size, '/', readArticles.length, 'articles')
- setReadingPositions(positions)
+ const loadLinksTab = async () => {
+ if (!viewingPubkey || !isOwnProfile || !activeAccount) return
+
+ const hasBeenLoaded = loadedTabs.has('links')
+
+ try {
+ if (!hasBeenLoaded) setLoading(true)
+
+ // Ensure bookmarks are loaded
+ let fetchedBookmarks: Bookmark[] = bookmarks
+ if (bookmarks.length === 0) {
+ try {
+ await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
+ fetchedBookmarks = newBookmarks
+ setBookmarks(newBookmarks)
+ })
+ } catch (err) {
+ console.warn('Failed to load bookmarks:', err)
+ fetchedBookmarks = []
+ }
+ }
+
+ // Derive links from bookmarks immediately
+ const initialLinks = deriveLinksFromBookmarks(fetchedBookmarks)
+ const initialMap = new Map(initialLinks.map(item => [item.id, item]))
+ setLinksMap(initialMap)
+ setLinks(initialLinks)
+ setLoadedTabs(prev => new Set(prev).add('links'))
+ if (!hasBeenLoaded) setLoading(false)
+
+ // Background enrichment: merge reading progress and mark-as-read
+ // Only update items that are already in our map
+ fetchLinks(relayPool, viewingPubkey, (item) => {
+ setLinksMap(prevMap => {
+ // Only update if item exists in our current map
+ if (!prevMap.has(item.id)) return prevMap
+
+ const newMap = new Map(prevMap)
+ if (mergeReadItem(newMap, item)) {
+ // Update links array after map is updated
+ setLinks(Array.from(newMap.values()))
+ return newMap
+ }
+ return prevMap
+ })
+ }).catch(err => console.warn('Failed to enrich links:', err))
+
+ } catch (err) {
+ console.error('Failed to load links:', err)
+ if (!hasBeenLoaded) setLoading(false)
+ }
+ }
+
+ // Load active tab data
+ useEffect(() => {
+ if (!viewingPubkey || !activeTab) {
+ setLoading(false)
+ return
}
- loadPositions()
- }, [readArticles, isOwnProfile, activeAccount, relayPool, eventStore])
+ // Load cached data immediately if available
+ if (isOwnProfile) {
+ const cached = getCachedMeData(viewingPubkey)
+ if (cached) {
+ setHighlights(cached.highlights)
+ setBookmarks(cached.bookmarks)
+ setReads(cached.reads || [])
+ setLinks(cached.links || [])
+ }
+ }
- // Pull-to-refresh
+ // Load data for active tab (refresh in background if already loaded)
+ switch (activeTab) {
+ case 'highlights':
+ loadHighlightsTab()
+ break
+ case 'writings':
+ loadWritingsTab()
+ break
+ case 'reading-list':
+ loadReadingListTab()
+ break
+ case 'reads':
+ loadReadsTab()
+ break
+ case 'links':
+ loadLinksTab()
+ break
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [activeTab, viewingPubkey, refreshTrigger])
+
+
+ // Pull-to-refresh - reload active tab without clearing state
const { isRefreshing, pullPosition } = usePullToRefresh({
onRefresh: () => {
+ // Just trigger refresh - loaders will merge new data
setRefreshTrigger(prev => prev + 1)
},
maximumPullLength: 240,
@@ -217,6 +347,49 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
return `/a/${naddr}`
}
+ const getReadItemUrl = (item: ReadItem) => {
+ if (item.type === 'article') {
+ // ID is already in naddr format
+ return `/a/${item.id}`
+ } else if (item.url) {
+ return `/r/${encodeURIComponent(item.url)}`
+ }
+ return '#'
+ }
+
+ const convertReadItemToBlogPostPreview = (item: ReadItem): BlogPostPreview => {
+ if (item.event) {
+ return {
+ event: item.event,
+ title: item.title || 'Untitled',
+ summary: item.summary,
+ image: item.image,
+ published: item.published,
+ author: item.author || item.event.pubkey
+ }
+ }
+
+ // Create a mock event for external URLs
+ const mockEvent = {
+ id: item.id,
+ pubkey: item.author || '',
+ created_at: item.readingTimestamp || Math.floor(Date.now() / 1000),
+ kind: 1,
+ tags: [] as string[][],
+ content: item.title || item.url || 'Untitled',
+ sig: ''
+ } as const
+
+ return {
+ event: mockEvent as unknown as import('nostr-tools').NostrEvent,
+ title: item.title || item.url || 'Untitled',
+ summary: item.summary,
+ image: item.image,
+ published: item.published,
+ author: item.author || ''
+ }
+ }
+
const handleSelectUrl = (url: string, bookmark?: { id: string; kind: number; tags: string[][]; pubkey: string }) => {
if (bookmark && bookmark.kind === 30023) {
// For kind:30023 articles, navigate to the article route
@@ -245,29 +418,9 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
const groups = groupIndividualBookmarks(filteredBookmarks)
- // Apply archive filter
- const filteredReadArticles = readArticles.filter(post => {
- const position = readingPositions.get(post.event.id)
-
- switch (archiveFilter) {
- case 'to-read':
- // No position or 0% progress
- return !position || position === 0
- case 'reading':
- // Has some progress but not completed (0 < position < 1)
- return position !== undefined && position > 0 && position < 0.95
- case 'completed':
- // 95% or more read (we consider 95%+ as completed)
- return position !== undefined && position >= 0.95
- case 'marked':
- // Manually marked as read (in archive but no reading position data)
- // These are articles that were marked via the emoji reaction
- return !position || position === 0
- case 'all':
- default:
- return true
- }
- })
+ // Apply reading progress filter
+ const filteredReads = filterByReadingProgress(reads, readingProgressFilter)
+ const filteredLinks = filterByReadingProgress(links, readingProgressFilter)
const sections: Array<{ key: string; title: string; items: IndividualBookmark[] }> = [
{ key: 'private', title: 'Private Bookmarks', items: groups.privateItems },
{ key: 'public', title: 'Public Bookmarks', items: groups.publicItems },
@@ -276,7 +429,7 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
]
// Show content progressively - no blocking error screens
- const hasData = highlights.length > 0 || bookmarks.length > 0 || readArticles.length > 0 || writings.length > 0
+ const hasData = highlights.length > 0 || bookmarks.length > 0 || reads.length > 0 || links.length > 0 || writings.length > 0
const showSkeletons = loading && !hasData
const renderTabContent = () => {
@@ -291,9 +444,9 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
}
- return highlights.length === 0 ? (
+ return highlights.length === 0 && !loading ? (
-
+ No highlights yet.
) : (
@@ -320,9 +473,9 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
}
- return allIndividualBookmarks.length === 0 ? (
+ return allIndividualBookmarks.length === 0 && !loading ? (
-
+ No bookmarks yet.
) : (
@@ -386,8 +539,9 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
- case 'archive':
- if (showSkeletons) {
+ case 'reads':
+ // Show loading skeletons only while initially loading
+ if (loading && !loadedTabs.has('reads')) {
return (
{Array.from({ length: 6 }).map((_, i) => (
@@ -396,32 +550,84 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
}
- return readArticles.length === 0 ? (
-
-
-
- ) : (
+
+ // Show empty state if loaded but no reads
+ if (reads.length === 0 && loadedTabs.has('reads')) {
+ return (
+
+ No articles read yet.
+
+ )
+ }
+
+ // Show reads with filters
+ return (
<>
- {readArticles.length > 0 && (
-
- )}
- {filteredReadArticles.length === 0 ? (
+
+ {filteredReads.length === 0 ? (
No articles match this filter.
) : (
- {filteredReadArticles.map((post) => (
-
- ))}
+ {filteredReads.map((item) => (
+
+ ))}
+
+ )}
+ >
+ )
+
+ case 'links':
+ // Show loading skeletons only while initially loading
+ if (loading && !loadedTabs.has('links')) {
+ return (
+
+ {Array.from({ length: 6 }).map((_, i) => (
+
+ ))}
+
+ )
+ }
+
+ // Show empty state if loaded but no links
+ if (links.length === 0 && loadedTabs.has('links')) {
+ return (
+
+ No links with reading progress yet.
+
+ )
+ }
+
+ // Show links with filters
+ return (
+ <>
+
+ {filteredLinks.length === 0 ? (
+
+ No links match this filter.
+
+ ) : (
+
+ {filteredLinks.map((item) => (
+
+ ))}
)}
>
@@ -437,9 +643,9 @@ const Me: React.FC = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
}
- return writings.length === 0 ? (
+ return writings.length === 0 && !loading ? (
-
+ No articles written yet.
) : (
@@ -487,12 +693,20 @@ const Me: React.FC
= ({ relayPool, activeTab: propActiveTab, pubkey: pr
Bookmarks
navigate('/me/archive')}
+ className={`me-tab ${activeTab === 'reads' ? 'active' : ''}`}
+ data-tab="reads"
+ onClick={() => navigate('/me/reads')}
>
- Archive
+ Reads
+
+ navigate('/me/links')}
+ >
+
+ Links
>
)}
diff --git a/src/components/ReadingProgressFilters.tsx b/src/components/ReadingProgressFilters.tsx
new file mode 100644
index 00000000..6d69ccf2
--- /dev/null
+++ b/src/components/ReadingProgressFilters.tsx
@@ -0,0 +1,47 @@
+import React from 'react'
+import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
+import { faBookOpen, faCheckCircle, faAsterisk } from '@fortawesome/free-solid-svg-icons'
+import { faEnvelope, faEnvelopeOpen } from '@fortawesome/free-regular-svg-icons'
+
+export type ReadingProgressFilterType = 'all' | 'unopened' | 'started' | 'reading' | 'completed'
+
+interface ReadingProgressFiltersProps {
+ selectedFilter: ReadingProgressFilterType
+ onFilterChange: (filter: ReadingProgressFilterType) => void
+}
+
+const ReadingProgressFilters: React.FC = ({ selectedFilter, onFilterChange }) => {
+ const filters = [
+ { type: 'all' as const, icon: faAsterisk, label: 'All' },
+ { type: 'unopened' as const, icon: faEnvelope, label: 'Unopened' },
+ { type: 'started' as const, icon: faEnvelopeOpen, label: 'Started' },
+ { type: 'reading' as const, icon: faBookOpen, label: 'Reading' },
+ { type: 'completed' as const, icon: faCheckCircle, label: 'Completed' }
+ ]
+
+ return (
+
+ {filters.map(filter => {
+ const isActive = selectedFilter === filter.type
+ // Only "completed" gets green color, everything else uses default blue
+ const activeStyle = isActive && filter.type === 'completed' ? { color: '#10b981' } : undefined
+
+ return (
+ onFilterChange(filter.type)}
+ className={`filter-btn ${isActive ? 'active' : ''}`}
+ title={filter.label}
+ aria-label={`Filter by ${filter.label}`}
+ style={activeStyle}
+ >
+
+
+ )
+ })}
+
+ )
+}
+
+export default ReadingProgressFilters
+
diff --git a/src/components/ReadingProgressIndicator.tsx b/src/components/ReadingProgressIndicator.tsx
index cd47932a..9aee1ce4 100644
--- a/src/components/ReadingProgressIndicator.tsx
+++ b/src/components/ReadingProgressIndicator.tsx
@@ -19,6 +19,21 @@ export const ReadingProgressIndicator: React.FC =
}) => {
const clampedProgress = Math.min(100, Math.max(0, progress))
+ // Determine reading state based on progress (matching readingProgressUtils.ts logic)
+ const progressDecimal = clampedProgress / 100
+ const isStarted = progressDecimal > 0 && progressDecimal <= 0.10
+
+ // Determine bar color based on state
+ let barColorClass = ''
+ let barColorStyle: string | undefined = 'var(--color-primary)' // Default blue
+
+ if (isComplete) {
+ barColorClass = 'bg-green-500'
+ barColorStyle = undefined
+ } else if (isStarted) {
+ barColorStyle = 'var(--color-text)' // Neutral text color (matches card titles)
+ }
+
// Calculate left and right offsets based on sidebar states (desktop only)
const leftOffset = isSidebarCollapsed
? 'var(--sidebar-collapsed-width)'
@@ -42,14 +57,10 @@ export const ReadingProgressIndicator: React.FC =
style={{ backgroundColor: 'var(--color-border)' }}
>
@@ -60,7 +71,9 @@ export const ReadingProgressIndicator: React.FC
=
className={`text-[0.625rem] font-normal min-w-[32px] text-right tabular-nums ${
isComplete ? 'text-green-500' : ''
}`}
- style={{ color: isComplete ? undefined : 'var(--color-text-muted)' }}
+ style={{
+ color: isComplete ? undefined : isStarted ? 'var(--color-text)' : 'var(--color-text-muted)'
+ }}
>
{isComplete ? '✓' : `${clampedProgress}%`}
diff --git a/src/config/kinds.ts b/src/config/kinds.ts
new file mode 100644
index 00000000..4221a07d
--- /dev/null
+++ b/src/config/kinds.ts
@@ -0,0 +1,15 @@
+// Nostr event kinds used throughout the application
+export const KINDS = {
+ Highlights: 9802, // NIP-?? user highlights
+ BlogPost: 30023, // NIP-23 long-form article
+ AppData: 30078, // NIP-78 application data (reading positions)
+ List: 30001, // NIP-51 list (addressable)
+ ListReplaceable: 30003, // NIP-51 replaceable list
+ ListSimple: 10003, // NIP-51 simple list
+ WebBookmark: 39701, // NIP-B0 web bookmark
+ ReactionToEvent: 7, // emoji reaction to event (used for mark-as-read)
+ ReactionToUrl: 17 // emoji reaction to URL (used for mark-as-read)
+} as const
+
+export type KindValue = typeof KINDS[keyof typeof KINDS]
+
diff --git a/src/services/bookmarkService.ts b/src/services/bookmarkService.ts
index f53b630b..9d8a594d 100644
--- a/src/services/bookmarkService.ts
+++ b/src/services/bookmarkService.ts
@@ -15,6 +15,7 @@ import { collectBookmarksFromEvents } from './bookmarkProcessing.ts'
import { UserSettings } from './settingsService'
import { rebroadcastEvents } from './rebroadcastService'
import { queryEvents } from './dataFetch'
+import { KINDS } from '../config/kinds'
@@ -34,7 +35,7 @@ export const fetchBookmarks = async (
const rawEvents = await queryEvents(
relayPool,
- { kinds: [10003, 30003, 30001, 39701], authors: [activeAccount.pubkey] },
+ { kinds: [KINDS.ListSimple, KINDS.ListReplaceable, KINDS.List, KINDS.WebBookmark], authors: [activeAccount.pubkey] },
{}
)
console.log('📊 Raw events fetched:', rawEvents.length, 'events')
@@ -71,7 +72,7 @@ export const fetchBookmarks = async (
})
// Check specifically for Primal's "reads" list
- const primalReads = rawEvents.find(e => e.kind === 10003 && e.tags?.find((t: string[]) => t[0] === 'd' && t[1] === 'reads'))
+ const primalReads = rawEvents.find(e => e.kind === KINDS.ListSimple && e.tags?.find((t: string[]) => t[0] === 'd' && t[1] === 'reads'))
if (primalReads) {
console.log('✅ Found Primal reads list:', primalReads.id.slice(0, 8))
} else {
diff --git a/src/services/exploreService.ts b/src/services/exploreService.ts
index b4b4c122..ff486c7a 100644
--- a/src/services/exploreService.ts
+++ b/src/services/exploreService.ts
@@ -2,6 +2,7 @@ import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { queryEvents } from './dataFetch'
+import { KINDS } from '../config/kinds'
const { getArticleTitle, getArticleImage, getArticlePublished, getArticleSummary } = Helpers
@@ -41,7 +42,7 @@ export const fetchBlogPostsFromAuthors = async (
await queryEvents(
relayPool,
- { kinds: [30023], authors: pubkeys, limit: 100 },
+ { kinds: [KINDS.BlogPost], authors: pubkeys, limit: 100 },
{
relayUrls,
onEvent: (event: NostrEvent) => {
diff --git a/src/services/highlights/fetchByAuthor.ts b/src/services/highlights/fetchByAuthor.ts
index 7c6b71c6..011d02eb 100644
--- a/src/services/highlights/fetchByAuthor.ts
+++ b/src/services/highlights/fetchByAuthor.ts
@@ -6,6 +6,7 @@ import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
import { UserSettings } from '../settingsService'
import { rebroadcastEvents } from '../rebroadcastService'
+import { KINDS } from '../../config/kinds'
export const fetchHighlights = async (
relayPool: RelayPool,
@@ -21,7 +22,7 @@ export const fetchHighlights = async (
const seenIds = new Set()
const local$ = localRelays.length > 0
? relayPool
- .req(localRelays, { kinds: [9802], authors: [pubkey] })
+ .req(localRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
.pipe(
onlyEvents(),
tap((event: NostrEvent) => {
@@ -36,7 +37,7 @@ export const fetchHighlights = async (
: new Observable((sub) => sub.complete())
const remote$ = remoteRelays.length > 0
? relayPool
- .req(remoteRelays, { kinds: [9802], authors: [pubkey] })
+ .req(remoteRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
.pipe(
onlyEvents(),
tap((event: NostrEvent) => {
diff --git a/src/services/libraryService.ts b/src/services/libraryService.ts
index 8818b818..d07d0d4f 100644
--- a/src/services/libraryService.ts
+++ b/src/services/libraryService.ts
@@ -2,6 +2,7 @@ import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { RELAYS } from '../config/relays'
+import { KINDS } from '../config/kinds'
import { MARK_AS_READ_EMOJI } from './reactionService'
import { BlogPostPreview } from './exploreService'
import { queryEvents } from './dataFetch'
@@ -29,8 +30,8 @@ export async function fetchReadArticles(
try {
// Fetch kind:7 and kind:17 reactions in parallel
const [kind7Events, kind17Events] = await Promise.all([
- queryEvents(relayPool, { kinds: [7], authors: [userPubkey] }, { relayUrls: RELAYS }),
- queryEvents(relayPool, { kinds: [17], authors: [userPubkey] }, { relayUrls: RELAYS })
+ queryEvents(relayPool, { kinds: [KINDS.ReactionToEvent], authors: [userPubkey] }, { relayUrls: RELAYS }),
+ queryEvents(relayPool, { kinds: [KINDS.ReactionToUrl], authors: [userPubkey] }, { relayUrls: RELAYS })
])
const readArticles: ReadArticle[] = []
@@ -102,7 +103,7 @@ export async function fetchReadArticlesWithData(
// Filter to only nostr-native articles (kind 30023)
const nostrArticles = readArticles.filter(
- article => article.eventKind === 30023 && article.eventId
+ article => article.eventKind === KINDS.BlogPost && article.eventId
)
if (nostrArticles.length === 0) {
@@ -114,7 +115,7 @@ export async function fetchReadArticlesWithData(
const articleEvents = await queryEvents(
relayPool,
- { kinds: [30023], ids: eventIds },
+ { kinds: [KINDS.BlogPost], ids: eventIds },
{ relayUrls: RELAYS }
)
diff --git a/src/services/linksService.ts b/src/services/linksService.ts
new file mode 100644
index 00000000..401cec12
--- /dev/null
+++ b/src/services/linksService.ts
@@ -0,0 +1,90 @@
+import { RelayPool } from 'applesauce-relay'
+import { fetchReadArticles } from './libraryService'
+import { queryEvents } from './dataFetch'
+import { RELAYS } from '../config/relays'
+import { KINDS } from '../config/kinds'
+import { ReadItem } from './readsService'
+import { processReadingPositions, processMarkedAsRead, filterValidItems, sortByReadingActivity } from './readingDataProcessor'
+import { mergeReadItem } from '../utils/readItemMerge'
+
+/**
+ * Fetches external URL links with reading progress from:
+ * - URLs with reading progress (kind:30078)
+ * - Manually marked as read URLs (kind:7, kind:17)
+ */
+export async function fetchLinks(
+ relayPool: RelayPool,
+ userPubkey: string,
+ onItem?: (item: ReadItem) => void
+): Promise {
+ console.log('🔗 [Links] Fetching external links for user:', userPubkey.slice(0, 8))
+
+ const linksMap = new Map()
+
+ // Helper to emit items as they're added/updated
+ const emitItem = (item: ReadItem) => {
+ if (onItem && mergeReadItem(linksMap, item)) {
+ onItem(linksMap.get(item.id)!)
+ } else if (!onItem) {
+ linksMap.set(item.id, item)
+ }
+ }
+
+ try {
+ // Fetch all data sources in parallel
+ const [readingPositionEvents, markedAsReadArticles] = await Promise.all([
+ queryEvents(relayPool, { kinds: [KINDS.AppData], authors: [userPubkey] }, { relayUrls: RELAYS }),
+ fetchReadArticles(relayPool, userPubkey)
+ ])
+
+ console.log('📊 [Links] Data fetched:', {
+ readingPositions: readingPositionEvents.length,
+ markedAsRead: markedAsReadArticles.length
+ })
+
+ // Process reading positions and emit external items
+ processReadingPositions(readingPositionEvents, linksMap)
+ if (onItem) {
+ linksMap.forEach(item => {
+ if (item.type === 'external') {
+ const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
+ if (hasProgress) emitItem(item)
+ }
+ })
+ }
+
+ // Process marked-as-read and emit external items
+ processMarkedAsRead(markedAsReadArticles, linksMap)
+ if (onItem) {
+ linksMap.forEach(item => {
+ if (item.type === 'external') {
+ const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
+ if (hasProgress) emitItem(item)
+ }
+ })
+ }
+
+ // Filter for external URLs only with reading progress
+ const links = Array.from(linksMap.values())
+ .filter(item => {
+ // Only external URLs
+ if (item.type !== 'external') return false
+
+ // Only include if there's reading progress or marked as read
+ const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
+ return hasProgress
+ })
+
+ // Apply common validation and sorting
+ const validLinks = filterValidItems(links)
+ const sortedLinks = sortByReadingActivity(validLinks)
+
+ console.log('✅ [Links] Processed', sortedLinks.length, 'total links')
+ return sortedLinks
+
+ } catch (error) {
+ console.error('Failed to fetch links:', error)
+ return []
+ }
+}
+
diff --git a/src/services/meCache.ts b/src/services/meCache.ts
index 53b59a6a..e703ace9 100644
--- a/src/services/meCache.ts
+++ b/src/services/meCache.ts
@@ -1,11 +1,14 @@
import { Highlight } from '../types/highlights'
import { Bookmark } from '../types/bookmarks'
import { BlogPostPreview } from './exploreService'
+import { ReadItem } from './readsService'
export interface MeCache {
highlights: Highlight[]
bookmarks: Bookmark[]
readArticles: BlogPostPreview[]
+ reads?: ReadItem[]
+ links?: ReadItem[]
timestamp: number
}
diff --git a/src/services/readingDataProcessor.ts b/src/services/readingDataProcessor.ts
new file mode 100644
index 00000000..a61c54ef
--- /dev/null
+++ b/src/services/readingDataProcessor.ts
@@ -0,0 +1,147 @@
+import { NostrEvent } from 'nostr-tools'
+import { ReadItem } from './readsService'
+import { fallbackTitleFromUrl } from '../utils/readItemMerge'
+
+const READING_POSITION_PREFIX = 'boris:reading-position:'
+
+interface ReadArticle {
+ id: string
+ url?: string
+ eventId?: string
+ eventKind?: number
+ markedAt: number
+}
+
+/**
+ * Processes reading position events into ReadItems
+ */
+export function processReadingPositions(
+ events: NostrEvent[],
+ readsMap: Map
+): void {
+ for (const event of events) {
+ const dTag = event.tags.find(t => t[0] === 'd')?.[1]
+ if (!dTag || !dTag.startsWith(READING_POSITION_PREFIX)) continue
+
+ const identifier = dTag.replace(READING_POSITION_PREFIX, '')
+
+ try {
+ const positionData = JSON.parse(event.content)
+ const position = positionData.position
+ const timestamp = positionData.timestamp
+
+ let itemId: string
+ let itemUrl: string | undefined
+ let itemType: 'article' | 'external' = 'external'
+
+ // Check if it's a nostr article (naddr format)
+ if (identifier.startsWith('naddr1')) {
+ itemId = identifier
+ itemType = 'article'
+ } else {
+ // It's a base64url-encoded URL
+ try {
+ itemUrl = atob(identifier.replace(/-/g, '+').replace(/_/g, '/'))
+ itemId = itemUrl
+ itemType = 'external'
+ } catch (e) {
+ console.warn('Failed to decode URL identifier:', identifier)
+ continue
+ }
+ }
+
+ // Add or update the item
+ const existing = readsMap.get(itemId)
+ if (!existing || !existing.readingTimestamp || timestamp > existing.readingTimestamp) {
+ readsMap.set(itemId, {
+ ...existing,
+ id: itemId,
+ source: 'reading-progress',
+ type: itemType,
+ url: itemUrl,
+ readingProgress: position,
+ readingTimestamp: timestamp
+ })
+ }
+ } catch (error) {
+ console.warn('Failed to parse reading position:', error)
+ }
+ }
+}
+
+/**
+ * Processes marked-as-read articles into ReadItems
+ */
+export function processMarkedAsRead(
+ articles: ReadArticle[],
+ readsMap: Map
+): void {
+ for (const article of articles) {
+ const existing = readsMap.get(article.id)
+
+ if (article.eventId && article.eventKind === 30023) {
+ // Nostr article
+ readsMap.set(article.id, {
+ ...existing,
+ id: article.id,
+ source: 'marked-as-read',
+ type: 'article',
+ markedAsRead: true,
+ markedAt: article.markedAt,
+ readingTimestamp: existing?.readingTimestamp || article.markedAt
+ })
+ } else if (article.url) {
+ // External URL
+ readsMap.set(article.id, {
+ ...existing,
+ id: article.id,
+ source: 'marked-as-read',
+ type: 'external',
+ url: article.url,
+ markedAsRead: true,
+ markedAt: article.markedAt,
+ readingTimestamp: existing?.readingTimestamp || article.markedAt
+ })
+ }
+ }
+}
+
+/**
+ * Sorts ReadItems by most recent reading activity
+ */
+export function sortByReadingActivity(items: ReadItem[]): ReadItem[] {
+ return items.sort((a, b) => {
+ const timeA = a.readingTimestamp || a.markedAt || 0
+ const timeB = b.readingTimestamp || b.markedAt || 0
+ return timeB - timeA
+ })
+}
+
+/**
+ * Filters out items without timestamps and enriches external items with fallback titles
+ */
+export function filterValidItems(items: ReadItem[]): ReadItem[] {
+ return items
+ .filter(item => {
+ // Only include items that have a timestamp
+ const hasTimestamp = (item.readingTimestamp && item.readingTimestamp > 0) ||
+ (item.markedAt && item.markedAt > 0)
+ if (!hasTimestamp) return false
+
+ // For Nostr articles, we need the event to be valid
+ if (item.type === 'article' && !item.event) return false
+
+ // For external URLs, we need at least a URL
+ if (item.type === 'external' && !item.url) return false
+
+ return true
+ })
+ .map(item => {
+ // Add fallback title for external URLs without titles
+ if (item.type === 'external' && !item.title && item.url) {
+ return { ...item, title: fallbackTitleFromUrl(item.url) }
+ }
+ return item
+ })
+}
+
diff --git a/src/services/readsService.ts b/src/services/readsService.ts
new file mode 100644
index 00000000..f54adc9b
--- /dev/null
+++ b/src/services/readsService.ts
@@ -0,0 +1,197 @@
+import { RelayPool } from 'applesauce-relay'
+import { NostrEvent } from 'nostr-tools'
+import { Helpers } from 'applesauce-core'
+import { Bookmark } from '../types/bookmarks'
+import { fetchReadArticles } from './libraryService'
+import { queryEvents } from './dataFetch'
+import { RELAYS } from '../config/relays'
+import { KINDS } from '../config/kinds'
+import { classifyBookmarkType } from '../utils/bookmarkTypeClassifier'
+import { nip19 } from 'nostr-tools'
+import { processReadingPositions, processMarkedAsRead, filterValidItems, sortByReadingActivity } from './readingDataProcessor'
+import { mergeReadItem } from '../utils/readItemMerge'
+
+const { getArticleTitle, getArticleImage, getArticlePublished, getArticleSummary } = Helpers
+
+export interface ReadItem {
+ id: string // event ID or URL or coordinate
+ source: 'bookmark' | 'reading-progress' | 'marked-as-read'
+ type: 'article' | 'external' // article=kind:30023, external=URL
+
+ // Article data
+ event?: NostrEvent
+ url?: string
+ title?: string
+ summary?: string
+ image?: string
+ published?: number
+ author?: string
+
+ // Reading metadata
+ readingProgress?: number // 0-1
+ readingTimestamp?: number // Unix timestamp of last reading activity
+ markedAsRead?: boolean
+ markedAt?: number
+}
+
+/**
+ * Fetches all reads from multiple sources:
+ * - Bookmarked articles (kind:30023) and article/website URLs
+ * - Articles/URLs with reading progress (kind:30078)
+ * - Manually marked as read articles/URLs (kind:7, kind:17)
+ */
+export async function fetchAllReads(
+ relayPool: RelayPool,
+ userPubkey: string,
+ bookmarks: Bookmark[],
+ onItem?: (item: ReadItem) => void
+): Promise {
+ console.log('📚 [Reads] Fetching all reads for user:', userPubkey.slice(0, 8))
+
+ const readsMap = new Map()
+
+ // Helper to emit items as they're added/updated
+ const emitItem = (item: ReadItem) => {
+ if (onItem && mergeReadItem(readsMap, item)) {
+ onItem(readsMap.get(item.id)!)
+ } else if (!onItem) {
+ readsMap.set(item.id, item)
+ }
+ }
+
+ try {
+ // Fetch all data sources in parallel
+ const [readingPositionEvents, markedAsReadArticles] = await Promise.all([
+ queryEvents(relayPool, { kinds: [KINDS.AppData], authors: [userPubkey] }, { relayUrls: RELAYS }),
+ fetchReadArticles(relayPool, userPubkey)
+ ])
+
+ console.log('📊 [Reads] Data fetched:', {
+ readingPositions: readingPositionEvents.length,
+ markedAsRead: markedAsReadArticles.length,
+ bookmarks: bookmarks.length
+ })
+
+ // Process reading positions and emit items
+ processReadingPositions(readingPositionEvents, readsMap)
+ if (onItem) {
+ readsMap.forEach(item => {
+ if (item.type === 'article') onItem(item)
+ })
+ }
+
+ // Process marked-as-read and emit items
+ processMarkedAsRead(markedAsReadArticles, readsMap)
+ if (onItem) {
+ readsMap.forEach(item => {
+ if (item.type === 'article') onItem(item)
+ })
+ }
+
+ // 3. Process bookmarked articles and article/website URLs
+ const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
+
+ for (const bookmark of allBookmarks) {
+ const bookmarkType = classifyBookmarkType(bookmark)
+
+ // Only include articles
+ if (bookmarkType === 'article') {
+ // Kind:30023 nostr article
+ const coordinate = bookmark.id // Already in coordinate format
+ const existing = readsMap.get(coordinate)
+
+ if (!existing) {
+ const item: ReadItem = {
+ id: coordinate,
+ source: 'bookmark',
+ type: 'article',
+ readingProgress: 0,
+ readingTimestamp: bookmark.added_at || bookmark.created_at
+ }
+ readsMap.set(coordinate, item)
+ if (onItem) emitItem(item)
+ }
+ }
+ }
+
+ // 4. Fetch full event data for nostr articles
+ const articleCoordinates = Array.from(readsMap.values())
+ .filter(item => item.type === 'article' && !item.event)
+ .map(item => item.id)
+
+ if (articleCoordinates.length > 0) {
+ console.log('📖 [Reads] Fetching article events for', articleCoordinates.length, 'articles')
+
+ // Parse coordinates and fetch events
+ const articlesToFetch: Array<{ pubkey: string; identifier: string }> = []
+
+ for (const coord of articleCoordinates) {
+ try {
+ // Try to decode as naddr
+ if (coord.startsWith('naddr1')) {
+ const decoded = nip19.decode(coord)
+ if (decoded.type === 'naddr' && decoded.data.kind === KINDS.BlogPost) {
+ articlesToFetch.push({
+ pubkey: decoded.data.pubkey,
+ identifier: decoded.data.identifier || ''
+ })
+ }
+ } else {
+ // Try coordinate format (kind:pubkey:identifier)
+ const parts = coord.split(':')
+ if (parts.length === 3 && parseInt(parts[0]) === KINDS.BlogPost) {
+ articlesToFetch.push({
+ pubkey: parts[1],
+ identifier: parts[2]
+ })
+ }
+ }
+ } catch (e) {
+ console.warn('Failed to decode article coordinate:', coord)
+ }
+ }
+
+ if (articlesToFetch.length > 0) {
+ const authors = Array.from(new Set(articlesToFetch.map(a => a.pubkey)))
+ const identifiers = Array.from(new Set(articlesToFetch.map(a => a.identifier)))
+
+ const events = await queryEvents(
+ relayPool,
+ { kinds: [KINDS.BlogPost], authors, '#d': identifiers },
+ { relayUrls: RELAYS }
+ )
+
+ // Merge event data into ReadItems and emit
+ for (const event of events) {
+ const dTag = event.tags.find(t => t[0] === 'd')?.[1] || ''
+ const coordinate = `${KINDS.BlogPost}:${event.pubkey}:${dTag}`
+
+ const item = readsMap.get(coordinate) || readsMap.get(event.id)
+ if (item) {
+ item.event = event
+ item.title = getArticleTitle(event) || 'Untitled'
+ item.summary = getArticleSummary(event)
+ item.image = getArticleImage(event)
+ item.published = getArticlePublished(event)
+ item.author = event.pubkey
+ if (onItem) emitItem(item)
+ }
+ }
+ }
+ }
+
+ // 5. Filter for Nostr articles only and apply common validation/sorting
+ const articles = Array.from(readsMap.values())
+ .filter(item => item.type === 'article')
+
+ const validArticles = filterValidItems(articles)
+ const sortedReads = sortByReadingActivity(validArticles)
+
+ console.log('✅ [Reads] Processed', sortedReads.length, 'total reads')
+ return sortedReads
+
+ } catch (error) {
+ console.error('Failed to fetch all reads:', error)
+ return []
+ }
+}
diff --git a/src/utils/linksFromBookmarks.ts b/src/utils/linksFromBookmarks.ts
new file mode 100644
index 00000000..bd3912a7
--- /dev/null
+++ b/src/utils/linksFromBookmarks.ts
@@ -0,0 +1,69 @@
+import { Bookmark } from '../types/bookmarks'
+import { ReadItem } from '../services/readsService'
+import { KINDS } from '../config/kinds'
+import { fallbackTitleFromUrl } from './readItemMerge'
+
+/**
+ * Derives ReadItems from bookmarks for external URLs:
+ * - Web bookmarks (kind:39701)
+ * - Any bookmark with http(s) URLs in content or urlReferences
+ */
+export function deriveLinksFromBookmarks(bookmarks: Bookmark[]): ReadItem[] {
+ const linksMap = new Map()
+
+ const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
+
+ for (const bookmark of allBookmarks) {
+ const urls: string[] = []
+
+ // Web bookmarks (kind:39701) - extract from 'd' tag
+ if (bookmark.kind === KINDS.WebBookmark) {
+ const dTag = bookmark.tags.find(t => t[0] === 'd')?.[1]
+ if (dTag) {
+ const url = dTag.startsWith('http') ? dTag : `https://${dTag}`
+ urls.push(url)
+ }
+ }
+
+ // Extract URLs from content if not already captured
+ if (bookmark.content) {
+ const urlRegex = /(https?:\/\/[^\s]+)/g
+ const matches = bookmark.content.match(urlRegex)
+ if (matches) {
+ urls.push(...matches)
+ }
+ }
+
+ // Extract metadata from tags (for web bookmarks and other types)
+ const title = bookmark.tags.find(t => t[0] === 'title')?.[1]
+ const summary = bookmark.tags.find(t => t[0] === 'summary')?.[1]
+ const image = bookmark.tags.find(t => t[0] === 'image')?.[1]
+
+ // Create ReadItem for each unique URL
+ for (const url of [...new Set(urls)]) {
+ if (!linksMap.has(url)) {
+ const item: ReadItem = {
+ id: url,
+ source: 'bookmark',
+ type: 'external',
+ url,
+ title: title || fallbackTitleFromUrl(url),
+ summary,
+ image,
+ readingProgress: 0,
+ readingTimestamp: bookmark.added_at || bookmark.created_at
+ }
+
+ linksMap.set(url, item)
+ }
+ }
+ }
+
+ // Sort by most recent bookmark activity
+ return Array.from(linksMap.values()).sort((a, b) => {
+ const timeA = a.readingTimestamp || 0
+ const timeB = b.readingTimestamp || 0
+ return timeB - timeA
+ })
+}
+
diff --git a/src/utils/readItemMerge.ts b/src/utils/readItemMerge.ts
new file mode 100644
index 00000000..c9afa587
--- /dev/null
+++ b/src/utils/readItemMerge.ts
@@ -0,0 +1,83 @@
+import { ReadItem } from '../services/readsService'
+
+/**
+ * Merges a ReadItem into a state map, returning whether the state changed.
+ * Uses most recent reading activity to determine precedence.
+ */
+export function mergeReadItem(
+ stateMap: Map,
+ incoming: ReadItem
+): boolean {
+ const existing = stateMap.get(incoming.id)
+
+ if (!existing) {
+ stateMap.set(incoming.id, incoming)
+ return true
+ }
+
+ // Always merge if incoming has reading progress data
+ const hasNewProgress = incoming.readingProgress !== undefined &&
+ (existing.readingProgress === undefined || existing.readingProgress !== incoming.readingProgress)
+
+ const hasNewMarkedAsRead = incoming.markedAsRead !== undefined && existing.markedAsRead === undefined
+
+ // Merge by taking the most recent reading activity
+ const existingTime = existing.readingTimestamp || existing.markedAt || 0
+ const incomingTime = incoming.readingTimestamp || incoming.markedAt || 0
+
+ if (incomingTime > existingTime || hasNewProgress || hasNewMarkedAsRead) {
+ // Keep existing data, but update with newer reading metadata
+ stateMap.set(incoming.id, {
+ ...existing,
+ ...incoming,
+ // Preserve event data if incoming doesn't have it
+ event: incoming.event || existing.event,
+ title: incoming.title || existing.title,
+ summary: incoming.summary || existing.summary,
+ image: incoming.image || existing.image,
+ published: incoming.published || existing.published,
+ author: incoming.author || existing.author,
+ // Always take reading progress if available
+ readingProgress: incoming.readingProgress !== undefined ? incoming.readingProgress : existing.readingProgress,
+ readingTimestamp: incomingTime > existingTime ? incoming.readingTimestamp : existing.readingTimestamp
+ })
+ return true
+ }
+
+ // If timestamps are equal but incoming has additional data, merge it
+ if (incomingTime === existingTime && (!existing.event && incoming.event || !existing.title && incoming.title)) {
+ stateMap.set(incoming.id, {
+ ...existing,
+ ...incoming,
+ event: incoming.event || existing.event,
+ title: incoming.title || existing.title,
+ summary: incoming.summary || existing.summary,
+ image: incoming.image || existing.image,
+ published: incoming.published || existing.published,
+ author: incoming.author || existing.author
+ })
+ return true
+ }
+
+ return false
+}
+
+/**
+ * Extracts a readable title from a URL when no title is available.
+ * Removes protocol, www, and shows domain + path.
+ */
+export function fallbackTitleFromUrl(url: string): string {
+ try {
+ const parsed = new URL(url)
+ let title = parsed.hostname.replace(/^www\./, '')
+ if (parsed.pathname && parsed.pathname !== '/') {
+ const path = parsed.pathname.slice(0, 40)
+ title += path.length < parsed.pathname.length ? path + '...' : path
+ }
+ return title
+ } catch {
+ // If URL parsing fails, just return the URL truncated
+ return url.length > 50 ? url.slice(0, 47) + '...' : url
+ }
+}
+
diff --git a/src/utils/readingProgressUtils.ts b/src/utils/readingProgressUtils.ts
new file mode 100644
index 00000000..99b6cfda
--- /dev/null
+++ b/src/utils/readingProgressUtils.ts
@@ -0,0 +1,30 @@
+import { ReadItem } from '../services/readsService'
+import { ReadingProgressFilterType } from '../components/ReadingProgressFilters'
+
+/**
+ * Filters ReadItems by reading progress
+ */
+export function filterByReadingProgress(
+ items: ReadItem[],
+ filter: ReadingProgressFilterType
+): ReadItem[] {
+ return items.filter((item) => {
+ const progress = item.readingProgress || 0
+ const isMarked = item.markedAsRead || false
+
+ switch (filter) {
+ case 'unopened':
+ return progress === 0 && !isMarked
+ case 'started':
+ return progress > 0 && progress <= 0.10 && !isMarked
+ case 'reading':
+ return progress > 0.10 && progress <= 0.94 && !isMarked
+ case 'completed':
+ return progress >= 0.95 || isMarked
+ case 'all':
+ default:
+ return true
+ }
+ })
+}
+
diff --git a/src/utils/readsFromBookmarks.ts b/src/utils/readsFromBookmarks.ts
new file mode 100644
index 00000000..99e7310b
--- /dev/null
+++ b/src/utils/readsFromBookmarks.ts
@@ -0,0 +1,71 @@
+import { Bookmark } from '../types/bookmarks'
+import { ReadItem } from '../services/readsService'
+import { classifyBookmarkType } from './bookmarkTypeClassifier'
+import { KINDS } from '../config/kinds'
+import { nip19 } from 'nostr-tools'
+
+/**
+ * Derives ReadItems from bookmarks for Nostr articles (kind:30023).
+ * Returns items with type='article', using hydrated data when available.
+ * Note: After hydration, article titles are in bookmark.content, metadata in tags.
+ */
+export function deriveReadsFromBookmarks(bookmarks: Bookmark[]): ReadItem[] {
+ const readsMap = new Map()
+
+ const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
+
+ for (const bookmark of allBookmarks) {
+ const bookmarkType = classifyBookmarkType(bookmark)
+
+ // Only include articles (kind:30023)
+ if (bookmarkType === 'article' && bookmark.kind === KINDS.BlogPost) {
+ const coordinate = bookmark.id // coordinate format: kind:pubkey:identifier
+
+ // Extract identifier from coordinate
+ const parts = coordinate.split(':')
+ const identifier = parts[2] || ''
+
+ // Convert to naddr format (reading positions use naddr as ID)
+ let naddr: string
+ try {
+ naddr = nip19.naddrEncode({
+ kind: KINDS.BlogPost,
+ pubkey: bookmark.pubkey,
+ identifier
+ })
+ } catch (e) {
+ console.warn('Failed to encode naddr for bookmark:', coordinate)
+ continue
+ }
+
+ // Extract metadata from tags (same as BookmarkItem does)
+ const title = bookmark.content || 'Untitled'
+ const image = bookmark.tags.find(t => t[0] === 'image')?.[1]
+ const summary = bookmark.tags.find(t => t[0] === 'summary')?.[1]
+ const published = bookmark.tags.find(t => t[0] === 'published_at')?.[1]
+
+ const item: ReadItem = {
+ id: naddr, // Use naddr format to match reading positions
+ source: 'bookmark',
+ type: 'article',
+ readingProgress: 0,
+ readingTimestamp: bookmark.added_at || bookmark.created_at,
+ title,
+ summary,
+ image,
+ published: published ? parseInt(published) : undefined,
+ author: bookmark.pubkey
+ }
+
+ readsMap.set(naddr, item)
+ }
+ }
+
+ // Sort by most recent bookmark activity
+ return Array.from(readsMap.values()).sort((a, b) => {
+ const timeA = a.readingTimestamp || 0
+ const timeB = b.readingTimestamp || 0
+ return timeB - timeA
+ })
+}
+