feat: add named kind constants, streaming updates, and fix reads/links tabs

- Create src/config/kinds.ts with named Nostr kind constants
- Add streaming support to fetchAllReads and fetchLinks with onItem callbacks
- Update all services to use KINDS constants instead of magic numbers
- Add mergeReadItem utility for DRY state management
- Add fallbackTitleFromUrl for external links without titles
- Relax validation to allow external items without titles
- Update Me.tsx to use streaming with Map-based state for reads/links
- Fix refresh to merge new data instead of clearing state
- Fix empty states for Reads and Links tabs (no more infinite skeletons)
- Services updated: readsService, linksService, libraryService, bookmarkService, exploreService, highlights/fetchByAuthor
This commit is contained in:
Gigi
2025-10-16 08:27:10 +02:00
parent e6876d141f
commit fddf79e0c6
10 changed files with 241 additions and 79 deletions

View File

@@ -49,7 +49,9 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
const [highlights, setHighlights] = useState<Highlight[]>([])
const [bookmarks, setBookmarks] = useState<Bookmark[]>([])
const [reads, setReads] = useState<ReadItem[]>([])
const [readsMap, setReadsMap] = useState<Map<string, ReadItem>>(new Map())
const [links, setLinks] = useState<ReadItem[]>([])
const [linksMap, setLinksMap] = useState<Map<string, ReadItem>>(new Map())
const [writings, setWritings] = useState<BlogPostPreview[]>([])
const [loading, setLoading] = useState(true)
const [loadedTabs, setLoadedTabs] = useState<Set<TabType>>(new Set())
@@ -144,9 +146,14 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
fetchedBookmarks = []
}
// Fetch all reads
const userReads = await fetchAllReads(relayPool, viewingPubkey, fetchedBookmarks)
setReads(userReads)
// Fetch all reads with streaming
const tempMap = new Map(readsMap)
await fetchAllReads(relayPool, viewingPubkey, fetchedBookmarks, (item) => {
tempMap.set(item.id, item)
setReadsMap(new Map(tempMap))
setReads(Array.from(tempMap.values()))
})
setLoadedTabs(prev => new Set(prev).add('reads'))
} catch (err) {
console.error('Failed to load reads:', err)
@@ -163,9 +170,14 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
try {
if (!hasBeenLoaded) setLoading(true)
// Fetch links (external URLs with reading progress)
const userLinks = await fetchLinks(relayPool, viewingPubkey)
setLinks(userLinks)
// Fetch links with streaming
const tempMap = new Map(linksMap)
await fetchLinks(relayPool, viewingPubkey, (item) => {
tempMap.set(item.id, item)
setLinksMap(new Map(tempMap))
setLinks(Array.from(tempMap.values()))
})
setLoadedTabs(prev => new Set(prev).add('links'))
} catch (err) {
console.error('Failed to load links:', err)
@@ -214,15 +226,10 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
}, [activeTab, viewingPubkey, refreshTrigger])
// Pull-to-refresh - only reload active tab
// Pull-to-refresh - reload active tab without clearing state
const { isRefreshing, pullPosition } = usePullToRefresh({
onRefresh: () => {
// Clear the loaded state for current tab to force refresh
setLoadedTabs(prev => {
const newSet = new Set(prev)
newSet.delete(activeTab)
return newSet
})
// Just trigger refresh - loaders will merge new data
setRefreshTrigger(prev => prev + 1)
},
maximumPullLength: 240,
@@ -449,8 +456,8 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
case 'reads':
// Show loading skeletons while fetching or if no data
if (reads.length === 0 || (loading && !loadedTabs.has('reads'))) {
// Show loading skeletons only while initially loading
if (loading && !loadedTabs.has('reads')) {
return (
<div className="explore-grid">
{Array.from({ length: 6 }).map((_, i) => (
@@ -460,6 +467,15 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
}
// Show empty state if loaded but no reads
if (reads.length === 0 && loadedTabs.has('reads')) {
return (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
No articles read yet.
</div>
)
}
// Show reads with filters
return (
<>
@@ -487,8 +503,8 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
case 'links':
// Show loading skeletons while fetching or if no data
if (links.length === 0 || (loading && !loadedTabs.has('links'))) {
// Show loading skeletons only while initially loading
if (loading && !loadedTabs.has('links')) {
return (
<div className="explore-grid">
{Array.from({ length: 6 }).map((_, i) => (
@@ -498,6 +514,15 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
)
}
// Show empty state if loaded but no links
if (links.length === 0 && loadedTabs.has('links')) {
return (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
No links with reading progress yet.
</div>
)
}
// Show links with filters
return (
<>

15
src/config/kinds.ts Normal file
View File

@@ -0,0 +1,15 @@
// Nostr event kinds used throughout the application
export const KINDS = {
Highlights: 9802, // NIP-?? user highlights
BlogPost: 30023, // NIP-23 long-form article
AppData: 30078, // NIP-78 application data (reading positions)
List: 30001, // NIP-51 list (addressable)
ListReplaceable: 30003, // NIP-51 replaceable list
ListSimple: 10003, // NIP-51 simple list
WebBookmark: 39701, // NIP-B0 web bookmark
ReactionToEvent: 7, // emoji reaction to event (used for mark-as-read)
ReactionToUrl: 17 // emoji reaction to URL (used for mark-as-read)
} as const
export type KindValue = typeof KINDS[keyof typeof KINDS]

View File

@@ -15,6 +15,7 @@ import { collectBookmarksFromEvents } from './bookmarkProcessing.ts'
import { UserSettings } from './settingsService'
import { rebroadcastEvents } from './rebroadcastService'
import { queryEvents } from './dataFetch'
import { KINDS } from '../config/kinds'
@@ -34,7 +35,7 @@ export const fetchBookmarks = async (
const rawEvents = await queryEvents(
relayPool,
{ kinds: [10003, 30003, 30001, 39701], authors: [activeAccount.pubkey] },
{ kinds: [KINDS.ListSimple, KINDS.ListReplaceable, KINDS.List, KINDS.WebBookmark], authors: [activeAccount.pubkey] },
{}
)
console.log('📊 Raw events fetched:', rawEvents.length, 'events')
@@ -71,7 +72,7 @@ export const fetchBookmarks = async (
})
// Check specifically for Primal's "reads" list
const primalReads = rawEvents.find(e => e.kind === 10003 && e.tags?.find((t: string[]) => t[0] === 'd' && t[1] === 'reads'))
const primalReads = rawEvents.find(e => e.kind === KINDS.ListSimple && e.tags?.find((t: string[]) => t[0] === 'd' && t[1] === 'reads'))
if (primalReads) {
console.log('✅ Found Primal reads list:', primalReads.id.slice(0, 8))
} else {

View File

@@ -2,6 +2,7 @@ import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { queryEvents } from './dataFetch'
import { KINDS } from '../config/kinds'
const { getArticleTitle, getArticleImage, getArticlePublished, getArticleSummary } = Helpers
@@ -41,7 +42,7 @@ export const fetchBlogPostsFromAuthors = async (
await queryEvents(
relayPool,
{ kinds: [30023], authors: pubkeys, limit: 100 },
{ kinds: [KINDS.BlogPost], authors: pubkeys, limit: 100 },
{
relayUrls,
onEvent: (event: NostrEvent) => {

View File

@@ -6,6 +6,7 @@ import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
import { UserSettings } from '../settingsService'
import { rebroadcastEvents } from '../rebroadcastService'
import { KINDS } from '../../config/kinds'
export const fetchHighlights = async (
relayPool: RelayPool,
@@ -21,7 +22,7 @@ export const fetchHighlights = async (
const seenIds = new Set<string>()
const local$ = localRelays.length > 0
? relayPool
.req(localRelays, { kinds: [9802], authors: [pubkey] })
.req(localRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
.pipe(
onlyEvents(),
tap((event: NostrEvent) => {
@@ -36,7 +37,7 @@ export const fetchHighlights = async (
: new Observable<NostrEvent>((sub) => sub.complete())
const remote$ = remoteRelays.length > 0
? relayPool
.req(remoteRelays, { kinds: [9802], authors: [pubkey] })
.req(remoteRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
.pipe(
onlyEvents(),
tap((event: NostrEvent) => {

View File

@@ -2,6 +2,7 @@ import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { RELAYS } from '../config/relays'
import { KINDS } from '../config/kinds'
import { MARK_AS_READ_EMOJI } from './reactionService'
import { BlogPostPreview } from './exploreService'
import { queryEvents } from './dataFetch'
@@ -29,8 +30,8 @@ export async function fetchReadArticles(
try {
// Fetch kind:7 and kind:17 reactions in parallel
const [kind7Events, kind17Events] = await Promise.all([
queryEvents(relayPool, { kinds: [7], authors: [userPubkey] }, { relayUrls: RELAYS }),
queryEvents(relayPool, { kinds: [17], authors: [userPubkey] }, { relayUrls: RELAYS })
queryEvents(relayPool, { kinds: [KINDS.ReactionToEvent], authors: [userPubkey] }, { relayUrls: RELAYS }),
queryEvents(relayPool, { kinds: [KINDS.ReactionToUrl], authors: [userPubkey] }, { relayUrls: RELAYS })
])
const readArticles: ReadArticle[] = []
@@ -102,7 +103,7 @@ export async function fetchReadArticlesWithData(
// Filter to only nostr-native articles (kind 30023)
const nostrArticles = readArticles.filter(
article => article.eventKind === 30023 && article.eventId
article => article.eventKind === KINDS.BlogPost && article.eventId
)
if (nostrArticles.length === 0) {
@@ -114,7 +115,7 @@ export async function fetchReadArticlesWithData(
const articleEvents = await queryEvents(
relayPool,
{ kinds: [30023], ids: eventIds },
{ kinds: [KINDS.BlogPost], ids: eventIds },
{ relayUrls: RELAYS }
)

View File

@@ -2,10 +2,10 @@ import { RelayPool } from 'applesauce-relay'
import { fetchReadArticles } from './libraryService'
import { queryEvents } from './dataFetch'
import { RELAYS } from '../config/relays'
import { KINDS } from '../config/kinds'
import { ReadItem } from './readsService'
import { processReadingPositions, processMarkedAsRead, filterValidItems, sortByReadingActivity } from './readingDataProcessor'
const APP_DATA_KIND = 30078 // NIP-78 Application Data
import { mergeReadItem } from '../utils/readItemMerge'
/**
* Fetches external URL links with reading progress from:
@@ -14,14 +14,26 @@ const APP_DATA_KIND = 30078 // NIP-78 Application Data
*/
export async function fetchLinks(
relayPool: RelayPool,
userPubkey: string
userPubkey: string,
onItem?: (item: ReadItem) => void
): Promise<ReadItem[]> {
console.log('🔗 [Links] Fetching external links for user:', userPubkey.slice(0, 8))
const linksMap = new Map<string, ReadItem>()
// Helper to emit items as they're added/updated
const emitItem = (item: ReadItem) => {
if (onItem && mergeReadItem(linksMap, item)) {
onItem(linksMap.get(item.id)!)
} else if (!onItem) {
linksMap.set(item.id, item)
}
}
try {
// Fetch all data sources in parallel
const [readingPositionEvents, markedAsReadArticles] = await Promise.all([
queryEvents(relayPool, { kinds: [APP_DATA_KIND], authors: [userPubkey] }, { relayUrls: RELAYS }),
queryEvents(relayPool, { kinds: [KINDS.AppData], authors: [userPubkey] }, { relayUrls: RELAYS }),
fetchReadArticles(relayPool, userPubkey)
])
@@ -30,10 +42,27 @@ export async function fetchLinks(
markedAsRead: markedAsReadArticles.length
})
// Process data using shared utilities
const linksMap = new Map<string, ReadItem>()
// Process reading positions and emit external items
processReadingPositions(readingPositionEvents, linksMap)
if (onItem) {
linksMap.forEach(item => {
if (item.type === 'external') {
const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
if (hasProgress) emitItem(item)
}
})
}
// Process marked-as-read and emit external items
processMarkedAsRead(markedAsReadArticles, linksMap)
if (onItem) {
linksMap.forEach(item => {
if (item.type === 'external') {
const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
if (hasProgress) emitItem(item)
}
})
}
// Filter for external URLs only with reading progress
const links = Array.from(linksMap.values())

View File

@@ -1,5 +1,6 @@
import { NostrEvent } from 'nostr-tools'
import { ReadItem } from './readsService'
import { fallbackTitleFromUrl } from '../utils/readItemMerge'
const READING_POSITION_PREFIX = 'boris:reading-position:'
@@ -117,24 +118,30 @@ export function sortByReadingActivity(items: ReadItem[]): ReadItem[] {
}
/**
* Filters out items without timestamps or proper titles
* Filters out items without timestamps and enriches external items with fallback titles
*/
export function filterValidItems(items: ReadItem[]): ReadItem[] {
return items.filter(item => {
// Only include items that have a timestamp
const hasTimestamp = (item.readingTimestamp && item.readingTimestamp > 0) ||
(item.markedAt && item.markedAt > 0)
if (!hasTimestamp) return false
// Filter out items without titles
if (!item.title || item.title === 'Untitled') {
// For Nostr articles, we need the title from the event
return items
.filter(item => {
// Only include items that have a timestamp
const hasTimestamp = (item.readingTimestamp && item.readingTimestamp > 0) ||
(item.markedAt && item.markedAt > 0)
if (!hasTimestamp) return false
// For Nostr articles, we need the event to be valid
if (item.type === 'article' && !item.event) return false
// For external URLs, we need a proper title
if (item.type === 'external' && !item.title) return false
}
return true
})
// For external URLs, we need at least a URL
if (item.type === 'external' && !item.url) return false
return true
})
.map(item => {
// Add fallback title for external URLs without titles
if (item.type === 'external' && !item.title && item.url) {
return { ...item, title: fallbackTitleFromUrl(item.url) }
}
return item
})
}

View File

@@ -5,14 +5,14 @@ import { Bookmark, IndividualBookmark } from '../types/bookmarks'
import { fetchReadArticles } from './libraryService'
import { queryEvents } from './dataFetch'
import { RELAYS } from '../config/relays'
import { KINDS } from '../config/kinds'
import { classifyBookmarkType } from '../utils/bookmarkTypeClassifier'
import { nip19 } from 'nostr-tools'
import { processReadingPositions, processMarkedAsRead, filterValidItems, sortByReadingActivity } from './readingDataProcessor'
import { mergeReadItem } from '../utils/readItemMerge'
const { getArticleTitle, getArticleImage, getArticlePublished, getArticleSummary } = Helpers
const APP_DATA_KIND = 30078 // NIP-78 Application Data
export interface ReadItem {
id: string // event ID or URL or coordinate
source: 'bookmark' | 'reading-progress' | 'marked-as-read'
@@ -43,14 +43,26 @@ export interface ReadItem {
export async function fetchAllReads(
relayPool: RelayPool,
userPubkey: string,
bookmarks: Bookmark[]
bookmarks: Bookmark[],
onItem?: (item: ReadItem) => void
): Promise<ReadItem[]> {
console.log('📚 [Reads] Fetching all reads for user:', userPubkey.slice(0, 8))
const readsMap = new Map<string, ReadItem>()
// Helper to emit items as they're added/updated
const emitItem = (item: ReadItem) => {
if (onItem && mergeReadItem(readsMap, item)) {
onItem(readsMap.get(item.id)!)
} else if (!onItem) {
readsMap.set(item.id, item)
}
}
try {
// Fetch all data sources in parallel
const [readingPositionEvents, markedAsReadArticles] = await Promise.all([
queryEvents(relayPool, { kinds: [APP_DATA_KIND], authors: [userPubkey] }, { relayUrls: RELAYS }),
queryEvents(relayPool, { kinds: [KINDS.AppData], authors: [userPubkey] }, { relayUrls: RELAYS }),
fetchReadArticles(relayPool, userPubkey)
])
@@ -60,10 +72,21 @@ export async function fetchAllReads(
bookmarks: bookmarks.length
})
// Process data using shared utilities
const readsMap = new Map<string, ReadItem>()
// Process reading positions and emit items
processReadingPositions(readingPositionEvents, readsMap)
if (onItem) {
readsMap.forEach(item => {
if (item.type === 'article') emitItem(item)
})
}
// Process marked-as-read and emit items
processMarkedAsRead(markedAsReadArticles, readsMap)
if (onItem) {
readsMap.forEach(item => {
if (item.type === 'article') emitItem(item)
})
}
// 3. Process bookmarked articles and article/website URLs
const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
@@ -71,38 +94,22 @@ export async function fetchAllReads(
for (const bookmark of allBookmarks) {
const bookmarkType = classifyBookmarkType(bookmark)
// Only include articles and external article/website bookmarks
// Only include articles
if (bookmarkType === 'article') {
// Kind:30023 nostr article
const coordinate = bookmark.id // Already in coordinate format
const existing = readsMap.get(coordinate)
if (!existing) {
readsMap.set(coordinate, {
const item: ReadItem = {
id: coordinate,
source: 'bookmark',
type: 'article',
readingProgress: 0,
readingTimestamp: bookmark.added_at || bookmark.created_at
})
}
} else if (bookmarkType === 'external') {
// External article URL
const urls = extractUrlFromBookmark(bookmark)
if (urls.length > 0) {
const url = urls[0]
const existing = readsMap.get(url)
if (!existing) {
readsMap.set(url, {
id: url,
source: 'bookmark',
type: 'external',
url,
readingProgress: 0,
readingTimestamp: bookmark.added_at || bookmark.created_at
})
}
readsMap.set(coordinate, item)
if (onItem) emitItem(item)
}
}
}
@@ -123,7 +130,7 @@ export async function fetchAllReads(
// Try to decode as naddr
if (coord.startsWith('naddr1')) {
const decoded = nip19.decode(coord)
if (decoded.type === 'naddr' && decoded.data.kind === 30023) {
if (decoded.type === 'naddr' && decoded.data.kind === KINDS.BlogPost) {
articlesToFetch.push({
pubkey: decoded.data.pubkey,
identifier: decoded.data.identifier || ''
@@ -132,7 +139,7 @@ export async function fetchAllReads(
} else {
// Try coordinate format (kind:pubkey:identifier)
const parts = coord.split(':')
if (parts.length === 3 && parts[0] === '30023') {
if (parts.length === 3 && parseInt(parts[0]) === KINDS.BlogPost) {
articlesToFetch.push({
pubkey: parts[1],
identifier: parts[2]
@@ -150,14 +157,14 @@ export async function fetchAllReads(
const events = await queryEvents(
relayPool,
{ kinds: [30023], authors, '#d': identifiers },
{ kinds: [KINDS.BlogPost], authors, '#d': identifiers },
{ relayUrls: RELAYS }
)
// Merge event data into ReadItems
// Merge event data into ReadItems and emit
for (const event of events) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || ''
const coordinate = `30023:${event.pubkey}:${dTag}`
const coordinate = `${KINDS.BlogPost}:${event.pubkey}:${dTag}`
const item = readsMap.get(coordinate) || readsMap.get(event.id)
if (item) {
@@ -167,6 +174,7 @@ export async function fetchAllReads(
item.image = getArticleImage(event)
item.published = getArticlePublished(event)
item.author = event.pubkey
if (onItem) emitItem(item)
}
}
}

View File

@@ -0,0 +1,74 @@
import { ReadItem } from '../services/readsService'
/**
* Merges a ReadItem into a state map, returning whether the state changed.
* Uses most recent reading activity to determine precedence.
*/
export function mergeReadItem(
stateMap: Map<string, ReadItem>,
incoming: ReadItem
): boolean {
const existing = stateMap.get(incoming.id)
if (!existing) {
stateMap.set(incoming.id, incoming)
return true
}
// Merge by taking the most recent reading activity
const existingTime = existing.readingTimestamp || existing.markedAt || 0
const incomingTime = incoming.readingTimestamp || incoming.markedAt || 0
if (incomingTime > existingTime) {
// Keep existing data, but update with newer reading metadata
stateMap.set(incoming.id, {
...existing,
...incoming,
// Preserve event data if incoming doesn't have it
event: incoming.event || existing.event,
title: incoming.title || existing.title,
summary: incoming.summary || existing.summary,
image: incoming.image || existing.image,
published: incoming.published || existing.published,
author: incoming.author || existing.author
})
return true
}
// If timestamps are equal but incoming has additional data, merge it
if (incomingTime === existingTime && (!existing.event && incoming.event || !existing.title && incoming.title)) {
stateMap.set(incoming.id, {
...existing,
...incoming,
event: incoming.event || existing.event,
title: incoming.title || existing.title,
summary: incoming.summary || existing.summary,
image: incoming.image || existing.image,
published: incoming.published || existing.published,
author: incoming.author || existing.author
})
return true
}
return false
}
/**
* Extracts a readable title from a URL when no title is available.
* Removes protocol, www, and shows domain + path.
*/
export function fallbackTitleFromUrl(url: string): string {
try {
const parsed = new URL(url)
let title = parsed.hostname.replace(/^www\./, '')
if (parsed.pathname && parsed.pathname !== '/') {
const path = parsed.pathname.slice(0, 40)
title += path.length < parsed.pathname.length ? path + '...' : path
}
return title
} catch {
// If URL parsing fails, just return the URL truncated
return url.length > 50 ? url.slice(0, 47) + '...' : url
}
}