Compare commits

...

220 Commits

Author SHA1 Message Date
Gigi
2c3aff0407 perf(bunker): make NIP-46 publish non-blocking at app wiring level; resolve immediately and let responses drive timing/results 2025-10-17 13:09:42 +02:00
Gigi
aad35d41db fix(debug): return benign object from fire-and-forget publish so timing UI remains stable 2025-10-17 13:06:17 +02:00
Gigi
cc6189a5d9 perf(bunker): fire-and-forget NIP-46 publish in app wrapper so UI isn’t blocked waiting on relay publish; encryption/decryption results now display immediately on /debug 2025-10-17 13:04:59 +02:00
Gigi
18bf8f9a2c ui(debug): use existing color pattern for red disconnect button with proper styling and hover effects 2025-10-17 12:56:47 +02:00
Gigi
37f3a32a1c fix(debug): use inline red styling for disconnect button since btn-danger class doesn't exist 2025-10-17 12:56:06 +02:00
Gigi
c9678564a5 ui(debug): change disconnect button to red (btn-danger) for better visual indication 2025-10-17 12:54:26 +02:00
Gigi
721c18c509 ui(debug): add Reset button to restore default payload text 2025-10-17 12:53:44 +02:00
Gigi
9e30fe683b ui(debug): left-align encrypt and decrypt buttons in both NIP-44 and NIP-04 sections 2025-10-17 12:53:20 +02:00
Gigi
7fff50c146 ui(debug): move Encrypt/Decrypt buttons above Encrypted text in both NIP-44 and NIP-04 sections 2025-10-17 12:52:40 +02:00
Gigi
fc1c845b67 ui(debug): change 'cipher' labels to 'Encrypted:' for better clarity 2025-10-17 12:52:12 +02:00
Gigi
c2ec1f3677 ui(debug): move Clear logs button below Show all checkbox 2025-10-17 12:51:37 +02:00
Gigi
0cbd357856 ui(debug): right-align all buttons using justify-end 2025-10-17 12:51:21 +02:00
Gigi
26ea9ed547 fix(lint): remove unused global variable declarations from Debug component 2025-10-17 12:50:49 +02:00
Gigi
9cbbecb32c ui(debug): increase debug logs height from max-h-96 to max-h-192 (2x taller) 2025-10-17 12:49:59 +02:00
Gigi
db12c89731 ui(debug): add character-wrap (break-all) to ciphertext textboxes 2025-10-17 12:49:28 +02:00
Gigi
6f413deb90 ui(debug): increase ciphertext textarea height to 5 lines (h-20) 2025-10-17 12:48:57 +02:00
Gigi
0127e2dc86 ui(debug): change page title from 'Bunker Debug' to 'Debug' 2025-10-17 12:48:25 +02:00
Gigi
7743928702 ui(debug): increase log area height from max-h-64 to max-h-96 (3x taller) 2025-10-17 12:48:01 +02:00
Gigi
bf76150fc1 ui(debug): show spinner in place of millisecond number during measurement 2025-10-17 12:47:36 +02:00
Gigi
c62107172b ui(debug): make ciphertext and plaintext fields multiline with proper whitespace handling 2025-10-17 12:47:13 +02:00
Gigi
a253587dfa ui(debug): add subtle background to payload textarea for better editability indication 2025-10-17 12:46:57 +02:00
Gigi
1938533d53 ui(debug): replace animated timing with spinner during measurement 2025-10-17 12:46:43 +02:00
Gigi
28943c55bd style(debug): update ciphertext and plaintext display to match logs textbox style 2025-10-17 12:46:21 +02:00
Gigi
791bbb68b6 fix(debug): implement proper stopwatch timing that counts up from 0ms in real-time 2025-10-17 12:44:29 +02:00
Gigi
ec8adcc794 refactor(debug): move plaintext display below buttons for better visual flow 2025-10-17 12:43:06 +02:00
Gigi
68058e7661 refactor(debug): move encrypt buttons next to decrypt buttons for better UX 2025-10-17 12:42:22 +02:00
Gigi
416c62369c refactor: extract VersionFooter component to eliminate duplication between debug and settings 2025-10-17 12:41:39 +02:00
Gigi
a19dd53423 feat(debug): add live performance timing with digital stopwatch display 2025-10-17 12:40:22 +02:00
Gigi
79ec33b79a style(debug): format NIP specifications as NIP-44 and NIP-04 2025-10-17 12:37:59 +02:00
Gigi
be881b957c feat(debug): update log description to 'Recent bunker logs:' 2025-10-17 12:36:50 +02:00
Gigi
244872e9f2 style(debug): move debug logs controls below the log output 2025-10-17 12:36:36 +02:00
Gigi
1397f7f0f4 style(debug): apply settings page styling structure and layout 2025-10-17 12:36:10 +02:00
Gigi
96424dd65c fix: resolve all linting issues - replace empty catch blocks and fix explicit any types 2025-10-17 12:33:53 +02:00
Gigi
9efc5459fb feat(debug): replace debug logs button with proper HTML checkbox element 2025-10-17 12:32:53 +02:00
Gigi
7e02168e54 feat(debug): make debug logs button show toggleable checkmark (✓/☐) 2025-10-17 12:32:29 +02:00
Gigi
f8e6b3e828 refactor(debug): move time measurements to dedicated Performance Timing section 2025-10-17 12:32:12 +02:00
Gigi
c06176bfc9 feat(debug): add bunker login section as first section of debug page 2025-10-17 12:31:31 +02:00
Gigi
e2a1701000 refactor(debug): move debug logs section to end with improved layout 2025-10-17 12:30:14 +02:00
Gigi
d7703ceef4 style(debug): use regular HTML checkmark instead of FontAwesome icon 2025-10-17 12:29:09 +02:00
Gigi
93daabc673 style(debug): improve cipher text wrapping with overflowWrap anywhere 2025-10-17 12:28:43 +02:00
Gigi
9264245944 style(debug): make Clear logs button a proper secondary button 2025-10-17 12:28:14 +02:00
Gigi
f56423040b feat(debug): add checkmark icon to debug logs button when enabled 2025-10-17 12:28:04 +02:00
Gigi
4b91504a50 feat(debug): clarify button text to 'Show all applesauce debug logs' 2025-10-17 12:27:45 +02:00
Gigi
1f0f7fef5e feat(debug): update title to 'Bunker Debug' for clarity 2025-10-17 12:27:25 +02:00
Gigi
6aced653fb feat(debug): add clock icon to time measurements for better visual clarity 2025-10-17 12:27:14 +02:00
Gigi
0899482869 style(debug): make Encrypt (nip04) and Clear buttons proper secondary buttons 2025-10-17 12:26:51 +02:00
Gigi
1bdfa1e6e1 style(debug): apply same max-width as reading view to debug page 2025-10-17 12:26:31 +02:00
Gigi
f22a8f15c0 style(debug): improve debug page styling and layout consistency 2025-10-17 12:22:31 +02:00
Gigi
bf6394fc7d feat(debug): add version and git commit footer to /debug page 2025-10-17 12:20:43 +02:00
Gigi
6f08586e8f feat(debug): improve layout/readability with sections, code boxes, and stats badges 2025-10-17 12:19:09 +02:00
Gigi
d60a4a24ad feat(debug): show encrypt/decrypt durations for nip04/nip44 on /debug page 2025-10-17 12:14:59 +02:00
Gigi
51069f3623 feat(debug): add debug toggle and clear logs; disable account queueing for nostr-connect 2025-10-17 12:12:25 +02:00
Gigi
1407af22e3 feat(debug): interactive /debug page (manual nip04/nip44 encrypt/decrypt, live logs); add DebugBus and wire signer logs 2025-10-17 10:50:20 +02:00
Gigi
ea6220277d feat(debug): add /debug page with NIP-46 encrypt→decrypt probes for nip04/nip44 2025-10-17 10:37:45 +02:00
Gigi
fbffa03dad docs(amber): summarize bunker decrypt investigation, evidence, and next steps 2025-10-17 09:48:11 +02:00
Gigi
a74760d804 chore(bunker): increase decrypt timeouts (probe 10s, bookmark decrypt 30s) 2025-10-17 09:36:13 +02:00
Gigi
c4b0a712d2 chore(bunker): log NIP-46 method from event content to debug decrypt calls 2025-10-17 09:34:31 +02:00
Gigi
1fecf9c7f4 fix(bunker): accept remote===pubkey for Amber; remove invalid-state warning 2025-10-17 01:26:32 +02:00
Gigi
7be21203d9 chore(types): cast through unknown for protected publish/subscription access in debug wrappers 2025-10-17 01:25:21 +02:00
Gigi
f65f2c6597 chore(lint): remove explicit any types, add deps for useEffect, and type relay logging 2025-10-17 01:24:41 +02:00
Gigi
227def4328 chore(lint): replace empty catch blocks with warnings; keep strict rules 2025-10-17 01:22:53 +02:00
Gigi
b506624f57 fix(bunker): use encrypt→decrypt roundtrip for nip44/nip04 probe to avoid false timeouts 2025-10-17 01:19:37 +02:00
Gigi
fbb6a0a153 fix(bunker): merge signer.relays with app RELAYS to include local Amber relays 2025-10-17 01:13:03 +02:00
Gigi
528de32689 fix(bunker): wire NostrConnectSigner to RelayPool publish/subscription statics for NIP-46 responses 2025-10-17 01:07:35 +02:00
Gigi
230e5380ca chore(bunker): expand debug logs for NIP-46 publish/subscribe (tags, content length) 2025-10-17 01:05:13 +02:00
Gigi
349237d097 fix(bunker): preserve signer context when wrapping publish/subscription for decrypt responses 2025-10-17 01:01:44 +02:00
Gigi
d4df9f0424 chore: commit pending changes to App and LoginOptions 2025-10-17 00:55:47 +02:00
Gigi
2f68e84002 debug(bunker): log NIP-46 request body preview (method, params, content slice)
- Helps align our request shape with Amber's expected BunkerRequest format
2025-10-17 00:53:58 +02:00
Gigi
b18dcc29cd revert: do not block when remote === user pubkey
- Amber may legally use user pubkey as remote id
- Remove validation and warning that caused false negatives
2025-10-17 00:45:39 +02:00
Gigi
680169e312 fix(bunker): validate bunker URI - remote must differ from user pubkey
- Prevents invalid state where Amber remote equals user pubkey
- Show actionable error to generate fresh connect link in Amber
2025-10-17 00:42:14 +02:00
Gigi
11753c4515 debug(bunker): add post-connect decrypt probe (nip04/nip44) with timeout
- Verifies Amber responds to NIP-46 decrypt after connect
- Logs probe results under [bunker]; non-blocking to UX
2025-10-17 00:29:52 +02:00
Gigi
bd29dfd65f chore(bunker): warn if remote pubkey equals user pubkey (invalid state)
- Add sanity check and toast guidance to reconnect via Amber
- Helps catch misconfigured bunker URIs that would never respond to requests
2025-10-17 00:26:54 +02:00
Gigi
4b1ae838e5 chore: add Amber to .gitignore 2025-10-17 00:23:58 +02:00
Gigi
85599d3103 fix(bunker): guarded connect with explicit permissions on restore
- Pass getDefaultBunkerPermissions() to connect() to ensure decrypt perms
- Keeps existing reconnection safeguards and logging
- Aims to make Amber accept decrypt requests after restore
2025-10-17 00:21:46 +02:00
Gigi
4603c5a258 fix(bunker): guarded connect after subscription to enable decrypt
- After opening subscription, call connect() once per session if remote is present
- Helps Amber authorize decrypt ops; safe-guarded and logged
- Keep isConnected=true for subsequent requireConnection() paths
2025-10-17 00:19:21 +02:00
Gigi
ec45fbc5e8 debug(bunker): log signer publish/subscribe calls and relay connectivity
- Wrap NostrConnectSigner publish/subscription to log relays and filters
- Log relayPool connectivity snapshot before bookmark decryption
- Helps diagnose decrypt requests not reaching Amber
2025-10-17 00:17:00 +02:00
Gigi
53400334b2 Revert "fix: skip bookmark decryption for bunker signers"
This reverts commit af4ff7081a.
2025-10-17 00:12:20 +02:00
Gigi
af4ff7081a fix: skip bookmark decryption for bunker signers
- Bunker (NIP-46) signers don't reliably support async decrypt operations
- Skip attempting to decrypt private bookmarks when using bunker
- Users can still see all public bookmarks
- Use extension signer for access to encrypted private bookmarks
- Prevents 15+ second hangs waiting for decrypt responses that won't come
2025-10-17 00:11:20 +02:00
Gigi
7f21b8ed76 fix: add startup delay to allow bunker subscription to fully establish
- Small 100ms delay after opening signer subscription
- Ensures the subscription is ready to receive decrypt responses
- May fix timeout issues with bunker decrypt operations
2025-10-17 00:09:27 +02:00
Gigi
55e44dcc9c debug: increase decrypt timeout to 15 seconds
- Give bunker operations more time to respond
- Will help determine if this is a timing issue or a fundamental limitation
- Still logging timeout errors for visibility
2025-10-17 00:05:53 +02:00
Gigi
59dac947ab fix: actually reorder bunker relay addition before signer recreation
- Previous commit had wrong message, code wasn't actually changed
- Now properly add relays to pool before creating NostrConnectSigner
- Ensures publishMethod/subscriptionMethod have full relay list available
2025-10-17 00:00:57 +02:00
Gigi
7d33c3c024 fix: add bunker relays to pool BEFORE recreating signer
- Bunker relays must be in pool when signer sets up publishMethod/subscriptionMethod
- Previously added after signer recreation, leaving pool incomplete
- This should fix decrypt operations that rely on publishMethod being set up correctly
- Same fix pattern as we used for signing
2025-10-16 23:59:14 +02:00
Gigi
38a014ef84 debug: verify subscriptionMethod and publishMethod on recreated signer
- Check if recreated NostrConnectSigner has methods needed for decrypt operations
- This will help identify if the issue is missing publishMethod for sending decrypt requests
- Or missing subscriptionMethod for receiving responses
2025-10-16 23:57:32 +02:00
Gigi
f451348430 debug: add logging to bookmark decrypt error handling
- Log nip04/nip44 decrypt errors instead of silently ignoring
- Will help identify why bookmark decryption is timing out with bunker
- Timeout errors will now be visible in console
2025-10-16 23:55:30 +02:00
Gigi
685aaf43b0 fix: add timeout to bookmark decryption to prevent hanging
- Wrap nip04/nip44 decrypt calls with 5 second timeout
- Prevents UI from hanging if decrypt request doesn't receive response
- Allows graceful degradation instead of infinite wait
- With bunker, decrypt responses may not arrive if perms/relay issues
2025-10-16 23:54:31 +02:00
Gigi
d6a20b5272 debug: add [bunker] prefix to bookmark decryption logging
- Better filtering of bunker-related logs
- Track when signer candidate is being selected
2025-10-16 23:50:16 +02:00
Gigi
d8d7a19fa1 fix: pass account.signer to EventFactory instead of full account
- EventFactory expects an EventSigner interface with signEvent method
- account.signer is the actual NostrConnectSigner instance
- Add debug logging to trace signer type
- This should fix signing hanging when using bunker
2025-10-16 23:46:25 +02:00
Gigi
63626fae3a fix: recreate NostrConnectSigner with pool on account restore
- Restored signers from JSON don't have pool context
- Recreate signer with pool passed explicitly to fix subscriptionMethod binding
- This ensures signing requests are properly sent/received through the pool
- Fixes hanging on signing after page reload
2025-10-16 23:44:43 +02:00
Gigi
de09ef2935 fix: avoid adding duplicate bunker relays to pool
- Only add bunker relays that aren't already in the pool
- Prevents duplicate subscriptions that could cause signing hangs
- Improves stability when account is reconnected
2025-10-16 23:43:03 +02:00
Gigi
bcb28a63a7 refactor: cleanup after bunker signing implementation
- Remove reconnectBunkerSigner function, inline logic into App.tsx for better control
- Clean up try-catch wrapper in highlightCreationService, signing now works reliably
- Remove extra logging from signing process (already has [bunker] prefix logs)
- Simplify nostrConnect.ts to just export permissions helper
- Update api/article-og.ts to use local relay config instead of import
- All bunker signing tests now passing 
2025-10-16 23:39:31 +02:00
Gigi
a479903ce3 debug: log signer state before signing 2025-10-16 23:34:59 +02:00
Gigi
567d105261 fix: restore isConnected = true so signing doesn't hang
- Without this, requireConnection() tries to connect() again
- That breaks the entire signing flow
- Mark signer as connected after opening subscription
2025-10-16 23:33:31 +02:00
Gigi
83743c5a9f fix: remove decrypt queue that was blocking highlight signing
- The global decrypt queue in bookmarkProcessing was getting stuck
- Caused all NIP-46 operations to hang indefinitely
- Decrypt already has per-call timeouts; queue was unnecessary
- Highlights should now sign immediately without waiting for bookmarks
2025-10-16 23:30:18 +02:00
Gigi
0b8f88ea1d revert(highlight): avoid pre-connect; rely on requireConnection during sign
- Remove manual connect/open in highlight flow
- Prevent side-effects that may interfere with pending requests
2025-10-16 23:28:06 +02:00
Gigi
fadc755930 fix(highlight): ensure NIP-46 signer is open/connected before signing
- Pre-open subscription and connect() if bunker signer present
- Restores reliable highlight signing with Amber (NIP-46)
2025-10-16 23:26:28 +02:00
Gigi
f67f171e64 fix(bookmarks): serialize decrypt/unlock NIP-46 operations
- Queue decrypt/unlock to avoid overlapping requests hanging the provider
- Keep timeouts and detailed [bunker] logs
- Should stop decrypt flood from blocking highlight signing
2025-10-16 23:21:52 +02:00
Gigi
449c59015e refactor(api): import RELAYS from central config to keep DRY
- Remove duplicated relay array from api/article-og.ts
- Import from src/config/relays.ts instead
2025-10-16 23:20:57 +02:00
Gigi
4d697e6a79 chore(relays): update RELAYS list (include relay.nsec.app early)
- Aligns app relay set with commonly used relays
- May improve connectivity and latency for NIP-46 roundtrips
2025-10-16 23:20:05 +02:00
Gigi
04ae70873a fix: restore direct pool bindings for NIP-46 methods
- Revert logging wrappers around subscription/publish
- Use pool.subscription.bind(pool) and pool.publish.bind(pool)
- Avoid any side effects interfering with signer requests
2025-10-16 23:18:37 +02:00
Gigi
2f8a64826a debug: restore [bunker] logs around highlight signing
- Log before/after factory.sign for highlights
- Surface errors to console for fast diagnosis
2025-10-16 23:16:59 +02:00
Gigi
11cb3542ee fix: revert forced connect on reconnection to restore signing
- Remove connect(undefined, permissions) on restore
- Let requireConnection() trigger connect per op
- Keeps highlights signing working as before while we debug decrypt
2025-10-16 23:11:08 +02:00
Gigi
905296621c fix: pass permissions on reconnect to ensure decrypt allowed
- Call signer.connect(undefined, permissions) when restoring account
- Ensures bunker re-grants decrypt (nip04/nip44) if needed
- Keeps implementation aligned with applesauce examples
2025-10-16 23:06:06 +02:00
Gigi
769484bc0d debug: log NIP-46 subscribe/publish traffic
- Wrap subscriptionMethod/publishMethod to log relays, filters, responses
- Helps confirm decrypt/sign requests are actually sent and on which relays
- Continue using applesauce-recommended binding pattern
2025-10-16 22:58:41 +02:00
Gigi
27ff4cef22 fix: properly connect NostrConnectSigner on reconnection
- Call signer.connect() instead of forcing isConnected
- Add [bunker] logs for connect lifecycle
- Should unblock nip44/nip04 decrypt calls that were timing out
2025-10-16 22:55:17 +02:00
Gigi
a352e2616e fix: prevent decrypt hangs with timeout + fallback
- Wrap nip44/nip04 decrypt and unlockHiddenTags in timeouts
- Fallback nip44->nip04 if nip44 hangs/fails
- Add detailed [bunker] logs for each stage
- Keeps UI responsive while debugging bunker responses
2025-10-16 22:51:58 +02:00
Gigi
77cbb9394f refactor: simplify bunker implementation following applesauce patterns
- Remove bunkerFixVersion migration logic
- Simplify account loading to match applesauce examples
- Simplify reconnectBunkerSigner (no waiting, no complex logging)
- Direct nip04/nip44 exposure from signer (like ExtensionAccount)
- Clean up bookmark service account checking
- Keep debug logs for now until verified working
2025-10-16 22:48:46 +02:00
Gigi
39c8b3dfe4 fix: auto-clear old bunker accounts that were created with wrong setup
- Old bunker accounts were created before proper method binding
- Add version check to clear nostr-connect accounts once
- Preserves extension accounts
- Users will need to reconnect bunker (one-time migration)
2025-10-16 22:45:56 +02:00
Gigi
7bd11e695e fix: use proper NostrConnectSigner setup per applesauce examples
- Was setting NostrConnectSigner.pool (wrong approach)
- Should set subscriptionMethod and publishMethod directly
- Follows the pattern from applesauce/packages/examples/src/examples/signers/bunker.tsx
- This is the correct way to wire up the signer with the relay pool
2025-10-16 22:44:56 +02:00
Gigi
a76b703d36 fix: cache wrapped nip04/nip44 objects instead of using getters
- Getters were returning new objects each time
- Code was getting reference then calling decrypt on it
- Now assign wrapped objects directly as properties
- This ensures our logging wrappers are actually used
2025-10-16 22:42:47 +02:00
Gigi
df51173405 debug: wrap nip04/nip44 methods with [bunker] logging
- Log when decrypt/encrypt methods are called
- Log when they complete or fail
- Show pubkey and ciphertext/plaintext lengths
- This will tell us if decrypt is hanging in the signer or never returning
2025-10-16 22:41:04 +02:00
Gigi
a79d7f9eaf debug: enable NostrConnectSigner logging to diagnose decrypt hang
- Add detailed logging for signer subscription opening
- Enable debug logs for NostrConnectSigner via localStorage
- This will show if requests are being sent and responses received
- Helps diagnose why decrypt requests hang indefinitely
2025-10-16 22:40:00 +02:00
Gigi
1032a46456 fix: wait for bunker relay connections before marking signer ready
- Decryption was hanging because relay connections weren't established
- NostrConnectSigner sends requests via relays but pool wasn't connected
- Now wait for at least one bunker relay to be connected (5s timeout)
- Prevents decrypt/sign requests from being sent to unconnected relays
- Adds detailed logging for connection status
2025-10-16 22:37:45 +02:00
Gigi
ae997758ab debug: add detailed [bunker] logs for bookmark decryption
- Log account properties and nip04/nip44 availability
- Log signer fallback logic
- Log each decryption attempt (nip44 and nip04)
- Log success/failure for hidden tags and content decryption
- Helps diagnose why bunker decryption isn't working
2025-10-16 22:36:00 +02:00
Gigi
91a827324d fix: expose nip04/nip44 on NostrConnectAccount for bookmark decryption
- NostrConnectSigner has nip04/nip44 but not exposed at account level
- ExtensionAccount exposes these via getters, NostrConnectAccount didn't
- Add properties dynamically during reconnection for compatibility
- Enables private bookmark decryption with bunker accounts
2025-10-16 22:34:18 +02:00
Gigi
bf849c9faa refactor: clean up bunker implementation for better maintainability
- Extract reconnectBunkerSigner into reusable helper function
- Reduce excessive debug logging in App.tsx (90+ lines → 30 lines)
- Simplify account restoration logic with cleaner conditionals
- Remove verbose signing logs from highlightCreationService
- Keep only essential error logs for debugging
- Follows DRY principles and applesauce patterns
2025-10-16 22:32:06 +02:00
Gigi
118ab46ac0 fix: add bunker relays to relay pool for signing requests
- NostrConnectSigner uses its own relay list for signing requests
- Pool must be connected to bunker relays to send/receive requests
- Add bunker relays to pool when reconnecting after page load
- This fixes signing hanging indefinitely
2025-10-16 22:28:54 +02:00
Gigi
d2f2b689f9 fix: create and setup pool BEFORE loading accounts from localStorage
- NostrConnectAccount.fromJSON needs NostrConnectSigner.pool to be set
- Move pool creation and setup before accounts.fromJSON()
- This fixes 'Missing subscriptionMethod' error on page reload
- Now bunker accounts can be properly restored from localStorage
2025-10-16 22:25:15 +02:00
Gigi
5229e45566 fix: remove unused getDefaultBunkerPermissions import from App.tsx
- Import was no longer needed after removing connect() call
- Fixes eslint no-unused-vars error
- All linter and type checks now pass
2025-10-16 22:22:16 +02:00
Gigi
b17043e85d debug: add detailed logging for account restoration from localStorage
- Log raw accounts JSON from localStorage
- Log parsed account count and types
- Log active ID lookup and restoration steps
- This will help diagnose why accounts aren't persisting across refresh
2025-10-16 22:21:05 +02:00
Gigi
19ca909ef5 fix: setup pool and relays BEFORE bunker reconnection subscription
- Move NostrConnectSigner.pool assignment before active account subscription
- Move pool.group(RELAYS) before subscription
- This ensures pool is ready when bunker signer tries to send requests
- The subscription can fire immediately, so pool must be configured first
- Add log to confirm pool assignment
2025-10-16 22:17:48 +02:00
Gigi
f7ff309b6e fix: set isConnected=true after opening restored bunker signer
- After page reload, signer is restored with isConnected=false
- When signing, requireConnection() would call connect() again without permissions
- Now we set isConnected=true after open() to prevent re-connection
- The bunker remembers permissions from initial connection
- This ensures signing works after page refresh
2025-10-16 22:16:06 +02:00
Gigi
ea5a8486b9 fix: don't call connect() again on restored bunker signer
- fromBunkerURI() already calls connect() with permissions during login
- Calling connect() again breaks the connection state
- Just call open() to ensure subscription is active
- This matches the pattern in applesauce examples which don't reconnect
- Log final signer status including relays for debugging
2025-10-16 22:15:02 +02:00
Gigi
58897b3436 fix: prevent double reconnection and add status checks after connect
- Track reconnected accounts to avoid double-connecting
- Log signer status after open() and connect() to verify state
- This should prevent the double reconnection issue
- Will help diagnose if connection is being lost immediately
2025-10-16 22:14:12 +02:00
Gigi
6a59ecfa47 debug: prefix all bunker logs with [bunker] for easy filtering
- Update App.tsx reconnection logs
- Update highlightCreationService signing logs
- Update LoginOptions error logs
- Makes it easy to filter console with 'bunker' keyword
2025-10-16 22:12:56 +02:00
Gigi
272066c6e0 debug: add comprehensive logging for bunker reconnection and signing
- Add detailed logs for active account changes and bunker detection
- Log signer status (listening, isConnected, hasRemote)
- Log each step of reconnection process
- Add signing attempt logs in highlightCreationService
- This will help diagnose where the signing process hangs
2025-10-16 22:08:14 +02:00
Gigi
0426c9d3b0 fix: correct Accounts import in App.tsx
- Import Accounts from 'applesauce-accounts' instead of 'applesauce-accounts/accounts'
- Fixes TypeScript error TS2305
- All linter and type checks now pass
2025-10-16 21:58:08 +02:00
Gigi
c22419ba0e fix: ensure bunker signer reconnects with permissions on app restore
- Create centralized getDefaultBunkerPermissions() in nostrConnect service
- Update LoginOptions to use centralized permissions
- Add bunker reconnection logic in App.tsx on active account change
- Reconnect bunker signer with open() and connect() when restored from localStorage
- Surface permission errors to users via toast in useHighlightCreation
- Ensures highlights, reactions, settings, and bookmarks work after page reload with bunker
2025-10-16 21:56:31 +02:00
Gigi
8278fed2fb fix: request NIP-46 permissions for bunker signing
- Add explicit signing permissions for event kinds: 5, 7, 17, 9802, 30078, 39701, 0
- Add encryption/decryption permissions: nip04_encrypt/decrypt, nip44_encrypt/decrypt
- Improve error messages when bunker permissions are missing or denied
- Add debug logging hint for bunker permission issues in write service
- This ensures highlights, reactions, settings, reading positions, and web bookmarks all work with bunker
2025-10-16 21:47:59 +02:00
Gigi
b24a65b490 feat: add Login with Bunker authentication option
- Wire NostrConnectSigner to RelayPool in App.tsx
- Create LoginOptions component with Extension and Bunker login flows
- Show LoginOptions in BookmarkList when user is logged out
- Add applesauce-accounts and applesauce-signers to vite optimizeDeps
- Support NIP-46 bunker:// URI authentication alongside extension login
2025-10-16 21:17:34 +02:00
Gigi
fb509fabd8 style(settings): add proper spacing around middot separator between version and commit 2025-10-16 20:59:27 +02:00
Gigi
d21285123f feat(settings): separate version and commit links - version links to release, commit links to commit 2025-10-16 20:59:09 +02:00
Gigi
1029b6be0c feat(settings): link version to GitHub release page instead of commit 2025-10-16 20:57:57 +02:00
Gigi
3fff9455a1 docs: update CHANGELOG.md for v0.6.24 2025-10-16 20:00:22 +02:00
Gigi
8c6232e029 chore(release): bump version to 0.6.24 2025-10-16 19:59:48 +02:00
Gigi
f6c562e9be fix(types): add global declarations for build-time defines and fix eslint issues 2025-10-16 19:58:57 +02:00
Gigi
a92b14e877 docs: update CHANGELOG.md for v0.6.23 2025-10-16 19:57:11 +02:00
Gigi
b69a956247 chore(release): bump version to 0.6.23 2025-10-16 19:54:35 +02:00
Gigi
82a8dcf6eb chore(settings): link short commit hash to GitHub and remove timestamp/branch 2025-10-16 19:35:20 +02:00
Gigi
8e19e22289 feat(settings): display app version and git commit in settings footer 2025-10-16 19:32:18 +02:00
Gigi
e167b57810 fix(api): align article-og relay usage to RelayPool.request and remove open/close 2025-10-16 19:20:54 +02:00
Gigi
ba3b82e6b5 chore(app): add RouteDebug gated by ?debug=1 to log route state 2025-10-16 19:19:33 +02:00
Gigi
b5edfbb2c9 chore(api): add structured debug logs to article-og handler with ?debug=1 2025-10-16 19:17:12 +02:00
Gigi
48048f877a fix(vercel): limit /a/:naddr rewrite to bots 2025-10-16 19:16:29 +02:00
Gigi
bd1afc54c3 docs: update CHANGELOG.md for v0.6.22 2025-10-16 16:02:02 +02:00
Gigi
a2c4bed0f5 chore: bump version to 0.6.22 2025-10-16 16:01:19 +02:00
Gigi
9bad49fe5f feat(vercel): add rewrite rule for article OG endpoint
Route /a/:naddr requests to /api/article-og for dynamic social preview tags.
2025-10-16 16:00:36 +02:00
Gigi
2aa6536496 Merge pull request #17 from dergigi/social-preview
Add dynamic social preview for article deep-links
2025-10-16 15:58:52 +02:00
Gigi
bd6d8a0342 chore(api): remove debug logging from article-og endpoint 2025-10-16 15:50:00 +02:00
Gigi
dc8e86bc57 fix(api): use history.replaceState before redirecting to SPA
Set the browser history to /a/{naddr} before redirecting to /, so when the SPA loads it sees the correct URL path.
2025-10-16 15:41:22 +02:00
Gigi
32b843908e debug: add logging and debug endpoint to article-og
Add console logging for debugging and ?debug=1 query param to see request details in browser.
2025-10-16 15:34:50 +02:00
Gigi
5a71480459 fix(api): add base tag for proper asset loading
Use named parameter syntax in Vercel rewrite and add <base href="/"> tag to ensure assets load correctly from root when serving index.html through the API.
2025-10-16 15:27:13 +02:00
Gigi
17455aa47b fix(api): serve index.html to browsers with preserved URL
Instead of redirecting, serve the static index.html file directly. The Vercel rewrite preserves the /a/{naddr} URL, allowing client-side SPA routing to work correctly.
2025-10-16 15:20:10 +02:00
Gigi
4cc32c27de fix(api): detect crawlers and redirect browsers to SPA
Browsers get 302 redirect to / where the SPA handles routing client-side with the original /a/{naddr} URL preserved. Crawlers/bots get the full HTML with OG meta tags.
2025-10-16 14:43:29 +02:00
Gigi
99bfe209a5 fix(api): use meta refresh instead of SPA boot in OG endpoint
Browsers will immediately redirect to / and load the SPA client-side, while crawlers/bots ignore meta refresh and only see the OG meta tags.
2025-10-16 14:38:17 +02:00
Gigi
0a28bfbd50 fix(api): replace any type with Filter from nostr-tools 2025-10-16 14:32:35 +02:00
Gigi
ba9fb109f6 refactor(api): DRY improvements for article OG endpoint
- Extract fetchEventsFromRelays helper to eliminate duplication
- Add setCacheHeaders helper for consistent header setting
- Parallelize article and profile fetching for faster response
- Move relayPool.close() to finally block to prevent leaks
- Remove redundant cacheKey variable and sorting
2025-10-16 14:31:39 +02:00
Gigi
ec9d2fcb49 chore(meta): add social preview image to homepage OG tags 2025-10-16 14:23:44 +02:00
Gigi
f841043e03 chore(assets): add default social preview image (1200x630) 2025-10-16 14:22:04 +02:00
Gigi
94dc95e1f0 feat(api): dynamic OG HTML for /a/{naddr} using relay metadata 2025-10-16 14:21:49 +02:00
Gigi
32a5145d8f chore(vercel): route /a/* to article OG handler 2025-10-16 14:20:58 +02:00
Gigi
a856e8ca26 docs: update CHANGELOG.md for v0.6.21 2025-10-16 09:57:13 +02:00
Gigi
d54306cf92 chore: bump version to 0.6.21 2025-10-16 09:56:06 +02:00
Gigi
9fdb96b64e Merge pull request #16 from dergigi/reading-progress-filters-part-two
feat: add reading progress filters and reads/links tabs
2025-10-16 09:55:32 +02:00
Gigi
c50aa3a243 fix: resolve TypeScript errors from merge
- Remove unused readingPositions and markedAsReadIds from useBookmarksData
- Remove eventStore parameter from useBookmarksData call
- Add reads and links fields to MeCache interface
2025-10-16 09:53:20 +02:00
Gigi
adef1a922c chore: remove completed plan file 2025-10-16 09:49:43 +02:00
Gigi
99df4d6761 chore: merge master into reading-progress-filters-part-two
Resolved conflicts by keeping feature branch changes:
- Kept /me/reads and /me/links routes (not /me/archive)
- Kept ReadingProgressFilters component and readingProgressUtils
- Kept readsService, linksService, and readingDataProcessor
- Restored files that were renamed/deleted in master
2025-10-16 09:49:13 +02:00
Gigi
5f6a414953 fix: resolve all linter errors and type issues
- Remove unused state variables (readsMap, linksMap) by using only setters
- Move VALID_FILTERS constant outside component to fix exhaustive-deps warning
- Remove unused isReading variable in ReadingProgressIndicator
- Remove unused extractUrlFromBookmark function and IndividualBookmark import
- Fix type errors in linksFromBookmarks by extracting metadata from tags instead of non-existent properties
2025-10-16 09:36:17 +02:00
Gigi
ed17a68986 refactor: simplify filter icon colors to blue (except green for completed) 2025-10-16 09:33:04 +02:00
Gigi
bedf3daed1 feat: add URL routing for reading progress filters 2025-10-16 09:32:30 +02:00
Gigi
2c913cf7e8 feat: color reading progress filter icons when active 2025-10-16 09:30:16 +02:00
Gigi
aff5bff03b refactor: use neutral text color for 'started' reading progress state 2025-10-16 09:29:41 +02:00
Gigi
e90f902f0b feat: add amber color for 'started' reading progress state (0-10%) 2025-10-16 09:28:06 +02:00
Gigi
d763aa5f15 fix: merge reading progress even when timestamp is older than bookmark 2025-10-16 09:20:24 +02:00
Gigi
9d6b1f6f84 fix: call onItem callback directly for items already in reads map 2025-10-16 09:18:32 +02:00
Gigi
9eb2f35dbf debug: add console logging to trace reading progress enrichment 2025-10-16 09:13:34 +02:00
Gigi
5f33ad3ba0 fix(reads): use setState callback pattern for background enrichment
- Replace closure over tempMap with setState callback pattern
- Ensures we always work with latest state when merging progress
- Prevents stale closure issues that block state updates
- Apply same fix to both reads and links tabs
- Fixes reading progress not updating in UI
2025-10-16 09:13:19 +02:00
Gigi
3db4855532 fix(reads): use naddr format for IDs to match reading positions
- Convert bookmark coordinates to naddr format in deriveReadsFromBookmarks
- Reading positions store progress with naddr as ID
- Using naddr format enables proper merging of reading progress data
- Simplify getReadItemUrl to use item.id directly (already naddr)
- Fixes reading progress not showing in /me/reads tab
2025-10-16 09:11:21 +02:00
Gigi
3305be1da5 feat(reads): extract image, summary, and published date from bookmark tags
- Extract metadata from tags same way BookmarkItem does (DRY)
- Add image tag extraction for article images
- Add summary tag extraction for article summaries
- Add published_at tag extraction for publish dates
- Images and summaries now display in /me/reads tab
2025-10-16 09:08:57 +02:00
Gigi
fe55e87496 fix: remove unused import from readsFromBookmarks 2025-10-16 09:06:06 +02:00
Gigi
f78f1a3460 fix(reads): use bookmark.content for article titles
- IndividualBookmark doesn't have separate title/event fields
- After hydration, article titles are stored in content field
- Simplified extraction logic to just use bookmark.content
2025-10-16 09:06:00 +02:00
Gigi
e73d89739b fix(reads): extract article titles from events using applesauce helpers
- Use getArticleTitle, getArticleSummary, getArticleImage, getArticlePublished from Helpers
- Extract metadata from bookmark.event when available
- Fallback to bookmark fields if event not hydrated
- Fixes 'Untitled' articles in Reads tab
2025-10-16 09:01:51 +02:00
Gigi
7e2b4b46c9 feat(me): populate reads/links from bookmarks instantly
- Add deriveReadsFromBookmarks helper to convert 30023 bookmarks to ReadItems
- Add deriveLinksFromBookmarks helper for web bookmarks (39701) and URLs
- Update loadReadsTab to show bookmarked articles immediately, enrich in background
- Update loadLinksTab to show bookmarked links immediately, enrich in background
- Background enrichment merges reading progress only for displayed items
- Preserve existing pull-to-refresh and empty state logic
2025-10-16 08:45:31 +02:00
Gigi
fddf79e0c6 feat: add named kind constants, streaming updates, and fix reads/links tabs
- Create src/config/kinds.ts with named Nostr kind constants
- Add streaming support to fetchAllReads and fetchLinks with onItem callbacks
- Update all services to use KINDS constants instead of magic numbers
- Add mergeReadItem utility for DRY state management
- Add fallbackTitleFromUrl for external links without titles
- Relax validation to allow external items without titles
- Update Me.tsx to use streaming with Map-based state for reads/links
- Fix refresh to merge new data instead of clearing state
- Fix empty states for Reads and Links tabs (no more infinite skeletons)
- Services updated: readsService, linksService, libraryService, bookmarkService, exploreService, highlights/fetchByAuthor
2025-10-16 08:27:10 +02:00
Gigi
cf2098a723 Merge pull request #15 from dergigi/revert-14-reading-progress-filters
Revert "Add reading progress filters and split Reads/Links tabs"
2025-10-16 08:06:06 +02:00
Gigi
5568437663 Revert "Add reading progress filters and split Reads/Links tabs" 2025-10-16 08:05:20 +02:00
Gigi
7bfd7fdf6c Merge pull request #14 from dergigi/reading-progress-filters
Add reading progress filters and split Reads/Links tabs
2025-10-16 01:46:32 +02:00
Gigi
e6876d141f fix: show skeletons during initial tab load for reads/links 2025-10-16 01:43:36 +02:00
Gigi
5bb81b3c22 fix: always show skeletons for reads/links when no data
Removed empty state messages like "No articles in your reads" and
"No links yet" - now just show loading skeletons until data arrives.

This is simpler and prevents showing empty states while data is still
being fetched in the background.

Users will only see:
- Skeletons when no data (loading or truly empty)
- "No articles/links match this filter" when filtered out
- Actual content when data is available
2025-10-16 01:40:37 +02:00
Gigi
1e8e58fa05 fix: show loading skeletons correctly for reads and links tabs
The bug was that showSkeletons checked if ANY tab had data, so if you
had highlights or bookmarks, it would never show skeletons for reads/links
even while they were still loading.

Fix: Each tab now checks its own loading state (loading && tabData.length === 0)
instead of using the shared showSkeletons variable.

This makes the logic simple and clear:
1. If loading AND no data → show skeletons
2. If not loading AND no data → show empty state
3. If has data but filtered out → show no match message
4. Otherwise → show content
2025-10-16 01:39:03 +02:00
Gigi
f44e36e4bf refactor: make code more DRY by extracting shared utilities
- Create readingProgressUtils.ts with filterByReadingProgress function
- Create readingDataProcessor.ts with shared processing functions:
  - processReadingPositions
  - processMarkedAsRead
  - filterValidItems
  - sortByReadingActivity
- Refactor readsService.ts to use shared utilities
- Refactor linksService.ts to use shared utilities
- Eliminate 100+ lines of duplicated code
- Simplify Me.tsx filter logic to 2 lines

Benefits:
- Single source of truth for reading progress filtering
- Easier to maintain and modify
- Less code duplication across services
- More testable with isolated utility functions
2025-10-16 01:36:28 +02:00
Gigi
11c7564f8c feat: split Reads tab into Reads and Links
- Reads: Only Nostr-native articles (kind:30023)
- Links: Only external URLs with reading progress
- Create linksService.ts for fetching external URL links
- Update readsService to filter only Nostr articles
- Add Links tab between Reads and Writings with same filtering
- Add /me/links route
- Update meCache to include links field
- Both tabs support reading progress filters
- Lazy loading for both tabs

This provides clear separation between native Nostr content and external web links.
2025-10-16 01:33:04 +02:00
Gigi
a064376bd8 fix: filter out 'Untitled' items from Reads tab
- Exclude Nostr articles without event data (can't fetch title)
- Exclude external URLs without proper titles
- Prevents cluttering Reads with items that have no meaningful title
- Only shows items we can properly identify and display
2025-10-16 01:25:31 +02:00
Gigi
292e8e9bda fix: only show external URLs in Reads if they have reading progress
- External URLs with 0% progress are now filtered out
- External URLs only appear if readingProgress > 0 OR marked as read
- Nostr articles still show even at 0% (bookmarked articles)
- Keeps Reads tab focused on actual reading activity for external links
2025-10-16 01:24:50 +02:00
Gigi
951a3699ca fix: replace spinners with skeleton placeholders in Me tabs
- Replace spinner in highlights tab with 'No highlights yet' message
- Replace spinner in reading-list tab with 'No bookmarks yet' message
- Only show these messages when loading is complete and arrays are empty
- Remove unused faSpinner import
- Consistent with skeleton placeholder pattern used elsewhere
2025-10-16 01:21:31 +02:00
Gigi
860ec70b1c feat: implement lazy loading for Me component tabs
- Add loadedTabs state to track which tabs have been loaded
- Create tab-specific loading functions (loadHighlightsTab, loadWritingsTab, loadReadingListTab, loadReadsTab)
- Only load data for active tab on mount and tab switches
- Show cached data immediately, refresh in background when revisiting tabs
- Update pull-to-refresh to only reload the active tab
- Show loading skeletons only on first load of each tab
- Works for both /me (own profile) and /p/ (other profiles)

This reduces initial load time from 30+ seconds to 2-5 seconds by only fetching data for the active tab.
2025-10-16 01:19:06 +02:00
Gigi
2b69c72939 refactor: simplify loading state to use unified logic
- Remove separate loadingReads state
- Keep single loading state true until ALL data is loaded
- Matches existing pattern used in other tabs
- Keeps code DRY and simple
2025-10-16 01:08:56 +02:00
Gigi
b98d774cbf fix: filter out reads without timestamps
- Exclude items without readingTimestamp or markedAt from reads
- Prevents 'Just Now' items from appearing in the reads list
- Only show reads with valid activity timestamps
2025-10-16 01:06:27 +02:00
Gigi
8972571a18 fix: keep showing skeletons while reads are loading
- Add separate loadingReads state to track reads fetching
- Show skeletons during the entire reads loading period
- Set loading=false after public data (highlights/writings) completes
- Prevents showing 'No articles match this filter' while reads are being fetched
2025-10-16 01:05:42 +02:00
Gigi
ab5d5dca58 debug: add logging to reads filtering 2025-10-16 00:59:28 +02:00
Gigi
e383356af1 feat: rename Archive to Reads and expand functionality
- Create new readsService to aggregate all read content from multiple sources
- Include bookmarked articles, reading progress tracked articles, and manually marked-as-read items
- Update Me component to use new reads service
- Update routes from /me/archive to /me/reads
- Update meCache to use ReadItem[] instead of BlogPostPreview[]
- Update filter logic to use actual reading progress data
- Support both Nostr-native articles and external URLs in reads
- Fetch and display article metadata from multiple sources
- Sort by most recent reading activity
2025-10-16 00:45:16 +02:00
Gigi
165d10c49b feat: split 'To read' filter into 'Unopened' and 'Started'
- Add 'unopened' filter (no progress, 0%) - uses fa-envelope icon
- Add 'started' filter (0-10% progress) - uses fa-envelope-open icon
- Remove 'to-read' filter
- Use classic/regular variant for envelope icons
- Update filter logic in BookmarkList and Me components
- New filter ranges:
  - Unopened: 0% (never opened)
  - Started: 0-10% (opened but not read far)
  - Reading: 11-94%
  - Completed: 95-100%
2025-10-16 00:13:34 +02:00
Gigi
e0869c436b fix: adjust 'Reading' filter to 11-94% range
- Change 'reading' filter from 10-95% to 11-94%
- Creates clearer boundaries between filters:
  - To read: 0-10%
  - Reading: 11-94%
  - Completed: 95-100%
2025-10-16 00:10:20 +02:00
Gigi
95432fc276 fix: reading position filters now work correctly in bookmarks
- Match marked-as-read event IDs to bookmark coordinate IDs
- Use eventStore to lookup events and build coordinates from them
- Add both event ID and coordinate format to markedAsReadIds set
- This fixes filtering of bookmarked articles by reading progress
- Apply same fix to both Bookmarks and Explore components
2025-10-15 23:54:44 +02:00
Gigi
1982d25fa8 feat: add fancy animation to Mark as Read button
- Icon spins 360° with bounce effect (scale up during spin)
- Button background changes to vibrant green gradient (#10b981)
- Green pulsing box-shadow effect on activation
- Button scales up slightly on click for emphasis
- Holds green state for 1.5 seconds
- Smoothly fades to gray after animation
- Final state is gray button to indicate marked status
- Uses cubic-bezier easing for modern, smooth feel
- Total animation duration: 2.5 seconds
- Prevents interaction during animation
2025-10-15 23:39:14 +02:00
Gigi
2fc64b6028 feat: change 'To read' filter to show 0-10% progress
- Update 'to-read' filter range from 0-5% to 0-10%
- Update 'reading' filter to start at 10% instead of 5%
- Adjust filter comments to reflect new ranges
2025-10-15 23:37:59 +02:00
Gigi
6e8686a49d feat: treat marked-as-read articles as 100% progress
- Fetch marked-as-read articles in useBookmarksData and Explore
- Pass markedAsReadIds through component chain (Bookmarks -> ThreePaneLayout -> BookmarkList)
- Display 100% progress for marked articles in all views (Archive, Bookmarks, Explore)
- Update filter logic to treat marked articles as completed
- Marked articles show green 100% progress bar
- Marked articles only appear in 'completed' or 'all' filters
- Remove reading position tracking from Me.tsx (not needed when all are marked)
- Clean up unused imports and variables
2025-10-15 23:36:05 +02:00
Gigi
fd5ce80a06 feat: add auto-mark as read at 100% reading progress
- Add autoMarkAsReadAt100 setting (default: false)
- Add checkbox in Layout & Behavior settings
- Automatically mark article as read after 2 seconds at 100% progress
- Trigger same animation as manual mark as read button
- Move isNostrArticle computation earlier for useCallback deps
- Move handleMarkAsRead to useCallback for use in auto-mark effect
2025-10-15 23:28:50 +02:00
Gigi
ac4185e2cc feat: merge 'Completed' and 'Marked as Read' filters into one
- Remove 'marked' filter type from ReadingProgressFilterType
- Update ReadingProgressFilters component to show only 4 filters
- Keep checkmark icon for unified 'Completed' filter
- Completed filter now shows both:
  - Articles with 95%+ reading progress
  - Articles manually marked as read (no position data or 0%)
- Remove unused faBooks icon import
- Update filter logic in BookmarkList and Me components
2025-10-15 23:22:40 +02:00
Gigi
9217077283 fix: replace spinners with skeletons during refresh in archive/writings tabs
- Changed spinner to empty state message only when not loading
- During refresh, keeps showing cached content or skeletons
- Archive: shows 'No articles in your archive' only when done loading
- Writings: shows 'No articles written yet' only when done loading
- Prevents jarring transition from skeletons to spinner during refresh
2025-10-15 23:20:54 +02:00
Gigi
b7c14b5c7c fix: restore top padding to reading progress filters
- Remove padding-top: 0 override
- Now has equal spacing top and bottom (0.5rem)
2025-10-15 23:18:31 +02:00
Gigi
9b3cc41770 refactor: rename ArchiveFilters to ReadingProgressFilters
- More accurate naming: filters are based on reading progress/position
- Renamed component: ArchiveFilters -> ReadingProgressFilters
- Renamed type: ArchiveFilterType -> ReadingProgressFilterType
- Renamed variables: archiveFilter -> readingProgressFilter
- Renamed CSS class: archive-filters-wrapper -> reading-progress-filters-wrapper
- Updated all imports and references in BookmarkList and Me components
- Updated comments to reflect reading progress filtering
2025-10-15 23:17:55 +02:00
Gigi
4c4bd2214c feat: add top border to archive filters in bookmarks sidebar
- Matches the style of bookmark type filters at top
- Visually separates archive filters from bookmarks content
2025-10-15 23:14:56 +02:00
Gigi
93c31650f4 fix: remove double border between archive filters and view controls
- Add archive-filters-wrapper class
- Remove border-bottom from bookmark-filters in wrapper
- Prevents double border (bookmark-filters border-bottom + view-mode-controls border-top)
2025-10-15 23:14:20 +02:00
Gigi
7f0d99fc29 fix: remove duplicate border between archive filters and view controls
- Remove borderTop from archive filters div
- Keep only the border from view-mode-controls CSS
2025-10-15 23:12:26 +02:00
Gigi
eb6dbe1644 feat: add archive filters to bookmarks sidebar
- Add ArchiveFilters component to bookmarks sidebar
- Filter buttons shown above view-mode-controls row
- Filters: All, To Read (0-5%), Reading (5-95%), Completed (95%+), Marked
- Only shown when kind:30023 articles are present
- Filters only apply to kind:30023 articles
- Other bookmark types (videos, notes, web) remain visible
2025-10-15 23:10:31 +02:00
Gigi
474da25f77 fix: add autoScrollToPosition to useEffect dependency array
- Fixes react-hooks/exhaustive-deps warning
- Ensures effect reruns when auto-scroll setting changes
2025-10-15 23:08:21 +02:00
Gigi
02eaa1c8f8 feat: show reading progress in Explore and Bookmarks sidebar
- Add reading position loading to Explore component
- Add reading position loading to useBookmarksData hook
- Display progress bars in Explore tab blog posts
- Display progress bars in Bookmarks large preview view
- Progress shown as colored bar (green for completed, orange for in-progress)
- Only shown for kind:30023 articles with saved reading positions
- Requires syncReadingPosition setting to be enabled
2025-10-15 23:07:18 +02:00
Gigi
8800791723 feat: add auto-scroll to reading position setting
- Add autoScrollToPosition setting (default: true)
- Add checkbox in Layout & Behavior settings
- Only auto-scroll when setting is enabled
- Allows users to disable auto-scrolling while keeping sync enabled
2025-10-15 22:53:47 +02:00
Gigi
6758b9678b fix: update 'To Read' filter to show 0-5% progress articles
- Filter now shows articles with 0-5% reading progress
- Excludes manually marked as read articles (those without position data)
- Updates comment to reflect new logic
2025-10-15 22:51:40 +02:00
Gigi
85649ae283 Merge pull request #13 from dergigi/sync-reading-position
Add reading position sync and archive enhancements
2025-10-15 22:45:13 +02:00
44 changed files with 2883 additions and 243 deletions

1
.gitignore vendored
View File

@@ -11,4 +11,5 @@ dist
# Reference Projects
applesauce
primal-web-app
Amber

77
Amber.md Normal file
View File

@@ -0,0 +1,77 @@
## Boris ↔ Amber bunker: current findings
- **Environment**
- Client: Boris (web) using `applesauce` stack (`NostrConnectSigner`, `RelayPool`).
- Bunker: Amber (mobile).
- We restored a `nostr-connect` account from localStorage and re-wired the signer to the app `RelayPool` before use.
## What we changed client-side
- **Signer wiring**
- Bound `NostrConnectSigner.subscriptionMethod/publishMethod` to the app `RelayPool` at startup.
- After deserialization, recreated the signer with pool context and merged its relays with app `RELAYS` (includes local relays).
- Opened the signer subscription and performed a guarded `connect()` with default permissions including `nip04_encrypt/decrypt` and `nip44_encrypt/decrypt`.
- **Probes and timeouts**
- Initial probe tried `decrypt('invalid-ciphertext')` → timed out.
- Switched to roundtrip probes: `encrypt(self, ... )` then `decrypt(self, cipher)` for both nip-44 and nip-04.
- Increased probe timeout from 3s → 10s; increased bookmark decrypt timeout from 15s → 30s.
- **Logging**
- Added logs for publish/subscribe and parsed the NIP-46 request content length.
- Confirmed NIP46 request events are kind `24133` with a single `p` tag (expected). The method is inside the encrypted content, so it prints as `method: undefined` (expected).
## Evidence from logs (client)
```
[bunker] ✅ Wired NostrConnectSigner to RelayPool publish/subscription
[bunker] 🔗 Signer relays merged with app RELAYS: (19) [...]
[bunker] subscribe via signer: { relays: [...], filters: [...] }
[bunker] ✅ Signer subscription opened
[bunker] publish via signer: { relays: [...], kind: 24133, tags: [['p', <remote>]], contentLength: 260|304|54704 }
[bunker] 🔎 Probe nip44 roundtrip (encrypt→decrypt)… → probe timeout after 10000ms
[bunker] 🔎 Probe nip04 roundtrip (encrypt→decrypt)… → probe timeout after 10000ms
bookmarkProcessing.ts: ❌ nip44.decrypt failed: Decrypt timeout after 30000ms
bookmarkProcessing.ts: ❌ nip04.decrypt failed: Decrypt timeout after 30000ms
```
Notes:
- Final signer status shows `listening: true`, `isConnected: true`, and requests are published to 19 relays (includes Ambers).
## Evidence from Amber (device)
- Activity screen shows multiple entries for: “Encrypt data using nip 4” and “Encrypt data using nip 44” with green checkmarks.
- No entries for “Decrypt data using nip 4” or “Decrypt data using nip 44”.
## Interpretation
- Transport and publish paths are working: Boris is publishing NIP46 requests (kind 24133) and Amber receives them (ENCRYPT activity visible).
- The persistent failure is specific to DECRYPT handling: Amber does not show any DECRYPT activity and Boris receives no decrypt responses within 1030s windows.
- Client-side wiring is likely correct (subscription open, permissions requested, relays merged). The remaining issue appears provider-side in Ambers NIP46 decrypt handling or permission gating.
## Repro steps (quick)
1) Revoke Boris in Amber.
2) Reconnect with a fresh bunker URI; approve signing and both encrypt/decrypt scopes for nip04 and nip44.
3) Keep Amber unlocked and foregrounded.
4) Reload Boris; observe:
- Logs showing `publish via signer` for kind 24133.
- In Amber, activity should include “Decrypt data using nip 4/44”.
If DECRYPT entries still dont appear:
- This points to Ambers NIP46 provider not executing/authorizing `nip04_decrypt`/`nip44_decrypt` methods, or not publishing responses.
## Suggestions for Amber-side debugging
- Verify permission gating allows `nip04_decrypt` and `nip44_decrypt` (not just encrypt).
- Confirm the provider recognizes NIP46 methods `nip04_decrypt` and `nip44_decrypt` in the decrypted payload and routes them to decrypt routines.
- Ensure the response event is published back to the same relays and correctly addressed to the client (`p` tag set and content encrypted back to client pubkey).
- Add activity logging for “Decrypt …” attempts and failures to surface denial/exception states.
## Current conclusion
- Client is configured and publishing requests correctly; encryption proves endtoend path is alive.
- The missing DECRYPT activity in Amber is the blocker. Fixing Ambers NIP46 decrypt handling should resolve bookmark decryption in Boris without further client changes.

View File

@@ -7,6 +7,125 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [0.6.24] - 2025-01-16
### Fixed
- TypeScript global declarations for build-time defines
- Added proper type declarations for `__APP_VERSION__`, `__GIT_COMMIT__`, `__GIT_BRANCH__`, `__BUILD_TIME__`, and `__GIT_COMMIT_URL__`
- Resolved ESLint no-undef errors for build-time injected variables
- Added Node.js environment hint to Vite configuration
## [0.6.23] - 2025-01-16
### Fixed
- Deep-link refresh redirect issue for nostr-native articles
- Limited `/a/:naddr` rewrite to bot user-agents only in Vercel configuration
- Real browsers now hit the SPA directly, preventing redirect to root path
- Bot crawlers still receive proper OpenGraph metadata for social sharing
### Added
- Version and git commit information in Settings footer
- Displays app version and short commit hash with link to GitHub
- Build-time metadata injection via Vite configuration
- Subtle footer styling with selectable text
### Changed
- Article OG handler now uses proper RelayPool.request() API
- Aligned with applesauce RelayPool interface
- Removed deprecated open/close methods
- Fixed TypeScript linting errors
### Technical
- Added debug logging for route state and article OG handler
- Gated by `?debug=1` query parameter for production testing
- Structured logging for troubleshooting deep-link issues
- Temporary debug components for validation
## [0.6.22] - 2025-10-16
### Added
- Dynamic OpenGraph and Twitter Card meta tags for article deep-links
- Social media platforms display article title, author, cover image, and summary when sharing `/a/{naddr}` links
- Serverless endpoint fetches article metadata from Nostr relays (kind:30023) and author profiles (kind:0)
- User-agent detection serves appropriate content to crawlers vs browsers
- Falls back to default social preview image when articles have no cover image
- Social preview image for homepage and article links
- Added `boris-social-1200.png` as default OpenGraph image (1200x630)
- Homepage now includes social preview image in meta tags
### Changed
- Article deep-links now properly preserve URL when loading in browser
- Uses `history.replaceState()` to maintain correct article path
- Browser navigation works correctly on refresh and new tab opens
### Fixed
- Vercel rewrite configuration for article routes
- Routes `/a/:naddr` to serverless OG endpoint for dynamic meta tags
- Regular SPA routing preserved for browser navigation
## [0.6.21] - 2025-10-16
### Added
- Reading position sync across devices using Nostr Kind 30078 (NIP-78)
- Automatically saves and syncs reading position as you scroll
- Visual reading progress indicator on article cards
- Reading progress shown in Explore and Bookmarks sidebar
- Auto-scroll to last reading position setting (configurable in Settings)
- Reading position displayed as colored progress bar on cards
- Reading progress filters for organizing articles
- Filter by reading state: Unopened, Started (0-10%), Reading (11-94%), Completed (95-100% or marked as read)
- Filter icons colored when active (blue for most, green for completed)
- URL routing support for reading progress filters
- Reading progress filters available in Archive tab and bookmarks sidebar
- Reads and Links tabs on `/me` page
- Reads tab shows nostr-native articles with reading progress
- Links tab shows external URLs with reading progress
- Both tabs populate instantly from bookmarks for fast loading
- Lazy loading for improved performance
- Auto-mark as read at 100% reading progress
- Articles automatically marked as read when scrolled to end
- Marked-as-read articles treated as 100% progress
- Fancy checkmark animation on Mark as Read button
- Click-to-open article navigation on highlights
- Clicking highlights in Explore and Me pages opens the source article
- Automatically scrolls to highlighted text position
### Changed
- Renamed Archive to Reads with expanded functionality
- Merged 'Completed' and 'Marked as Read' filters into one unified filter
- Simplified filter icon colors to blue (except green for completed)
- Started reading progress state (0-10%) uses neutral text color
- Replace spinners with skeleton placeholders during refresh in Archive/Reads/Links tabs
- Removed unused IEventStore import in ContentPanel
### Fixed
- Reading position calculation now accurately reaches 100%
- Reading position filters work correctly in bookmarks sidebar
- Filter out reads without timestamps or 'Untitled' items
- Show skeleton placeholders correctly during initial tab load
- External URLs in Reads tab only shown if they have reading progress
- Reading progress merges even when timestamp is older than bookmark
- Resolved all linter errors and TypeScript type issues
### Refactored
- Renamed ArchiveFilters component to ReadingProgressFilters
- Extracted shared utilities from readsFromBookmarks for DRY code
- Use setState callback pattern for background enrichment
- Use naddr format for article IDs to match reading positions
- Extract article titles, images, summaries from bookmark tags using applesauce helpers
## [0.6.20] - 2025-10-15
### Added
@@ -1641,7 +1760,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Optimize relay usage following applesauce-relay best practices
- Use applesauce-react event models for better profile handling
[Unreleased]: https://github.com/dergigi/boris/compare/v0.6.20...HEAD
[Unreleased]: https://github.com/dergigi/boris/compare/v0.6.24...HEAD
[0.6.24]: https://github.com/dergigi/boris/compare/v0.6.23...v0.6.24
[0.6.23]: https://github.com/dergigi/boris/compare/v0.6.22...v0.6.23
[0.6.21]: https://github.com/dergigi/boris/compare/v0.6.20...v0.6.21
[0.6.20]: https://github.com/dergigi/boris/compare/v0.6.19...v0.6.20
[0.6.19]: https://github.com/dergigi/boris/compare/v0.6.18...v0.6.19
[0.6.18]: https://github.com/dergigi/boris/compare/v0.6.17...v0.6.18

304
api/article-og.ts Normal file
View File

@@ -0,0 +1,304 @@
import type { VercelRequest, VercelResponse } from '@vercel/node'
import { RelayPool } from 'applesauce-relay'
import { nip19 } from 'nostr-tools'
import { AddressPointer } from 'nostr-tools/nip19'
import { NostrEvent, Filter } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
const { getArticleTitle, getArticleImage, getArticleSummary } = Helpers
// Relay configuration (from src/config/relays.ts)
const RELAYS = [
'wss://relay.damus.io',
'wss://nos.lol',
'wss://relay.nostr.band',
'wss://relay.dergigi.com',
'wss://wot.dergigi.com',
'wss://relay.snort.social',
'wss://relay.current.fyi',
'wss://nostr-pub.wellorder.net',
'wss://purplepag.es',
'wss://relay.primal.net'
]
type CacheEntry = {
html: string
expires: number
}
const WEEK_MS = 7 * 24 * 60 * 60 * 1000
const memoryCache = new Map<string, CacheEntry>()
function escapeHtml(text: string): string {
return text
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#039;')
}
function setCacheHeaders(res: VercelResponse, maxAge: number = 86400): void {
res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=604800`)
res.setHeader('Content-Type', 'text/html; charset=utf-8')
}
interface ArticleMetadata {
title: string
summary: string
image: string
author: string
published?: number
}
async function fetchEventsFromRelays(
relayPool: RelayPool,
relayUrls: string[],
filter: Filter,
timeoutMs: number
): Promise<NostrEvent[]> {
const events: NostrEvent[] = []
await new Promise<void>((resolve) => {
const timeout = setTimeout(() => resolve(), timeoutMs)
// `request` emits NostrEvent objects directly
relayPool.request(relayUrls, filter).subscribe({
next: (event) => {
events.push(event)
},
error: () => resolve(),
complete: () => {
clearTimeout(timeout)
resolve()
}
})
})
// Sort by created_at and return most recent first
return events.sort((a, b) => b.created_at - a.created_at)
}
async function fetchArticleMetadata(naddr: string): Promise<ArticleMetadata | null> {
const relayPool = new RelayPool()
try {
// Decode naddr
const decoded = nip19.decode(naddr)
if (decoded.type !== 'naddr') {
return null
}
const pointer = decoded.data as AddressPointer
// Determine relay URLs
const relayUrls = pointer.relays && pointer.relays.length > 0 ? pointer.relays : RELAYS
// Fetch article and profile in parallel
const [articleEvents, profileEvents] = await Promise.all([
fetchEventsFromRelays(relayPool, relayUrls, {
kinds: [pointer.kind],
authors: [pointer.pubkey],
'#d': [pointer.identifier || '']
}, 5000),
fetchEventsFromRelays(relayPool, relayUrls, {
kinds: [0],
authors: [pointer.pubkey]
}, 3000)
])
if (articleEvents.length === 0) {
return null
}
const article = articleEvents[0]
// Extract article metadata
const title = getArticleTitle(article) || 'Untitled Article'
const summary = getArticleSummary(article) || 'Read this article on Boris'
const image = getArticleImage(article) || '/boris-social-1200.png'
// Extract author name from profile
let authorName = pointer.pubkey.slice(0, 8) + '...'
if (profileEvents.length > 0) {
try {
const profileData = JSON.parse(profileEvents[0].content)
authorName = profileData.display_name || profileData.name || authorName
} catch {
// Use fallback
}
}
return {
title,
summary,
image,
author: authorName,
published: article.created_at
}
} catch (err) {
console.error('Failed to fetch article metadata:', err)
return null
} finally {
// No explicit close needed; pool manages connections internally
}
}
function generateHtml(naddr: string, meta: ArticleMetadata | null): string {
const baseUrl = 'https://read.withboris.com'
const articleUrl = `${baseUrl}/a/${naddr}`
const title = meta?.title || 'Boris Nostr Bookmarks'
const description = meta?.summary || 'Your reading list for the Nostr world. A minimal nostr client for bookmark management with highlights.'
const image = meta?.image?.startsWith('http') ? meta.image : `${baseUrl}${meta?.image || '/boris-social-1200.png'}`
const author = meta?.author || 'Boris'
return `<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/x-icon" href="/favicon.ico" />
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
<meta name="theme-color" content="#0f172a" />
<link rel="manifest" href="/manifest.webmanifest" />
<title>${escapeHtml(title)}</title>
<meta name="description" content="${escapeHtml(description)}" />
<link rel="canonical" href="${articleUrl}" />
<!-- Open Graph / Social Media -->
<meta property="og:type" content="article" />
<meta property="og:url" content="${articleUrl}" />
<meta property="og:title" content="${escapeHtml(title)}" />
<meta property="og:description" content="${escapeHtml(description)}" />
<meta property="og:image" content="${escapeHtml(image)}" />
<meta property="og:site_name" content="Boris" />
${meta?.published ? `<meta property="article:published_time" content="${new Date(meta.published * 1000).toISOString()}" />` : ''}
<meta property="article:author" content="${escapeHtml(author)}" />
<!-- Twitter Card -->
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:url" content="${articleUrl}" />
<meta name="twitter:title" content="${escapeHtml(title)}" />
<meta name="twitter:description" content="${escapeHtml(description)}" />
<meta name="twitter:image" content="${escapeHtml(image)}" />
</head>
<body>
<noscript>
<p>Redirecting to <a href="/">Boris</a>...</p>
</noscript>
</body>
</html>`
}
function isCrawler(userAgent: string | undefined): boolean {
if (!userAgent) return false
const crawlers = [
'bot', 'crawl', 'spider', 'slurp', 'facebook', 'twitter', 'linkedin',
'whatsapp', 'telegram', 'slack', 'discord', 'preview'
]
const ua = userAgent.toLowerCase()
return crawlers.some(crawler => ua.includes(crawler))
}
export default async function handler(req: VercelRequest, res: VercelResponse) {
const naddr = (req.query.naddr as string | undefined)?.trim()
if (!naddr) {
return res.status(400).json({ error: 'Missing naddr parameter' })
}
const userAgent = req.headers['user-agent'] as string | undefined
const isCrawlerRequest = isCrawler(userAgent)
const debugEnabled = req.query.debug === '1' || req.headers['x-boris-debug'] === '1'
if (debugEnabled) {
console.log('[article-og] request', JSON.stringify({
naddr,
ua: userAgent || null,
isCrawlerRequest,
path: req.url || null
}))
res.setHeader('X-Boris-Debug', '1')
}
// If it's a regular browser (not a bot), serve HTML that loads SPA
// Use history.replaceState to set the URL before the SPA boots
if (!isCrawlerRequest) {
const articlePath = `/a/${naddr}`
// Serve a minimal HTML that sets up the URL and loads the SPA
const html = `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<link rel="icon" type="image/x-icon" href="/favicon.ico">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Boris - Loading Article...</title>
<script>
// Set the URL to the article path before SPA loads
if (window.location.pathname !== '${articlePath}') {
history.replaceState(null, '', '${articlePath}');
}
</script>
${debugEnabled ? `<script>console.debug('article-og', { mode: 'browser', naddr: '${naddr}', path: location.pathname, referrer: document.referrer });</script>` : ''}
<script>
// Redirect to index.html which will load the SPA
// The history state is already set, so SPA will see the correct URL
window.location.replace('/');
</script>
</head>
<body>
<div id="root"></div>
</body>
</html>`
res.setHeader('Content-Type', 'text/html; charset=utf-8')
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate')
if (debugEnabled) {
console.log('[article-og] response', JSON.stringify({ mode: 'browser', naddr }))
}
return res.status(200).send(html)
}
// Check cache for bots/crawlers
const now = Date.now()
const cached = memoryCache.get(naddr)
if (cached && cached.expires > now) {
setCacheHeaders(res)
if (debugEnabled) {
console.log('[article-og] response', JSON.stringify({ mode: 'bot', naddr, cache: true }))
}
return res.status(200).send(cached.html)
}
try {
// Fetch metadata
const meta = await fetchArticleMetadata(naddr)
// Generate HTML
const html = generateHtml(naddr, meta)
// Cache the result
memoryCache.set(naddr, { html, expires: now + WEEK_MS })
// Send response
setCacheHeaders(res)
if (debugEnabled) {
console.log('[article-og] response', JSON.stringify({ mode: 'bot', naddr, cache: false }))
}
return res.status(200).send(html)
} catch (err) {
console.error('Error generating article OG HTML:', err)
// Fallback to basic HTML with SPA boot
const html = generateHtml(naddr, null)
setCacheHeaders(res, 3600)
if (debugEnabled) {
console.log('[article-og] response', JSON.stringify({ mode: 'bot-fallback', naddr }))
}
return res.status(200).send(html)
}
}

View File

@@ -18,6 +18,7 @@
<meta property="og:url" content="https://read.withboris.com/" />
<meta property="og:title" content="Boris - Nostr Bookmarks" />
<meta property="og:description" content="Your reading list for the Nostr world. A minimal nostr client for bookmark management with highlights." />
<meta property="og:image" content="https://read.withboris.com/boris-social-1200.png" />
<meta property="og:site_name" content="Boris" />
<!-- Twitter Card -->
@@ -25,6 +26,7 @@
<meta name="twitter:url" content="https://read.withboris.com/" />
<meta name="twitter:title" content="Boris - Nostr Bookmarks" />
<meta name="twitter:description" content="Your reading list for the Nostr world. A minimal nostr client for bookmark management with highlights." />
<meta name="twitter:image" content="https://read.withboris.com/boris-social-1200.png" />
<!-- Default to system theme until settings load from Nostr -->
<script>

View File

@@ -1,6 +1,6 @@
{
"name": "boris",
"version": "0.6.20",
"version": "0.6.24",
"description": "A minimal nostr client for bookmark management",
"homepage": "https://read.withboris.com/",
"type": "module",

Binary file not shown.

After

Width:  |  Height:  |  Size: 819 KiB

View File

@@ -4,16 +4,21 @@ import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faSpinner } from '@fortawesome/free-solid-svg-icons'
import { EventStoreProvider, AccountsProvider, Hooks } from 'applesauce-react'
import { EventStore } from 'applesauce-core'
import { AccountManager } from 'applesauce-accounts'
import { AccountManager, Accounts } from 'applesauce-accounts'
import { registerCommonAccountTypes } from 'applesauce-accounts/accounts'
import { RelayPool } from 'applesauce-relay'
import { NostrConnectSigner } from 'applesauce-signers'
import { getDefaultBunkerPermissions } from './services/nostrConnect'
import { createAddressLoader } from 'applesauce-loaders/loaders'
import Debug from './components/Debug'
import Bookmarks from './components/Bookmarks'
import RouteDebug from './components/RouteDebug'
import Toast from './components/Toast'
import { useToast } from './hooks/useToast'
import { useOnlineStatus } from './hooks/useOnlineStatus'
import { RELAYS } from './config/relays'
import { SkeletonThemeProvider } from './components/Skeletons'
import { DebugBus } from './utils/debugBus'
const DEFAULT_ARTICLE = import.meta.env.VITE_DEFAULT_ARTICLE_NADDR ||
'naddr1qvzqqqr4gupzqmjxss3dld622uu8q25gywum9qtg4w4cv4064jmg20xsac2aam5nqqxnzd3cxqmrzv3exgmr2wfesgsmew'
@@ -112,7 +117,25 @@ function AppRoutes({
}
/>
<Route
path="/me/archive"
path="/me/reads"
element={
<Bookmarks
relayPool={relayPool}
onLogout={handleLogout}
/>
}
/>
<Route
path="/me/reads/:filter"
element={
<Bookmarks
relayPool={relayPool}
onLogout={handleLogout}
/>
}
/>
<Route
path="/me/links"
element={
<Bookmarks
relayPool={relayPool}
@@ -147,6 +170,7 @@ function AppRoutes({
/>
}
/>
<Route path="/debug" element={<Debug />} />
<Route path="/" element={<Navigate to={`/a/${DEFAULT_ARTICLE}`} replace />} />
</Routes>
)
@@ -168,20 +192,57 @@ function App() {
// Register common account types (needed for deserialization)
registerCommonAccountTypes(accounts)
// Create relay pool and set it up BEFORE loading accounts
// NostrConnectAccount.fromJSON needs this to restore the signer
const pool = new RelayPool()
// Wire the signer to use this pool; make publish non-blocking so callers don't
// wait for every relay send to finish. Responses still resolve the pending request.
NostrConnectSigner.subscriptionMethod = pool.subscription.bind(pool)
NostrConnectSigner.publishMethod = (relays: string[], event: unknown) => {
const result: any = pool.publish(relays, event as any)
if (result && typeof (result as any).subscribe === 'function') {
try { (result as any).subscribe({ complete: () => {}, error: () => {} }) } catch {}
}
// Return an already-resolved promise so upstream await finishes immediately
return Promise.resolve()
}
console.log('[bunker] ✅ Wired NostrConnectSigner to RelayPool publish/subscription (before account load)')
// Create a relay group for better event deduplication and management
pool.group(RELAYS)
console.log('[bunker] Created relay group with', RELAYS.length, 'relays (including local)')
// Load persisted accounts from localStorage
try {
const json = JSON.parse(localStorage.getItem('accounts') || '[]')
const accountsJson = localStorage.getItem('accounts')
console.log('[bunker] Raw accounts from localStorage:', accountsJson)
const json = JSON.parse(accountsJson || '[]')
console.log('[bunker] Parsed accounts:', json.length, 'accounts')
await accounts.fromJSON(json)
console.log('Loaded', accounts.accounts.length, 'accounts from storage')
console.log('[bunker] Loaded', accounts.accounts.length, 'accounts from storage')
console.log('[bunker] Account types:', accounts.accounts.map(a => ({ id: a.id, type: a.type })))
// Load active account from storage
const activeId = localStorage.getItem('active')
if (activeId && accounts.getAccount(activeId)) {
accounts.setActive(activeId)
console.log('Restored active account:', activeId)
console.log('[bunker] Active ID from localStorage:', activeId)
if (activeId) {
const account = accounts.getAccount(activeId)
console.log('[bunker] Found account for ID?', !!account, account?.type)
if (account) {
accounts.setActive(activeId)
console.log('[bunker] ✅ Restored active account:', activeId, 'type:', account.type)
} else {
console.warn('[bunker] ⚠️ Active ID found but account not in list')
}
} else {
console.log('[bunker] No active account ID in localStorage')
}
} catch (err) {
console.error('Failed to load accounts from storage:', err)
console.error('[bunker] ❌ Failed to load accounts from storage:', err)
}
// Subscribe to accounts changes and persist to localStorage
@@ -198,12 +259,197 @@ function App() {
}
})
const pool = new RelayPool()
// Reconnect bunker signers when active account changes
// Keep track of which accounts we've already reconnected to avoid double-connecting
const reconnectedAccounts = new Set<string>()
// Create a relay group for better event deduplication and management
pool.group(RELAYS)
console.log('Created relay group with', RELAYS.length, 'relays (including local)')
console.log('Relay URLs:', RELAYS)
const bunkerReconnectSub = accounts.active$.subscribe(async (account) => {
console.log('[bunker] Active account changed:', {
hasAccount: !!account,
type: account?.type,
id: account?.id
})
if (account && account.type === 'nostr-connect') {
const nostrConnectAccount = account as Accounts.NostrConnectAccount<unknown>
// Disable applesauce account queueing so decrypt requests aren't serialized behind earlier ops
try {
if (!(nostrConnectAccount as unknown as { disableQueue?: boolean }).disableQueue) {
(nostrConnectAccount as unknown as { disableQueue?: boolean }).disableQueue = true
console.log('[bunker] ⚙️ Disabled account request queueing for nostr-connect')
}
} catch (err) { console.warn('[bunker] failed to disable queue', err) }
// Note: for Amber bunker, the remote signer pubkey is the user's pubkey. This is expected.
// Skip if we've already reconnected this account
if (reconnectedAccounts.has(account.id)) {
console.log('[bunker] ⏭️ Already reconnected this account, skipping')
return
}
console.log('[bunker] Account detected. Status:', {
listening: nostrConnectAccount.signer.listening,
isConnected: nostrConnectAccount.signer.isConnected,
hasRemote: !!nostrConnectAccount.signer.remote,
bunkerRelays: nostrConnectAccount.signer.relays
})
try {
// For restored signers, ensure they have the pool's subscription methods
// The signer was created in fromJSON without pool context, so we need to recreate it
const signerData = nostrConnectAccount.toJSON().signer
// Add bunker's relays to the pool BEFORE recreating the signer
// This ensures the pool has all relays when the signer sets up its methods
const bunkerRelays = signerData.relays || []
const existingRelayUrls = new Set(Array.from(pool.relays.keys()))
const newBunkerRelays = bunkerRelays.filter(url => !existingRelayUrls.has(url))
if (newBunkerRelays.length > 0) {
console.log('[bunker] Adding bunker relays to pool BEFORE signer recreation:', newBunkerRelays)
pool.group(newBunkerRelays)
} else {
console.log('[bunker] Bunker relays already in pool')
}
const recreatedSigner = new NostrConnectSigner({
relays: signerData.relays,
pubkey: nostrConnectAccount.pubkey,
remote: signerData.remote,
signer: nostrConnectAccount.signer.signer, // Use the existing SimpleSigner
pool: pool
})
// Ensure local relays are included for NIP-46 request/response traffic (e.g., Amber bunker)
try {
const mergedRelays = Array.from(new Set([...(signerData.relays || []), ...RELAYS]))
recreatedSigner.relays = mergedRelays
console.log('[bunker] 🔗 Signer relays merged with app RELAYS:', mergedRelays)
} catch (err) { console.warn('[bunker] failed to merge signer relays', err) }
// Replace the signer on the account
nostrConnectAccount.signer = recreatedSigner
console.log('[bunker] ✅ Signer recreated with pool context')
// Debug: log publish/subscription calls made by signer (decrypt/sign requests)
// IMPORTANT: bind originals to preserve `this` context used internally by the signer
const originalPublish = (recreatedSigner as unknown as { publishMethod: (relays: string[], event: unknown) => unknown }).publishMethod.bind(recreatedSigner)
;(recreatedSigner as unknown as { publishMethod: (relays: string[], event: unknown) => unknown }).publishMethod = (relays: string[], event: unknown) => {
try {
let method: string | undefined
const content = (event as { content?: unknown })?.content
if (typeof content === 'string') {
try {
const parsed = JSON.parse(content) as { method?: string; id?: unknown }
method = parsed?.method
} catch (err) { console.warn('[bunker] failed to parse event content', err) }
}
const summary = {
relays,
kind: (event as { kind?: number })?.kind,
method,
// include tags array for debugging (NIP-46 expects method tag)
tags: (event as { tags?: unknown })?.tags,
contentLength: typeof content === 'string' ? content.length : undefined
}
console.log('[bunker] publish via signer:', summary)
try { DebugBus.info('bunker', 'publish', summary) } catch (err) { console.warn('[bunker] failed to log to DebugBus', err) }
} catch (err) { console.warn('[bunker] failed to log publish summary', err) }
// Fire-and-forget publish: trigger the publish but do not return the
// Observable/Promise to upstream to avoid their awaiting of completion.
const result = originalPublish(relays, event)
if (result && typeof (result as { subscribe?: unknown }).subscribe === 'function') {
try { (result as { subscribe: (h: { complete?: () => void; error?: (e: unknown) => void }) => unknown }).subscribe({ complete: () => {}, error: () => {} }) } catch {}
}
// If it's a Promise, simply ignore it (no await) so it resolves in the background.
// Return a benign object so callers that probe for a "subscribe" property
// (e.g., applesauce makeRequest) won't throw on `"subscribe" in result`.
return {} as unknown as never
}
const originalSubscribe = (recreatedSigner as unknown as { subscriptionMethod: (relays: string[], filters: unknown[]) => unknown }).subscriptionMethod.bind(recreatedSigner)
;(recreatedSigner as unknown as { subscriptionMethod: (relays: string[], filters: unknown[]) => unknown }).subscriptionMethod = (relays: string[], filters: unknown[]) => {
try {
console.log('[bunker] subscribe via signer:', { relays, filters })
try { DebugBus.info('bunker', 'subscribe', { relays, filters }) } catch (err) { console.warn('[bunker] failed to log subscribe to DebugBus', err) }
} catch (err) { console.warn('[bunker] failed to log subscribe summary', err) }
return originalSubscribe(relays, filters)
}
// Just ensure the signer is listening for responses - don't call connect() again
// The fromBunkerURI already connected with permissions during login
if (!nostrConnectAccount.signer.listening) {
console.log('[bunker] Opening signer subscription...')
await nostrConnectAccount.signer.open()
console.log('[bunker] ✅ Signer subscription opened')
} else {
console.log('[bunker] ✅ Signer already listening')
}
// Attempt a guarded reconnect to ensure Amber authorizes decrypt operations
try {
if (nostrConnectAccount.signer.remote && !reconnectedAccounts.has(account.id)) {
const permissions = getDefaultBunkerPermissions()
console.log('[bunker] Attempting guarded connect() with permissions to ensure decrypt perms', { count: permissions.length })
await nostrConnectAccount.signer.connect(undefined, permissions)
console.log('[bunker] ✅ Guarded connect() succeeded with permissions')
}
} catch (e) {
console.warn('[bunker] ⚠️ Guarded connect() failed:', e)
}
// Give the subscription a moment to fully establish before allowing decrypt operations
// This ensures the signer is ready to handle and receive responses
await new Promise(resolve => setTimeout(resolve, 100))
console.log("[bunker] Subscription ready after startup delay")
// Fire-and-forget: probe decrypt path to verify Amber responds to NIP-46 decrypt
try {
const withTimeout = async <T,>(p: Promise<T>, ms = 10000): Promise<T> => {
return await Promise.race([
p,
new Promise<T>((_, rej) => setTimeout(() => rej(new Error(`probe timeout after ${ms}ms`)), ms)),
])
}
setTimeout(async () => {
const self = nostrConnectAccount.pubkey
// Try a roundtrip so the bunker can respond successfully
try {
console.log('[bunker] 🔎 Probe nip44 roundtrip (encrypt→decrypt)…')
const cipher44 = await withTimeout(nostrConnectAccount.signer.nip44!.encrypt(self, 'probe-nip44'))
const plain44 = await withTimeout(nostrConnectAccount.signer.nip44!.decrypt(self, cipher44))
console.log('[bunker] 🔎 Probe nip44 responded:', typeof plain44 === 'string' ? plain44 : typeof plain44)
} catch (err) {
console.log('[bunker] 🔎 Probe nip44 result:', err instanceof Error ? err.message : err)
}
try {
console.log('[bunker] 🔎 Probe nip04 roundtrip (encrypt→decrypt)…')
const cipher04 = await withTimeout(nostrConnectAccount.signer.nip04!.encrypt(self, 'probe-nip04'))
const plain04 = await withTimeout(nostrConnectAccount.signer.nip04!.decrypt(self, cipher04))
console.log('[bunker] 🔎 Probe nip04 responded:', typeof plain04 === 'string' ? plain04 : typeof plain04)
} catch (err) {
console.log('[bunker] 🔎 Probe nip04 result:', err instanceof Error ? err.message : err)
}
}, 0)
} catch (err) {
console.log('[bunker] 🔎 Probe setup failed:', err)
}
// The bunker remembers the permissions from the initial connection
nostrConnectAccount.signer.isConnected = true
console.log('[bunker] Final signer status:', {
listening: nostrConnectAccount.signer.listening,
isConnected: nostrConnectAccount.signer.isConnected,
remote: nostrConnectAccount.signer.remote,
relays: nostrConnectAccount.signer.relays
})
// Mark this account as reconnected
reconnectedAccounts.add(account.id)
console.log('[bunker] 🎉 Signer ready for signing')
} catch (error) {
console.error('[bunker] ❌ Failed to open signer:', error)
}
}
})
// Keep all relay connections alive indefinitely by creating a persistent subscription
// This prevents disconnection when no other subscriptions are active
@@ -233,6 +479,7 @@ function App() {
return () => {
accountsSub.unsubscribe()
activeSub.unsubscribe()
bunkerReconnectSub.unsubscribe()
// Clean up keep-alive subscription if it exists
const poolWithSub = pool as unknown as { _keepAliveSubscription?: { unsubscribe: () => void } }
if (poolWithSub._keepAliveSubscription) {
@@ -249,7 +496,7 @@ function App() {
return () => {
if (cleanup) cleanup()
}
}, [])
}, [isOnline, showToast])
// Monitor online/offline status
useEffect(() => {
@@ -285,6 +532,7 @@ function App() {
<BrowserRouter>
<div className="min-h-screen p-0 max-w-none m-0 relative">
<AppRoutes relayPool={relayPool} showToast={showToast} />
<RouteDebug />
</div>
</BrowserRouter>
{toastMessage && (

View File

@@ -1,7 +1,6 @@
import React from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faBookOpen, faCheckCircle, faAsterisk } from '@fortawesome/free-solid-svg-icons'
import { faBookmark } from '@fortawesome/free-regular-svg-icons'
import { faBookOpen, faBookmark, faCheckCircle, faAsterisk } from '@fortawesome/free-solid-svg-icons'
import { faBooks } from '../icons/customIcons'
export type ArchiveFilterType = 'all' | 'to-read' | 'reading' | 'completed' | 'marked'
@@ -22,17 +21,24 @@ const ArchiveFilters: React.FC<ArchiveFiltersProps> = ({ selectedFilter, onFilte
return (
<div className="bookmark-filters">
{filters.map(filter => (
<button
key={filter.type}
onClick={() => onFilterChange(filter.type)}
className={`filter-btn ${selectedFilter === filter.type ? 'active' : ''}`}
title={filter.label}
aria-label={`Filter by ${filter.label}`}
>
<FontAwesomeIcon icon={filter.icon} />
</button>
))}
{filters.map(filter => {
const isActive = selectedFilter === filter.type
// Only "completed" gets green color, everything else uses default blue
const activeStyle = isActive && filter.type === 'completed' ? { color: '#10b981' } : undefined
return (
<button
key={filter.type}
onClick={() => onFilterChange(filter.type)}
className={`filter-btn ${isActive ? 'active' : ''}`}
title={filter.label}
aria-label={`Filter by ${filter.label}`}
style={activeStyle}
>
<FontAwesomeIcon icon={filter.icon} />
</button>
)
})}
</div>
)
}

View File

@@ -24,9 +24,15 @@ const BlogPostCard: React.FC<BlogPostCardProps> = ({ post, href, level, readingP
addSuffix: true
})
// Calculate progress percentage and determine color
// Calculate progress percentage and determine color (matching readingProgressUtils.ts logic)
const progressPercent = readingProgress ? Math.round(readingProgress * 100) : 0
const progressColor = progressPercent >= 95 ? '#10b981' : '#6366f1' // green if >=95%, blue otherwise
let progressColor = '#6366f1' // Default blue (reading)
if (readingProgress && readingProgress >= 0.95) {
progressColor = '#10b981' // Green (completed)
} else if (readingProgress && readingProgress > 0 && readingProgress <= 0.10) {
progressColor = 'var(--color-text)' // Neutral text color (started)
}
return (
<Link

View File

@@ -21,6 +21,7 @@ import { RELAYS } from '../config/relays'
import { Hooks } from 'applesauce-react'
import BookmarkFilters, { BookmarkFilterType } from './BookmarkFilters'
import { filterBookmarksByType } from '../utils/bookmarkTypeClassifier'
import LoginOptions from './LoginOptions'
interface BookmarkListProps {
bookmarks: Bookmark[]
@@ -153,7 +154,9 @@ export const BookmarkList: React.FC<BookmarkListProps> = ({
/>
)}
{filteredBookmarks.length === 0 && allIndividualBookmarks.length > 0 ? (
{!activeAccount ? (
<LoginOptions />
) : filteredBookmarks.length === 0 && allIndividualBookmarks.length > 0 ? (
<div className="empty-state">
<p>No bookmarks match this filter.</p>
</div>
@@ -170,7 +173,6 @@ export const BookmarkList: React.FC<BookmarkListProps> = ({
<div className="empty-state">
<p>No bookmarks found.</p>
<p>Add bookmarks using your nostr client to see them here.</p>
<p>If you aren't on nostr yet, start here: <a href="https://nstart.me/" target="_blank" rel="noopener noreferrer">nstart.me</a></p>
</div>
)
) : (

View File

@@ -23,6 +23,7 @@ interface LargeViewProps {
handleReadNow: (e: React.MouseEvent<HTMLButtonElement>) => void
articleSummary?: string
contentTypeIcon: IconDefinition
readingProgress?: number // 0-1 reading progress (optional)
}
export const LargeView: React.FC<LargeViewProps> = ({
@@ -38,11 +39,22 @@ export const LargeView: React.FC<LargeViewProps> = ({
getAuthorDisplayName,
handleReadNow,
articleSummary,
contentTypeIcon
contentTypeIcon,
readingProgress
}) => {
const cachedImage = useImageCache(previewImage || undefined)
const isArticle = bookmark.kind === 30023
// Calculate progress display (matching readingProgressUtils.ts logic)
const progressPercent = readingProgress ? Math.round(readingProgress * 100) : 0
let progressColor = '#6366f1' // Default blue (reading)
if (readingProgress && readingProgress >= 0.95) {
progressColor = '#10b981' // Green (completed)
} else if (readingProgress && readingProgress > 0 && readingProgress <= 0.10) {
progressColor = 'var(--color-text)' // Neutral text color (started)
}
const triggerOpen = () => handleReadNow({ preventDefault: () => {} } as React.MouseEvent<HTMLButtonElement>)
const handleKeyDown: React.KeyboardEventHandler<HTMLDivElement> = (e) => {
if (e.key === 'Enter' || e.key === ' ') {
@@ -92,6 +104,28 @@ export const LargeView: React.FC<LargeViewProps> = ({
</div>
)}
{/* Reading progress indicator for articles - shown only if there's progress */}
{isArticle && readingProgress !== undefined && readingProgress > 0 && (
<div
style={{
height: '3px',
width: '100%',
background: 'var(--color-border)',
overflow: 'hidden',
marginTop: '0.75rem'
}}
>
<div
style={{
height: '100%',
width: `${progressPercent}%`,
background: progressColor,
transition: 'width 0.3s ease, background 0.3s ease'
}}
/>
</div>
)}
<div className="large-footer">
<span className="bookmark-type-large">
<FontAwesomeIcon icon={contentTypeIcon} className="content-type-icon" />

View File

@@ -52,7 +52,8 @@ const Bookmarks: React.FC<BookmarksProps> = ({ relayPool, onLogout }) => {
const meTab = location.pathname === '/me' ? 'highlights' :
location.pathname === '/me/highlights' ? 'highlights' :
location.pathname === '/me/reading-list' ? 'reading-list' :
location.pathname === '/me/archive' ? 'archive' :
location.pathname.startsWith('/me/reads') ? 'reads' :
location.pathname === '/me/links' ? 'links' :
location.pathname === '/me/writings' ? 'writings' : 'highlights'
// Extract tab from profile routes

395
src/components/Debug.tsx Normal file
View File

@@ -0,0 +1,395 @@
import React, { useEffect, useMemo, useState } from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faClock, faSpinner } from '@fortawesome/free-solid-svg-icons'
import { Hooks } from 'applesauce-react'
import { Accounts } from 'applesauce-accounts'
import { NostrConnectSigner } from 'applesauce-signers'
import { getDefaultBunkerPermissions } from '../services/nostrConnect'
import { DebugBus, type DebugLogEntry } from '../utils/debugBus'
import VersionFooter from './VersionFooter'
const defaultPayload = 'The quick brown fox jumps over the lazy dog.'
const Debug: React.FC = () => {
const activeAccount = Hooks.useActiveAccount()
const accountManager = Hooks.useAccountManager()
const [payload, setPayload] = useState<string>(defaultPayload)
const [cipher44, setCipher44] = useState<string>('')
const [cipher04, setCipher04] = useState<string>('')
const [plain44, setPlain44] = useState<string>('')
const [plain04, setPlain04] = useState<string>('')
const [tEncrypt44, setTEncrypt44] = useState<number | null>(null)
const [tEncrypt04, setTEncrypt04] = useState<number | null>(null)
const [tDecrypt44, setTDecrypt44] = useState<number | null>(null)
const [tDecrypt04, setTDecrypt04] = useState<number | null>(null)
const [logs, setLogs] = useState<DebugLogEntry[]>(DebugBus.snapshot())
const [debugEnabled, setDebugEnabled] = useState<boolean>(() => localStorage.getItem('debug') === '*')
// Bunker login state
const [bunkerUri, setBunkerUri] = useState<string>('')
const [isBunkerLoading, setIsBunkerLoading] = useState<boolean>(false)
const [bunkerError, setBunkerError] = useState<string | null>(null)
// Live timing state
const [liveTiming, setLiveTiming] = useState<{
nip44?: { type: 'encrypt' | 'decrypt'; startTime: number }
nip04?: { type: 'encrypt' | 'decrypt'; startTime: number }
}>({})
useEffect(() => {
return DebugBus.subscribe((e) => setLogs(prev => [...prev, e].slice(-300)))
}, [])
// Live timer effect - triggers re-renders for live timing updates
useEffect(() => {
const interval = setInterval(() => {
// Force re-render to update live timing display
setLiveTiming(prev => prev)
}, 16) // ~60fps for smooth updates
return () => clearInterval(interval)
}, [])
const signer = useMemo(() => (activeAccount as unknown as { signer?: unknown })?.signer, [activeAccount])
const pubkey = (activeAccount as unknown as { pubkey?: string })?.pubkey
const hasNip04 = typeof (signer as { nip04?: { encrypt?: unknown; decrypt?: unknown } } | undefined)?.nip04?.encrypt === 'function'
const hasNip44 = typeof (signer as { nip44?: { encrypt?: unknown; decrypt?: unknown } } | undefined)?.nip44?.encrypt === 'function'
const doEncrypt = async (mode: 'nip44' | 'nip04') => {
if (!signer || !pubkey) return
try {
const api = (signer as { [key: string]: { encrypt: (pubkey: string, message: string) => Promise<string> } })[mode]
DebugBus.info('debug', `encrypt start ${mode}`, { pubkey, len: payload.length })
// Start live timing
const start = performance.now()
setLiveTiming(prev => ({ ...prev, [mode]: { type: 'encrypt', startTime: start } }))
const cipher = await api.encrypt(pubkey, payload)
const ms = Math.round(performance.now() - start)
// Stop live timing
setLiveTiming(prev => ({ ...prev, [mode]: undefined }))
DebugBus.info('debug', `encrypt done ${mode}`, { len: typeof cipher === 'string' ? cipher.length : -1, ms })
if (mode === 'nip44') setCipher44(cipher)
else setCipher04(cipher)
if (mode === 'nip44') setTEncrypt44(ms)
else setTEncrypt04(ms)
} catch (e) {
// Stop live timing on error
setLiveTiming(prev => ({ ...prev, [mode]: undefined }))
DebugBus.error('debug', `encrypt error ${mode}`, e instanceof Error ? e.message : String(e))
}
}
const doDecrypt = async (mode: 'nip44' | 'nip04') => {
if (!signer || !pubkey) return
try {
const api = (signer as { [key: string]: { decrypt: (pubkey: string, ciphertext: string) => Promise<string> } })[mode]
const cipher = mode === 'nip44' ? cipher44 : cipher04
if (!cipher) {
DebugBus.warn('debug', `no cipher to decrypt for ${mode}`)
return
}
DebugBus.info('debug', `decrypt start ${mode}`, { len: cipher.length })
// Start live timing
const start = performance.now()
setLiveTiming(prev => ({ ...prev, [mode]: { type: 'decrypt', startTime: start } }))
const plain = await api.decrypt(pubkey, cipher)
const ms = Math.round(performance.now() - start)
// Stop live timing
setLiveTiming(prev => ({ ...prev, [mode]: undefined }))
DebugBus.info('debug', `decrypt done ${mode}`, { len: typeof plain === 'string' ? plain.length : -1, ms })
if (mode === 'nip44') setPlain44(String(plain))
else setPlain04(String(plain))
if (mode === 'nip44') setTDecrypt44(ms)
else setTDecrypt04(ms)
} catch (e) {
// Stop live timing on error
setLiveTiming(prev => ({ ...prev, [mode]: undefined }))
DebugBus.error('debug', `decrypt error ${mode}`, e instanceof Error ? e.message : String(e))
}
}
const toggleDebug = () => {
const next = !debugEnabled
setDebugEnabled(next)
if (next) localStorage.setItem('debug', '*')
else localStorage.removeItem('debug')
}
const handleBunkerLogin = async () => {
if (!bunkerUri.trim()) {
setBunkerError('Please enter a bunker URI')
return
}
if (!bunkerUri.startsWith('bunker://')) {
setBunkerError('Invalid bunker URI. Must start with bunker://')
return
}
try {
setIsBunkerLoading(true)
setBunkerError(null)
// Create signer from bunker URI with default permissions
const permissions = getDefaultBunkerPermissions()
const signer = await NostrConnectSigner.fromBunkerURI(bunkerUri, { permissions })
// Get pubkey from signer
const pubkey = await signer.getPublicKey()
// Create account from signer
const account = new Accounts.NostrConnectAccount(pubkey, signer)
// Add to account manager and set active
accountManager.addAccount(account)
accountManager.setActive(account)
// Clear input on success
setBunkerUri('')
} catch (err) {
console.error('[bunker] Login failed:', err)
const errorMessage = err instanceof Error ? err.message : 'Failed to connect to bunker'
// Check for permission-related errors
if (errorMessage.toLowerCase().includes('permission') || errorMessage.toLowerCase().includes('unauthorized')) {
setBunkerError('Your bunker connection is missing signing permissions. Reconnect and approve signing.')
} else {
setBunkerError(errorMessage)
}
} finally {
setIsBunkerLoading(false)
}
}
const CodeBox = ({ value }: { value: string }) => (
<div className="h-20 overflow-y-auto font-mono text-xs leading-relaxed p-2 bg-gray-100 dark:bg-gray-800 rounded whitespace-pre-wrap break-all">
{value || '—'}
</div>
)
const getLiveTiming = (mode: 'nip44' | 'nip04', type: 'encrypt' | 'decrypt') => {
const timing = liveTiming[mode]
if (timing && timing.type === type) {
const elapsed = Math.round(performance.now() - timing.startTime)
return elapsed
}
return null
}
const Stat = ({ label, value, mode, type }: {
label: string;
value?: string | number | null;
mode?: 'nip44' | 'nip04';
type?: 'encrypt' | 'decrypt';
}) => {
const liveValue = mode && type ? getLiveTiming(mode, type) : null
const isLive = !!liveValue
let displayValue: string
if (isLive) {
displayValue = ''
} else if (value !== null && value !== undefined) {
displayValue = `${value}ms`
} else {
displayValue = '—'
}
return (
<span className="badge" style={{ marginRight: 8 }}>
<FontAwesomeIcon icon={faClock} style={{ marginRight: 4, fontSize: '0.8em' }} />
{label}: {isLive ? (
<FontAwesomeIcon icon={faSpinner} className="animate-spin" style={{ fontSize: '0.8em' }} />
) : (
displayValue
)}
</span>
)
}
return (
<div className="settings-view">
<div className="settings-header">
<h2>Debug</h2>
<div className="settings-header-actions">
<span className="opacity-70">Active pubkey:</span> <code className="text-sm">{pubkey || 'none'}</code>
</div>
</div>
<div className="settings-content">
{/* Bunker Login Section */}
<div className="settings-section">
<h3 className="section-title">Bunker Connection</h3>
{!activeAccount ? (
<div>
<div className="text-sm opacity-70 mb-3">Connect to your bunker (Nostr Connect signer) to enable encryption/decryption testing</div>
<div className="flex gap-2 mb-3">
<input
type="text"
className="input flex-1"
placeholder="bunker://..."
value={bunkerUri}
onChange={(e) => setBunkerUri(e.target.value)}
disabled={isBunkerLoading}
/>
<button
className="btn btn-primary"
onClick={handleBunkerLogin}
disabled={isBunkerLoading || !bunkerUri.trim()}
>
{isBunkerLoading ? 'Connecting...' : 'Connect'}
</button>
</div>
{bunkerError && (
<div className="text-sm text-red-600 dark:text-red-400 mb-2">{bunkerError}</div>
)}
</div>
) : (
<div className="flex items-center justify-between">
<div>
<div className="text-sm opacity-70">Connected to bunker</div>
<div className="text-sm font-mono">{pubkey}</div>
</div>
<button
className="btn"
style={{
background: 'rgb(220 38 38)',
color: 'white',
border: '1px solid rgb(220 38 38)',
padding: '0.75rem 1.5rem',
borderRadius: '6px',
fontSize: '1rem',
cursor: 'pointer',
transition: 'background-color 0.2s'
}}
onMouseEnter={(e) => e.currentTarget.style.background = 'rgb(185 28 28)'}
onMouseLeave={(e) => e.currentTarget.style.background = 'rgb(220 38 38)'}
onClick={() => accountManager.removeAccount(activeAccount)}
>
Disconnect
</button>
</div>
)}
</div>
{/* Encryption Tools Section */}
<div className="settings-section">
<h3 className="section-title">Encryption Tools</h3>
<div className="setting-group">
<label className="setting-label">Payload</label>
<textarea
className="textarea w-full bg-gray-50 dark:bg-gray-900 border border-gray-200 dark:border-gray-700"
value={payload}
onChange={e => setPayload(e.target.value)}
rows={3}
/>
<div className="flex gap-2 mt-3 justify-end">
<button className="btn btn-secondary" onClick={() => setPayload(defaultPayload)}>Reset</button>
<button className="btn btn-secondary" onClick={() => { setCipher44(''); setCipher04(''); setPlain44(''); setPlain04(''); setTEncrypt44(null); setTEncrypt04(null); setTDecrypt44(null); setTDecrypt04(null) }}>Clear</button>
</div>
</div>
<div className="grid" style={{ gap: 12, gridTemplateColumns: 'minmax(0,1fr) minmax(0,1fr)' }}>
<div className="setting-group">
<label className="setting-label">NIP-44</label>
<div className="flex gap-2 mb-3">
<button className="btn btn-primary" onClick={() => doEncrypt('nip44')} disabled={!hasNip44}>Encrypt</button>
<button className="btn btn-secondary" onClick={() => doDecrypt('nip44')} disabled={!cipher44}>Decrypt</button>
</div>
<label className="block text-sm opacity-70 mb-2">Encrypted:</label>
<CodeBox value={cipher44} />
<div className="mt-3">
<span className="text-sm opacity-70">Plain:</span>
<CodeBox value={plain44} />
</div>
</div>
<div className="setting-group">
<label className="setting-label">NIP-04</label>
<div className="flex gap-2 mb-3">
<button className="btn btn-primary" onClick={() => doEncrypt('nip04')} disabled={!hasNip04}>Encrypt</button>
<button className="btn btn-secondary" onClick={() => doDecrypt('nip04')} disabled={!cipher04}>Decrypt</button>
</div>
<label className="block text-sm opacity-70 mb-2">Encrypted:</label>
<CodeBox value={cipher04} />
<div className="mt-3">
<span className="text-sm opacity-70">Plain:</span>
<CodeBox value={plain04} />
</div>
</div>
</div>
</div>
{/* Performance Timing Section */}
<div className="settings-section">
<h3 className="section-title">Performance Timing</h3>
<div className="text-sm opacity-70 mb-3">Encryption and decryption operation durations</div>
<div className="grid" style={{ gap: 12, gridTemplateColumns: 'minmax(0,1fr) minmax(0,1fr)' }}>
<div className="setting-group">
<label className="setting-label">NIP-44</label>
<div className="flex flex-wrap items-center gap-2">
<Stat label="enc" value={tEncrypt44} mode="nip44" type="encrypt" />
<Stat label="dec" value={tDecrypt44} mode="nip44" type="decrypt" />
</div>
</div>
<div className="setting-group">
<label className="setting-label">NIP-04</label>
<div className="flex flex-wrap items-center gap-2">
<Stat label="enc" value={tEncrypt04} mode="nip04" type="encrypt" />
<Stat label="dec" value={tDecrypt04} mode="nip04" type="decrypt" />
</div>
</div>
</div>
</div>
{/* Debug Logs Section */}
<div className="settings-section">
<h3 className="section-title">Debug Logs</h3>
<div className="text-sm opacity-70 mb-3">Recent bunker logs:</div>
<div className="max-h-192 overflow-y-auto font-mono text-xs leading-relaxed">
{logs.length === 0 ? (
<div className="text-sm opacity-50 italic">No logs yet</div>
) : (
logs.slice(-200).map((l, i) => (
<div key={i} className="mb-1 p-2 bg-gray-100 dark:bg-gray-800 rounded">
<span className="opacity-70">[{new Date(l.ts).toLocaleTimeString()}]</span> <span className="font-semibold">{l.level.toUpperCase()}</span> {l.source}: {l.message}
{l.data !== undefined && (
<span className="opacity-70"> {typeof l.data === 'string' ? l.data : JSON.stringify(l.data)}</span>
)}
</div>
))
)}
</div>
<div className="mt-3">
<div className="flex justify-end mb-2">
<label className="flex items-center gap-2 cursor-pointer">
<input
type="checkbox"
checked={debugEnabled}
onChange={toggleDebug}
className="checkbox"
/>
<span className="text-sm">Show all applesauce debug logs</span>
</label>
</div>
<div className="flex justify-end">
<button className="btn btn-secondary" onClick={() => setLogs([])}>Clear logs</button>
</div>
</div>
</div>
</div>
<VersionFooter />
</div>
)
}
export default Debug

View File

@@ -0,0 +1,179 @@
import React, { useState } from 'react'
import { Hooks } from 'applesauce-react'
import { Accounts } from 'applesauce-accounts'
import { NostrConnectSigner } from 'applesauce-signers'
import { getDefaultBunkerPermissions } from '../services/nostrConnect'
const LoginOptions: React.FC = () => {
const accountManager = Hooks.useAccountManager()
const [showBunkerInput, setShowBunkerInput] = useState(false)
const [bunkerUri, setBunkerUri] = useState('')
const [isLoading, setIsLoading] = useState(false)
const [error, setError] = useState<string | null>(null)
const handleExtensionLogin = async () => {
try {
setIsLoading(true)
setError(null)
const account = await Accounts.ExtensionAccount.fromExtension()
accountManager.addAccount(account)
accountManager.setActive(account)
} catch (err) {
console.error('Extension login failed:', err)
setError('Login failed. Please install a nostr browser extension and try again.')
} finally {
setIsLoading(false)
}
}
const handleBunkerLogin = async () => {
if (!bunkerUri.trim()) {
setError('Please enter a bunker URI')
return
}
if (!bunkerUri.startsWith('bunker://')) {
setError('Invalid bunker URI. Must start with bunker://')
return
}
try {
setIsLoading(true)
setError(null)
// Create signer from bunker URI with default permissions
const permissions = getDefaultBunkerPermissions()
const signer = await NostrConnectSigner.fromBunkerURI(bunkerUri, { permissions })
// Get pubkey from signer
const pubkey = await signer.getPublicKey()
// Create account from signer
const account = new Accounts.NostrConnectAccount(pubkey, signer)
// Add to account manager and set active
accountManager.addAccount(account)
accountManager.setActive(account)
// Clear input on success
setBunkerUri('')
setShowBunkerInput(false)
} catch (err) {
console.error('[bunker] Login failed:', err)
const errorMessage = err instanceof Error ? err.message : 'Failed to connect to bunker'
// Check for permission-related errors
if (errorMessage.toLowerCase().includes('permission') || errorMessage.toLowerCase().includes('unauthorized')) {
setError('Your bunker connection is missing signing permissions. Reconnect and approve signing.')
} else {
setError(errorMessage)
}
} finally {
setIsLoading(false)
}
}
return (
<div className="empty-state">
<p style={{ marginBottom: '1rem' }}>Login with:</p>
<div style={{ display: 'flex', flexDirection: 'column', gap: '0.75rem', maxWidth: '300px', margin: '0 auto' }}>
<button
onClick={handleExtensionLogin}
disabled={isLoading}
style={{
padding: '0.75rem 1.5rem',
fontSize: '1rem',
cursor: isLoading ? 'wait' : 'pointer',
opacity: isLoading ? 0.6 : 1
}}
>
{isLoading && !showBunkerInput ? 'Connecting...' : 'Extension'}
</button>
{!showBunkerInput ? (
<button
onClick={() => setShowBunkerInput(true)}
disabled={isLoading}
style={{
padding: '0.75rem 1.5rem',
fontSize: '1rem',
cursor: isLoading ? 'wait' : 'pointer',
opacity: isLoading ? 0.6 : 1
}}
>
Bunker
</button>
) : (
<div style={{ display: 'flex', flexDirection: 'column', gap: '0.5rem' }}>
<input
type="text"
placeholder="bunker://..."
value={bunkerUri}
onChange={(e) => setBunkerUri(e.target.value)}
disabled={isLoading}
style={{
padding: '0.75rem',
fontSize: '0.9rem',
width: '100%',
boxSizing: 'border-box'
}}
onKeyDown={(e) => {
if (e.key === 'Enter') {
handleBunkerLogin()
}
}}
/>
<div style={{ display: 'flex', gap: '0.5rem' }}>
<button
onClick={handleBunkerLogin}
disabled={isLoading || !bunkerUri.trim()}
style={{
padding: '0.5rem 1rem',
fontSize: '0.9rem',
flex: 1,
cursor: isLoading || !bunkerUri.trim() ? 'not-allowed' : 'pointer',
opacity: isLoading || !bunkerUri.trim() ? 0.6 : 1
}}
>
{isLoading && showBunkerInput ? 'Connecting...' : 'Connect'}
</button>
<button
onClick={() => {
setShowBunkerInput(false)
setBunkerUri('')
setError(null)
}}
disabled={isLoading}
style={{
padding: '0.5rem 1rem',
fontSize: '0.9rem',
cursor: isLoading ? 'not-allowed' : 'pointer',
opacity: isLoading ? 0.6 : 1
}}
>
Cancel
</button>
</div>
</div>
)}
</div>
{error && (
<p style={{ color: 'var(--color-error, #ef4444)', marginTop: '1rem', fontSize: '0.9rem' }}>
{error}
</p>
)}
<p style={{ marginTop: '1.5rem', fontSize: '0.9rem' }}>
If you aren't on nostr yet, start here:{' '}
<a href="https://nstart.me/" target="_blank" rel="noopener noreferrer">
nstart.me
</a>
</p>
</div>
)
}
export default LoginOptions

View File

@@ -1,16 +1,17 @@
import React, { useState, useEffect } from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faSpinner, faHighlighter, faBookmark, faList, faThLarge, faImage, faPenToSquare } from '@fortawesome/free-solid-svg-icons'
import { faHighlighter, faBookmark, faList, faThLarge, faImage, faPenToSquare, faLink } from '@fortawesome/free-solid-svg-icons'
import { Hooks } from 'applesauce-react'
import { BlogPostSkeleton, HighlightSkeleton, BookmarkSkeleton } from './Skeletons'
import { RelayPool } from 'applesauce-relay'
import { nip19 } from 'nostr-tools'
import { useNavigate } from 'react-router-dom'
import { useNavigate, useParams } from 'react-router-dom'
import { Highlight } from '../types/highlights'
import { HighlightItem } from './HighlightItem'
import { fetchHighlights } from '../services/highlightService'
import { fetchBookmarks } from '../services/bookmarkService'
import { fetchReadArticlesWithData } from '../services/libraryService'
import { fetchAllReads, ReadItem } from '../services/readsService'
import { fetchLinks } from '../services/linksService'
import { BlogPostPreview, fetchBlogPostsFromAuthors } from '../services/exploreService'
import { RELAYS } from '../config/relays'
import { Bookmark, IndividualBookmark } from '../types/bookmarks'
@@ -19,15 +20,18 @@ import BlogPostCard from './BlogPostCard'
import { BookmarkItem } from './BookmarkItem'
import IconButton from './IconButton'
import { ViewMode } from './Bookmarks'
import { getCachedMeData, setCachedMeData, updateCachedHighlights } from '../services/meCache'
import { getCachedMeData, updateCachedHighlights } from '../services/meCache'
import { faBooks } from '../icons/customIcons'
import { usePullToRefresh } from 'use-pull-to-refresh'
import RefreshIndicator from './RefreshIndicator'
import { groupIndividualBookmarks, hasContent } from '../utils/bookmarkUtils'
import BookmarkFilters, { BookmarkFilterType } from './BookmarkFilters'
import { filterBookmarksByType } from '../utils/bookmarkTypeClassifier'
import { generateArticleIdentifier, loadReadingPosition } from '../services/readingPositionService'
import ArchiveFilters, { ArchiveFilterType } from './ArchiveFilters'
import ReadingProgressFilters, { ReadingProgressFilterType } from './ReadingProgressFilters'
import { filterByReadingProgress } from '../utils/readingProgressUtils'
import { deriveReadsFromBookmarks } from '../utils/readsFromBookmarks'
import { deriveLinksFromBookmarks } from '../utils/linksFromBookmarks'
import { mergeReadItem } from '../utils/readItemMerge'
interface MeProps {
relayPool: RelayPool
@@ -35,12 +39,15 @@ interface MeProps {
pubkey?: string // Optional pubkey for viewing other users' profiles
}
type TabType = 'highlights' | 'reading-list' | 'archive' | 'writings'
type TabType = 'highlights' | 'reading-list' | 'reads' | 'links' | 'writings'
// Valid reading progress filters
const VALID_FILTERS: ReadingProgressFilterType[] = ['all', 'unopened', 'started', 'reading', 'completed']
const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: propPubkey }) => {
const activeAccount = Hooks.useActiveAccount()
const eventStore = Hooks.useEventStore()
const navigate = useNavigate()
const { filter: urlFilter } = useParams<{ filter?: string }>()
const [activeTab, setActiveTab] = useState<TabType>(propActiveTab || 'highlights')
// Use provided pubkey or fall back to active account
@@ -48,14 +55,22 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
const isOwnProfile = !propPubkey || (activeAccount?.pubkey === propPubkey)
const [highlights, setHighlights] = useState<Highlight[]>([])
const [bookmarks, setBookmarks] = useState<Bookmark[]>([])
const [readArticles, setReadArticles] = useState<BlogPostPreview[]>([])
const [reads, setReads] = useState<ReadItem[]>([])
const [, setReadsMap] = useState<Map<string, ReadItem>>(new Map())
const [links, setLinks] = useState<ReadItem[]>([])
const [, setLinksMap] = useState<Map<string, ReadItem>>(new Map())
const [writings, setWritings] = useState<BlogPostPreview[]>([])
const [loading, setLoading] = useState(true)
const [loadedTabs, setLoadedTabs] = useState<Set<TabType>>(new Set())
const [viewMode, setViewMode] = useState<ViewMode>('cards')
const [refreshTrigger, setRefreshTrigger] = useState(0)
const [bookmarkFilter, setBookmarkFilter] = useState<BookmarkFilterType>('all')
const [archiveFilter, setArchiveFilter] = useState<ArchiveFilterType>('all')
const [readingPositions, setReadingPositions] = useState<Map<string, number>>(new Map())
// Initialize reading progress filter from URL param
const initialFilter = urlFilter && VALID_FILTERS.includes(urlFilter as ReadingProgressFilterType)
? (urlFilter as ReadingProgressFilterType)
: 'all'
const [readingProgressFilter, setReadingProgressFilter] = useState<ReadingProgressFilterType>(initialFilter)
// Update local state when prop changes
useEffect(() => {
@@ -64,131 +79,246 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
}
}, [propActiveTab])
// Sync filter state with URL changes
useEffect(() => {
const loadData = async () => {
if (!viewingPubkey) {
setLoading(false)
return
}
const filterFromUrl = urlFilter && VALID_FILTERS.includes(urlFilter as ReadingProgressFilterType)
? (urlFilter as ReadingProgressFilterType)
: 'all'
setReadingProgressFilter(filterFromUrl)
}, [urlFilter])
// Handler to change reading progress filter and update URL
const handleReadingProgressFilterChange = (filter: ReadingProgressFilterType) => {
setReadingProgressFilter(filter)
if (activeTab === 'reads') {
if (filter === 'all') {
navigate('/me/reads', { replace: true })
} else {
navigate(`/me/reads/${filter}`, { replace: true })
}
}
}
// Tab-specific loading functions
const loadHighlightsTab = async () => {
if (!viewingPubkey) return
// Only show loading skeleton if tab hasn't been loaded yet
const hasBeenLoaded = loadedTabs.has('highlights')
try {
if (!hasBeenLoaded) setLoading(true)
const userHighlights = await fetchHighlights(relayPool, viewingPubkey)
setHighlights(userHighlights)
setLoadedTabs(prev => new Set(prev).add('highlights'))
} catch (err) {
console.error('Failed to load highlights:', err)
} finally {
if (!hasBeenLoaded) setLoading(false)
}
}
const loadWritingsTab = async () => {
if (!viewingPubkey) return
const hasBeenLoaded = loadedTabs.has('writings')
try {
if (!hasBeenLoaded) setLoading(true)
const userWritings = await fetchBlogPostsFromAuthors(relayPool, [viewingPubkey], RELAYS)
setWritings(userWritings)
setLoadedTabs(prev => new Set(prev).add('writings'))
} catch (err) {
console.error('Failed to load writings:', err)
} finally {
if (!hasBeenLoaded) setLoading(false)
}
}
const loadReadingListTab = async () => {
if (!viewingPubkey || !isOwnProfile || !activeAccount) return
const hasBeenLoaded = loadedTabs.has('reading-list')
try {
if (!hasBeenLoaded) setLoading(true)
try {
setLoading(true)
// Seed from cache if available to avoid empty flash (own profile only)
if (isOwnProfile) {
const cached = getCachedMeData(viewingPubkey)
if (cached) {
setHighlights(cached.highlights)
setBookmarks(cached.bookmarks)
setReadArticles(cached.readArticles)
}
}
// Fetch highlights and writings (public data)
const [userHighlights, userWritings] = await Promise.all([
fetchHighlights(relayPool, viewingPubkey),
fetchBlogPostsFromAuthors(relayPool, [viewingPubkey], RELAYS)
])
setHighlights(userHighlights)
setWritings(userWritings)
// Only fetch private data for own profile
if (isOwnProfile && activeAccount) {
const userReadArticles = await fetchReadArticlesWithData(relayPool, viewingPubkey)
setReadArticles(userReadArticles)
// Fetch bookmarks using callback pattern
let fetchedBookmarks: Bookmark[] = []
try {
await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
fetchedBookmarks = newBookmarks
setBookmarks(newBookmarks)
})
} catch (err) {
console.warn('Failed to load bookmarks:', err)
setBookmarks([])
}
// Update cache with all fetched data
setCachedMeData(viewingPubkey, userHighlights, fetchedBookmarks, userReadArticles)
} else {
setBookmarks([])
setReadArticles([])
}
await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
setBookmarks(newBookmarks)
})
} catch (err) {
console.error('Failed to load data:', err)
// No blocking error - user can pull-to-refresh
} finally {
setLoading(false)
console.warn('Failed to load bookmarks:', err)
setBookmarks([])
}
setLoadedTabs(prev => new Set(prev).add('reading-list'))
} catch (err) {
console.error('Failed to load reading list:', err)
} finally {
if (!hasBeenLoaded) setLoading(false)
}
}
loadData()
}, [relayPool, viewingPubkey, isOwnProfile, activeAccount, refreshTrigger])
// Load reading positions for read articles (only for own profile)
useEffect(() => {
const loadPositions = async () => {
if (!isOwnProfile || !activeAccount || !relayPool || !eventStore || readArticles.length === 0) {
console.log('🔍 [Archive] Skipping position load:', {
isOwnProfile,
hasAccount: !!activeAccount,
hasRelayPool: !!relayPool,
hasEventStore: !!eventStore,
articlesCount: readArticles.length
})
return
const loadReadsTab = async () => {
if (!viewingPubkey || !isOwnProfile || !activeAccount) return
const hasBeenLoaded = loadedTabs.has('reads')
try {
if (!hasBeenLoaded) setLoading(true)
// Ensure bookmarks are loaded
let fetchedBookmarks: Bookmark[] = bookmarks
if (bookmarks.length === 0) {
try {
await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
fetchedBookmarks = newBookmarks
setBookmarks(newBookmarks)
})
} catch (err) {
console.warn('Failed to load bookmarks:', err)
fetchedBookmarks = []
}
}
console.log('📊 [Archive] Loading reading positions for', readArticles.length, 'articles')
const positions = new Map<string, number>()
// Load positions for all read articles
await Promise.all(
readArticles.map(async (post) => {
try {
const dTag = post.event.tags.find(t => t[0] === 'd')?.[1] || ''
const naddr = nip19.naddrEncode({
kind: 30023,
pubkey: post.author,
identifier: dTag
})
const articleUrl = `nostr:${naddr}`
const identifier = generateArticleIdentifier(articleUrl)
console.log('🔍 [Archive] Loading position for:', post.title?.slice(0, 50), 'identifier:', identifier.slice(0, 32))
const savedPosition = await loadReadingPosition(
relayPool,
eventStore,
activeAccount.pubkey,
identifier
)
if (savedPosition && savedPosition.position > 0) {
console.log('✅ [Archive] Found position:', Math.round(savedPosition.position * 100) + '%', 'for', post.title?.slice(0, 50))
positions.set(post.event.id, savedPosition.position)
} else {
console.log('❌ [Archive] No position found for:', post.title?.slice(0, 50))
}
} catch (error) {
console.warn('⚠️ [Archive] Failed to load reading position for article:', error)
// Derive reads from bookmarks immediately
const initialReads = deriveReadsFromBookmarks(fetchedBookmarks)
const initialMap = new Map(initialReads.map(item => [item.id, item]))
setReadsMap(initialMap)
setReads(initialReads)
setLoadedTabs(prev => new Set(prev).add('reads'))
if (!hasBeenLoaded) setLoading(false)
// Background enrichment: merge reading progress and mark-as-read
// Only update items that are already in our map
fetchAllReads(relayPool, viewingPubkey, fetchedBookmarks, (item) => {
console.log('📈 [Reads] Enrichment item received:', {
id: item.id.slice(0, 20) + '...',
progress: item.readingProgress,
hasProgress: item.readingProgress !== undefined && item.readingProgress > 0
})
setReadsMap(prevMap => {
// Only update if item exists in our current map
if (!prevMap.has(item.id)) {
console.log('⚠️ [Reads] Item not in map, skipping:', item.id.slice(0, 20) + '...')
return prevMap
}
const newMap = new Map(prevMap)
const merged = mergeReadItem(newMap, item)
if (merged) {
console.log('✅ [Reads] Merged progress:', item.id.slice(0, 20) + '...', item.readingProgress)
// Update reads array after map is updated
setReads(Array.from(newMap.values()))
return newMap
}
return prevMap
})
)
}).catch(err => console.warn('Failed to enrich reads:', err))
} catch (err) {
console.error('Failed to load reads:', err)
if (!hasBeenLoaded) setLoading(false)
}
}
console.log('📊 [Archive] Loaded positions for', positions.size, '/', readArticles.length, 'articles')
setReadingPositions(positions)
const loadLinksTab = async () => {
if (!viewingPubkey || !isOwnProfile || !activeAccount) return
const hasBeenLoaded = loadedTabs.has('links')
try {
if (!hasBeenLoaded) setLoading(true)
// Ensure bookmarks are loaded
let fetchedBookmarks: Bookmark[] = bookmarks
if (bookmarks.length === 0) {
try {
await fetchBookmarks(relayPool, activeAccount, (newBookmarks) => {
fetchedBookmarks = newBookmarks
setBookmarks(newBookmarks)
})
} catch (err) {
console.warn('Failed to load bookmarks:', err)
fetchedBookmarks = []
}
}
// Derive links from bookmarks immediately
const initialLinks = deriveLinksFromBookmarks(fetchedBookmarks)
const initialMap = new Map(initialLinks.map(item => [item.id, item]))
setLinksMap(initialMap)
setLinks(initialLinks)
setLoadedTabs(prev => new Set(prev).add('links'))
if (!hasBeenLoaded) setLoading(false)
// Background enrichment: merge reading progress and mark-as-read
// Only update items that are already in our map
fetchLinks(relayPool, viewingPubkey, (item) => {
setLinksMap(prevMap => {
// Only update if item exists in our current map
if (!prevMap.has(item.id)) return prevMap
const newMap = new Map(prevMap)
if (mergeReadItem(newMap, item)) {
// Update links array after map is updated
setLinks(Array.from(newMap.values()))
return newMap
}
return prevMap
})
}).catch(err => console.warn('Failed to enrich links:', err))
} catch (err) {
console.error('Failed to load links:', err)
if (!hasBeenLoaded) setLoading(false)
}
}
// Load active tab data
useEffect(() => {
if (!viewingPubkey || !activeTab) {
setLoading(false)
return
}
loadPositions()
}, [readArticles, isOwnProfile, activeAccount, relayPool, eventStore])
// Load cached data immediately if available
if (isOwnProfile) {
const cached = getCachedMeData(viewingPubkey)
if (cached) {
setHighlights(cached.highlights)
setBookmarks(cached.bookmarks)
setReads(cached.reads || [])
setLinks(cached.links || [])
}
}
// Pull-to-refresh
// Load data for active tab (refresh in background if already loaded)
switch (activeTab) {
case 'highlights':
loadHighlightsTab()
break
case 'writings':
loadWritingsTab()
break
case 'reading-list':
loadReadingListTab()
break
case 'reads':
loadReadsTab()
break
case 'links':
loadLinksTab()
break
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeTab, viewingPubkey, refreshTrigger])
// Pull-to-refresh - reload active tab without clearing state
const { isRefreshing, pullPosition } = usePullToRefresh({
onRefresh: () => {
// Just trigger refresh - loaders will merge new data
setRefreshTrigger(prev => prev + 1)
},
maximumPullLength: 240,
@@ -217,6 +347,49 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
return `/a/${naddr}`
}
const getReadItemUrl = (item: ReadItem) => {
if (item.type === 'article') {
// ID is already in naddr format
return `/a/${item.id}`
} else if (item.url) {
return `/r/${encodeURIComponent(item.url)}`
}
return '#'
}
const convertReadItemToBlogPostPreview = (item: ReadItem): BlogPostPreview => {
if (item.event) {
return {
event: item.event,
title: item.title || 'Untitled',
summary: item.summary,
image: item.image,
published: item.published,
author: item.author || item.event.pubkey
}
}
// Create a mock event for external URLs
const mockEvent = {
id: item.id,
pubkey: item.author || '',
created_at: item.readingTimestamp || Math.floor(Date.now() / 1000),
kind: 1,
tags: [] as string[][],
content: item.title || item.url || 'Untitled',
sig: ''
} as const
return {
event: mockEvent as unknown as import('nostr-tools').NostrEvent,
title: item.title || item.url || 'Untitled',
summary: item.summary,
image: item.image,
published: item.published,
author: item.author || ''
}
}
const handleSelectUrl = (url: string, bookmark?: { id: string; kind: number; tags: string[][]; pubkey: string }) => {
if (bookmark && bookmark.kind === 30023) {
// For kind:30023 articles, navigate to the article route
@@ -245,29 +418,9 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
const groups = groupIndividualBookmarks(filteredBookmarks)
// Apply archive filter
const filteredReadArticles = readArticles.filter(post => {
const position = readingPositions.get(post.event.id)
switch (archiveFilter) {
case 'to-read':
// No position or 0% progress
return !position || position === 0
case 'reading':
// Has some progress but not completed (0 < position < 1)
return position !== undefined && position > 0 && position < 0.95
case 'completed':
// 95% or more read (we consider 95%+ as completed)
return position !== undefined && position >= 0.95
case 'marked':
// Manually marked as read (in archive but no reading position data)
// These are articles that were marked via the emoji reaction
return !position || position === 0
case 'all':
default:
return true
}
})
// Apply reading progress filter
const filteredReads = filterByReadingProgress(reads, readingProgressFilter)
const filteredLinks = filterByReadingProgress(links, readingProgressFilter)
const sections: Array<{ key: string; title: string; items: IndividualBookmark[] }> = [
{ key: 'private', title: 'Private Bookmarks', items: groups.privateItems },
{ key: 'public', title: 'Public Bookmarks', items: groups.publicItems },
@@ -276,7 +429,7 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
]
// Show content progressively - no blocking error screens
const hasData = highlights.length > 0 || bookmarks.length > 0 || readArticles.length > 0 || writings.length > 0
const hasData = highlights.length > 0 || bookmarks.length > 0 || reads.length > 0 || links.length > 0 || writings.length > 0
const showSkeletons = loading && !hasData
const renderTabContent = () => {
@@ -291,9 +444,9 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
</div>
)
}
return highlights.length === 0 ? (
return highlights.length === 0 && !loading ? (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
<FontAwesomeIcon icon={faSpinner} spin size="2x" />
No highlights yet.
</div>
) : (
<div className="highlights-list me-highlights-list">
@@ -320,9 +473,9 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
</div>
)
}
return allIndividualBookmarks.length === 0 ? (
return allIndividualBookmarks.length === 0 && !loading ? (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
<FontAwesomeIcon icon={faSpinner} spin size="2x" />
No bookmarks yet.
</div>
) : (
<div className="bookmarks-list">
@@ -386,8 +539,9 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
</div>
)
case 'archive':
if (showSkeletons) {
case 'reads':
// Show loading skeletons only while initially loading
if (loading && !loadedTabs.has('reads')) {
return (
<div className="explore-grid">
{Array.from({ length: 6 }).map((_, i) => (
@@ -396,32 +550,84 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
</div>
)
}
return readArticles.length === 0 ? (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
<FontAwesomeIcon icon={faSpinner} spin size="2x" />
</div>
) : (
// Show empty state if loaded but no reads
if (reads.length === 0 && loadedTabs.has('reads')) {
return (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
No articles read yet.
</div>
)
}
// Show reads with filters
return (
<>
{readArticles.length > 0 && (
<ArchiveFilters
selectedFilter={archiveFilter}
onFilterChange={setArchiveFilter}
/>
)}
{filteredReadArticles.length === 0 ? (
<ReadingProgressFilters
selectedFilter={readingProgressFilter}
onFilterChange={handleReadingProgressFilterChange}
/>
{filteredReads.length === 0 ? (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
No articles match this filter.
</div>
) : (
<div className="explore-grid">
{filteredReadArticles.map((post) => (
<BlogPostCard
key={post.event.id}
post={post}
href={getPostUrl(post)}
readingProgress={readingPositions.get(post.event.id)}
/>
))}
{filteredReads.map((item) => (
<BlogPostCard
key={item.id}
post={convertReadItemToBlogPostPreview(item)}
href={getReadItemUrl(item)}
readingProgress={item.readingProgress}
/>
))}
</div>
)}
</>
)
case 'links':
// Show loading skeletons only while initially loading
if (loading && !loadedTabs.has('links')) {
return (
<div className="explore-grid">
{Array.from({ length: 6 }).map((_, i) => (
<BlogPostSkeleton key={i} />
))}
</div>
)
}
// Show empty state if loaded but no links
if (links.length === 0 && loadedTabs.has('links')) {
return (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
No links with reading progress yet.
</div>
)
}
// Show links with filters
return (
<>
<ReadingProgressFilters
selectedFilter={readingProgressFilter}
onFilterChange={handleReadingProgressFilterChange}
/>
{filteredLinks.length === 0 ? (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
No links match this filter.
</div>
) : (
<div className="explore-grid">
{filteredLinks.map((item) => (
<BlogPostCard
key={item.id}
post={convertReadItemToBlogPostPreview(item)}
href={getReadItemUrl(item)}
readingProgress={item.readingProgress}
/>
))}
</div>
)}
</>
@@ -437,9 +643,9 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
</div>
)
}
return writings.length === 0 ? (
return writings.length === 0 && !loading ? (
<div className="explore-loading" style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', padding: '4rem', color: 'var(--text-secondary)' }}>
<FontAwesomeIcon icon={faSpinner} spin size="2x" />
No articles written yet.
</div>
) : (
<div className="explore-grid">
@@ -487,12 +693,20 @@ const Me: React.FC<MeProps> = ({ relayPool, activeTab: propActiveTab, pubkey: pr
<span className="tab-label">Bookmarks</span>
</button>
<button
className={`me-tab ${activeTab === 'archive' ? 'active' : ''}`}
data-tab="archive"
onClick={() => navigate('/me/archive')}
className={`me-tab ${activeTab === 'reads' ? 'active' : ''}`}
data-tab="reads"
onClick={() => navigate('/me/reads')}
>
<FontAwesomeIcon icon={faBooks} />
<span className="tab-label">Archive</span>
<span className="tab-label">Reads</span>
</button>
<button
className={`me-tab ${activeTab === 'links' ? 'active' : ''}`}
data-tab="links"
onClick={() => navigate('/me/links')}
>
<FontAwesomeIcon icon={faLink} />
<span className="tab-label">Links</span>
</button>
</>
)}

View File

@@ -0,0 +1,47 @@
import React from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faBookOpen, faCheckCircle, faAsterisk } from '@fortawesome/free-solid-svg-icons'
import { faEnvelope, faEnvelopeOpen } from '@fortawesome/free-regular-svg-icons'
export type ReadingProgressFilterType = 'all' | 'unopened' | 'started' | 'reading' | 'completed'
interface ReadingProgressFiltersProps {
selectedFilter: ReadingProgressFilterType
onFilterChange: (filter: ReadingProgressFilterType) => void
}
const ReadingProgressFilters: React.FC<ReadingProgressFiltersProps> = ({ selectedFilter, onFilterChange }) => {
const filters = [
{ type: 'all' as const, icon: faAsterisk, label: 'All' },
{ type: 'unopened' as const, icon: faEnvelope, label: 'Unopened' },
{ type: 'started' as const, icon: faEnvelopeOpen, label: 'Started' },
{ type: 'reading' as const, icon: faBookOpen, label: 'Reading' },
{ type: 'completed' as const, icon: faCheckCircle, label: 'Completed' }
]
return (
<div className="bookmark-filters">
{filters.map(filter => {
const isActive = selectedFilter === filter.type
// Only "completed" gets green color, everything else uses default blue
const activeStyle = isActive && filter.type === 'completed' ? { color: '#10b981' } : undefined
return (
<button
key={filter.type}
onClick={() => onFilterChange(filter.type)}
className={`filter-btn ${isActive ? 'active' : ''}`}
title={filter.label}
aria-label={`Filter by ${filter.label}`}
style={activeStyle}
>
<FontAwesomeIcon icon={filter.icon} />
</button>
)
})}
</div>
)
}
export default ReadingProgressFilters

View File

@@ -19,6 +19,21 @@ export const ReadingProgressIndicator: React.FC<ReadingProgressIndicatorProps> =
}) => {
const clampedProgress = Math.min(100, Math.max(0, progress))
// Determine reading state based on progress (matching readingProgressUtils.ts logic)
const progressDecimal = clampedProgress / 100
const isStarted = progressDecimal > 0 && progressDecimal <= 0.10
// Determine bar color based on state
let barColorClass = ''
let barColorStyle: string | undefined = 'var(--color-primary)' // Default blue
if (isComplete) {
barColorClass = 'bg-green-500'
barColorStyle = undefined
} else if (isStarted) {
barColorStyle = 'var(--color-text)' // Neutral text color (matches card titles)
}
// Calculate left and right offsets based on sidebar states (desktop only)
const leftOffset = isSidebarCollapsed
? 'var(--sidebar-collapsed-width)'
@@ -42,14 +57,10 @@ export const ReadingProgressIndicator: React.FC<ReadingProgressIndicatorProps> =
style={{ backgroundColor: 'var(--color-border)' }}
>
<div
className={`h-full rounded-full transition-all duration-300 relative ${
isComplete
? 'bg-green-500'
: ''
}`}
className={`h-full rounded-full transition-all duration-300 relative ${barColorClass}`}
style={{
width: `${clampedProgress}%`,
backgroundColor: isComplete ? undefined : 'var(--color-primary)'
backgroundColor: barColorStyle
}}
>
<div className="absolute inset-0 bg-gradient-to-r from-transparent via-white/30 to-transparent animate-[shimmer_2s_infinite]" />
@@ -60,7 +71,9 @@ export const ReadingProgressIndicator: React.FC<ReadingProgressIndicatorProps> =
className={`text-[0.625rem] font-normal min-w-[32px] text-right tabular-nums ${
isComplete ? 'text-green-500' : ''
}`}
style={{ color: isComplete ? undefined : 'var(--color-text-muted)' }}
style={{
color: isComplete ? undefined : isStarted ? 'var(--color-text)' : 'var(--color-text-muted)'
}}
>
{isComplete ? '✓' : `${clampedProgress}%`}
</div>

View File

@@ -0,0 +1,30 @@
import { useEffect } from 'react'
import { useLocation, useMatch } from 'react-router-dom'
export default function RouteDebug() {
const location = useLocation()
const matchArticle = useMatch('/a/:naddr')
useEffect(() => {
const params = new URLSearchParams(location.search)
if (params.get('debug') !== '1') return
const info: Record<string, unknown> = {
pathname: location.pathname,
search: location.search || null,
matchedArticleRoute: Boolean(matchArticle),
referrer: document.referrer || null
}
if (location.pathname === '/') {
// Unexpected during deep-link refresh tests
console.warn('[RouteDebug] unexpected root redirect', info)
} else {
console.debug('[RouteDebug]', info)
}
}, [location, matchArticle])
return null
}

View File

@@ -11,6 +11,7 @@ import ZapSettings from './Settings/ZapSettings'
import RelaySettings from './Settings/RelaySettings'
import PWASettings from './Settings/PWASettings'
import { useRelayStatus } from '../hooks/useRelayStatus'
import VersionFooter from './VersionFooter'
const DEFAULT_SETTINGS: UserSettings = {
collapseOnArticleOpen: true,
@@ -167,6 +168,7 @@ const Settings: React.FC<SettingsProps> = ({ settings, onSave, onClose, relayPoo
<PWASettings settings={localSettings} onUpdate={handleUpdate} onClose={onClose} />
<RelaySettings relayStatuses={relayStatuses} onClose={onClose} />
</div>
<VersionFooter />
</div>
)
}

View File

@@ -0,0 +1,32 @@
/* global __APP_VERSION__, __GIT_COMMIT__, __GIT_COMMIT_URL__, __RELEASE_URL__ */
import React from 'react'
const VersionFooter: React.FC = () => {
return (
<div className="text-xs opacity-60 mt-4 px-4 pb-3 select-text">
<span>
{typeof __RELEASE_URL__ !== 'undefined' && __RELEASE_URL__ ? (
<a href={__RELEASE_URL__} target="_blank" rel="noopener noreferrer">
Version {typeof __APP_VERSION__ !== 'undefined' ? __APP_VERSION__ : 'dev'}
</a>
) : (
`Version ${typeof __APP_VERSION__ !== 'undefined' ? __APP_VERSION__ : 'dev'}`
)}
</span>
{typeof __GIT_COMMIT__ !== 'undefined' && __GIT_COMMIT__ ? (
<span>
{' '}·{' '}
{typeof __GIT_COMMIT_URL__ !== 'undefined' && __GIT_COMMIT_URL__ ? (
<a href={__GIT_COMMIT_URL__} target="_blank" rel="noopener noreferrer">
<code>{__GIT_COMMIT__.slice(0, 7)}</code>
</a>
) : (
<code>{__GIT_COMMIT__.slice(0, 7)}</code>
)}
</span>
) : null}
</div>
)
}
export default VersionFooter

15
src/config/kinds.ts Normal file
View File

@@ -0,0 +1,15 @@
// Nostr event kinds used throughout the application
export const KINDS = {
Highlights: 9802, // NIP-?? user highlights
BlogPost: 30023, // NIP-23 long-form article
AppData: 30078, // NIP-78 application data (reading positions)
List: 30001, // NIP-51 list (addressable)
ListReplaceable: 30003, // NIP-51 replaceable list
ListSimple: 10003, // NIP-51 simple list
WebBookmark: 39701, // NIP-B0 web bookmark
ReactionToEvent: 7, // emoji reaction to event (used for mark-as-read)
ReactionToUrl: 17 // emoji reaction to URL (used for mark-as-read)
} as const
export type KindValue = typeof KINDS[keyof typeof KINDS]

View File

@@ -7,6 +7,7 @@
export const RELAYS = [
'ws://localhost:10547',
'ws://localhost:4869',
'wss://relay.nsec.app',
'wss://relay.damus.io',
'wss://nos.lol',
'wss://relay.nostr.band',

View File

@@ -9,6 +9,7 @@ import { ReadableContent } from '../services/readerService'
import { createHighlight } from '../services/highlightCreationService'
import { HighlightButtonRef } from '../components/HighlightButton'
import { UserSettings } from '../services/settingsService'
import { useToast } from './useToast'
interface UseHighlightCreationParams {
activeAccount: IAccount | undefined
@@ -32,6 +33,7 @@ export const useHighlightCreation = ({
settings
}: UseHighlightCreationParams) => {
const highlightButtonRef = useRef<HighlightButtonRef>(null)
const { showToast } = useToast()
const handleTextSelection = useCallback((text: string) => {
highlightButtonRef.current?.updateSelection(text)
@@ -92,10 +94,19 @@ export const useHighlightCreation = ({
})
} catch (error) {
console.error('❌ Failed to create highlight:', error)
// Show user-friendly error messages
const errorMessage = error instanceof Error ? error.message : 'Failed to create highlight'
if (errorMessage.toLowerCase().includes('permission') || errorMessage.toLowerCase().includes('unauthorized')) {
showToast('Reconnect bunker and approve signing permissions to create highlights')
} else {
showToast(`Failed to create highlight: ${errorMessage}`)
}
// Re-throw to allow parent to handle
throw error
}
}, [activeAccount, relayPool, eventStore, currentArticle, selectedUrl, readerContent, onHighlightCreated, settings])
}, [activeAccount, relayPool, eventStore, currentArticle, selectedUrl, readerContent, onHighlightCreated, settings, showToast])
return {
highlightButtonRef,

View File

@@ -11,6 +11,18 @@ type UnlockHiddenTagsFn = typeof Helpers.unlockHiddenTags
type HiddenContentSigner = Parameters<UnlockHiddenTagsFn>[1]
type UnlockMode = Parameters<UnlockHiddenTagsFn>[2]
/**
* Wrap a decrypt promise with a timeout to prevent hanging (using 30s timeout for bunker)
*/
function withDecryptTimeout<T>(promise: Promise<T>, timeoutMs = 30000): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error(`Decrypt timeout after ${timeoutMs}ms`)), timeoutMs)
)
])
}
export async function collectBookmarksFromEvents(
bookmarkListEvents: NostrEvent[],
activeAccount: ActiveAccount,
@@ -80,7 +92,8 @@ export async function collectBookmarksFromEvents(
} catch {
try {
await Helpers.unlockHiddenTags(evt, signerCandidate as HiddenContentSigner, 'nip44' as UnlockMode)
} catch {
} catch (err) {
console.log("[bunker] ❌ nip44.decrypt failed:", err instanceof Error ? err.message : String(err))
// ignore
}
}
@@ -88,24 +101,26 @@ export async function collectBookmarksFromEvents(
let decryptedContent: string | undefined
try {
if (hasNip44Decrypt(signerCandidate)) {
decryptedContent = await (signerCandidate as { nip44: { decrypt: DecryptFn } }).nip44.decrypt(
decryptedContent = await withDecryptTimeout((signerCandidate as { nip44: { decrypt: DecryptFn } }).nip44.decrypt(
evt.pubkey,
evt.content
)
))
}
} catch {
} catch (err) {
console.log("[bunker] ❌ nip44.decrypt failed:", err instanceof Error ? err.message : String(err))
// ignore
}
if (!decryptedContent) {
try {
if (hasNip04Decrypt(signerCandidate)) {
decryptedContent = await (signerCandidate as { nip04: { decrypt: DecryptFn } }).nip04.decrypt(
decryptedContent = await withDecryptTimeout((signerCandidate as { nip04: { decrypt: DecryptFn } }).nip04.decrypt(
evt.pubkey,
evt.content
)
))
}
} catch {
} catch (err) {
console.log("[bunker] ❌ nip04.decrypt failed:", err instanceof Error ? err.message : String(err))
// ignore
}
}
@@ -127,7 +142,7 @@ export async function collectBookmarksFromEvents(
Reflect.set(evt, BookmarkHiddenSymbol, manualPrivate)
Reflect.set(evt, 'EncryptedContentSymbol', decryptedContent)
// Don't set latestContent to decrypted JSON - it's not user-facing content
} catch {
} catch (err) {
// ignore
}
}

View File

@@ -15,6 +15,7 @@ import { collectBookmarksFromEvents } from './bookmarkProcessing.ts'
import { UserSettings } from './settingsService'
import { rebroadcastEvents } from './rebroadcastService'
import { queryEvents } from './dataFetch'
import { KINDS } from '../config/kinds'
@@ -34,7 +35,7 @@ export const fetchBookmarks = async (
const rawEvents = await queryEvents(
relayPool,
{ kinds: [10003, 30003, 30001, 39701], authors: [activeAccount.pubkey] },
{ kinds: [KINDS.ListSimple, KINDS.ListReplaceable, KINDS.List, KINDS.WebBookmark], authors: [activeAccount.pubkey] },
{}
)
console.log('📊 Raw events fetched:', rawEvents.length, 'events')
@@ -71,7 +72,7 @@ export const fetchBookmarks = async (
})
// Check specifically for Primal's "reads" list
const primalReads = rawEvents.find(e => e.kind === 10003 && e.tags?.find((t: string[]) => t[0] === 'd' && t[1] === 'reads'))
const primalReads = rawEvents.find(e => e.kind === KINDS.ListSimple && e.tags?.find((t: string[]) => t[0] === 'd' && t[1] === 'reads'))
if (primalReads) {
console.log('✅ Found Primal reads list:', primalReads.id.slice(0, 8))
} else {
@@ -84,7 +85,7 @@ export const fetchBookmarks = async (
}
// Aggregate across events
const maybeAccount = activeAccount as AccountWithExtension
console.log('🔐 Account object:', {
console.log('[bunker] 🔐 Account object:', {
hasSignEvent: typeof maybeAccount?.signEvent === 'function',
hasSigner: !!maybeAccount?.signer,
accountType: typeof maybeAccount,
@@ -101,12 +102,19 @@ export const fetchBookmarks = async (
signerCandidate = maybeAccount.signer
}
console.log('🔑 Signer candidate:', !!signerCandidate, typeof signerCandidate)
console.log('[bunker] 🔑 Signer candidate:', !!signerCandidate, typeof signerCandidate)
if (signerCandidate) {
console.log('🔑 Signer has nip04:', hasNip04Decrypt(signerCandidate))
console.log('🔑 Signer has nip44:', hasNip44Decrypt(signerCandidate))
console.log('[bunker] 🔑 Signer has nip04:', hasNip04Decrypt(signerCandidate))
console.log('[bunker] 🔑 Signer has nip44:', hasNip44Decrypt(signerCandidate))
}
const { publicItemsAll, privateItemsAll, newestCreatedAt, latestContent, allTags } = await collectBookmarksFromEvents(
// Debug relay connectivity for bunker relays
try {
const urls = Array.from(relayPool.relays.values()).map(r => ({ url: r.url, connected: (r as unknown as { connected?: boolean }).connected }))
console.log('[bunker] Relay connections:', urls)
} catch (err) { console.warn('[bunker] Failed to read relay connections', err) }
const { publicItemsAll, privateItemsAll, newestCreatedAt, latestContent, allTags } = await collectBookmarksFromEvents(
bookmarkListEvents,
activeAccount,
signerCandidate

View File

@@ -2,6 +2,7 @@ import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { queryEvents } from './dataFetch'
import { KINDS } from '../config/kinds'
const { getArticleTitle, getArticleImage, getArticlePublished, getArticleSummary } = Helpers
@@ -41,7 +42,7 @@ export const fetchBlogPostsFromAuthors = async (
await queryEvents(
relayPool,
{ kinds: [30023], authors: pubkeys, limit: 100 },
{ kinds: [KINDS.BlogPost], authors: pubkeys, limit: 100 },
{
relayUrls,
onEvent: (event: NostrEvent) => {

View File

@@ -46,7 +46,8 @@ export async function createHighlight(
}
// Create EventFactory with the account as signer
const factory = new EventFactory({ signer: account })
console.log("[bunker] Creating EventFactory with signer:", { signerType: account.signer?.constructor?.name })
const factory = new EventFactory({ signer: account.signer })
let blueprintSource: NostrEvent | AddressPointer | string
let context: string | undefined
@@ -116,7 +117,9 @@ export async function createHighlight(
}
// Sign the event
console.log('[bunker] Signing highlight event...', { kind: highlightEvent.kind, tags: highlightEvent.tags.length })
const signedEvent = await factory.sign(highlightEvent)
console.log('[bunker] ✅ Highlight signed successfully!', { id: signedEvent.id.slice(0, 8) })
// Use unified write service to store and publish
await publishEvent(relayPool, eventStore, signedEvent)

View File

@@ -6,6 +6,7 @@ import { prioritizeLocalRelays, partitionRelays } from '../../utils/helpers'
import { eventToHighlight, dedupeHighlights, sortHighlights } from '../highlightEventProcessor'
import { UserSettings } from '../settingsService'
import { rebroadcastEvents } from '../rebroadcastService'
import { KINDS } from '../../config/kinds'
export const fetchHighlights = async (
relayPool: RelayPool,
@@ -21,7 +22,7 @@ export const fetchHighlights = async (
const seenIds = new Set<string>()
const local$ = localRelays.length > 0
? relayPool
.req(localRelays, { kinds: [9802], authors: [pubkey] })
.req(localRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
.pipe(
onlyEvents(),
tap((event: NostrEvent) => {
@@ -36,7 +37,7 @@ export const fetchHighlights = async (
: new Observable<NostrEvent>((sub) => sub.complete())
const remote$ = remoteRelays.length > 0
? relayPool
.req(remoteRelays, { kinds: [9802], authors: [pubkey] })
.req(remoteRelays, { kinds: [KINDS.Highlights], authors: [pubkey] })
.pipe(
onlyEvents(),
tap((event: NostrEvent) => {

View File

@@ -2,6 +2,7 @@ import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { RELAYS } from '../config/relays'
import { KINDS } from '../config/kinds'
import { MARK_AS_READ_EMOJI } from './reactionService'
import { BlogPostPreview } from './exploreService'
import { queryEvents } from './dataFetch'
@@ -29,8 +30,8 @@ export async function fetchReadArticles(
try {
// Fetch kind:7 and kind:17 reactions in parallel
const [kind7Events, kind17Events] = await Promise.all([
queryEvents(relayPool, { kinds: [7], authors: [userPubkey] }, { relayUrls: RELAYS }),
queryEvents(relayPool, { kinds: [17], authors: [userPubkey] }, { relayUrls: RELAYS })
queryEvents(relayPool, { kinds: [KINDS.ReactionToEvent], authors: [userPubkey] }, { relayUrls: RELAYS }),
queryEvents(relayPool, { kinds: [KINDS.ReactionToUrl], authors: [userPubkey] }, { relayUrls: RELAYS })
])
const readArticles: ReadArticle[] = []
@@ -102,7 +103,7 @@ export async function fetchReadArticlesWithData(
// Filter to only nostr-native articles (kind 30023)
const nostrArticles = readArticles.filter(
article => article.eventKind === 30023 && article.eventId
article => article.eventKind === KINDS.BlogPost && article.eventId
)
if (nostrArticles.length === 0) {
@@ -114,7 +115,7 @@ export async function fetchReadArticlesWithData(
const articleEvents = await queryEvents(
relayPool,
{ kinds: [30023], ids: eventIds },
{ kinds: [KINDS.BlogPost], ids: eventIds },
{ relayUrls: RELAYS }
)

View File

@@ -0,0 +1,90 @@
import { RelayPool } from 'applesauce-relay'
import { fetchReadArticles } from './libraryService'
import { queryEvents } from './dataFetch'
import { RELAYS } from '../config/relays'
import { KINDS } from '../config/kinds'
import { ReadItem } from './readsService'
import { processReadingPositions, processMarkedAsRead, filterValidItems, sortByReadingActivity } from './readingDataProcessor'
import { mergeReadItem } from '../utils/readItemMerge'
/**
* Fetches external URL links with reading progress from:
* - URLs with reading progress (kind:30078)
* - Manually marked as read URLs (kind:7, kind:17)
*/
export async function fetchLinks(
relayPool: RelayPool,
userPubkey: string,
onItem?: (item: ReadItem) => void
): Promise<ReadItem[]> {
console.log('🔗 [Links] Fetching external links for user:', userPubkey.slice(0, 8))
const linksMap = new Map<string, ReadItem>()
// Helper to emit items as they're added/updated
const emitItem = (item: ReadItem) => {
if (onItem && mergeReadItem(linksMap, item)) {
onItem(linksMap.get(item.id)!)
} else if (!onItem) {
linksMap.set(item.id, item)
}
}
try {
// Fetch all data sources in parallel
const [readingPositionEvents, markedAsReadArticles] = await Promise.all([
queryEvents(relayPool, { kinds: [KINDS.AppData], authors: [userPubkey] }, { relayUrls: RELAYS }),
fetchReadArticles(relayPool, userPubkey)
])
console.log('📊 [Links] Data fetched:', {
readingPositions: readingPositionEvents.length,
markedAsRead: markedAsReadArticles.length
})
// Process reading positions and emit external items
processReadingPositions(readingPositionEvents, linksMap)
if (onItem) {
linksMap.forEach(item => {
if (item.type === 'external') {
const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
if (hasProgress) emitItem(item)
}
})
}
// Process marked-as-read and emit external items
processMarkedAsRead(markedAsReadArticles, linksMap)
if (onItem) {
linksMap.forEach(item => {
if (item.type === 'external') {
const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
if (hasProgress) emitItem(item)
}
})
}
// Filter for external URLs only with reading progress
const links = Array.from(linksMap.values())
.filter(item => {
// Only external URLs
if (item.type !== 'external') return false
// Only include if there's reading progress or marked as read
const hasProgress = (item.readingProgress && item.readingProgress > 0) || item.markedAsRead
return hasProgress
})
// Apply common validation and sorting
const validLinks = filterValidItems(links)
const sortedLinks = sortByReadingActivity(validLinks)
console.log('✅ [Links] Processed', sortedLinks.length, 'total links')
return sortedLinks
} catch (error) {
console.error('Failed to fetch links:', error)
return []
}
}

View File

@@ -1,11 +1,14 @@
import { Highlight } from '../types/highlights'
import { Bookmark } from '../types/bookmarks'
import { BlogPostPreview } from './exploreService'
import { ReadItem } from './readsService'
export interface MeCache {
highlights: Highlight[]
bookmarks: Bookmark[]
readArticles: BlogPostPreview[]
reads?: ReadItem[]
links?: ReadItem[]
timestamp: number
}

View File

@@ -0,0 +1,26 @@
import { NostrConnectSigner } from 'applesauce-signers'
/**
* Get default NIP-46 permissions for bunker connections
* These permissions cover all event kinds and encryption/decryption operations Boris needs
*/
export function getDefaultBunkerPermissions(): string[] {
return [
// Signing permissions for event kinds we create
...NostrConnectSigner.buildSigningPermissions([
0, // Profile metadata
5, // Event deletion
7, // Reactions (nostr events)
17, // Reactions (websites)
9802, // Highlights
30078, // Settings & reading positions
39701, // Web bookmarks
]),
// Encryption/decryption for hidden content
'nip04_encrypt',
'nip04_decrypt',
'nip44_encrypt',
'nip44_decrypt',
]
}

View File

@@ -0,0 +1,147 @@
import { NostrEvent } from 'nostr-tools'
import { ReadItem } from './readsService'
import { fallbackTitleFromUrl } from '../utils/readItemMerge'
const READING_POSITION_PREFIX = 'boris:reading-position:'
interface ReadArticle {
id: string
url?: string
eventId?: string
eventKind?: number
markedAt: number
}
/**
* Processes reading position events into ReadItems
*/
export function processReadingPositions(
events: NostrEvent[],
readsMap: Map<string, ReadItem>
): void {
for (const event of events) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1]
if (!dTag || !dTag.startsWith(READING_POSITION_PREFIX)) continue
const identifier = dTag.replace(READING_POSITION_PREFIX, '')
try {
const positionData = JSON.parse(event.content)
const position = positionData.position
const timestamp = positionData.timestamp
let itemId: string
let itemUrl: string | undefined
let itemType: 'article' | 'external' = 'external'
// Check if it's a nostr article (naddr format)
if (identifier.startsWith('naddr1')) {
itemId = identifier
itemType = 'article'
} else {
// It's a base64url-encoded URL
try {
itemUrl = atob(identifier.replace(/-/g, '+').replace(/_/g, '/'))
itemId = itemUrl
itemType = 'external'
} catch (e) {
console.warn('Failed to decode URL identifier:', identifier)
continue
}
}
// Add or update the item
const existing = readsMap.get(itemId)
if (!existing || !existing.readingTimestamp || timestamp > existing.readingTimestamp) {
readsMap.set(itemId, {
...existing,
id: itemId,
source: 'reading-progress',
type: itemType,
url: itemUrl,
readingProgress: position,
readingTimestamp: timestamp
})
}
} catch (error) {
console.warn('Failed to parse reading position:', error)
}
}
}
/**
* Processes marked-as-read articles into ReadItems
*/
export function processMarkedAsRead(
articles: ReadArticle[],
readsMap: Map<string, ReadItem>
): void {
for (const article of articles) {
const existing = readsMap.get(article.id)
if (article.eventId && article.eventKind === 30023) {
// Nostr article
readsMap.set(article.id, {
...existing,
id: article.id,
source: 'marked-as-read',
type: 'article',
markedAsRead: true,
markedAt: article.markedAt,
readingTimestamp: existing?.readingTimestamp || article.markedAt
})
} else if (article.url) {
// External URL
readsMap.set(article.id, {
...existing,
id: article.id,
source: 'marked-as-read',
type: 'external',
url: article.url,
markedAsRead: true,
markedAt: article.markedAt,
readingTimestamp: existing?.readingTimestamp || article.markedAt
})
}
}
}
/**
* Sorts ReadItems by most recent reading activity
*/
export function sortByReadingActivity(items: ReadItem[]): ReadItem[] {
return items.sort((a, b) => {
const timeA = a.readingTimestamp || a.markedAt || 0
const timeB = b.readingTimestamp || b.markedAt || 0
return timeB - timeA
})
}
/**
* Filters out items without timestamps and enriches external items with fallback titles
*/
export function filterValidItems(items: ReadItem[]): ReadItem[] {
return items
.filter(item => {
// Only include items that have a timestamp
const hasTimestamp = (item.readingTimestamp && item.readingTimestamp > 0) ||
(item.markedAt && item.markedAt > 0)
if (!hasTimestamp) return false
// For Nostr articles, we need the event to be valid
if (item.type === 'article' && !item.event) return false
// For external URLs, we need at least a URL
if (item.type === 'external' && !item.url) return false
return true
})
.map(item => {
// Add fallback title for external URLs without titles
if (item.type === 'external' && !item.title && item.url) {
return { ...item, title: fallbackTitleFromUrl(item.url) }
}
return item
})
}

View File

@@ -0,0 +1,197 @@
import { RelayPool } from 'applesauce-relay'
import { NostrEvent } from 'nostr-tools'
import { Helpers } from 'applesauce-core'
import { Bookmark } from '../types/bookmarks'
import { fetchReadArticles } from './libraryService'
import { queryEvents } from './dataFetch'
import { RELAYS } from '../config/relays'
import { KINDS } from '../config/kinds'
import { classifyBookmarkType } from '../utils/bookmarkTypeClassifier'
import { nip19 } from 'nostr-tools'
import { processReadingPositions, processMarkedAsRead, filterValidItems, sortByReadingActivity } from './readingDataProcessor'
import { mergeReadItem } from '../utils/readItemMerge'
const { getArticleTitle, getArticleImage, getArticlePublished, getArticleSummary } = Helpers
export interface ReadItem {
id: string // event ID or URL or coordinate
source: 'bookmark' | 'reading-progress' | 'marked-as-read'
type: 'article' | 'external' // article=kind:30023, external=URL
// Article data
event?: NostrEvent
url?: string
title?: string
summary?: string
image?: string
published?: number
author?: string
// Reading metadata
readingProgress?: number // 0-1
readingTimestamp?: number // Unix timestamp of last reading activity
markedAsRead?: boolean
markedAt?: number
}
/**
* Fetches all reads from multiple sources:
* - Bookmarked articles (kind:30023) and article/website URLs
* - Articles/URLs with reading progress (kind:30078)
* - Manually marked as read articles/URLs (kind:7, kind:17)
*/
export async function fetchAllReads(
relayPool: RelayPool,
userPubkey: string,
bookmarks: Bookmark[],
onItem?: (item: ReadItem) => void
): Promise<ReadItem[]> {
console.log('📚 [Reads] Fetching all reads for user:', userPubkey.slice(0, 8))
const readsMap = new Map<string, ReadItem>()
// Helper to emit items as they're added/updated
const emitItem = (item: ReadItem) => {
if (onItem && mergeReadItem(readsMap, item)) {
onItem(readsMap.get(item.id)!)
} else if (!onItem) {
readsMap.set(item.id, item)
}
}
try {
// Fetch all data sources in parallel
const [readingPositionEvents, markedAsReadArticles] = await Promise.all([
queryEvents(relayPool, { kinds: [KINDS.AppData], authors: [userPubkey] }, { relayUrls: RELAYS }),
fetchReadArticles(relayPool, userPubkey)
])
console.log('📊 [Reads] Data fetched:', {
readingPositions: readingPositionEvents.length,
markedAsRead: markedAsReadArticles.length,
bookmarks: bookmarks.length
})
// Process reading positions and emit items
processReadingPositions(readingPositionEvents, readsMap)
if (onItem) {
readsMap.forEach(item => {
if (item.type === 'article') onItem(item)
})
}
// Process marked-as-read and emit items
processMarkedAsRead(markedAsReadArticles, readsMap)
if (onItem) {
readsMap.forEach(item => {
if (item.type === 'article') onItem(item)
})
}
// 3. Process bookmarked articles and article/website URLs
const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
for (const bookmark of allBookmarks) {
const bookmarkType = classifyBookmarkType(bookmark)
// Only include articles
if (bookmarkType === 'article') {
// Kind:30023 nostr article
const coordinate = bookmark.id // Already in coordinate format
const existing = readsMap.get(coordinate)
if (!existing) {
const item: ReadItem = {
id: coordinate,
source: 'bookmark',
type: 'article',
readingProgress: 0,
readingTimestamp: bookmark.added_at || bookmark.created_at
}
readsMap.set(coordinate, item)
if (onItem) emitItem(item)
}
}
}
// 4. Fetch full event data for nostr articles
const articleCoordinates = Array.from(readsMap.values())
.filter(item => item.type === 'article' && !item.event)
.map(item => item.id)
if (articleCoordinates.length > 0) {
console.log('📖 [Reads] Fetching article events for', articleCoordinates.length, 'articles')
// Parse coordinates and fetch events
const articlesToFetch: Array<{ pubkey: string; identifier: string }> = []
for (const coord of articleCoordinates) {
try {
// Try to decode as naddr
if (coord.startsWith('naddr1')) {
const decoded = nip19.decode(coord)
if (decoded.type === 'naddr' && decoded.data.kind === KINDS.BlogPost) {
articlesToFetch.push({
pubkey: decoded.data.pubkey,
identifier: decoded.data.identifier || ''
})
}
} else {
// Try coordinate format (kind:pubkey:identifier)
const parts = coord.split(':')
if (parts.length === 3 && parseInt(parts[0]) === KINDS.BlogPost) {
articlesToFetch.push({
pubkey: parts[1],
identifier: parts[2]
})
}
}
} catch (e) {
console.warn('Failed to decode article coordinate:', coord)
}
}
if (articlesToFetch.length > 0) {
const authors = Array.from(new Set(articlesToFetch.map(a => a.pubkey)))
const identifiers = Array.from(new Set(articlesToFetch.map(a => a.identifier)))
const events = await queryEvents(
relayPool,
{ kinds: [KINDS.BlogPost], authors, '#d': identifiers },
{ relayUrls: RELAYS }
)
// Merge event data into ReadItems and emit
for (const event of events) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || ''
const coordinate = `${KINDS.BlogPost}:${event.pubkey}:${dTag}`
const item = readsMap.get(coordinate) || readsMap.get(event.id)
if (item) {
item.event = event
item.title = getArticleTitle(event) || 'Untitled'
item.summary = getArticleSummary(event)
item.image = getArticleImage(event)
item.published = getArticlePublished(event)
item.author = event.pubkey
if (onItem) emitItem(item)
}
}
}
}
// 5. Filter for Nostr articles only and apply common validation/sorting
const articles = Array.from(readsMap.values())
.filter(item => item.type === 'article')
const validArticles = filterValidItems(articles)
const sortedReads = sortByReadingActivity(validArticles)
console.log('✅ [Reads] Processed', sortedReads.length, 'total reads')
return sortedReads
} catch (error) {
console.error('Failed to fetch all reads:', error)
return []
}
}

View File

@@ -52,6 +52,11 @@ export async function publishEvent(
})
.catch((error) => {
console.warn('⚠️ Failed to publish event to relays (event still saved locally):', error)
// Surface common bunker signing errors for debugging
if (error instanceof Error && error.message.includes('permission')) {
console.warn('💡 Hint: This may be a bunker permission issue. Ensure your bunker connection has signing permissions.')
}
})
}

36
src/utils/debugBus.ts Normal file
View File

@@ -0,0 +1,36 @@
export type DebugLevel = 'info' | 'warn' | 'error'
export interface DebugLogEntry {
ts: number
level: DebugLevel
source: string
message: string
data?: unknown
}
type Listener = (entry: DebugLogEntry) => void
const listeners = new Set<Listener>()
const buffer: DebugLogEntry[] = []
const MAX_BUFFER = 300
export const DebugBus = {
log(level: DebugLevel, source: string, message: string, data?: unknown): void {
const entry: DebugLogEntry = { ts: Date.now(), level, source, message, data }
buffer.push(entry)
if (buffer.length > MAX_BUFFER) buffer.shift()
listeners.forEach(l => {
try { l(entry) } catch (err) { console.warn('[DebugBus] listener error:', err) }
})
},
info(source: string, message: string, data?: unknown): void { this.log('info', source, message, data) },
warn(source: string, message: string, data?: unknown): void { this.log('warn', source, message, data) },
error(source: string, message: string, data?: unknown): void { this.log('error', source, message, data) },
subscribe(listener: Listener): () => void {
listeners.add(listener)
return () => listeners.delete(listener)
},
snapshot(): DebugLogEntry[] { return buffer.slice() }
}

View File

@@ -0,0 +1,69 @@
import { Bookmark } from '../types/bookmarks'
import { ReadItem } from '../services/readsService'
import { KINDS } from '../config/kinds'
import { fallbackTitleFromUrl } from './readItemMerge'
/**
* Derives ReadItems from bookmarks for external URLs:
* - Web bookmarks (kind:39701)
* - Any bookmark with http(s) URLs in content or urlReferences
*/
export function deriveLinksFromBookmarks(bookmarks: Bookmark[]): ReadItem[] {
const linksMap = new Map<string, ReadItem>()
const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
for (const bookmark of allBookmarks) {
const urls: string[] = []
// Web bookmarks (kind:39701) - extract from 'd' tag
if (bookmark.kind === KINDS.WebBookmark) {
const dTag = bookmark.tags.find(t => t[0] === 'd')?.[1]
if (dTag) {
const url = dTag.startsWith('http') ? dTag : `https://${dTag}`
urls.push(url)
}
}
// Extract URLs from content if not already captured
if (bookmark.content) {
const urlRegex = /(https?:\/\/[^\s]+)/g
const matches = bookmark.content.match(urlRegex)
if (matches) {
urls.push(...matches)
}
}
// Extract metadata from tags (for web bookmarks and other types)
const title = bookmark.tags.find(t => t[0] === 'title')?.[1]
const summary = bookmark.tags.find(t => t[0] === 'summary')?.[1]
const image = bookmark.tags.find(t => t[0] === 'image')?.[1]
// Create ReadItem for each unique URL
for (const url of [...new Set(urls)]) {
if (!linksMap.has(url)) {
const item: ReadItem = {
id: url,
source: 'bookmark',
type: 'external',
url,
title: title || fallbackTitleFromUrl(url),
summary,
image,
readingProgress: 0,
readingTimestamp: bookmark.added_at || bookmark.created_at
}
linksMap.set(url, item)
}
}
}
// Sort by most recent bookmark activity
return Array.from(linksMap.values()).sort((a, b) => {
const timeA = a.readingTimestamp || 0
const timeB = b.readingTimestamp || 0
return timeB - timeA
})
}

View File

@@ -0,0 +1,83 @@
import { ReadItem } from '../services/readsService'
/**
* Merges a ReadItem into a state map, returning whether the state changed.
* Uses most recent reading activity to determine precedence.
*/
export function mergeReadItem(
stateMap: Map<string, ReadItem>,
incoming: ReadItem
): boolean {
const existing = stateMap.get(incoming.id)
if (!existing) {
stateMap.set(incoming.id, incoming)
return true
}
// Always merge if incoming has reading progress data
const hasNewProgress = incoming.readingProgress !== undefined &&
(existing.readingProgress === undefined || existing.readingProgress !== incoming.readingProgress)
const hasNewMarkedAsRead = incoming.markedAsRead !== undefined && existing.markedAsRead === undefined
// Merge by taking the most recent reading activity
const existingTime = existing.readingTimestamp || existing.markedAt || 0
const incomingTime = incoming.readingTimestamp || incoming.markedAt || 0
if (incomingTime > existingTime || hasNewProgress || hasNewMarkedAsRead) {
// Keep existing data, but update with newer reading metadata
stateMap.set(incoming.id, {
...existing,
...incoming,
// Preserve event data if incoming doesn't have it
event: incoming.event || existing.event,
title: incoming.title || existing.title,
summary: incoming.summary || existing.summary,
image: incoming.image || existing.image,
published: incoming.published || existing.published,
author: incoming.author || existing.author,
// Always take reading progress if available
readingProgress: incoming.readingProgress !== undefined ? incoming.readingProgress : existing.readingProgress,
readingTimestamp: incomingTime > existingTime ? incoming.readingTimestamp : existing.readingTimestamp
})
return true
}
// If timestamps are equal but incoming has additional data, merge it
if (incomingTime === existingTime && (!existing.event && incoming.event || !existing.title && incoming.title)) {
stateMap.set(incoming.id, {
...existing,
...incoming,
event: incoming.event || existing.event,
title: incoming.title || existing.title,
summary: incoming.summary || existing.summary,
image: incoming.image || existing.image,
published: incoming.published || existing.published,
author: incoming.author || existing.author
})
return true
}
return false
}
/**
* Extracts a readable title from a URL when no title is available.
* Removes protocol, www, and shows domain + path.
*/
export function fallbackTitleFromUrl(url: string): string {
try {
const parsed = new URL(url)
let title = parsed.hostname.replace(/^www\./, '')
if (parsed.pathname && parsed.pathname !== '/') {
const path = parsed.pathname.slice(0, 40)
title += path.length < parsed.pathname.length ? path + '...' : path
}
return title
} catch {
// If URL parsing fails, just return the URL truncated
return url.length > 50 ? url.slice(0, 47) + '...' : url
}
}

View File

@@ -0,0 +1,30 @@
import { ReadItem } from '../services/readsService'
import { ReadingProgressFilterType } from '../components/ReadingProgressFilters'
/**
* Filters ReadItems by reading progress
*/
export function filterByReadingProgress(
items: ReadItem[],
filter: ReadingProgressFilterType
): ReadItem[] {
return items.filter((item) => {
const progress = item.readingProgress || 0
const isMarked = item.markedAsRead || false
switch (filter) {
case 'unopened':
return progress === 0 && !isMarked
case 'started':
return progress > 0 && progress <= 0.10 && !isMarked
case 'reading':
return progress > 0.10 && progress <= 0.94 && !isMarked
case 'completed':
return progress >= 0.95 || isMarked
case 'all':
default:
return true
}
})
}

View File

@@ -0,0 +1,71 @@
import { Bookmark } from '../types/bookmarks'
import { ReadItem } from '../services/readsService'
import { classifyBookmarkType } from './bookmarkTypeClassifier'
import { KINDS } from '../config/kinds'
import { nip19 } from 'nostr-tools'
/**
* Derives ReadItems from bookmarks for Nostr articles (kind:30023).
* Returns items with type='article', using hydrated data when available.
* Note: After hydration, article titles are in bookmark.content, metadata in tags.
*/
export function deriveReadsFromBookmarks(bookmarks: Bookmark[]): ReadItem[] {
const readsMap = new Map<string, ReadItem>()
const allBookmarks = bookmarks.flatMap(b => b.individualBookmarks || [])
for (const bookmark of allBookmarks) {
const bookmarkType = classifyBookmarkType(bookmark)
// Only include articles (kind:30023)
if (bookmarkType === 'article' && bookmark.kind === KINDS.BlogPost) {
const coordinate = bookmark.id // coordinate format: kind:pubkey:identifier
// Extract identifier from coordinate
const parts = coordinate.split(':')
const identifier = parts[2] || ''
// Convert to naddr format (reading positions use naddr as ID)
let naddr: string
try {
naddr = nip19.naddrEncode({
kind: KINDS.BlogPost,
pubkey: bookmark.pubkey,
identifier
})
} catch (e) {
console.warn('Failed to encode naddr for bookmark:', coordinate)
continue
}
// Extract metadata from tags (same as BookmarkItem does)
const title = bookmark.content || 'Untitled'
const image = bookmark.tags.find(t => t[0] === 'image')?.[1]
const summary = bookmark.tags.find(t => t[0] === 'summary')?.[1]
const published = bookmark.tags.find(t => t[0] === 'published_at')?.[1]
const item: ReadItem = {
id: naddr, // Use naddr format to match reading positions
source: 'bookmark',
type: 'article',
readingProgress: 0,
readingTimestamp: bookmark.added_at || bookmark.created_at,
title,
summary,
image,
published: published ? parseInt(published) : undefined,
author: bookmark.pubkey
}
readsMap.set(naddr, item)
}
}
// Sort by most recent bookmark activity
return Array.from(readsMap.values()).sort((a, b) => {
const timeA = a.readingTimestamp || 0
const timeB = b.readingTimestamp || 0
return timeB - timeA
})
}

8
src/vite-env.d.ts vendored
View File

@@ -8,3 +8,11 @@ declare module '*.svg?raw' {
const content: string
export default content
}
// Build-time defines injected by Vite in vite.config.ts
declare const __APP_VERSION__: string
declare const __GIT_COMMIT__: string
declare const __GIT_BRANCH__: string
declare const __BUILD_TIME__: string
declare const __GIT_COMMIT_URL__: string
declare const __RELEASE_URL__: string

View File

@@ -1,5 +1,16 @@
{
"rewrites": [
{
"source": "/a/:naddr",
"has": [
{
"type": "header",
"key": "user-agent",
"value": ".*(bot|crawl|spider|slurp|facebook|twitter|linkedin|whatsapp|telegram|slack|discord|preview).*"
}
],
"destination": "/api/article-og?naddr=:naddr"
},
{
"source": "/(.*)",
"destination": "/index.html"

View File

@@ -1,8 +1,101 @@
/* eslint-env node */
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
import { VitePWA } from 'vite-plugin-pwa'
import { readFileSync } from 'node:fs'
import { execSync } from 'node:child_process'
function getGitMetadata() {
const envSha = process.env.VERCEL_GIT_COMMIT_SHA || ''
const envRef = process.env.VERCEL_GIT_COMMIT_REF || ''
let commit = envSha
let branch = envRef
try {
if (!commit) commit = execSync('git rev-parse HEAD', { stdio: ['ignore', 'pipe', 'ignore'] }).toString().trim()
} catch {
// ignore
}
try {
if (!branch) branch = execSync('git rev-parse --abbrev-ref HEAD', { stdio: ['ignore', 'pipe', 'ignore'] }).toString().trim()
} catch {
// ignore
}
return { commit, branch }
}
function getPackageVersion() {
try {
const pkg = JSON.parse(readFileSync(new URL('./package.json', import.meta.url)).toString())
return pkg.version as string
} catch {
return '0.0.0'
}
}
const { commit, branch } = getGitMetadata()
const version = getPackageVersion()
const buildTime = new Date().toISOString()
function getReleaseUrl(version: string): string {
if (!version) return ''
const provider = process.env.VERCEL_GIT_PROVIDER || ''
const owner = process.env.VERCEL_GIT_REPO_OWNER || ''
const slug = process.env.VERCEL_GIT_REPO_SLUG || ''
if (provider.toLowerCase() === 'github' && owner && slug) {
return `https://github.com/${owner}/${slug}/releases/tag/v${version}`
}
try {
const remote = execSync('git config --get remote.origin.url', { stdio: ['ignore', 'pipe', 'ignore'] }).toString().trim()
if (remote.includes('github.com')) {
// git@github.com:owner/repo.git or https://github.com/owner/repo.git
const https = remote.startsWith('git@')
? `https://github.com/${remote.split(':')[1]}`
: remote
const cleaned = https.replace(/\.git$/, '')
return `${cleaned}/releases/tag/v${version}`
}
} catch {
// ignore
}
return ''
}
function getCommitUrl(commit: string): string {
if (!commit) return ''
const provider = process.env.VERCEL_GIT_PROVIDER || ''
const owner = process.env.VERCEL_GIT_REPO_OWNER || ''
const slug = process.env.VERCEL_GIT_REPO_SLUG || ''
if (provider.toLowerCase() === 'github' && owner && slug) {
return `https://github.com/${owner}/${slug}/commit/${commit}`
}
try {
const remote = execSync('git config --get remote.origin.url', { stdio: ['ignore', 'pipe', 'ignore'] }).toString().trim()
if (remote.includes('github.com')) {
// git@github.com:owner/repo.git or https://github.com/owner/repo.git
const https = remote.startsWith('git@')
? `https://github.com/${remote.split(':')[1]}`
: remote
const cleaned = https.replace(/\.git$/, '')
return `${cleaned}/commit/${commit}`
}
} catch {
// ignore
}
return ''
}
const releaseUrl = getReleaseUrl(version)
const commitUrl = getCommitUrl(commit)
export default defineConfig({
define: {
__APP_VERSION__: JSON.stringify(version),
__GIT_COMMIT__: JSON.stringify(commit),
__GIT_BRANCH__: JSON.stringify(branch),
__BUILD_TIME__: JSON.stringify(buildTime),
__GIT_COMMIT_URL__: JSON.stringify(commitUrl),
__RELEASE_URL__: JSON.stringify(releaseUrl)
},
plugins: [
react(),
VitePWA({
@@ -48,7 +141,7 @@ export default defineConfig({
mainFields: ['module', 'jsnext:main', 'jsnext', 'main']
},
optimizeDeps: {
include: ['applesauce-core', 'applesauce-factory', 'applesauce-relay', 'applesauce-react'],
include: ['applesauce-core', 'applesauce-factory', 'applesauce-relay', 'applesauce-react', 'applesauce-accounts', 'applesauce-signers'],
esbuildOptions: {
resolveExtensions: ['.js', '.ts', '.tsx', '.json']
}
@@ -65,7 +158,7 @@ export default defineConfig({
}
},
ssr: {
noExternal: ['applesauce-core', 'applesauce-factory', 'applesauce-relay']
noExternal: ['applesauce-core', 'applesauce-factory', 'applesauce-relay', 'applesauce-accounts', 'applesauce-signers']
}
})