mirror of
https://github.com/aljazceru/notedeck.git
synced 2025-12-17 08:44:20 +01:00
Switch to unified timeline cache via TimelineKinds
This is a fairly large rewrite which unifies our threads, timelines and profiles. Now all timelines have a MultiSubscriber, and can be added and removed to columns just like Threads and Profiles. Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,6 +1,8 @@
|
|||||||
.buildcmd
|
.buildcmd
|
||||||
build.log
|
build.log
|
||||||
perf.data
|
perf.data
|
||||||
|
rusty-tags.vi
|
||||||
|
notedeck-settings
|
||||||
perf.data.old
|
perf.data.old
|
||||||
crates/notedeck_chrome/android/app/build
|
crates/notedeck_chrome/android/app/build
|
||||||
.privenv
|
.privenv
|
||||||
|
|||||||
3
Cargo.lock
generated
3
Cargo.lock
generated
@@ -4531,6 +4531,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "tokenator"
|
name = "tokenator"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"hex",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -7,7 +7,7 @@ check:
|
|||||||
cargo check
|
cargo check
|
||||||
|
|
||||||
tags: fake
|
tags: fake
|
||||||
find . -type d -name target -prune -o -type f -name '*.rs' -print | xargs ctags
|
rusty-tags vi
|
||||||
|
|
||||||
jni: fake
|
jni: fake
|
||||||
cargo ndk --target arm64-v8a -o $(ANDROID_DIR)/app/src/main/jniLibs/ build --profile release
|
cargo ndk --target arm64-v8a -o $(ANDROID_DIR)/app/src/main/jniLibs/ build --profile release
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ pub struct NoteId([u8; 32]);
|
|||||||
|
|
||||||
impl fmt::Debug for NoteId {
|
impl fmt::Debug for NoteId {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "{}", self.hex())
|
write!(f, "NoteId({})", self.hex())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -414,6 +414,12 @@ impl Accounts {
|
|||||||
.or_else(|| self.accounts.iter().find_map(|a| a.to_full()))
|
.or_else(|| self.accounts.iter().find_map(|a| a.to_full()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the selected account's pubkey as bytes. Common operation so
|
||||||
|
/// we make it a helper here.
|
||||||
|
pub fn selected_account_pubkey_bytes(&self) -> Option<&[u8; 32]> {
|
||||||
|
self.get_selected_account().map(|kp| kp.pubkey.bytes())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_selected_account(&self) -> Option<&UserAccount> {
|
pub fn get_selected_account(&self) -> Option<&UserAccount> {
|
||||||
if let Some(account_index) = self.currently_selected_account {
|
if let Some(account_index) = self.currently_selected_account {
|
||||||
if let Some(account) = self.get_account(account_index) {
|
if let Some(account) = self.get_account(account_index) {
|
||||||
|
|||||||
@@ -35,6 +35,9 @@ impl From<String> for Error {
|
|||||||
pub enum FilterError {
|
pub enum FilterError {
|
||||||
#[error("empty contact list")]
|
#[error("empty contact list")]
|
||||||
EmptyContactList,
|
EmptyContactList,
|
||||||
|
|
||||||
|
#[error("filter not ready")]
|
||||||
|
FilterNotReady,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Copy, Clone, thiserror::Error)]
|
#[derive(Debug, Eq, PartialEq, Copy, Clone, thiserror::Error)]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use crate::error::{Error, FilterError};
|
use crate::error::{Error, FilterError};
|
||||||
use crate::note::NoteRef;
|
use crate::note::NoteRef;
|
||||||
use crate::Result;
|
|
||||||
use nostrdb::{Filter, FilterBuilder, Note, Subscription};
|
use nostrdb::{Filter, FilterBuilder, Note, Subscription};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
@@ -24,7 +23,7 @@ pub struct FilterStates {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl FilterStates {
|
impl FilterStates {
|
||||||
pub fn get(&mut self, relay: &str) -> &FilterState {
|
pub fn get_mut(&mut self, relay: &str) -> &FilterState {
|
||||||
// if our initial state is ready, then just use that
|
// if our initial state is ready, then just use that
|
||||||
if let FilterState::Ready(_) = self.initial_state {
|
if let FilterState::Ready(_) = self.initial_state {
|
||||||
&self.initial_state
|
&self.initial_state
|
||||||
@@ -195,7 +194,7 @@ pub fn last_n_per_pubkey_from_tags(
|
|||||||
note: &Note,
|
note: &Note,
|
||||||
kind: u64,
|
kind: u64,
|
||||||
notes_per_pubkey: u64,
|
notes_per_pubkey: u64,
|
||||||
) -> Result<Vec<Filter>> {
|
) -> Result<Vec<Filter>, Error> {
|
||||||
let mut filters: Vec<Filter> = vec![];
|
let mut filters: Vec<Filter> = vec![];
|
||||||
|
|
||||||
for tag in note.tags() {
|
for tag in note.tags() {
|
||||||
@@ -250,7 +249,7 @@ pub fn filter_from_tags(
|
|||||||
note: &Note,
|
note: &Note,
|
||||||
add_pubkey: Option<&[u8; 32]>,
|
add_pubkey: Option<&[u8; 32]>,
|
||||||
with_hashtags: bool,
|
with_hashtags: bool,
|
||||||
) -> Result<FilteredTags> {
|
) -> Result<FilteredTags, Error> {
|
||||||
let mut author_filter = Filter::new();
|
let mut author_filter = Filter::new();
|
||||||
let mut hashtag_filter = Filter::new();
|
let mut hashtag_filter = Filter::new();
|
||||||
let mut author_res: Option<FilterBuilder> = None;
|
let mut author_res: Option<FilterBuilder> = None;
|
||||||
@@ -338,3 +337,11 @@ pub fn filter_from_tags(
|
|||||||
hashtags: hashtag_res,
|
hashtags: hashtag_res,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn make_filters_since(raw: &[Filter], since: u64) -> Vec<Filter> {
|
||||||
|
let mut filters = Vec::with_capacity(raw.len());
|
||||||
|
for builder in raw {
|
||||||
|
filters.push(Filter::copy_from(builder).since(since).build());
|
||||||
|
}
|
||||||
|
filters
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use enostr::NoteId;
|
|||||||
use nostrdb::{Ndb, Note, NoteKey, QueryResult, Transaction};
|
use nostrdb::{Ndb, Note, NoteKey, QueryResult, Transaction};
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
|
#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
|
||||||
pub struct NoteRef {
|
pub struct NoteRef {
|
||||||
@@ -10,9 +11,15 @@ pub struct NoteRef {
|
|||||||
pub created_at: u64,
|
pub created_at: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
|
#[derive(Clone, Copy, Eq, PartialEq, Hash)]
|
||||||
pub struct RootNoteIdBuf([u8; 32]);
|
pub struct RootNoteIdBuf([u8; 32]);
|
||||||
|
|
||||||
|
impl fmt::Debug for RootNoteIdBuf {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "RootNoteIdBuf({})", self.hex())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
|
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
|
||||||
pub struct RootNoteId<'a>(&'a [u8; 32]);
|
pub struct RootNoteId<'a>(&'a [u8; 32]);
|
||||||
|
|
||||||
|
|||||||
@@ -183,21 +183,20 @@ mod tests {
|
|||||||
.column(0)
|
.column(0)
|
||||||
.router()
|
.router()
|
||||||
.top()
|
.top()
|
||||||
.timeline_id();
|
.timeline_id()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let tl2 = app
|
let tl2 = app
|
||||||
.columns(app_ctx.accounts)
|
.columns(app_ctx.accounts)
|
||||||
.column(1)
|
.column(1)
|
||||||
.router()
|
.router()
|
||||||
.top()
|
.top()
|
||||||
.timeline_id();
|
.timeline_id()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(tl1.is_some(), true);
|
let timelines = app.timeline_cache.timelines.len() == 2;
|
||||||
assert_eq!(tl2.is_some(), true);
|
assert!(app.timeline_cache.timelines.get(&tl1).is_some());
|
||||||
|
assert!(app.timeline_cache.timelines.get(&tl2).is_some());
|
||||||
let timelines = app.columns(app_ctx.accounts).timelines();
|
|
||||||
assert!(timelines[0].kind.is_notifications());
|
|
||||||
assert!(timelines[1].kind.is_contacts());
|
|
||||||
|
|
||||||
rmrf(tmpdir);
|
rmrf(tmpdir);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
column::Columns,
|
column::Columns,
|
||||||
route::{Route, Router},
|
route::{Route, Router},
|
||||||
timeline::{TimelineCache, TimelineCacheKey},
|
timeline::{ThreadSelection, TimelineCache, TimelineKind},
|
||||||
};
|
};
|
||||||
|
|
||||||
use enostr::{NoteId, Pubkey, RelayPool};
|
use enostr::{NoteId, Pubkey, RelayPool};
|
||||||
@@ -17,13 +17,13 @@ pub enum NoteAction {
|
|||||||
OpenProfile(Pubkey),
|
OpenProfile(Pubkey),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NewNotes<'a> {
|
pub struct NewNotes {
|
||||||
pub id: TimelineCacheKey<'a>,
|
pub id: TimelineKind,
|
||||||
pub notes: Vec<NoteKey>,
|
pub notes: Vec<NoteKey>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum TimelineOpenResult<'a> {
|
pub enum TimelineOpenResult {
|
||||||
NewNotes(NewNotes<'a>),
|
NewNotes(NewNotes),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// open_thread is called when a note is selected and we need to navigate
|
/// open_thread is called when a note is selected and we need to navigate
|
||||||
@@ -32,16 +32,18 @@ pub enum TimelineOpenResult<'a> {
|
|||||||
/// the thread view. We don't have a concept of model/view/controller etc
|
/// the thread view. We don't have a concept of model/view/controller etc
|
||||||
/// in egui, but this is the closest thing to that.
|
/// in egui, but this is the closest thing to that.
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn open_thread<'txn>(
|
fn open_thread(
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
txn: &'txn Transaction,
|
txn: &Transaction,
|
||||||
router: &mut Router<Route>,
|
router: &mut Router<Route>,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
pool: &mut RelayPool,
|
pool: &mut RelayPool,
|
||||||
timeline_cache: &mut TimelineCache,
|
timeline_cache: &mut TimelineCache,
|
||||||
selected_note: &'txn [u8; 32],
|
selected_note: &[u8; 32],
|
||||||
) -> Option<TimelineOpenResult<'txn>> {
|
) -> Option<TimelineOpenResult> {
|
||||||
router.route_to(Route::thread(NoteId::new(selected_note.to_owned())));
|
router.route_to(Route::thread(
|
||||||
|
ThreadSelection::from_note_id(ndb, note_cache, txn, NoteId::new(*selected_note)).ok()?,
|
||||||
|
));
|
||||||
|
|
||||||
match root_note_id_from_selected_id(ndb, note_cache, txn, selected_note) {
|
match root_note_id_from_selected_id(ndb, note_cache, txn, selected_note) {
|
||||||
Ok(root_id) => timeline_cache.open(
|
Ok(root_id) => timeline_cache.open(
|
||||||
@@ -49,7 +51,7 @@ fn open_thread<'txn>(
|
|||||||
note_cache,
|
note_cache,
|
||||||
txn,
|
txn,
|
||||||
pool,
|
pool,
|
||||||
TimelineCacheKey::thread(root_id),
|
&TimelineKind::Thread(ThreadSelection::from_root_id(root_id.to_owned())),
|
||||||
),
|
),
|
||||||
|
|
||||||
Err(RootIdError::NoteNotFound) => {
|
Err(RootIdError::NoteNotFound) => {
|
||||||
@@ -72,18 +74,15 @@ fn open_thread<'txn>(
|
|||||||
|
|
||||||
impl NoteAction {
|
impl NoteAction {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn execute<'txn, 'a>(
|
pub fn execute(
|
||||||
&'a self,
|
&self,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
router: &mut Router<Route>,
|
router: &mut Router<Route>,
|
||||||
timeline_cache: &mut TimelineCache,
|
timeline_cache: &mut TimelineCache,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
pool: &mut RelayPool,
|
pool: &mut RelayPool,
|
||||||
txn: &'txn Transaction,
|
txn: &Transaction,
|
||||||
) -> Option<TimelineOpenResult<'txn>>
|
) -> Option<TimelineOpenResult> {
|
||||||
where
|
|
||||||
'a: 'txn,
|
|
||||||
{
|
|
||||||
match self {
|
match self {
|
||||||
NoteAction::Reply(note_id) => {
|
NoteAction::Reply(note_id) => {
|
||||||
router.route_to(Route::reply(*note_id));
|
router.route_to(Route::reply(*note_id));
|
||||||
@@ -102,13 +101,7 @@ impl NoteAction {
|
|||||||
|
|
||||||
NoteAction::OpenProfile(pubkey) => {
|
NoteAction::OpenProfile(pubkey) => {
|
||||||
router.route_to(Route::profile(*pubkey));
|
router.route_to(Route::profile(*pubkey));
|
||||||
timeline_cache.open(
|
timeline_cache.open(ndb, note_cache, txn, pool, &TimelineKind::Profile(*pubkey))
|
||||||
ndb,
|
|
||||||
note_cache,
|
|
||||||
txn,
|
|
||||||
pool,
|
|
||||||
TimelineCacheKey::profile(pubkey.as_ref()),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
NoteAction::Quote(note_id) => {
|
NoteAction::Quote(note_id) => {
|
||||||
@@ -138,8 +131,8 @@ impl NoteAction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TimelineOpenResult<'a> {
|
impl TimelineOpenResult {
|
||||||
pub fn new_notes(notes: Vec<NoteKey>, id: TimelineCacheKey<'a>) -> Self {
|
pub fn new_notes(notes: Vec<NoteKey>, id: TimelineKind) -> Self {
|
||||||
Self::NewNotes(NewNotes::new(notes, id))
|
Self::NewNotes(NewNotes::new(notes, id))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -160,8 +153,8 @@ impl<'a> TimelineOpenResult<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> NewNotes<'a> {
|
impl NewNotes {
|
||||||
pub fn new(notes: Vec<NoteKey>, id: TimelineCacheKey<'a>) -> Self {
|
pub fn new(notes: Vec<NoteKey>, id: TimelineKind) -> Self {
|
||||||
NewNotes { notes, id }
|
NewNotes { notes, id }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,46 +168,18 @@ impl<'a> NewNotes<'a> {
|
|||||||
unknown_ids: &mut UnknownIds,
|
unknown_ids: &mut UnknownIds,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
) {
|
) {
|
||||||
match self.id {
|
let reversed = matches!(&self.id, TimelineKind::Thread(_));
|
||||||
TimelineCacheKey::Profile(pubkey) => {
|
|
||||||
let profile = if let Some(profile) = timeline_cache.profiles.get_mut(pubkey.bytes())
|
|
||||||
{
|
|
||||||
profile
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
let reversed = false;
|
let timeline = if let Some(profile) = timeline_cache.timelines.get_mut(&self.id) {
|
||||||
|
profile
|
||||||
|
} else {
|
||||||
|
error!("NewNotes: could not get timeline for key {}", self.id);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
if let Err(err) = profile.timeline.insert(
|
if let Err(err) = timeline.insert(&self.notes, ndb, txn, unknown_ids, note_cache, reversed)
|
||||||
&self.notes,
|
{
|
||||||
ndb,
|
error!("error inserting notes into profile timeline: {err}")
|
||||||
txn,
|
|
||||||
unknown_ids,
|
|
||||||
note_cache,
|
|
||||||
reversed,
|
|
||||||
) {
|
|
||||||
error!("error inserting notes into profile timeline: {err}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TimelineCacheKey::Thread(root_id) => {
|
|
||||||
// threads are chronological, ie reversed from reverse-chronological, the default.
|
|
||||||
let reversed = true;
|
|
||||||
let thread = if let Some(thread) = timeline_cache.threads.get_mut(root_id.bytes()) {
|
|
||||||
thread
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(err) =
|
|
||||||
thread
|
|
||||||
.timeline
|
|
||||||
.insert(&self.notes, ndb, txn, unknown_ids, note_cache, reversed)
|
|
||||||
{
|
|
||||||
error!("error inserting notes into thread timeline: {err}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ fn try_process_event(
|
|||||||
timeline::send_initial_timeline_filters(
|
timeline::send_initial_timeline_filters(
|
||||||
app_ctx.ndb,
|
app_ctx.ndb,
|
||||||
damus.since_optimize,
|
damus.since_optimize,
|
||||||
get_active_columns_mut(app_ctx.accounts, &mut damus.decks_cache),
|
&mut damus.timeline_cache,
|
||||||
&mut damus.subscriptions,
|
&mut damus.subscriptions,
|
||||||
app_ctx.pool,
|
app_ctx.pool,
|
||||||
&ev.relay,
|
&ev.relay,
|
||||||
@@ -127,30 +127,16 @@ fn try_process_event(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let current_columns = get_active_columns_mut(app_ctx.accounts, &mut damus.decks_cache);
|
for (_kind, timeline) in damus.timeline_cache.timelines.iter_mut() {
|
||||||
let n_timelines = current_columns.timelines().len();
|
let is_ready =
|
||||||
for timeline_ind in 0..n_timelines {
|
timeline::is_timeline_ready(app_ctx.ndb, app_ctx.pool, app_ctx.note_cache, timeline);
|
||||||
let is_ready = {
|
|
||||||
let timeline = &mut current_columns.timelines[timeline_ind];
|
|
||||||
timeline::is_timeline_ready(
|
|
||||||
app_ctx.ndb,
|
|
||||||
app_ctx.pool,
|
|
||||||
app_ctx.note_cache,
|
|
||||||
timeline,
|
|
||||||
app_ctx
|
|
||||||
.accounts
|
|
||||||
.get_selected_account()
|
|
||||||
.as_ref()
|
|
||||||
.map(|sa| &sa.pubkey),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
if is_ready {
|
if is_ready {
|
||||||
let txn = Transaction::new(app_ctx.ndb).expect("txn");
|
let txn = Transaction::new(app_ctx.ndb).expect("txn");
|
||||||
// only thread timelines are reversed
|
// only thread timelines are reversed
|
||||||
let reversed = false;
|
let reversed = false;
|
||||||
|
|
||||||
if let Err(err) = current_columns.timelines_mut()[timeline_ind].poll_notes_into_view(
|
if let Err(err) = timeline.poll_notes_into_view(
|
||||||
app_ctx.ndb,
|
app_ctx.ndb,
|
||||||
&txn,
|
&txn,
|
||||||
app_ctx.unknown_ids,
|
app_ctx.unknown_ids,
|
||||||
@@ -193,7 +179,7 @@ fn update_damus(damus: &mut Damus, app_ctx: &mut AppContext<'_>, ctx: &egui::Con
|
|||||||
if let Err(err) = timeline::setup_initial_nostrdb_subs(
|
if let Err(err) = timeline::setup_initial_nostrdb_subs(
|
||||||
app_ctx.ndb,
|
app_ctx.ndb,
|
||||||
app_ctx.note_cache,
|
app_ctx.note_cache,
|
||||||
&mut damus.decks_cache,
|
&mut damus.timeline_cache,
|
||||||
) {
|
) {
|
||||||
warn!("update_damus init: {err}");
|
warn!("update_damus init: {err}");
|
||||||
}
|
}
|
||||||
@@ -208,15 +194,16 @@ fn update_damus(damus: &mut Damus, app_ctx: &mut AppContext<'_>, ctx: &egui::Con
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn handle_eose(
|
fn handle_eose(
|
||||||
damus: &mut Damus,
|
subscriptions: &Subscriptions,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
ctx: &mut AppContext<'_>,
|
ctx: &mut AppContext<'_>,
|
||||||
subid: &str,
|
subid: &str,
|
||||||
relay_url: &str,
|
relay_url: &str,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let sub_kind = if let Some(sub_kind) = damus.subscriptions().get(subid) {
|
let sub_kind = if let Some(sub_kind) = subscriptions.subs.get(subid) {
|
||||||
sub_kind
|
sub_kind
|
||||||
} else {
|
} else {
|
||||||
let n_subids = damus.subscriptions().len();
|
let n_subids = subscriptions.subs.len();
|
||||||
warn!(
|
warn!(
|
||||||
"got unknown eose subid {}, {} tracked subscriptions",
|
"got unknown eose subid {}, {} tracked subscriptions",
|
||||||
subid, n_subids
|
subid, n_subids
|
||||||
@@ -224,7 +211,7 @@ fn handle_eose(
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
match *sub_kind {
|
match sub_kind {
|
||||||
SubKind::Timeline(_) => {
|
SubKind::Timeline(_) => {
|
||||||
// eose on timeline? whatevs
|
// eose on timeline? whatevs
|
||||||
}
|
}
|
||||||
@@ -233,7 +220,7 @@ fn handle_eose(
|
|||||||
unknowns::update_from_columns(
|
unknowns::update_from_columns(
|
||||||
&txn,
|
&txn,
|
||||||
ctx.unknown_ids,
|
ctx.unknown_ids,
|
||||||
get_active_columns(ctx.accounts, &damus.decks_cache),
|
timeline_cache,
|
||||||
ctx.ndb,
|
ctx.ndb,
|
||||||
ctx.note_cache,
|
ctx.note_cache,
|
||||||
);
|
);
|
||||||
@@ -250,10 +237,7 @@ fn handle_eose(
|
|||||||
}
|
}
|
||||||
|
|
||||||
SubKind::FetchingContactList(timeline_uid) => {
|
SubKind::FetchingContactList(timeline_uid) => {
|
||||||
let timeline = if let Some(tl) =
|
let timeline = if let Some(tl) = timeline_cache.timelines.get_mut(timeline_uid) {
|
||||||
get_active_columns_mut(ctx.accounts, &mut damus.decks_cache)
|
|
||||||
.find_timeline_mut(timeline_uid)
|
|
||||||
{
|
|
||||||
tl
|
tl
|
||||||
} else {
|
} else {
|
||||||
error!(
|
error!(
|
||||||
@@ -263,7 +247,7 @@ fn handle_eose(
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
let filter_state = timeline.filter.get(relay_url);
|
let filter_state = timeline.filter.get_mut(relay_url);
|
||||||
|
|
||||||
// If this request was fetching a contact list, our filter
|
// If this request was fetching a contact list, our filter
|
||||||
// state should be "FetchingRemote". We look at the local
|
// state should be "FetchingRemote". We look at the local
|
||||||
@@ -325,7 +309,13 @@ fn process_message(damus: &mut Damus, ctx: &mut AppContext<'_>, relay: &str, msg
|
|||||||
RelayMessage::Notice(msg) => warn!("Notice from {}: {}", relay, msg),
|
RelayMessage::Notice(msg) => warn!("Notice from {}: {}", relay, msg),
|
||||||
RelayMessage::OK(cr) => info!("OK {:?}", cr),
|
RelayMessage::OK(cr) => info!("OK {:?}", cr),
|
||||||
RelayMessage::Eose(sid) => {
|
RelayMessage::Eose(sid) => {
|
||||||
if let Err(err) = handle_eose(damus, ctx, sid, relay) {
|
if let Err(err) = handle_eose(
|
||||||
|
&damus.subscriptions,
|
||||||
|
&mut damus.timeline_cache,
|
||||||
|
ctx,
|
||||||
|
sid,
|
||||||
|
relay,
|
||||||
|
) {
|
||||||
error!("error handling eose: {}", err);
|
error!("error handling eose: {}", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -367,39 +357,58 @@ impl Damus {
|
|||||||
pub fn new(ctx: &mut AppContext<'_>, args: &[String]) -> Self {
|
pub fn new(ctx: &mut AppContext<'_>, args: &[String]) -> Self {
|
||||||
// arg parsing
|
// arg parsing
|
||||||
|
|
||||||
let parsed_args = ColumnsArgs::parse(args);
|
let parsed_args = ColumnsArgs::parse(
|
||||||
|
args,
|
||||||
|
ctx.accounts
|
||||||
|
.get_selected_account()
|
||||||
|
.as_ref()
|
||||||
|
.map(|kp| &kp.pubkey),
|
||||||
|
);
|
||||||
|
|
||||||
let account = ctx
|
let account = ctx
|
||||||
.accounts
|
.accounts
|
||||||
.get_selected_account()
|
.get_selected_account()
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|a| a.pubkey.bytes());
|
.map(|a| a.pubkey.bytes());
|
||||||
|
|
||||||
|
let mut timeline_cache = TimelineCache::default();
|
||||||
let tmp_columns = !parsed_args.columns.is_empty();
|
let tmp_columns = !parsed_args.columns.is_empty();
|
||||||
let decks_cache = if tmp_columns {
|
let decks_cache = if tmp_columns {
|
||||||
info!("DecksCache: loading from command line arguments");
|
info!("DecksCache: loading from command line arguments");
|
||||||
let mut columns: Columns = Columns::new();
|
let mut columns: Columns = Columns::new();
|
||||||
|
let txn = Transaction::new(ctx.ndb).unwrap();
|
||||||
for col in parsed_args.columns {
|
for col in parsed_args.columns {
|
||||||
if let Some(timeline) = col.into_timeline(ctx.ndb, account) {
|
let timeline_kind = col.into_timeline_kind();
|
||||||
columns.add_new_timeline_column(timeline);
|
if let Some(add_result) = columns.add_new_timeline_column(
|
||||||
|
&mut timeline_cache,
|
||||||
|
&txn,
|
||||||
|
ctx.ndb,
|
||||||
|
ctx.note_cache,
|
||||||
|
ctx.pool,
|
||||||
|
&timeline_kind,
|
||||||
|
) {
|
||||||
|
add_result.process(
|
||||||
|
ctx.ndb,
|
||||||
|
ctx.note_cache,
|
||||||
|
&txn,
|
||||||
|
&mut timeline_cache,
|
||||||
|
ctx.unknown_ids,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
columns_to_decks_cache(columns, account)
|
columns_to_decks_cache(columns, account)
|
||||||
} else if let Some(decks_cache) = crate::storage::load_decks_cache(ctx.path, ctx.ndb) {
|
} else if let Some(decks_cache) =
|
||||||
|
crate::storage::load_decks_cache(ctx.path, ctx.ndb, &mut timeline_cache)
|
||||||
|
{
|
||||||
info!(
|
info!(
|
||||||
"DecksCache: loading from disk {}",
|
"DecksCache: loading from disk {}",
|
||||||
crate::storage::DECKS_CACHE_FILE
|
crate::storage::DECKS_CACHE_FILE
|
||||||
);
|
);
|
||||||
decks_cache
|
decks_cache
|
||||||
} else if let Some(cols) = storage::deserialize_columns(ctx.path, ctx.ndb, account) {
|
|
||||||
info!(
|
|
||||||
"DecksCache: loading from disk at depreciated location {}",
|
|
||||||
crate::storage::COLUMNS_FILE
|
|
||||||
);
|
|
||||||
columns_to_decks_cache(cols, account)
|
|
||||||
} else {
|
} else {
|
||||||
info!("DecksCache: creating new with demo configuration");
|
info!("DecksCache: creating new with demo configuration");
|
||||||
let mut cache = DecksCache::new_with_demo_config(ctx.ndb);
|
let mut cache = DecksCache::new_with_demo_config(&mut timeline_cache, ctx);
|
||||||
for account in ctx.accounts.get_accounts() {
|
for account in ctx.accounts.get_accounts() {
|
||||||
cache.add_deck_default(account.pubkey);
|
cache.add_deck_default(account.pubkey);
|
||||||
}
|
}
|
||||||
@@ -414,7 +423,7 @@ impl Damus {
|
|||||||
Self {
|
Self {
|
||||||
subscriptions: Subscriptions::default(),
|
subscriptions: Subscriptions::default(),
|
||||||
since_optimize: parsed_args.since_optimize,
|
since_optimize: parsed_args.since_optimize,
|
||||||
timeline_cache: TimelineCache::default(),
|
timeline_cache,
|
||||||
drafts: Drafts::default(),
|
drafts: Drafts::default(),
|
||||||
state: DamusState::Initializing,
|
state: DamusState::Initializing,
|
||||||
textmode: parsed_args.textmode,
|
textmode: parsed_args.textmode,
|
||||||
@@ -565,7 +574,8 @@ fn timelines_view(ui: &mut egui::Ui, sizes: Size, app: &mut Damus, ctx: &mut App
|
|||||||
|
|
||||||
let mut save_cols = false;
|
let mut save_cols = false;
|
||||||
if let Some(action) = side_panel_action {
|
if let Some(action) = side_panel_action {
|
||||||
save_cols = save_cols || action.process(&mut app.decks_cache, ctx);
|
save_cols =
|
||||||
|
save_cols || action.process(&mut app.timeline_cache, &mut app.decks_cache, ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
let num_cols = app.columns(ctx.accounts).num_columns();
|
let num_cols = app.columns(ctx.accounts).num_columns();
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
use notedeck::FilterState;
|
use crate::timeline::TimelineKind;
|
||||||
|
|
||||||
use crate::timeline::{PubkeySource, Timeline, TimelineKind, TimelineTab};
|
|
||||||
use enostr::{Filter, Pubkey};
|
use enostr::{Filter, Pubkey};
|
||||||
use nostrdb::Ndb;
|
|
||||||
use tracing::{debug, error, info};
|
use tracing::{debug, error, info};
|
||||||
|
|
||||||
pub struct ColumnsArgs {
|
pub struct ColumnsArgs {
|
||||||
@@ -12,7 +9,7 @@ pub struct ColumnsArgs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ColumnsArgs {
|
impl ColumnsArgs {
|
||||||
pub fn parse(args: &[String]) -> Self {
|
pub fn parse(args: &[String], deck_author: Option<&Pubkey>) -> Self {
|
||||||
let mut res = Self {
|
let mut res = Self {
|
||||||
columns: vec![],
|
columns: vec![],
|
||||||
since_optimize: true,
|
since_optimize: true,
|
||||||
@@ -55,40 +52,48 @@ impl ColumnsArgs {
|
|||||||
if let Ok(pubkey) = Pubkey::parse(rest) {
|
if let Ok(pubkey) = Pubkey::parse(rest) {
|
||||||
info!("contact column for user {}", pubkey.hex());
|
info!("contact column for user {}", pubkey.hex());
|
||||||
res.columns
|
res.columns
|
||||||
.push(ArgColumn::Timeline(TimelineKind::contact_list(
|
.push(ArgColumn::Timeline(TimelineKind::contact_list(pubkey)))
|
||||||
PubkeySource::Explicit(pubkey),
|
|
||||||
)))
|
|
||||||
} else {
|
} else {
|
||||||
error!("error parsing contacts pubkey {}", rest);
|
error!("error parsing contacts pubkey {}", rest);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
} else if column_name == "contacts" {
|
} else if column_name == "contacts" {
|
||||||
res.columns
|
if let Some(deck_author) = deck_author {
|
||||||
.push(ArgColumn::Timeline(TimelineKind::contact_list(
|
res.columns
|
||||||
PubkeySource::DeckAuthor,
|
.push(ArgColumn::Timeline(TimelineKind::contact_list(
|
||||||
)))
|
deck_author.to_owned(),
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
panic!("No accounts available, could not handle implicit pubkey contacts column")
|
||||||
|
}
|
||||||
} else if let Some(notif_pk_str) = column_name.strip_prefix("notifications:") {
|
} else if let Some(notif_pk_str) = column_name.strip_prefix("notifications:") {
|
||||||
if let Ok(pubkey) = Pubkey::parse(notif_pk_str) {
|
if let Ok(pubkey) = Pubkey::parse(notif_pk_str) {
|
||||||
info!("got notifications column for user {}", pubkey.hex());
|
info!("got notifications column for user {}", pubkey.hex());
|
||||||
res.columns
|
res.columns
|
||||||
.push(ArgColumn::Timeline(TimelineKind::notifications(
|
.push(ArgColumn::Timeline(TimelineKind::notifications(pubkey)))
|
||||||
PubkeySource::Explicit(pubkey),
|
|
||||||
)))
|
|
||||||
} else {
|
} else {
|
||||||
error!("error parsing notifications pubkey {}", notif_pk_str);
|
error!("error parsing notifications pubkey {}", notif_pk_str);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
} else if column_name == "notifications" {
|
} else if column_name == "notifications" {
|
||||||
debug!("got notification column for default user");
|
debug!("got notification column for default user");
|
||||||
res.columns
|
if let Some(deck_author) = deck_author {
|
||||||
.push(ArgColumn::Timeline(TimelineKind::notifications(
|
res.columns
|
||||||
PubkeySource::DeckAuthor,
|
.push(ArgColumn::Timeline(TimelineKind::notifications(
|
||||||
)))
|
deck_author.to_owned(),
|
||||||
|
)));
|
||||||
|
} else {
|
||||||
|
panic!("Tried to push notifications timeline with no available users");
|
||||||
|
}
|
||||||
} else if column_name == "profile" {
|
} else if column_name == "profile" {
|
||||||
debug!("got profile column for default user");
|
debug!("got profile column for default user");
|
||||||
res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
|
if let Some(deck_author) = deck_author {
|
||||||
PubkeySource::DeckAuthor,
|
res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
|
||||||
)))
|
deck_author.to_owned(),
|
||||||
|
)));
|
||||||
|
} else {
|
||||||
|
panic!("Tried to push profile timeline with no available users");
|
||||||
|
}
|
||||||
} else if column_name == "universe" {
|
} else if column_name == "universe" {
|
||||||
debug!("got universe column");
|
debug!("got universe column");
|
||||||
res.columns
|
res.columns
|
||||||
@@ -96,9 +101,8 @@ impl ColumnsArgs {
|
|||||||
} else if let Some(profile_pk_str) = column_name.strip_prefix("profile:") {
|
} else if let Some(profile_pk_str) = column_name.strip_prefix("profile:") {
|
||||||
if let Ok(pubkey) = Pubkey::parse(profile_pk_str) {
|
if let Ok(pubkey) = Pubkey::parse(profile_pk_str) {
|
||||||
info!("got profile column for user {}", pubkey.hex());
|
info!("got profile column for user {}", pubkey.hex());
|
||||||
res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
|
res.columns
|
||||||
PubkeySource::Explicit(pubkey),
|
.push(ArgColumn::Timeline(TimelineKind::profile(pubkey)))
|
||||||
)))
|
|
||||||
} else {
|
} else {
|
||||||
error!("error parsing profile pubkey {}", profile_pk_str);
|
error!("error parsing profile pubkey {}", profile_pk_str);
|
||||||
continue;
|
continue;
|
||||||
@@ -146,14 +150,13 @@ pub enum ArgColumn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ArgColumn {
|
impl ArgColumn {
|
||||||
pub fn into_timeline(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Timeline> {
|
pub fn into_timeline_kind(self) -> TimelineKind {
|
||||||
match self {
|
match self {
|
||||||
ArgColumn::Generic(filters) => Some(Timeline::new(
|
ArgColumn::Generic(_filters) => {
|
||||||
TimelineKind::Generic,
|
// TODO: fix generic filters by referencing some filter map
|
||||||
FilterState::ready(filters),
|
TimelineKind::Generic(0)
|
||||||
TimelineTab::full_tabs(),
|
}
|
||||||
)),
|
ArgColumn::Timeline(tk) => tk,
|
||||||
ArgColumn::Timeline(tk) => tk.into_timeline(ndb, user),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
use crate::route::{Route, Router};
|
use crate::{
|
||||||
use crate::timeline::{Timeline, TimelineId};
|
actionbar::TimelineOpenResult,
|
||||||
use indexmap::IndexMap;
|
route::{Route, Router},
|
||||||
|
timeline::{Timeline, TimelineCache, TimelineKind},
|
||||||
|
};
|
||||||
|
use enostr::RelayPool;
|
||||||
|
use nostrdb::{Ndb, Transaction};
|
||||||
|
use notedeck::NoteCache;
|
||||||
use std::iter::Iterator;
|
use std::iter::Iterator;
|
||||||
use std::sync::atomic::{AtomicU32, Ordering};
|
|
||||||
use tracing::warn;
|
use tracing::warn;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -28,36 +32,29 @@ impl Column {
|
|||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Columns {
|
pub struct Columns {
|
||||||
/// Columns are simply routers into settings, timelines, etc
|
/// Columns are simply routers into settings, timelines, etc
|
||||||
columns: IndexMap<u32, Column>,
|
columns: Vec<Column>,
|
||||||
|
|
||||||
/// Timeline state is not tied to routing logic separately, so that
|
|
||||||
/// different columns can navigate to and from settings to timelines,
|
|
||||||
/// etc.
|
|
||||||
pub timelines: IndexMap<u32, Timeline>,
|
|
||||||
|
|
||||||
/// The selected column for key navigation
|
/// The selected column for key navigation
|
||||||
selected: i32,
|
selected: i32,
|
||||||
}
|
}
|
||||||
static UIDS: AtomicU32 = AtomicU32::new(0);
|
|
||||||
|
|
||||||
impl Columns {
|
impl Columns {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Columns::default()
|
Columns::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_new_timeline_column(&mut self, timeline: Timeline) {
|
pub fn add_new_timeline_column(
|
||||||
let id = Self::get_new_id();
|
&mut self,
|
||||||
let routes = vec![Route::timeline(timeline.id)];
|
timeline_cache: &mut TimelineCache,
|
||||||
self.timelines.insert(id, timeline);
|
txn: &Transaction,
|
||||||
self.columns.insert(id, Column::new(routes));
|
ndb: &Ndb,
|
||||||
}
|
note_cache: &mut NoteCache,
|
||||||
|
pool: &mut RelayPool,
|
||||||
pub fn add_timeline_to_column(&mut self, col: usize, timeline: Timeline) {
|
kind: &TimelineKind,
|
||||||
let col_id = self.get_column_id_at_index(col);
|
) -> Option<TimelineOpenResult> {
|
||||||
self.column_mut(col)
|
self.columns
|
||||||
.router_mut()
|
.push(Column::new(vec![Route::timeline(kind.to_owned())]));
|
||||||
.route_to_replaced(Route::timeline(timeline.id));
|
timeline_cache.open(ndb, note_cache, txn, pool, kind)
|
||||||
self.timelines.insert(col_id, timeline);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_column_picker(&mut self) {
|
pub fn new_column_picker(&mut self) {
|
||||||
@@ -66,38 +63,38 @@ impl Columns {
|
|||||||
)]));
|
)]));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_intermediary_routes(&mut self, intermediary_routes: Vec<IntermediaryRoute>) {
|
pub fn insert_intermediary_routes(
|
||||||
let id = Self::get_new_id();
|
&mut self,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
intermediary_routes: Vec<IntermediaryRoute>,
|
||||||
|
) {
|
||||||
let routes = intermediary_routes
|
let routes = intermediary_routes
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|r| match r {
|
.map(|r| match r {
|
||||||
IntermediaryRoute::Timeline(timeline) => {
|
IntermediaryRoute::Timeline(timeline) => {
|
||||||
let route = Route::timeline(timeline.id);
|
let route = Route::timeline(timeline.kind.clone());
|
||||||
self.timelines.insert(id, timeline);
|
timeline_cache
|
||||||
|
.timelines
|
||||||
|
.insert(timeline.kind.clone(), timeline);
|
||||||
route
|
route
|
||||||
}
|
}
|
||||||
IntermediaryRoute::Route(route) => route,
|
IntermediaryRoute::Route(route) => route,
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
self.columns.insert(id, Column::new(routes));
|
self.columns.push(Column::new(routes));
|
||||||
}
|
|
||||||
|
|
||||||
fn get_new_id() -> u32 {
|
|
||||||
UIDS.fetch_add(1, Ordering::Relaxed)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_column_at(&mut self, column: Column, index: u32) {
|
pub fn add_column_at(&mut self, column: Column, index: u32) {
|
||||||
self.columns.insert(index, column);
|
self.columns.insert(index as usize, column);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_column(&mut self, column: Column) {
|
pub fn add_column(&mut self, column: Column) {
|
||||||
self.columns.insert(Self::get_new_id(), column);
|
self.columns.push(column);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn columns_mut(&mut self) -> Vec<&mut Column> {
|
pub fn columns_mut(&mut self) -> &mut Vec<Column> {
|
||||||
self.columns.values_mut().collect()
|
&mut self.columns
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn num_columns(&self) -> usize {
|
pub fn num_columns(&self) -> usize {
|
||||||
@@ -110,72 +107,23 @@ impl Columns {
|
|||||||
if self.columns.is_empty() {
|
if self.columns.is_empty() {
|
||||||
self.new_column_picker();
|
self.new_column_picker();
|
||||||
}
|
}
|
||||||
self.columns
|
self.columns[0].router_mut()
|
||||||
.get_index_mut(0)
|
|
||||||
.expect("There should be at least one column")
|
|
||||||
.1
|
|
||||||
.router_mut()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn timeline_mut(&mut self, timeline_ind: usize) -> &mut Timeline {
|
|
||||||
self.timelines
|
|
||||||
.get_index_mut(timeline_ind)
|
|
||||||
.expect("expected index to be in bounds")
|
|
||||||
.1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn column(&self, ind: usize) -> &Column {
|
pub fn column(&self, ind: usize) -> &Column {
|
||||||
self.columns
|
&self.columns[ind]
|
||||||
.get_index(ind)
|
|
||||||
.expect("Expected index to be in bounds")
|
|
||||||
.1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn columns(&self) -> Vec<&Column> {
|
pub fn columns(&self) -> &[Column] {
|
||||||
self.columns.values().collect()
|
&self.columns
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_column_id_at_index(&self, ind: usize) -> u32 {
|
|
||||||
*self
|
|
||||||
.columns
|
|
||||||
.get_index(ind)
|
|
||||||
.expect("expected index to be within bounds")
|
|
||||||
.0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn selected(&mut self) -> &mut Column {
|
pub fn selected(&mut self) -> &mut Column {
|
||||||
self.columns
|
&mut self.columns[self.selected as usize]
|
||||||
.get_index_mut(self.selected as usize)
|
|
||||||
.expect("Expected selected index to be in bounds")
|
|
||||||
.1
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn timelines_mut(&mut self) -> Vec<&mut Timeline> {
|
|
||||||
self.timelines.values_mut().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn timelines(&self) -> Vec<&Timeline> {
|
|
||||||
self.timelines.values().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_timeline_mut(&mut self, id: TimelineId) -> Option<&mut Timeline> {
|
|
||||||
self.timelines_mut().into_iter().find(|tl| tl.id == id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_timeline(&self, id: TimelineId) -> Option<&Timeline> {
|
|
||||||
self.timelines().into_iter().find(|tl| tl.id == id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn column_mut(&mut self, ind: usize) -> &mut Column {
|
pub fn column_mut(&mut self, ind: usize) -> &mut Column {
|
||||||
self.columns
|
&mut self.columns[ind]
|
||||||
.get_index_mut(ind)
|
|
||||||
.expect("Expected index to be in bounds")
|
|
||||||
.1
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_timeline_for_column_index(&self, ind: usize) -> Option<&Timeline> {
|
|
||||||
let col_id = self.get_column_id_at_index(ind);
|
|
||||||
self.timelines.get(&col_id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn select_down(&mut self) {
|
pub fn select_down(&mut self) {
|
||||||
@@ -200,16 +148,22 @@ impl Columns {
|
|||||||
self.selected += 1;
|
self.selected += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_column(&mut self, index: usize) {
|
#[must_use = "you must call timeline_cache.pop() for each returned value"]
|
||||||
if let Some((key, _)) = self.columns.get_index_mut(index) {
|
pub fn delete_column(&mut self, index: usize) -> Vec<TimelineKind> {
|
||||||
self.timelines.shift_remove(key);
|
let mut kinds_to_pop: Vec<TimelineKind> = vec![];
|
||||||
|
for route in self.columns[index].router().routes() {
|
||||||
|
if let Route::Timeline(kind) = route {
|
||||||
|
kinds_to_pop.push(kind.clone());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.columns.shift_remove_index(index);
|
self.columns.remove(index);
|
||||||
|
|
||||||
if self.columns.is_empty() {
|
if self.columns.is_empty() {
|
||||||
self.new_column_picker();
|
self.new_column_picker();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
kinds_to_pop
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn move_col(&mut self, from_index: usize, to_index: usize) {
|
pub fn move_col(&mut self, from_index: usize, to_index: usize) {
|
||||||
@@ -220,15 +174,7 @@ impl Columns {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if from_index < to_index {
|
self.columns.swap(from_index, to_index);
|
||||||
for i in from_index..to_index {
|
|
||||||
self.columns.swap_indices(i, i + 1);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for i in (to_index..from_index).rev() {
|
|
||||||
self.columns.swap_indices(i, i + 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
use std::collections::{hash_map::ValuesMut, HashMap};
|
use std::collections::{hash_map::ValuesMut, HashMap};
|
||||||
|
|
||||||
use enostr::Pubkey;
|
use enostr::Pubkey;
|
||||||
use nostrdb::Ndb;
|
use nostrdb::Transaction;
|
||||||
|
use notedeck::AppContext;
|
||||||
use tracing::{error, info};
|
use tracing::{error, info};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
accounts::AccountsRoute,
|
accounts::AccountsRoute,
|
||||||
column::{Column, Columns},
|
column::{Column, Columns},
|
||||||
route::Route,
|
route::Route,
|
||||||
timeline::{self, Timeline, TimelineKind},
|
timeline::{TimelineCache, TimelineKind},
|
||||||
ui::{add_column::AddColumnRoute, configure_deck::ConfigureDeckResponse},
|
ui::{add_column::AddColumnRoute, configure_deck::ConfigureDeckResponse},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -44,10 +45,13 @@ impl DecksCache {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_demo_config(ndb: &Ndb) -> Self {
|
pub fn new_with_demo_config(timeline_cache: &mut TimelineCache, ctx: &mut AppContext) -> Self {
|
||||||
let mut account_to_decks: HashMap<Pubkey, Decks> = Default::default();
|
let mut account_to_decks: HashMap<Pubkey, Decks> = Default::default();
|
||||||
let fallback_pubkey = FALLBACK_PUBKEY();
|
let fallback_pubkey = FALLBACK_PUBKEY();
|
||||||
account_to_decks.insert(fallback_pubkey, demo_decks(fallback_pubkey, ndb));
|
account_to_decks.insert(
|
||||||
|
fallback_pubkey,
|
||||||
|
demo_decks(fallback_pubkey, timeline_cache, ctx),
|
||||||
|
);
|
||||||
DecksCache::new(account_to_decks)
|
DecksCache::new(account_to_decks)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -298,7 +302,11 @@ impl Deck {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks {
|
pub fn demo_decks(
|
||||||
|
demo_pubkey: Pubkey,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
ctx: &mut AppContext,
|
||||||
|
) -> Decks {
|
||||||
let deck = {
|
let deck = {
|
||||||
let mut columns = Columns::default();
|
let mut columns = Columns::default();
|
||||||
columns.add_column(Column::new(vec![
|
columns.add_column(Column::new(vec![
|
||||||
@@ -306,14 +314,27 @@ pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks {
|
|||||||
Route::Accounts(AccountsRoute::Accounts),
|
Route::Accounts(AccountsRoute::Accounts),
|
||||||
]));
|
]));
|
||||||
|
|
||||||
if let Some(timeline) =
|
let kind = TimelineKind::contact_list(demo_pubkey);
|
||||||
TimelineKind::contact_list(timeline::PubkeySource::Explicit(demo_pubkey))
|
let txn = Transaction::new(ctx.ndb).unwrap();
|
||||||
.into_timeline(ndb, Some(demo_pubkey.bytes()))
|
|
||||||
{
|
if let Some(results) = columns.add_new_timeline_column(
|
||||||
columns.add_new_timeline_column(timeline);
|
timeline_cache,
|
||||||
|
&txn,
|
||||||
|
ctx.ndb,
|
||||||
|
ctx.note_cache,
|
||||||
|
ctx.pool,
|
||||||
|
&kind,
|
||||||
|
) {
|
||||||
|
results.process(
|
||||||
|
ctx.ndb,
|
||||||
|
ctx.note_cache,
|
||||||
|
&txn,
|
||||||
|
timeline_cache,
|
||||||
|
ctx.unknown_ids,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string()));
|
//columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string()));
|
||||||
|
|
||||||
Deck {
|
Deck {
|
||||||
icon: '🇩',
|
icon: '🇩',
|
||||||
|
|||||||
@@ -5,6 +5,9 @@ pub enum Error {
|
|||||||
#[error("timeline not found")]
|
#[error("timeline not found")]
|
||||||
TimelineNotFound,
|
TimelineNotFound,
|
||||||
|
|
||||||
|
#[error("timeline is missing a subscription")]
|
||||||
|
MissingSubscription,
|
||||||
|
|
||||||
#[error("load failed")]
|
#[error("load failed")]
|
||||||
LoadFailed,
|
LoadFailed,
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,6 @@ mod route;
|
|||||||
mod subscriptions;
|
mod subscriptions;
|
||||||
mod support;
|
mod support;
|
||||||
mod test_data;
|
mod test_data;
|
||||||
mod thread;
|
|
||||||
mod timeline;
|
mod timeline;
|
||||||
pub mod ui;
|
pub mod ui;
|
||||||
mod unknowns;
|
mod unknowns;
|
||||||
|
|||||||
@@ -1,107 +1,145 @@
|
|||||||
use enostr::{Filter, RelayPool};
|
use enostr::{Filter, RelayPool};
|
||||||
use nostrdb::Ndb;
|
use nostrdb::{Ndb, Subscription};
|
||||||
use tracing::{error, info};
|
use tracing::{error, info};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use notedeck::UnifiedSubscription;
|
#[derive(Debug)]
|
||||||
|
|
||||||
pub struct MultiSubscriber {
|
pub struct MultiSubscriber {
|
||||||
filters: Vec<Filter>,
|
pub filters: Vec<Filter>,
|
||||||
pub sub: Option<UnifiedSubscription>,
|
pub local_subid: Option<Subscription>,
|
||||||
subscribers: u32,
|
pub remote_subid: Option<String>,
|
||||||
|
local_subscribers: u32,
|
||||||
|
remote_subscribers: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MultiSubscriber {
|
impl MultiSubscriber {
|
||||||
|
/// Create a MultiSubscriber with an initial local subscription.
|
||||||
|
pub fn with_initial_local_sub(sub: Subscription, filters: Vec<Filter>) -> Self {
|
||||||
|
let mut msub = MultiSubscriber::new(filters);
|
||||||
|
msub.local_subid = Some(sub);
|
||||||
|
msub.local_subscribers = 1;
|
||||||
|
msub
|
||||||
|
}
|
||||||
|
|
||||||
pub fn new(filters: Vec<Filter>) -> Self {
|
pub fn new(filters: Vec<Filter>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
filters,
|
filters,
|
||||||
sub: None,
|
local_subid: None,
|
||||||
subscribers: 0,
|
remote_subid: None,
|
||||||
|
local_subscribers: 0,
|
||||||
|
remote_subscribers: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn real_subscribe(
|
fn unsubscribe_remote(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
|
||||||
ndb: &Ndb,
|
let remote_subid = if let Some(remote_subid) = &self.remote_subid {
|
||||||
pool: &mut RelayPool,
|
remote_subid
|
||||||
filters: Vec<Filter>,
|
|
||||||
) -> Option<UnifiedSubscription> {
|
|
||||||
let subid = Uuid::new_v4().to_string();
|
|
||||||
let sub = ndb.subscribe(&filters).ok()?;
|
|
||||||
|
|
||||||
pool.subscribe(subid.clone(), filters);
|
|
||||||
|
|
||||||
Some(UnifiedSubscription {
|
|
||||||
local: sub,
|
|
||||||
remote: subid,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn unsubscribe(&mut self, ndb: &mut Ndb, pool: &mut RelayPool) {
|
|
||||||
if self.subscribers == 0 {
|
|
||||||
error!("No subscribers to unsubscribe from");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.subscribers -= 1;
|
|
||||||
if self.subscribers == 0 {
|
|
||||||
let sub = match self.sub {
|
|
||||||
Some(ref sub) => sub,
|
|
||||||
None => {
|
|
||||||
error!("No remote subscription to unsubscribe from");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let local_sub = &sub.local;
|
|
||||||
if let Err(e) = ndb.unsubscribe(*local_sub) {
|
|
||||||
error!(
|
|
||||||
"failed to unsubscribe from object: {e}, subid:{}, {} active subscriptions",
|
|
||||||
local_sub.id(),
|
|
||||||
ndb.subscription_count()
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
info!(
|
|
||||||
"Unsubscribed from object subid:{}. {} active subscriptions",
|
|
||||||
local_sub.id(),
|
|
||||||
ndb.subscription_count()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// unsub from remote
|
|
||||||
pool.unsubscribe(sub.remote.clone());
|
|
||||||
self.sub = None;
|
|
||||||
} else {
|
} else {
|
||||||
info!(
|
self.err_log(ndb, "unsubscribe_remote: nothing to unsubscribe from?");
|
||||||
"Locally unsubscribing. {} active ndb subscriptions. {} active subscriptions for this object",
|
return;
|
||||||
ndb.subscription_count(),
|
};
|
||||||
self.subscribers,
|
|
||||||
);
|
pool.unsubscribe(remote_subid.clone());
|
||||||
|
|
||||||
|
self.remote_subid = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Locally unsubscribe if we have one
|
||||||
|
fn unsubscribe_local(&mut self, ndb: &mut Ndb) {
|
||||||
|
let local_sub = if let Some(local_sub) = self.local_subid {
|
||||||
|
local_sub
|
||||||
|
} else {
|
||||||
|
self.err_log(ndb, "unsubscribe_local: nothing to unsubscribe from?");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
match ndb.unsubscribe(local_sub) {
|
||||||
|
Err(e) => {
|
||||||
|
self.err_log(ndb, &format!("Failed to unsubscribe: {e}"));
|
||||||
|
}
|
||||||
|
Ok(_) => {
|
||||||
|
self.local_subid = None;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn unsubscribe(&mut self, ndb: &mut Ndb, pool: &mut RelayPool) -> bool {
|
||||||
|
if self.local_subscribers == 0 && self.remote_subscribers == 0 {
|
||||||
|
self.err_log(
|
||||||
|
ndb,
|
||||||
|
"Called multi_subscriber unsubscribe when both sub counts are 0",
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.local_subscribers = self.local_subscribers.saturating_sub(1);
|
||||||
|
self.remote_subscribers = self.remote_subscribers.saturating_sub(1);
|
||||||
|
|
||||||
|
if self.local_subscribers == 0 && self.remote_subscribers == 0 {
|
||||||
|
self.info_log(ndb, "Locally unsubscribing");
|
||||||
|
self.unsubscribe_local(ndb);
|
||||||
|
self.unsubscribe_remote(ndb, pool);
|
||||||
|
self.local_subscribers = 0;
|
||||||
|
self.remote_subscribers = 0;
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn info_log(&self, ndb: &Ndb, msg: &str) {
|
||||||
|
info!(
|
||||||
|
"{msg}. {}/{}/{} active ndb/local/remote subscriptions.",
|
||||||
|
ndb.subscription_count(),
|
||||||
|
self.local_subscribers,
|
||||||
|
self.remote_subscribers,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn err_log(&self, ndb: &Ndb, msg: &str) {
|
||||||
|
error!(
|
||||||
|
"{msg}. {}/{}/{} active ndb/local/remote subscriptions.",
|
||||||
|
ndb.subscription_count(),
|
||||||
|
self.local_subscribers,
|
||||||
|
self.remote_subscribers,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn subscribe(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
|
pub fn subscribe(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
|
||||||
self.subscribers += 1;
|
self.local_subscribers += 1;
|
||||||
if self.subscribers == 1 {
|
self.remote_subscribers += 1;
|
||||||
if self.sub.is_some() {
|
|
||||||
error!("Object is first subscriber, but it already had remote subscription");
|
if self.remote_subscribers == 1 {
|
||||||
|
if self.remote_subid.is_some() {
|
||||||
|
self.err_log(
|
||||||
|
ndb,
|
||||||
|
"Object is first subscriber, but it already had a subscription",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
let subid = Uuid::new_v4().to_string();
|
||||||
|
pool.subscribe(subid.clone(), self.filters.clone());
|
||||||
|
self.info_log(ndb, "First remote subscription");
|
||||||
|
self.remote_subid = Some(subid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.local_subscribers == 1 {
|
||||||
|
if self.local_subid.is_some() {
|
||||||
|
self.err_log(ndb, "Should not have a local subscription already");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.sub = Self::real_subscribe(ndb, pool, self.filters.clone());
|
match ndb.subscribe(&self.filters) {
|
||||||
info!(
|
Ok(sub) => {
|
||||||
"Remotely subscribing to object. {} total active subscriptions, {} on this object",
|
self.info_log(ndb, "First local subscription");
|
||||||
ndb.subscription_count(),
|
self.local_subid = Some(sub);
|
||||||
self.subscribers,
|
}
|
||||||
);
|
|
||||||
|
|
||||||
if self.sub.is_none() {
|
Err(err) => {
|
||||||
error!("Error subscribing remotely to object");
|
error!("multi_subscriber: error subscribing locally: '{err}'")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
info!(
|
|
||||||
"Locally subscribing. {} total active subscriptions, {} for this object",
|
|
||||||
ndb.subscription_count(),
|
|
||||||
self.subscribers,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
accounts::render_accounts_route,
|
accounts::render_accounts_route,
|
||||||
actionbar::NoteAction,
|
actionbar::NoteAction,
|
||||||
app::{get_active_columns, get_active_columns_mut, get_decks_mut},
|
app::{get_active_columns_mut, get_decks_mut},
|
||||||
column::ColumnsAction,
|
column::ColumnsAction,
|
||||||
deck_state::DeckState,
|
deck_state::DeckState,
|
||||||
decks::{Deck, DecksAction, DecksCache},
|
decks::{Deck, DecksAction, DecksCache},
|
||||||
@@ -9,10 +9,7 @@ use crate::{
|
|||||||
profile_state::ProfileState,
|
profile_state::ProfileState,
|
||||||
relay_pool_manager::RelayPoolManager,
|
relay_pool_manager::RelayPoolManager,
|
||||||
route::Route,
|
route::Route,
|
||||||
timeline::{
|
timeline::{route::render_timeline_route, TimelineCache},
|
||||||
route::{render_timeline_route, TimelineRoute},
|
|
||||||
Timeline,
|
|
||||||
},
|
|
||||||
ui::{
|
ui::{
|
||||||
self,
|
self,
|
||||||
add_column::render_add_column_routes,
|
add_column::render_add_column_routes,
|
||||||
@@ -27,11 +24,10 @@ use crate::{
|
|||||||
Damus,
|
Damus,
|
||||||
};
|
};
|
||||||
|
|
||||||
use notedeck::{AccountsAction, AppContext, RootIdError};
|
|
||||||
|
|
||||||
use egui_nav::{Nav, NavAction, NavResponse, NavUiType};
|
use egui_nav::{Nav, NavAction, NavResponse, NavUiType};
|
||||||
use nostrdb::{Ndb, Transaction};
|
use nostrdb::Transaction;
|
||||||
use tracing::{error, info};
|
use notedeck::{AccountsAction, AppContext};
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
#[allow(clippy::enum_variant_names)]
|
||||||
pub enum RenderNavAction {
|
pub enum RenderNavAction {
|
||||||
@@ -51,7 +47,12 @@ pub enum SwitchingAction {
|
|||||||
|
|
||||||
impl SwitchingAction {
|
impl SwitchingAction {
|
||||||
/// process the action, and return whether switching occured
|
/// process the action, and return whether switching occured
|
||||||
pub fn process(&self, decks_cache: &mut DecksCache, ctx: &mut AppContext<'_>) -> bool {
|
pub fn process(
|
||||||
|
&self,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
decks_cache: &mut DecksCache,
|
||||||
|
ctx: &mut AppContext<'_>,
|
||||||
|
) -> bool {
|
||||||
match &self {
|
match &self {
|
||||||
SwitchingAction::Accounts(account_action) => match account_action {
|
SwitchingAction::Accounts(account_action) => match account_action {
|
||||||
AccountsAction::Switch(switch_action) => {
|
AccountsAction::Switch(switch_action) => {
|
||||||
@@ -68,8 +69,15 @@ impl SwitchingAction {
|
|||||||
},
|
},
|
||||||
SwitchingAction::Columns(columns_action) => match *columns_action {
|
SwitchingAction::Columns(columns_action) => match *columns_action {
|
||||||
ColumnsAction::Remove(index) => {
|
ColumnsAction::Remove(index) => {
|
||||||
get_active_columns_mut(ctx.accounts, decks_cache).delete_column(index)
|
let kinds_to_pop =
|
||||||
|
get_active_columns_mut(ctx.accounts, decks_cache).delete_column(index);
|
||||||
|
for kind in &kinds_to_pop {
|
||||||
|
if let Err(err) = timeline_cache.pop(kind, ctx.ndb, ctx.pool) {
|
||||||
|
error!("error popping timeline: {err}");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ColumnsAction::Switch(from, to) => {
|
ColumnsAction::Switch(from, to) => {
|
||||||
get_active_columns_mut(ctx.accounts, decks_cache).move_col(from, to);
|
get_active_columns_mut(ctx.accounts, decks_cache).move_col(from, to);
|
||||||
}
|
}
|
||||||
@@ -133,14 +141,14 @@ impl RenderNavResponse {
|
|||||||
}
|
}
|
||||||
|
|
||||||
RenderNavAction::RemoveColumn => {
|
RenderNavAction::RemoveColumn => {
|
||||||
let tl = app
|
let kinds_to_pop = app.columns_mut(ctx.accounts).delete_column(col);
|
||||||
.columns(ctx.accounts)
|
|
||||||
.find_timeline_for_column_index(col);
|
for kind in &kinds_to_pop {
|
||||||
if let Some(timeline) = tl {
|
if let Err(err) = app.timeline_cache.pop(kind, ctx.ndb, ctx.pool) {
|
||||||
unsubscribe_timeline(ctx.ndb, timeline);
|
error!("error popping timeline: {err}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
app.columns_mut(ctx.accounts).delete_column(col);
|
|
||||||
switching_occured = true;
|
switching_occured = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -169,7 +177,11 @@ impl RenderNavResponse {
|
|||||||
}
|
}
|
||||||
|
|
||||||
RenderNavAction::SwitchingAction(switching_action) => {
|
RenderNavAction::SwitchingAction(switching_action) => {
|
||||||
switching_occured = switching_action.process(&mut app.decks_cache, ctx);
|
switching_occured = switching_action.process(
|
||||||
|
&mut app.timeline_cache,
|
||||||
|
&mut app.decks_cache,
|
||||||
|
ctx,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
RenderNavAction::ProfileAction(profile_action) => {
|
RenderNavAction::ProfileAction(profile_action) => {
|
||||||
profile_action.process(
|
profile_action.process(
|
||||||
@@ -192,40 +204,12 @@ impl RenderNavResponse {
|
|||||||
.column_mut(col)
|
.column_mut(col)
|
||||||
.router_mut()
|
.router_mut()
|
||||||
.pop();
|
.pop();
|
||||||
let txn = Transaction::new(ctx.ndb).expect("txn");
|
|
||||||
|
|
||||||
if let Some(Route::Timeline(TimelineRoute::Thread(id))) = r {
|
if let Some(Route::Timeline(kind)) = &r {
|
||||||
match notedeck::note::root_note_id_from_selected_id(
|
if let Err(err) = app.timeline_cache.pop(kind, ctx.ndb, ctx.pool) {
|
||||||
ctx.ndb,
|
error!("popping timeline had an error: {err} for {:?}", kind);
|
||||||
ctx.note_cache,
|
|
||||||
&txn,
|
|
||||||
id.bytes(),
|
|
||||||
) {
|
|
||||||
Ok(root_id) => {
|
|
||||||
if let Some(thread) =
|
|
||||||
app.timeline_cache.threads.get_mut(root_id.bytes())
|
|
||||||
{
|
|
||||||
if let Some(sub) = &mut thread.subscription {
|
|
||||||
sub.unsubscribe(ctx.ndb, ctx.pool);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(RootIdError::NoteNotFound) => {
|
|
||||||
error!("thread returned: note not found for unsub??: {}", id.hex())
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(RootIdError::NoRootId) => {
|
|
||||||
error!("thread returned: note not found for unsub??: {}", id.hex())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else if let Some(Route::Timeline(TimelineRoute::Profile(pubkey))) = r {
|
};
|
||||||
if let Some(profile) = app.timeline_cache.profiles.get_mut(pubkey.bytes()) {
|
|
||||||
if let Some(sub) = &mut profile.subscription {
|
|
||||||
sub.unsubscribe(ctx.ndb, ctx.pool);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switching_occured = true;
|
switching_occured = true;
|
||||||
}
|
}
|
||||||
@@ -255,21 +239,21 @@ fn render_nav_body(
|
|||||||
app: &mut Damus,
|
app: &mut Damus,
|
||||||
ctx: &mut AppContext<'_>,
|
ctx: &mut AppContext<'_>,
|
||||||
top: &Route,
|
top: &Route,
|
||||||
|
depth: usize,
|
||||||
col: usize,
|
col: usize,
|
||||||
) -> Option<RenderNavAction> {
|
) -> Option<RenderNavAction> {
|
||||||
match top {
|
match top {
|
||||||
Route::Timeline(tlr) => render_timeline_route(
|
Route::Timeline(kind) => render_timeline_route(
|
||||||
ctx.ndb,
|
ctx.ndb,
|
||||||
get_active_columns_mut(ctx.accounts, &mut app.decks_cache),
|
|
||||||
&mut app.drafts,
|
|
||||||
ctx.img_cache,
|
ctx.img_cache,
|
||||||
ctx.unknown_ids,
|
ctx.unknown_ids,
|
||||||
ctx.note_cache,
|
ctx.note_cache,
|
||||||
&mut app.timeline_cache,
|
&mut app.timeline_cache,
|
||||||
ctx.accounts,
|
ctx.accounts,
|
||||||
*tlr,
|
kind,
|
||||||
col,
|
col,
|
||||||
app.textmode,
|
app.textmode,
|
||||||
|
depth,
|
||||||
ui,
|
ui,
|
||||||
),
|
),
|
||||||
Route::Accounts(amr) => {
|
Route::Accounts(amr) => {
|
||||||
@@ -294,6 +278,78 @@ fn render_nav_body(
|
|||||||
RelayView::new(ctx.accounts, manager, &mut app.view_state.id_string_map).ui(ui);
|
RelayView::new(ctx.accounts, manager, &mut app.view_state.id_string_map).ui(ui);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Route::Reply(id) => {
|
||||||
|
let txn = if let Ok(txn) = Transaction::new(ctx.ndb) {
|
||||||
|
txn
|
||||||
|
} else {
|
||||||
|
ui.label("Reply to unknown note");
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
let note = if let Ok(note) = ctx.ndb.get_note_by_id(&txn, id.bytes()) {
|
||||||
|
note
|
||||||
|
} else {
|
||||||
|
ui.label("Reply to unknown note");
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
let id = egui::Id::new(("post", col, note.key().unwrap()));
|
||||||
|
let poster = ctx.accounts.selected_or_first_nsec()?;
|
||||||
|
|
||||||
|
let action = {
|
||||||
|
let draft = app.drafts.reply_mut(note.id());
|
||||||
|
|
||||||
|
let response = egui::ScrollArea::vertical().show(ui, |ui| {
|
||||||
|
ui::PostReplyView::new(
|
||||||
|
ctx.ndb,
|
||||||
|
poster,
|
||||||
|
draft,
|
||||||
|
ctx.note_cache,
|
||||||
|
ctx.img_cache,
|
||||||
|
¬e,
|
||||||
|
)
|
||||||
|
.id_source(id)
|
||||||
|
.show(ui)
|
||||||
|
});
|
||||||
|
|
||||||
|
response.inner.action
|
||||||
|
};
|
||||||
|
|
||||||
|
action.map(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
Route::Quote(id) => {
|
||||||
|
let txn = Transaction::new(ctx.ndb).expect("txn");
|
||||||
|
|
||||||
|
let note = if let Ok(note) = ctx.ndb.get_note_by_id(&txn, id.bytes()) {
|
||||||
|
note
|
||||||
|
} else {
|
||||||
|
ui.label("Quote of unknown note");
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
let id = egui::Id::new(("post", col, note.key().unwrap()));
|
||||||
|
|
||||||
|
let poster = ctx.accounts.selected_or_first_nsec()?;
|
||||||
|
let draft = app.drafts.quote_mut(note.id());
|
||||||
|
|
||||||
|
let response = egui::ScrollArea::vertical().show(ui, |ui| {
|
||||||
|
crate::ui::note::QuoteRepostView::new(
|
||||||
|
ctx.ndb,
|
||||||
|
poster,
|
||||||
|
ctx.note_cache,
|
||||||
|
ctx.img_cache,
|
||||||
|
draft,
|
||||||
|
¬e,
|
||||||
|
)
|
||||||
|
.id_source(id)
|
||||||
|
.show(ui)
|
||||||
|
});
|
||||||
|
|
||||||
|
response.inner.action.map(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
Route::ComposeNote => {
|
Route::ComposeNote => {
|
||||||
let kp = ctx.accounts.get_selected_account()?.to_full()?;
|
let kp = ctx.accounts.get_selected_account()?.to_full()?;
|
||||||
let draft = app.drafts.compose_mut();
|
let draft = app.drafts.compose_mut();
|
||||||
@@ -421,9 +477,6 @@ pub fn render_nav(
|
|||||||
ctx: &mut AppContext<'_>,
|
ctx: &mut AppContext<'_>,
|
||||||
ui: &mut egui::Ui,
|
ui: &mut egui::Ui,
|
||||||
) -> RenderNavResponse {
|
) -> RenderNavResponse {
|
||||||
let col_id = get_active_columns(ctx.accounts, &app.decks_cache).get_column_id_at_index(col);
|
|
||||||
// TODO(jb55): clean up this router_mut mess by using Router<R> in egui-nav directly
|
|
||||||
|
|
||||||
let nav_response = Nav::new(
|
let nav_response = Nav::new(
|
||||||
&app.columns(ctx.accounts)
|
&app.columns(ctx.accounts)
|
||||||
.column(col)
|
.column(col)
|
||||||
@@ -443,33 +496,24 @@ pub fn render_nav(
|
|||||||
.router_mut()
|
.router_mut()
|
||||||
.returning,
|
.returning,
|
||||||
)
|
)
|
||||||
.id_source(egui::Id::new(col_id))
|
.id_source(egui::Id::new(("nav", col)))
|
||||||
.show_mut(ui, |ui, render_type, nav| match render_type {
|
.show_mut(ui, |ui, render_type, nav| match render_type {
|
||||||
NavUiType::Title => NavTitle::new(
|
NavUiType::Title => NavTitle::new(
|
||||||
ctx.ndb,
|
ctx.ndb,
|
||||||
ctx.img_cache,
|
ctx.img_cache,
|
||||||
get_active_columns_mut(ctx.accounts, &mut app.decks_cache),
|
get_active_columns_mut(ctx.accounts, &mut app.decks_cache),
|
||||||
ctx.accounts.get_selected_account().map(|a| &a.pubkey),
|
|
||||||
nav.routes(),
|
nav.routes(),
|
||||||
col,
|
col,
|
||||||
)
|
)
|
||||||
.show(ui),
|
.show(ui),
|
||||||
NavUiType::Body => render_nav_body(ui, app, ctx, nav.routes().last().expect("top"), col),
|
NavUiType::Body => {
|
||||||
|
if let Some(top) = nav.routes().last() {
|
||||||
|
render_nav_body(ui, app, ctx, top, nav.routes().len(), col)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
RenderNavResponse::new(col, nav_response)
|
RenderNavResponse::new(col, nav_response)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unsubscribe_timeline(ndb: &mut Ndb, timeline: &Timeline) {
|
|
||||||
if let Some(sub_id) = timeline.subscription {
|
|
||||||
if let Err(e) = ndb.unsubscribe(sub_id) {
|
|
||||||
error!("unsubscribe error: {}", e);
|
|
||||||
} else {
|
|
||||||
info!(
|
|
||||||
"successfully unsubscribed from timeline {} with sub id {}",
|
|
||||||
timeline.id,
|
|
||||||
sub_id.id()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,16 +1,13 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use enostr::{Filter, FullKeypair, Pubkey, PubkeyRef, RelayPool};
|
use enostr::{FullKeypair, Pubkey, RelayPool};
|
||||||
use nostrdb::{FilterBuilder, Ndb, Note, NoteBuildOptions, NoteBuilder, ProfileRecord};
|
use nostrdb::{Ndb, Note, NoteBuildOptions, NoteBuilder, ProfileRecord};
|
||||||
|
|
||||||
use notedeck::{filter::default_limit, FilterState};
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
multi_subscriber::MultiSubscriber,
|
|
||||||
profile_state::ProfileState,
|
profile_state::ProfileState,
|
||||||
route::{Route, Router},
|
route::{Route, Router},
|
||||||
timeline::{PubkeySource, Timeline, TimelineKind, TimelineTab},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct NostrName<'a> {
|
pub struct NostrName<'a> {
|
||||||
@@ -75,33 +72,6 @@ pub fn get_display_name<'a>(record: Option<&ProfileRecord<'a>>) -> NostrName<'a>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Profile {
|
|
||||||
pub timeline: Timeline,
|
|
||||||
pub subscription: Option<MultiSubscriber>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Profile {
|
|
||||||
pub fn new(source: PubkeySource, filters: Vec<Filter>) -> Self {
|
|
||||||
let timeline = Timeline::new(
|
|
||||||
TimelineKind::profile(source),
|
|
||||||
FilterState::ready(filters),
|
|
||||||
TimelineTab::full_tabs(),
|
|
||||||
);
|
|
||||||
|
|
||||||
Profile {
|
|
||||||
timeline,
|
|
||||||
subscription: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filters_raw(pk: PubkeyRef<'_>) -> Vec<FilterBuilder> {
|
|
||||||
vec![Filter::new()
|
|
||||||
.authors([pk.bytes()])
|
|
||||||
.kinds([1])
|
|
||||||
.limit(default_limit())]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SaveProfileChanges {
|
pub struct SaveProfileChanges {
|
||||||
pub kp: FullKeypair,
|
pub kp: FullKeypair,
|
||||||
pub state: ProfileState,
|
pub state: ProfileState,
|
||||||
|
|||||||
@@ -3,18 +3,22 @@ use std::fmt::{self};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
accounts::AccountsRoute,
|
accounts::AccountsRoute,
|
||||||
column::Columns,
|
timeline::{
|
||||||
timeline::{kind::ColumnTitle, TimelineId, TimelineRoute},
|
kind::{AlgoTimeline, ColumnTitle, ListKind},
|
||||||
|
ThreadSelection, TimelineKind,
|
||||||
|
},
|
||||||
ui::add_column::{AddAlgoRoute, AddColumnRoute},
|
ui::add_column::{AddAlgoRoute, AddColumnRoute},
|
||||||
};
|
};
|
||||||
|
|
||||||
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
|
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
|
||||||
|
|
||||||
/// App routing. These describe different places you can go inside Notedeck.
|
/// App routing. These describe different places you can go inside Notedeck.
|
||||||
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
|
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||||
pub enum Route {
|
pub enum Route {
|
||||||
Timeline(TimelineRoute),
|
Timeline(TimelineKind),
|
||||||
Accounts(AccountsRoute),
|
Accounts(AccountsRoute),
|
||||||
|
Reply(NoteId),
|
||||||
|
Quote(NoteId),
|
||||||
Relays,
|
Relays,
|
||||||
ComposeNote,
|
ComposeNote,
|
||||||
AddColumn(AddColumnRoute),
|
AddColumn(AddColumnRoute),
|
||||||
@@ -24,12 +28,60 @@ pub enum Route {
|
|||||||
EditDeck(usize),
|
EditDeck(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenSerializable for Route {
|
impl Route {
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
pub fn timeline(timeline_kind: TimelineKind) -> Self {
|
||||||
|
Route::Timeline(timeline_kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn timeline_id(&self) -> Option<&TimelineKind> {
|
||||||
|
if let Route::Timeline(tid) = self {
|
||||||
|
Some(tid)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn relays() -> Self {
|
||||||
|
Route::Relays
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn thread(thread_selection: ThreadSelection) -> Self {
|
||||||
|
Route::Timeline(TimelineKind::Thread(thread_selection))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn profile(pubkey: Pubkey) -> Self {
|
||||||
|
Route::Timeline(TimelineKind::profile(pubkey))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reply(replying_to: NoteId) -> Self {
|
||||||
|
Route::Reply(replying_to)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn quote(quoting: NoteId) -> Self {
|
||||||
|
Route::Quote(quoting)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn accounts() -> Self {
|
||||||
|
Route::Accounts(AccountsRoute::Accounts)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_account() -> Self {
|
||||||
|
Route::Accounts(AccountsRoute::AddAccount)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
||||||
match self {
|
match self {
|
||||||
Route::Timeline(routes) => routes.serialize_tokens(writer),
|
Route::Timeline(timeline_kind) => timeline_kind.serialize_tokens(writer),
|
||||||
Route::Accounts(routes) => routes.serialize_tokens(writer),
|
Route::Accounts(routes) => routes.serialize_tokens(writer),
|
||||||
Route::AddColumn(routes) => routes.serialize_tokens(writer),
|
Route::AddColumn(routes) => routes.serialize_tokens(writer),
|
||||||
|
Route::Reply(note_id) => {
|
||||||
|
writer.write_token("reply");
|
||||||
|
writer.write_token(¬e_id.hex());
|
||||||
|
}
|
||||||
|
Route::Quote(note_id) => {
|
||||||
|
writer.write_token("quote");
|
||||||
|
writer.write_token(¬e_id.hex());
|
||||||
|
}
|
||||||
Route::EditDeck(ind) => {
|
Route::EditDeck(ind) => {
|
||||||
writer.write_token("deck");
|
writer.write_token("deck");
|
||||||
writer.write_token("edit");
|
writer.write_token("edit");
|
||||||
@@ -56,11 +108,20 @@ impl TokenSerializable for Route {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
|
pub fn parse<'a>(
|
||||||
|
parser: &mut TokenParser<'a>,
|
||||||
|
deck_author: &Pubkey,
|
||||||
|
) -> Result<Self, ParseError<'a>> {
|
||||||
|
let tlkind =
|
||||||
|
parser.try_parse(|p| Ok(Route::Timeline(TimelineKind::parse(p, deck_author)?)));
|
||||||
|
|
||||||
|
if tlkind.is_ok() {
|
||||||
|
return tlkind;
|
||||||
|
}
|
||||||
|
|
||||||
TokenParser::alt(
|
TokenParser::alt(
|
||||||
parser,
|
parser,
|
||||||
&[
|
&[
|
||||||
|p| Ok(Route::Timeline(TimelineRoute::parse_from_tokens(p)?)),
|
|
||||||
|p| Ok(Route::Accounts(AccountsRoute::parse_from_tokens(p)?)),
|
|p| Ok(Route::Accounts(AccountsRoute::parse_from_tokens(p)?)),
|
||||||
|p| Ok(Route::AddColumn(AddColumnRoute::parse_from_tokens(p)?)),
|
|p| Ok(Route::AddColumn(AddColumnRoute::parse_from_tokens(p)?)),
|
||||||
|p| {
|
|p| {
|
||||||
@@ -89,6 +150,18 @@ impl TokenSerializable for Route {
|
|||||||
Ok(Route::Relays)
|
Ok(Route::Relays)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|p| {
|
||||||
|
p.parse_all(|p| {
|
||||||
|
p.parse_token("quote")?;
|
||||||
|
Ok(Route::Quote(NoteId::new(tokenator::parse_hex_id(p)?)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|p| {
|
||||||
|
p.parse_all(|p| {
|
||||||
|
p.parse_token("reply")?;
|
||||||
|
Ok(Route::Reply(NoteId::new(tokenator::parse_hex_id(p)?)))
|
||||||
|
})
|
||||||
|
},
|
||||||
|p| {
|
|p| {
|
||||||
p.parse_all(|p| {
|
p.parse_all(|p| {
|
||||||
p.parse_token("compose")?;
|
p.parse_token("compose")?;
|
||||||
@@ -111,64 +184,13 @@ impl TokenSerializable for Route {
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl Route {
|
pub fn title(&self) -> ColumnTitle<'_> {
|
||||||
pub fn timeline(timeline_id: TimelineId) -> Self {
|
|
||||||
Route::Timeline(TimelineRoute::Timeline(timeline_id))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn timeline_id(&self) -> Option<&TimelineId> {
|
|
||||||
if let Route::Timeline(TimelineRoute::Timeline(tid)) = self {
|
|
||||||
Some(tid)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn relays() -> Self {
|
|
||||||
Route::Relays
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn thread(thread_root: NoteId) -> Self {
|
|
||||||
Route::Timeline(TimelineRoute::Thread(thread_root))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn profile(pubkey: Pubkey) -> Self {
|
|
||||||
Route::Timeline(TimelineRoute::Profile(pubkey))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reply(replying_to: NoteId) -> Self {
|
|
||||||
Route::Timeline(TimelineRoute::Reply(replying_to))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn quote(quoting: NoteId) -> Self {
|
|
||||||
Route::Timeline(TimelineRoute::Quote(quoting))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn accounts() -> Self {
|
|
||||||
Route::Accounts(AccountsRoute::Accounts)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_account() -> Self {
|
|
||||||
Route::Accounts(AccountsRoute::AddAccount)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn title<'a>(&self, columns: &'a Columns) -> ColumnTitle<'a> {
|
|
||||||
match self {
|
match self {
|
||||||
Route::Timeline(tlr) => match tlr {
|
Route::Timeline(kind) => kind.to_title(),
|
||||||
TimelineRoute::Timeline(id) => {
|
|
||||||
if let Some(timeline) = columns.find_timeline(*id) {
|
Route::Reply(_id) => ColumnTitle::simple("Reply"),
|
||||||
timeline.kind.to_title()
|
Route::Quote(_id) => ColumnTitle::simple("Quote"),
|
||||||
} else {
|
|
||||||
ColumnTitle::simple("Unknown")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TimelineRoute::Thread(_id) => ColumnTitle::simple("Thread"),
|
|
||||||
TimelineRoute::Reply(_id) => ColumnTitle::simple("Reply"),
|
|
||||||
TimelineRoute::Quote(_id) => ColumnTitle::simple("Quote"),
|
|
||||||
TimelineRoute::Profile(_pubkey) => ColumnTitle::simple("Profile"),
|
|
||||||
},
|
|
||||||
|
|
||||||
Route::Relays => ColumnTitle::simple("Relays"),
|
Route::Relays => ColumnTitle::simple("Relays"),
|
||||||
|
|
||||||
@@ -292,14 +314,22 @@ impl<R: Clone> Router<R> {
|
|||||||
impl fmt::Display for Route {
|
impl fmt::Display for Route {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Route::Timeline(tlr) => match tlr {
|
Route::Timeline(kind) => match kind {
|
||||||
TimelineRoute::Timeline(name) => write!(f, "{}", name),
|
TimelineKind::List(ListKind::Contact(_pk)) => write!(f, "Contacts"),
|
||||||
TimelineRoute::Thread(_id) => write!(f, "Thread"),
|
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(_))) => {
|
||||||
TimelineRoute::Profile(_id) => write!(f, "Profile"),
|
write!(f, "Last Per Pubkey (Contact)")
|
||||||
TimelineRoute::Reply(_id) => write!(f, "Reply"),
|
}
|
||||||
TimelineRoute::Quote(_id) => write!(f, "Quote"),
|
TimelineKind::Notifications(_) => write!(f, "Notifications"),
|
||||||
|
TimelineKind::Universe => write!(f, "Universe"),
|
||||||
|
TimelineKind::Generic(_) => write!(f, "Custom"),
|
||||||
|
TimelineKind::Hashtag(ht) => write!(f, "Hashtag ({})", ht),
|
||||||
|
TimelineKind::Thread(_id) => write!(f, "Thread"),
|
||||||
|
TimelineKind::Profile(_id) => write!(f, "Profile"),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
Route::Reply(_id) => write!(f, "Reply"),
|
||||||
|
Route::Quote(_id) => write!(f, "Quote"),
|
||||||
|
|
||||||
Route::Relays => write!(f, "Relays"),
|
Route::Relays => write!(f, "Relays"),
|
||||||
|
|
||||||
Route::Accounts(amr) => match amr {
|
Route::Accounts(amr) => match amr {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::{collections::HashMap, fmt, str::FromStr};
|
use std::{collections::HashMap, fmt, str::FromStr};
|
||||||
|
|
||||||
use enostr::Pubkey;
|
use enostr::Pubkey;
|
||||||
use nostrdb::Ndb;
|
use nostrdb::{Ndb, Transaction};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::{error, info};
|
use tracing::{error, info};
|
||||||
|
|
||||||
@@ -9,16 +9,20 @@ use crate::{
|
|||||||
column::{Columns, IntermediaryRoute},
|
column::{Columns, IntermediaryRoute},
|
||||||
decks::{Deck, Decks, DecksCache},
|
decks::{Deck, Decks, DecksCache},
|
||||||
route::Route,
|
route::Route,
|
||||||
timeline::TimelineKind,
|
timeline::{TimelineCache, TimelineKind},
|
||||||
Error,
|
Error,
|
||||||
};
|
};
|
||||||
|
|
||||||
use notedeck::{storage, DataPath, DataPathType, Directory};
|
use notedeck::{storage, DataPath, DataPathType, Directory};
|
||||||
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
|
use tokenator::{ParseError, TokenParser, TokenWriter};
|
||||||
|
|
||||||
pub static DECKS_CACHE_FILE: &str = "decks_cache.json";
|
pub static DECKS_CACHE_FILE: &str = "decks_cache.json";
|
||||||
|
|
||||||
pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
|
pub fn load_decks_cache(
|
||||||
|
path: &DataPath,
|
||||||
|
ndb: &Ndb,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
) -> Option<DecksCache> {
|
||||||
let data_path = path.path(DataPathType::Setting);
|
let data_path = path.path(DataPathType::Setting);
|
||||||
|
|
||||||
let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) {
|
let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) {
|
||||||
@@ -35,7 +39,9 @@ pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
|
|||||||
let serializable_decks_cache =
|
let serializable_decks_cache =
|
||||||
serde_json::from_str::<SerializableDecksCache>(&decks_cache_str).ok()?;
|
serde_json::from_str::<SerializableDecksCache>(&decks_cache_str).ok()?;
|
||||||
|
|
||||||
serializable_decks_cache.decks_cache(ndb).ok()
|
serializable_decks_cache
|
||||||
|
.decks_cache(ndb, timeline_cache)
|
||||||
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) {
|
pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) {
|
||||||
@@ -81,14 +87,17 @@ impl SerializableDecksCache {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decks_cache(self, ndb: &Ndb) -> Result<DecksCache, Error> {
|
pub fn decks_cache(
|
||||||
|
self,
|
||||||
|
ndb: &Ndb,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
) -> Result<DecksCache, Error> {
|
||||||
let account_to_decks = self
|
let account_to_decks = self
|
||||||
.decks_cache
|
.decks_cache
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(pubkey, serializable_decks)| {
|
.map(|(pubkey, serializable_decks)| {
|
||||||
let deck_key = pubkey.bytes();
|
|
||||||
serializable_decks
|
serializable_decks
|
||||||
.decks(ndb, deck_key)
|
.decks(ndb, timeline_cache, &pubkey)
|
||||||
.map(|decks| (pubkey, decks))
|
.map(|decks| (pubkey, decks))
|
||||||
})
|
})
|
||||||
.collect::<Result<HashMap<Pubkey, Decks>, Error>>()?;
|
.collect::<Result<HashMap<Pubkey, Decks>, Error>>()?;
|
||||||
@@ -142,12 +151,17 @@ impl SerializableDecks {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decks(self, ndb: &Ndb, deck_key: &[u8; 32]) -> Result<Decks, Error> {
|
fn decks(
|
||||||
|
self,
|
||||||
|
ndb: &Ndb,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
deck_key: &Pubkey,
|
||||||
|
) -> Result<Decks, Error> {
|
||||||
Ok(Decks::from_decks(
|
Ok(Decks::from_decks(
|
||||||
self.active_deck,
|
self.active_deck,
|
||||||
self.decks
|
self.decks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|d| d.deck(ndb, deck_key))
|
.map(|d| d.deck(ndb, timeline_cache, deck_key))
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@@ -252,8 +266,13 @@ impl SerializableDeck {
|
|||||||
SerializableDeck { metadata, columns }
|
SerializableDeck { metadata, columns }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deck(self, ndb: &Ndb, deck_user: &[u8; 32]) -> Result<Deck, Error> {
|
pub fn deck(
|
||||||
let columns = deserialize_columns(ndb, deck_user, self.columns);
|
self,
|
||||||
|
ndb: &Ndb,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
deck_user: &Pubkey,
|
||||||
|
) -> Result<Deck, Error> {
|
||||||
|
let columns = deserialize_columns(ndb, timeline_cache, deck_user, self.columns);
|
||||||
let deserialized_metadata = deserialize_metadata(self.metadata)
|
let deserialized_metadata = deserialize_metadata(self.metadata)
|
||||||
.ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?;
|
.ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?;
|
||||||
|
|
||||||
@@ -292,7 +311,12 @@ fn serialize_columns(columns: &Columns) -> Vec<Vec<String>> {
|
|||||||
cols_serialized
|
cols_serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec<Vec<String>>) -> Columns {
|
fn deserialize_columns(
|
||||||
|
ndb: &Ndb,
|
||||||
|
timeline_cache: &mut TimelineCache,
|
||||||
|
deck_user: &Pubkey,
|
||||||
|
columns: Vec<Vec<String>>,
|
||||||
|
) -> Columns {
|
||||||
let mut cols = Columns::new();
|
let mut cols = Columns::new();
|
||||||
for column in columns {
|
for column in columns {
|
||||||
let mut cur_routes = Vec::new();
|
let mut cur_routes = Vec::new();
|
||||||
@@ -301,11 +325,9 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec<Vec<String>
|
|||||||
let tokens: Vec<&str> = route.split(":").collect();
|
let tokens: Vec<&str> = route.split(":").collect();
|
||||||
let mut parser = TokenParser::new(&tokens);
|
let mut parser = TokenParser::new(&tokens);
|
||||||
|
|
||||||
match CleanIntermediaryRoute::parse_from_tokens(&mut parser) {
|
match CleanIntermediaryRoute::parse(&mut parser, deck_user) {
|
||||||
Ok(route_intermediary) => {
|
Ok(route_intermediary) => {
|
||||||
if let Some(ir) =
|
if let Some(ir) = route_intermediary.into_intermediary_route(ndb) {
|
||||||
route_intermediary.into_intermediary_route(ndb, Some(deck_user))
|
|
||||||
{
|
|
||||||
cur_routes.push(ir);
|
cur_routes.push(ir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -316,7 +338,7 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec<Vec<String>
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !cur_routes.is_empty() {
|
if !cur_routes.is_empty() {
|
||||||
cols.insert_intermediary_routes(cur_routes);
|
cols.insert_intermediary_routes(timeline_cache, cur_routes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -329,48 +351,38 @@ enum CleanIntermediaryRoute {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CleanIntermediaryRoute {
|
impl CleanIntermediaryRoute {
|
||||||
fn into_intermediary_route(
|
fn into_intermediary_route(self, ndb: &Ndb) -> Option<IntermediaryRoute> {
|
||||||
self,
|
|
||||||
ndb: &Ndb,
|
|
||||||
user: Option<&[u8; 32]>,
|
|
||||||
) -> Option<IntermediaryRoute> {
|
|
||||||
match self {
|
match self {
|
||||||
CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline(
|
CleanIntermediaryRoute::ToTimeline(timeline_kind) => {
|
||||||
timeline_kind.into_timeline(ndb, user)?,
|
let txn = Transaction::new(ndb).unwrap();
|
||||||
)),
|
Some(IntermediaryRoute::Timeline(
|
||||||
|
timeline_kind.into_timeline(&txn, ndb)?,
|
||||||
|
))
|
||||||
|
}
|
||||||
CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
|
CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenSerializable for CleanIntermediaryRoute {
|
fn parse<'a>(
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
parser: &mut TokenParser<'a>,
|
||||||
match self {
|
deck_author: &Pubkey,
|
||||||
CleanIntermediaryRoute::ToTimeline(tlk) => {
|
) -> Result<Self, ParseError<'a>> {
|
||||||
tlk.serialize_tokens(writer);
|
let timeline = parser.try_parse(|p| {
|
||||||
}
|
Ok(CleanIntermediaryRoute::ToTimeline(TimelineKind::parse(
|
||||||
CleanIntermediaryRoute::ToRoute(route) => {
|
p,
|
||||||
route.serialize_tokens(writer);
|
deck_author,
|
||||||
}
|
)?))
|
||||||
|
});
|
||||||
|
if timeline.is_ok() {
|
||||||
|
return timeline;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
|
parser.try_parse(|p| {
|
||||||
TokenParser::alt(
|
Ok(CleanIntermediaryRoute::ToRoute(Route::parse(
|
||||||
parser,
|
p,
|
||||||
&[
|
deck_author,
|
||||||
|p| {
|
)?))
|
||||||
Ok(CleanIntermediaryRoute::ToTimeline(
|
})
|
||||||
TimelineKind::parse_from_tokens(p)?,
|
|
||||||
))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
Ok(CleanIntermediaryRoute::ToRoute(Route::parse_from_tokens(
|
|
||||||
p,
|
|
||||||
)?))
|
|
||||||
},
|
|
||||||
],
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,697 +0,0 @@
|
|||||||
use enostr::{NoteId, Pubkey};
|
|
||||||
use nostrdb::Ndb;
|
|
||||||
use serde::{Deserialize, Deserializer};
|
|
||||||
use tracing::error;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
accounts::AccountsRoute,
|
|
||||||
column::{Columns, IntermediaryRoute},
|
|
||||||
route::Route,
|
|
||||||
timeline::{kind::ListKind, PubkeySource, Timeline, TimelineId, TimelineKind, TimelineRoute},
|
|
||||||
ui::add_column::AddColumnRoute,
|
|
||||||
Result,
|
|
||||||
};
|
|
||||||
|
|
||||||
use notedeck::{DataPath, DataPathType, Directory};
|
|
||||||
|
|
||||||
pub static COLUMNS_FILE: &str = "columns.json";
|
|
||||||
|
|
||||||
fn columns_json(path: &DataPath) -> Option<String> {
|
|
||||||
let data_path = path.path(DataPathType::Setting);
|
|
||||||
Directory::new(data_path)
|
|
||||||
.get_file(COLUMNS_FILE.to_string())
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, PartialEq)]
|
|
||||||
enum MigrationTimelineRoute {
|
|
||||||
Timeline(u32),
|
|
||||||
Thread(String),
|
|
||||||
Profile(String),
|
|
||||||
Reply(String),
|
|
||||||
Quote(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationTimelineRoute {
|
|
||||||
fn timeline_route(self) -> Option<TimelineRoute> {
|
|
||||||
match self {
|
|
||||||
MigrationTimelineRoute::Timeline(id) => {
|
|
||||||
Some(TimelineRoute::Timeline(TimelineId::new(id)))
|
|
||||||
}
|
|
||||||
MigrationTimelineRoute::Thread(note_id_hex) => {
|
|
||||||
Some(TimelineRoute::Thread(NoteId::from_hex(¬e_id_hex).ok()?))
|
|
||||||
}
|
|
||||||
MigrationTimelineRoute::Profile(pubkey_hex) => {
|
|
||||||
Some(TimelineRoute::Profile(Pubkey::from_hex(&pubkey_hex).ok()?))
|
|
||||||
}
|
|
||||||
MigrationTimelineRoute::Reply(note_id_hex) => {
|
|
||||||
Some(TimelineRoute::Reply(NoteId::from_hex(¬e_id_hex).ok()?))
|
|
||||||
}
|
|
||||||
MigrationTimelineRoute::Quote(note_id_hex) => {
|
|
||||||
Some(TimelineRoute::Quote(NoteId::from_hex(¬e_id_hex).ok()?))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, PartialEq)]
|
|
||||||
enum MigrationRoute {
|
|
||||||
Timeline(MigrationTimelineRoute),
|
|
||||||
Accounts(MigrationAccountsRoute),
|
|
||||||
Relays,
|
|
||||||
ComposeNote,
|
|
||||||
AddColumn(MigrationAddColumnRoute),
|
|
||||||
Support,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationRoute {
|
|
||||||
fn route(self) -> Option<Route> {
|
|
||||||
match self {
|
|
||||||
MigrationRoute::Timeline(migration_timeline_route) => {
|
|
||||||
Some(Route::Timeline(migration_timeline_route.timeline_route()?))
|
|
||||||
}
|
|
||||||
MigrationRoute::Accounts(migration_accounts_route) => {
|
|
||||||
Some(Route::Accounts(migration_accounts_route.accounts_route()))
|
|
||||||
}
|
|
||||||
MigrationRoute::Relays => Some(Route::Relays),
|
|
||||||
MigrationRoute::ComposeNote => Some(Route::ComposeNote),
|
|
||||||
MigrationRoute::AddColumn(migration_add_column_route) => Some(Route::AddColumn(
|
|
||||||
migration_add_column_route.add_column_route(),
|
|
||||||
)),
|
|
||||||
MigrationRoute::Support => Some(Route::Support),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, PartialEq)]
|
|
||||||
enum MigrationAccountsRoute {
|
|
||||||
Accounts,
|
|
||||||
AddAccount,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationAccountsRoute {
|
|
||||||
fn accounts_route(self) -> AccountsRoute {
|
|
||||||
match self {
|
|
||||||
MigrationAccountsRoute::Accounts => AccountsRoute::Accounts,
|
|
||||||
MigrationAccountsRoute::AddAccount => AccountsRoute::AddAccount,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, PartialEq)]
|
|
||||||
enum MigrationAddColumnRoute {
|
|
||||||
Base,
|
|
||||||
UndecidedNotification,
|
|
||||||
ExternalNotification,
|
|
||||||
Hashtag,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationAddColumnRoute {
|
|
||||||
fn add_column_route(self) -> AddColumnRoute {
|
|
||||||
match self {
|
|
||||||
MigrationAddColumnRoute::Base => AddColumnRoute::Base,
|
|
||||||
MigrationAddColumnRoute::UndecidedNotification => AddColumnRoute::UndecidedNotification,
|
|
||||||
MigrationAddColumnRoute::ExternalNotification => AddColumnRoute::ExternalNotification,
|
|
||||||
MigrationAddColumnRoute::Hashtag => AddColumnRoute::Hashtag,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
struct MigrationColumn {
|
|
||||||
routes: Vec<MigrationRoute>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for MigrationColumn {
|
|
||||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let routes = Vec::<MigrationRoute>::deserialize(deserializer)?;
|
|
||||||
|
|
||||||
Ok(MigrationColumn { routes })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct MigrationColumns {
|
|
||||||
columns: Vec<MigrationColumn>,
|
|
||||||
timelines: Vec<MigrationTimeline>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Clone, PartialEq)]
|
|
||||||
struct MigrationTimeline {
|
|
||||||
id: u32,
|
|
||||||
kind: MigrationTimelineKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationTimeline {
|
|
||||||
fn into_timeline(self, ndb: &Ndb, deck_user_pubkey: Option<&[u8; 32]>) -> Option<Timeline> {
|
|
||||||
self.kind
|
|
||||||
.into_timeline_kind()?
|
|
||||||
.into_timeline(ndb, deck_user_pubkey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Clone, Debug, PartialEq)]
|
|
||||||
enum MigrationListKind {
|
|
||||||
Contact(MigrationPubkeySource),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationListKind {
|
|
||||||
fn list_kind(self) -> Option<ListKind> {
|
|
||||||
match self {
|
|
||||||
MigrationListKind::Contact(migration_pubkey_source) => {
|
|
||||||
Some(ListKind::Contact(migration_pubkey_source.pubkey_source()?))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Clone, Debug, PartialEq)]
|
|
||||||
enum MigrationPubkeySource {
|
|
||||||
Explicit(String),
|
|
||||||
DeckAuthor,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationPubkeySource {
|
|
||||||
fn pubkey_source(self) -> Option<PubkeySource> {
|
|
||||||
match self {
|
|
||||||
MigrationPubkeySource::Explicit(hex) => {
|
|
||||||
Some(PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?))
|
|
||||||
}
|
|
||||||
MigrationPubkeySource::DeckAuthor => Some(PubkeySource::DeckAuthor),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Clone, Debug, PartialEq)]
|
|
||||||
enum MigrationTimelineKind {
|
|
||||||
List(MigrationListKind),
|
|
||||||
Notifications(MigrationPubkeySource),
|
|
||||||
Profile(MigrationPubkeySource),
|
|
||||||
Universe,
|
|
||||||
Generic,
|
|
||||||
Hashtag(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationTimelineKind {
|
|
||||||
fn into_timeline_kind(self) -> Option<TimelineKind> {
|
|
||||||
match self {
|
|
||||||
MigrationTimelineKind::List(migration_list_kind) => {
|
|
||||||
Some(TimelineKind::List(migration_list_kind.list_kind()?))
|
|
||||||
}
|
|
||||||
MigrationTimelineKind::Notifications(migration_pubkey_source) => Some(
|
|
||||||
TimelineKind::Notifications(migration_pubkey_source.pubkey_source()?),
|
|
||||||
),
|
|
||||||
MigrationTimelineKind::Profile(migration_pubkey_source) => Some(TimelineKind::Profile(
|
|
||||||
migration_pubkey_source.pubkey_source()?,
|
|
||||||
)),
|
|
||||||
MigrationTimelineKind::Universe => Some(TimelineKind::Universe),
|
|
||||||
MigrationTimelineKind::Generic => Some(TimelineKind::Generic),
|
|
||||||
MigrationTimelineKind::Hashtag(hashtag) => Some(TimelineKind::Hashtag(hashtag)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationColumns {
|
|
||||||
fn into_columns(self, ndb: &Ndb, deck_pubkey: Option<&[u8; 32]>) -> Columns {
|
|
||||||
let mut columns = Columns::default();
|
|
||||||
|
|
||||||
for column in self.columns {
|
|
||||||
let mut cur_routes = Vec::new();
|
|
||||||
for route in column.routes {
|
|
||||||
match route {
|
|
||||||
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(timeline_id)) => {
|
|
||||||
if let Some(migration_tl) =
|
|
||||||
self.timelines.iter().find(|tl| tl.id == timeline_id)
|
|
||||||
{
|
|
||||||
let tl = migration_tl.clone().into_timeline(ndb, deck_pubkey);
|
|
||||||
if let Some(tl) = tl {
|
|
||||||
cur_routes.push(IntermediaryRoute::Timeline(tl));
|
|
||||||
} else {
|
|
||||||
error!("Problem deserializing timeline {:?}", migration_tl);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(_thread)) => {}
|
|
||||||
_ => {
|
|
||||||
if let Some(route) = route.route() {
|
|
||||||
cur_routes.push(IntermediaryRoute::Route(route));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !cur_routes.is_empty() {
|
|
||||||
columns.insert_intermediary_routes(cur_routes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
columns
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn string_to_columns(
|
|
||||||
serialized_columns: String,
|
|
||||||
ndb: &Ndb,
|
|
||||||
user: Option<&[u8; 32]>,
|
|
||||||
) -> Option<Columns> {
|
|
||||||
Some(
|
|
||||||
deserialize_columns_string(serialized_columns)
|
|
||||||
.ok()?
|
|
||||||
.into_columns(ndb, user),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize_columns(path: &DataPath, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Columns> {
|
|
||||||
string_to_columns(columns_json(path)?, ndb, user)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_columns_string(serialized_columns: String) -> Result<MigrationColumns> {
|
|
||||||
Ok(
|
|
||||||
serde_json::from_str::<MigrationColumns>(&serialized_columns)
|
|
||||||
.map_err(notedeck::Error::Json)?,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::storage::migration::{
|
|
||||||
MigrationColumn, MigrationListKind, MigrationPubkeySource, MigrationRoute,
|
|
||||||
MigrationTimeline, MigrationTimelineKind, MigrationTimelineRoute,
|
|
||||||
};
|
|
||||||
|
|
||||||
impl MigrationColumn {
|
|
||||||
fn from_route(route: MigrationRoute) -> Self {
|
|
||||||
Self {
|
|
||||||
routes: vec![route],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_routes(routes: Vec<MigrationRoute>) -> Self {
|
|
||||||
Self { routes }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MigrationTimeline {
|
|
||||||
fn new(id: u32, kind: MigrationTimelineKind) -> Self {
|
|
||||||
Self { id, kind }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn multi_column() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}],[{"Timeline":{"Timeline":0}}],[{"Timeline":{"Timeline":1}}]],"timelines":[{"id":0,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}},{"id":1,"kind":{"Hashtag":"introductions"}},{"id":2,"kind":"Universe"}]}"#; // Multi-column
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.columns.len(), 3);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(2)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.get(1).unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(0)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.get(2).unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(1)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 3);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
0,
|
|
||||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
|
||||||
MigrationPubkeySource::Explicit(
|
|
||||||
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
|
|
||||||
.to_owned()
|
|
||||||
)
|
|
||||||
))
|
|
||||||
)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.get(1).unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
1,
|
|
||||||
MigrationTimelineKind::Hashtag("introductions".to_owned())
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.get(2).unwrap(),
|
|
||||||
MigrationTimeline::new(2, MigrationTimelineKind::Universe)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn base() {
|
|
||||||
let route = r#"{"columns":[[{"AddColumn":"Base"}]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::AddColumn(MigrationAddColumnRoute::Base))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn universe() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":0}}]],"timelines":[{"id":0,"kind":"Universe"}]}"#;
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(0)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(0, MigrationTimelineKind::Universe)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn home() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}]],"timelines":[{"id":2,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}}]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(2)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
2,
|
|
||||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
|
||||||
MigrationPubkeySource::Explicit(
|
|
||||||
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
|
|
||||||
.to_owned()
|
|
||||||
)
|
|
||||||
))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn thread() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Thread":"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
|
|
||||||
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(
|
|
||||||
"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25".to_owned()
|
|
||||||
)),
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
7,
|
|
||||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
|
||||||
MigrationPubkeySource::Explicit(
|
|
||||||
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
|
|
||||||
.to_owned()
|
|
||||||
)
|
|
||||||
))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn profile() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Profile":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
|
|
||||||
MigrationRoute::Timeline(MigrationTimelineRoute::Profile(
|
|
||||||
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
|
|
||||||
)),
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
7,
|
|
||||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
|
||||||
MigrationPubkeySource::Explicit(
|
|
||||||
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
|
|
||||||
.to_owned()
|
|
||||||
)
|
|
||||||
))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn your_notifs() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":5}}]],"timelines":[{"id":5,"kind":{"Notifications":"DeckAuthor"}}]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(5)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
5,
|
|
||||||
MigrationTimelineKind::Notifications(MigrationPubkeySource::DeckAuthor)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn undecided_notifs() {
|
|
||||||
let route = r#"{"columns":[[{"AddColumn":"Base"},{"AddColumn":"UndecidedNotification"}]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::UndecidedNotification),
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extern_notifs() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":4}}]],"timelines":[{"id":4,"kind":{"Notifications":{"Explicit":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}}]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(4)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(
|
|
||||||
4,
|
|
||||||
MigrationTimelineKind::Notifications(MigrationPubkeySource::Explicit(
|
|
||||||
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
|
|
||||||
))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn hashtag() {
|
|
||||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":6}}]],"timelines":[{"id":6,"kind":{"Hashtag":"notedeck"}}]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
|
||||||
MigrationTimelineRoute::Timeline(6)
|
|
||||||
))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(migration_cols.timelines.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.timelines.first().unwrap(),
|
|
||||||
MigrationTimeline::new(6, MigrationTimelineKind::Hashtag("notedeck".to_owned()))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn support() {
|
|
||||||
let route = r#"{"columns":[[{"AddColumn":"Base"},"Support"]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
|
||||||
MigrationRoute::Support
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn post() {
|
|
||||||
let route = r#"{"columns":[[{"AddColumn":"Base"},"ComposeNote"]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
|
||||||
MigrationRoute::ComposeNote
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn relay() {
|
|
||||||
let route = r#"{"columns":[[{"AddColumn":"Base"},"Relays"]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
|
||||||
MigrationRoute::Relays
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn accounts() {
|
|
||||||
let route =
|
|
||||||
r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"}]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
|
||||||
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn login() {
|
|
||||||
let route = r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"},{"Accounts":"AddAccount"}]],"timelines":[]}"#;
|
|
||||||
|
|
||||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
|
||||||
assert!(deserialized_columns.is_ok());
|
|
||||||
|
|
||||||
let migration_cols = deserialized_columns.unwrap();
|
|
||||||
assert_eq!(migration_cols.columns.len(), 1);
|
|
||||||
assert_eq!(
|
|
||||||
*migration_cols.columns.first().unwrap(),
|
|
||||||
MigrationColumn::from_routes(vec![
|
|
||||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
|
||||||
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
|
|
||||||
MigrationRoute::Accounts(MigrationAccountsRoute::AddAccount),
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(migration_cols.timelines.is_empty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
mod decks;
|
mod decks;
|
||||||
mod migration;
|
|
||||||
|
|
||||||
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
|
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
|
||||||
pub use migration::{deserialize_columns, COLUMNS_FILE};
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use crate::timeline::{TimelineId, TimelineKind};
|
use crate::timeline::TimelineKind;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
@@ -16,7 +16,7 @@ pub enum SubKind {
|
|||||||
/// We are fetching a contact list so that we can use it for our follows
|
/// We are fetching a contact list so that we can use it for our follows
|
||||||
/// Filter.
|
/// Filter.
|
||||||
// TODO: generalize this to any list?
|
// TODO: generalize this to any list?
|
||||||
FetchingContactList(TimelineId),
|
FetchingContactList(TimelineKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Subscriptions that need to be tracked at various stages. Sometimes we
|
/// Subscriptions that need to be tracked at various stages. Sometimes we
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
use crate::{multi_subscriber::MultiSubscriber, timeline::Timeline};
|
|
||||||
|
|
||||||
use nostrdb::FilterBuilder;
|
|
||||||
use notedeck::{RootNoteId, RootNoteIdBuf};
|
|
||||||
|
|
||||||
pub struct Thread {
|
|
||||||
pub timeline: Timeline,
|
|
||||||
pub subscription: Option<MultiSubscriber>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Thread {
|
|
||||||
pub fn new(root_id: RootNoteIdBuf) -> Self {
|
|
||||||
let timeline = Timeline::thread(root_id);
|
|
||||||
|
|
||||||
Thread {
|
|
||||||
timeline,
|
|
||||||
subscription: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filters_raw(root_id: RootNoteId<'_>) -> Vec<FilterBuilder> {
|
|
||||||
vec![
|
|
||||||
nostrdb::Filter::new().kinds([1]).event(root_id.bytes()),
|
|
||||||
nostrdb::Filter::new().ids([root_id.bytes()]).limit(1),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +1,21 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
actionbar::TimelineOpenResult,
|
actionbar::TimelineOpenResult,
|
||||||
|
error::Error,
|
||||||
multi_subscriber::MultiSubscriber,
|
multi_subscriber::MultiSubscriber,
|
||||||
profile::Profile,
|
|
||||||
thread::Thread,
|
|
||||||
//subscriptions::SubRefs,
|
//subscriptions::SubRefs,
|
||||||
timeline::{PubkeySource, Timeline},
|
timeline::{Timeline, TimelineKind},
|
||||||
};
|
};
|
||||||
|
|
||||||
use notedeck::{NoteCache, NoteRef, RootNoteId, RootNoteIdBuf};
|
use notedeck::{filter, FilterState, NoteCache, NoteRef};
|
||||||
|
|
||||||
use enostr::{Pubkey, PubkeyRef, RelayPool};
|
use enostr::RelayPool;
|
||||||
use nostrdb::{Filter, FilterBuilder, Ndb, Transaction};
|
use nostrdb::{Filter, Ndb, Transaction};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tracing::{debug, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct TimelineCache {
|
pub struct TimelineCache {
|
||||||
pub threads: HashMap<RootNoteIdBuf, Thread>,
|
pub timelines: HashMap<TimelineKind, Timeline>,
|
||||||
pub profiles: HashMap<Pubkey, Profile>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum Vitality<'a, M> {
|
pub enum Vitality<'a, M> {
|
||||||
@@ -41,102 +39,64 @@ impl<'a, M> Vitality<'a, M> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Hash, Debug, Copy, Clone)]
|
|
||||||
pub enum TimelineCacheKey<'a> {
|
|
||||||
Profile(PubkeyRef<'a>),
|
|
||||||
Thread(RootNoteId<'a>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> TimelineCacheKey<'a> {
|
|
||||||
pub fn profile(pubkey: PubkeyRef<'a>) -> Self {
|
|
||||||
Self::Profile(pubkey)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn thread(root_id: RootNoteId<'a>) -> Self {
|
|
||||||
Self::Thread(root_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bytes(&self) -> &[u8; 32] {
|
|
||||||
match self {
|
|
||||||
Self::Profile(pk) => pk.bytes(),
|
|
||||||
Self::Thread(root_id) => root_id.bytes(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The filters used to update our timeline cache
|
|
||||||
pub fn filters_raw(&self) -> Vec<FilterBuilder> {
|
|
||||||
match self {
|
|
||||||
TimelineCacheKey::Thread(root_id) => Thread::filters_raw(*root_id),
|
|
||||||
|
|
||||||
TimelineCacheKey::Profile(pubkey) => vec![Filter::new()
|
|
||||||
.authors([pubkey.bytes()])
|
|
||||||
.kinds([1])
|
|
||||||
.limit(notedeck::filter::default_limit())],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filters_since(&self, since: u64) -> Vec<Filter> {
|
|
||||||
self.filters_raw()
|
|
||||||
.into_iter()
|
|
||||||
.map(|fb| fb.since(since).build())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filters(&self) -> Vec<Filter> {
|
|
||||||
self.filters_raw()
|
|
||||||
.into_iter()
|
|
||||||
.map(|mut fb| fb.build())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TimelineCache {
|
impl TimelineCache {
|
||||||
fn contains_key(&self, key: TimelineCacheKey<'_>) -> bool {
|
/// Pop a timeline from the timeline cache. This only removes the timeline
|
||||||
match key {
|
/// if it has reached 0 subscribers, meaning it was the last one to be
|
||||||
TimelineCacheKey::Profile(pubkey) => self.profiles.contains_key(pubkey.bytes()),
|
/// removed
|
||||||
TimelineCacheKey::Thread(root_id) => self.threads.contains_key(root_id.bytes()),
|
pub fn pop(
|
||||||
|
&mut self,
|
||||||
|
id: &TimelineKind,
|
||||||
|
ndb: &mut Ndb,
|
||||||
|
pool: &mut RelayPool,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let timeline = if let Some(timeline) = self.timelines.get_mut(id) {
|
||||||
|
timeline
|
||||||
|
} else {
|
||||||
|
return Err(Error::TimelineNotFound);
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(sub) = &mut timeline.subscription {
|
||||||
|
// if this is the last subscriber, remove the timeline from cache
|
||||||
|
if sub.unsubscribe(ndb, pool) {
|
||||||
|
debug!(
|
||||||
|
"popped last timeline {:?}, removing from timeline cache",
|
||||||
|
id
|
||||||
|
);
|
||||||
|
self.timelines.remove(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(Error::MissingSubscription)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_expected_mut(&mut self, key: TimelineCacheKey<'_>) -> &mut Timeline {
|
fn get_expected_mut(&mut self, key: &TimelineKind) -> &mut Timeline {
|
||||||
match key {
|
self.timelines
|
||||||
TimelineCacheKey::Profile(pubkey) => self
|
.get_mut(key)
|
||||||
.profiles
|
.expect("expected notes in timline cache")
|
||||||
.get_mut(pubkey.bytes())
|
|
||||||
.map(|p| &mut p.timeline),
|
|
||||||
TimelineCacheKey::Thread(root_id) => self
|
|
||||||
.threads
|
|
||||||
.get_mut(root_id.bytes())
|
|
||||||
.map(|t| &mut t.timeline),
|
|
||||||
}
|
|
||||||
.expect("expected notes in timline cache")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert a new profile or thread into the cache, based on the TimelineCacheKey
|
/// Insert a new timeline into the cache, based on the TimelineKind
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn insert_new(
|
fn insert_new(
|
||||||
&mut self,
|
&mut self,
|
||||||
id: TimelineCacheKey<'_>,
|
id: TimelineKind,
|
||||||
txn: &Transaction,
|
txn: &Transaction,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
notes: &[NoteRef],
|
notes: &[NoteRef],
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
filters: Vec<Filter>,
|
|
||||||
) {
|
) {
|
||||||
match id {
|
let mut timeline = if let Some(timeline) = id.clone().into_timeline(txn, ndb) {
|
||||||
TimelineCacheKey::Profile(pubkey) => {
|
timeline
|
||||||
let mut profile = Profile::new(PubkeySource::Explicit(pubkey.to_owned()), filters);
|
} else {
|
||||||
// insert initial notes into timeline
|
error!("Error creating timeline from {:?}", &id);
|
||||||
profile.timeline.insert_new(txn, ndb, note_cache, notes);
|
return;
|
||||||
self.profiles.insert(pubkey.to_owned(), profile);
|
};
|
||||||
}
|
|
||||||
|
|
||||||
TimelineCacheKey::Thread(root_id) => {
|
// insert initial notes into timeline
|
||||||
let mut thread = Thread::new(root_id.to_owned());
|
timeline.insert_new(txn, ndb, note_cache, notes);
|
||||||
thread.timeline.insert_new(txn, ndb, note_cache, notes);
|
self.timelines.insert(id, timeline);
|
||||||
self.threads.insert(root_id.to_owned(), thread);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get and/or update the notes associated with this timeline
|
/// Get and/or update the notes associated with this timeline
|
||||||
@@ -145,24 +105,28 @@ impl TimelineCache {
|
|||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
txn: &Transaction,
|
txn: &Transaction,
|
||||||
id: TimelineCacheKey<'a>,
|
id: &TimelineKind,
|
||||||
) -> Vitality<'a, Timeline> {
|
) -> Vitality<'a, Timeline> {
|
||||||
// we can't use the naive hashmap entry API here because lookups
|
// we can't use the naive hashmap entry API here because lookups
|
||||||
// require a copy, wait until we have a raw entry api. We could
|
// require a copy, wait until we have a raw entry api. We could
|
||||||
// also use hashbrown?
|
// also use hashbrown?
|
||||||
|
|
||||||
if self.contains_key(id) {
|
if self.timelines.contains_key(id) {
|
||||||
return Vitality::Stale(self.get_expected_mut(id));
|
return Vitality::Stale(self.get_expected_mut(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
let filters = id.filters();
|
let notes = if let FilterState::Ready(filters) = id.filters(txn, ndb) {
|
||||||
let notes = if let Ok(results) = ndb.query(txn, &filters, 1000) {
|
if let Ok(results) = ndb.query(txn, &filters, 1000) {
|
||||||
results
|
results
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(NoteRef::from_query_result)
|
.map(NoteRef::from_query_result)
|
||||||
.collect()
|
.collect()
|
||||||
|
} else {
|
||||||
|
debug!("got no results from TimelineCache lookup for {:?}", id);
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
debug!("got no results from TimelineCache lookup for {:?}", id);
|
// filter is not ready yet
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -172,44 +136,37 @@ impl TimelineCache {
|
|||||||
info!("found NotesHolder with {} notes", notes.len());
|
info!("found NotesHolder with {} notes", notes.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.insert_new(id, txn, ndb, ¬es, note_cache, filters);
|
self.insert_new(id.to_owned(), txn, ndb, ¬es, note_cache);
|
||||||
|
|
||||||
Vitality::Fresh(self.get_expected_mut(id))
|
Vitality::Fresh(self.get_expected_mut(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subscription(
|
/// Open a timeline, this is another way of saying insert a timeline
|
||||||
&mut self,
|
/// into the timeline cache. If there exists a timeline already, we
|
||||||
id: TimelineCacheKey<'_>,
|
/// bump its subscription reference count. If it's new we start a new
|
||||||
) -> Option<&mut Option<MultiSubscriber>> {
|
/// subscription
|
||||||
match id {
|
pub fn open(
|
||||||
TimelineCacheKey::Profile(pubkey) => self
|
|
||||||
.profiles
|
|
||||||
.get_mut(pubkey.bytes())
|
|
||||||
.map(|p| &mut p.subscription),
|
|
||||||
TimelineCacheKey::Thread(root_id) => self
|
|
||||||
.threads
|
|
||||||
.get_mut(root_id.bytes())
|
|
||||||
.map(|t| &mut t.subscription),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn open<'a>(
|
|
||||||
&mut self,
|
&mut self,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
txn: &Transaction,
|
txn: &Transaction,
|
||||||
pool: &mut RelayPool,
|
pool: &mut RelayPool,
|
||||||
id: TimelineCacheKey<'a>,
|
id: &TimelineKind,
|
||||||
) -> Option<TimelineOpenResult<'a>> {
|
) -> Option<TimelineOpenResult> {
|
||||||
let result = match self.notes(ndb, note_cache, txn, id) {
|
let (open_result, timeline) = match self.notes(ndb, note_cache, txn, id) {
|
||||||
Vitality::Stale(timeline) => {
|
Vitality::Stale(timeline) => {
|
||||||
// The timeline cache is stale, let's update it
|
// The timeline cache is stale, let's update it
|
||||||
let notes = find_new_notes(timeline.all_or_any_notes(), id, txn, ndb);
|
let notes = find_new_notes(
|
||||||
let cached_timeline_result = if notes.is_empty() {
|
timeline.all_or_any_notes(),
|
||||||
|
timeline.subscription.as_ref().map(|s| &s.filters)?,
|
||||||
|
txn,
|
||||||
|
ndb,
|
||||||
|
);
|
||||||
|
let open_result = if notes.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
let new_notes = notes.iter().map(|n| n.key).collect();
|
let new_notes = notes.iter().map(|n| n.key).collect();
|
||||||
Some(TimelineOpenResult::new_notes(new_notes, id))
|
Some(TimelineOpenResult::new_notes(new_notes, id.clone()))
|
||||||
};
|
};
|
||||||
|
|
||||||
// we can't insert and update the VirtualList now, because we
|
// we can't insert and update the VirtualList now, because we
|
||||||
@@ -217,42 +174,36 @@ impl TimelineCache {
|
|||||||
// result instead
|
// result instead
|
||||||
//
|
//
|
||||||
// holder.get_view().insert(¬es); <-- no
|
// holder.get_view().insert(¬es); <-- no
|
||||||
cached_timeline_result
|
(open_result, timeline)
|
||||||
}
|
}
|
||||||
|
|
||||||
Vitality::Fresh(_timeline) => None,
|
Vitality::Fresh(timeline) => (None, timeline),
|
||||||
};
|
};
|
||||||
|
|
||||||
let sub_id = if let Some(sub) = self.subscription(id) {
|
if let Some(multi_sub) = &mut timeline.subscription {
|
||||||
if let Some(multi_subscriber) = sub {
|
debug!("got open with *old* subscription for {:?}", &timeline.kind);
|
||||||
multi_subscriber.subscribe(ndb, pool);
|
multi_sub.subscribe(ndb, pool);
|
||||||
multi_subscriber.sub.as_ref().map(|s| s.local)
|
} else if let Some(filter) = timeline.filter.get_any_ready() {
|
||||||
} else {
|
debug!("got open with *new* subscription for {:?}", &timeline.kind);
|
||||||
let mut multi_sub = MultiSubscriber::new(id.filters());
|
let mut multi_sub = MultiSubscriber::new(filter.clone());
|
||||||
multi_sub.subscribe(ndb, pool);
|
multi_sub.subscribe(ndb, pool);
|
||||||
let sub_id = multi_sub.sub.as_ref().map(|s| s.local);
|
timeline.subscription = Some(multi_sub);
|
||||||
*sub = Some(multi_sub);
|
|
||||||
sub_id
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
None
|
// This should never happen reasoning, self.notes would have
|
||||||
|
// failed above if the filter wasn't ready
|
||||||
|
error!(
|
||||||
|
"open: filter not ready, so could not setup subscription. this should never happen"
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
let timeline = self.get_expected_mut(id);
|
open_result
|
||||||
if let Some(sub_id) = sub_id {
|
|
||||||
timeline.subscription = Some(sub_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: We have subscription ids tracked in different places. Fix this
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Look for new thread notes since our last fetch
|
/// Look for new thread notes since our last fetch
|
||||||
fn find_new_notes(
|
fn find_new_notes(
|
||||||
notes: &[NoteRef],
|
notes: &[NoteRef],
|
||||||
id: TimelineCacheKey<'_>,
|
filters: &[Filter],
|
||||||
txn: &Transaction,
|
txn: &Transaction,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
) -> Vec<NoteRef> {
|
) -> Vec<NoteRef> {
|
||||||
@@ -261,7 +212,7 @@ fn find_new_notes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let last_note = notes[0];
|
let last_note = notes[0];
|
||||||
let filters = id.filters_since(last_note.created_at + 1);
|
let filters = filter::make_filters_since(filters, last_note.created_at + 1);
|
||||||
|
|
||||||
if let Ok(results) = ndb.query(txn, &filters, 1000) {
|
if let Ok(results) = ndb.query(txn, &filters, 1000) {
|
||||||
debug!("got {} results from NotesHolder update", results.len());
|
debug!("got {} results from NotesHolder update", results.len());
|
||||||
|
|||||||
@@ -1,23 +1,35 @@
|
|||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
use crate::timeline::{Timeline, TimelineTab};
|
use crate::timeline::{Timeline, TimelineTab};
|
||||||
use enostr::{Filter, Pubkey};
|
use enostr::{Filter, NoteId, Pubkey};
|
||||||
use nostrdb::{Ndb, Transaction};
|
use nostrdb::{Ndb, Transaction};
|
||||||
use notedeck::{filter::default_limit, FilterError, FilterState, RootNoteIdBuf};
|
use notedeck::{
|
||||||
|
filter::{self, default_limit},
|
||||||
|
FilterError, FilterState, NoteCache, RootIdError, RootNoteIdBuf,
|
||||||
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
use std::{borrow::Cow, fmt::Display};
|
use std::{borrow::Cow, fmt::Display};
|
||||||
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
|
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
|
||||||
use tracing::{error, warn};
|
use tracing::{error, warn};
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Hash, Copy, Default, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub enum PubkeySource {
|
pub enum PubkeySource {
|
||||||
Explicit(Pubkey),
|
Explicit(Pubkey),
|
||||||
#[default]
|
#[default]
|
||||||
DeckAuthor,
|
DeckAuthor,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
|
||||||
pub enum ListKind {
|
pub enum ListKind {
|
||||||
Contact(PubkeySource),
|
Contact(Pubkey),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ListKind {
|
||||||
|
pub fn pubkey(&self) -> Option<&Pubkey> {
|
||||||
|
match self {
|
||||||
|
Self::Contact(pk) => Some(pk),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PubkeySource {
|
impl PubkeySource {
|
||||||
@@ -31,13 +43,6 @@ impl PubkeySource {
|
|||||||
PubkeySource::DeckAuthor => deck_author,
|
PubkeySource::DeckAuthor => deck_author,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_pubkey_bytes<'a>(&'a self, deck_author: &'a [u8; 32]) -> &'a [u8; 32] {
|
|
||||||
match self {
|
|
||||||
PubkeySource::Explicit(pk) => pk.bytes(),
|
|
||||||
PubkeySource::DeckAuthor => deck_author,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenSerializable for PubkeySource {
|
impl TokenSerializable for PubkeySource {
|
||||||
@@ -77,32 +82,18 @@ impl TokenSerializable for PubkeySource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ListKind {
|
impl ListKind {
|
||||||
pub fn contact_list(pk_src: PubkeySource) -> Self {
|
pub fn contact_list(pk: Pubkey) -> Self {
|
||||||
ListKind::Contact(pk_src)
|
ListKind::Contact(pk)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pubkey_source(&self) -> Option<&PubkeySource> {
|
pub fn parse<'a>(
|
||||||
match self {
|
parser: &mut TokenParser<'a>,
|
||||||
ListKind::Contact(pk_src) => Some(pk_src),
|
deck_author: &Pubkey,
|
||||||
}
|
) -> Result<Self, ParseError<'a>> {
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenSerializable for ListKind {
|
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
|
||||||
match self {
|
|
||||||
ListKind::Contact(pk_src) => {
|
|
||||||
writer.write_token("contact");
|
|
||||||
pk_src.serialize_tokens(writer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
|
|
||||||
parser.parse_all(|p| {
|
parser.parse_all(|p| {
|
||||||
p.parse_token("contact")?;
|
p.parse_token("contact")?;
|
||||||
let pk_src = PubkeySource::parse_from_tokens(p)?;
|
let pk_src = PubkeySource::parse_from_tokens(p)?;
|
||||||
Ok(ListKind::Contact(pk_src))
|
Ok(ListKind::Contact(*pk_src.to_pubkey(deck_author)))
|
||||||
})
|
})
|
||||||
|
|
||||||
/* here for u when you need more things to parse
|
/* here for u when you need more things to parse
|
||||||
@@ -120,8 +111,80 @@ impl TokenSerializable for ListKind {
|
|||||||
)
|
)
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
||||||
|
match self {
|
||||||
|
ListKind::Contact(pk) => {
|
||||||
|
writer.write_token("contact");
|
||||||
|
PubkeySource::pubkey(*pk).serialize_tokens(writer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Thread selection hashing is done in a specific way. For TimelineCache
|
||||||
|
/// lookups, we want to only let the root_id influence thread selection.
|
||||||
|
/// This way Thread TimelineKinds always map to the same cached timeline
|
||||||
|
/// for now (we will likely have to rework this since threads aren't
|
||||||
|
/// *really* timelines)
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ThreadSelection {
|
||||||
|
pub root_id: RootNoteIdBuf,
|
||||||
|
|
||||||
|
/// The selected note, if different than the root_id. None here
|
||||||
|
/// means the root is selected
|
||||||
|
pub selected_note: Option<NoteId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ThreadSelection {
|
||||||
|
pub fn selected_or_root(&self) -> &[u8; 32] {
|
||||||
|
self.selected_note
|
||||||
|
.as_ref()
|
||||||
|
.map(|sn| sn.bytes())
|
||||||
|
.unwrap_or(self.root_id.bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_root_id(root_id: RootNoteIdBuf) -> Self {
|
||||||
|
Self {
|
||||||
|
root_id,
|
||||||
|
selected_note: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_note_id(
|
||||||
|
ndb: &Ndb,
|
||||||
|
note_cache: &mut NoteCache,
|
||||||
|
txn: &Transaction,
|
||||||
|
note_id: NoteId,
|
||||||
|
) -> Result<Self, RootIdError> {
|
||||||
|
let root_id = RootNoteIdBuf::new(ndb, note_cache, txn, note_id.bytes())?;
|
||||||
|
Ok(if root_id.bytes() == note_id.bytes() {
|
||||||
|
Self::from_root_id(root_id)
|
||||||
|
} else {
|
||||||
|
Self {
|
||||||
|
root_id,
|
||||||
|
selected_note: Some(note_id),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for ThreadSelection {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
// only hash the root id for thread selection
|
||||||
|
self.root_id.hash(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// need this to only match root_id or else hash lookups will fail
|
||||||
|
impl PartialEq for ThreadSelection {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.root_id == other.root_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for ThreadSelection {}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// What kind of timeline is it?
|
/// What kind of timeline is it?
|
||||||
/// - Follow List
|
/// - Follow List
|
||||||
@@ -130,24 +193,23 @@ impl TokenSerializable for ListKind {
|
|||||||
/// - filter
|
/// - filter
|
||||||
/// - ... etc
|
/// - ... etc
|
||||||
///
|
///
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum TimelineKind {
|
pub enum TimelineKind {
|
||||||
List(ListKind),
|
List(ListKind),
|
||||||
|
|
||||||
/// The last not per pubkey
|
/// The last not per pubkey
|
||||||
Algo(AlgoTimeline),
|
Algo(AlgoTimeline),
|
||||||
|
|
||||||
Notifications(PubkeySource),
|
Notifications(Pubkey),
|
||||||
|
|
||||||
Profile(PubkeySource),
|
Profile(Pubkey),
|
||||||
|
|
||||||
/// This could be any note id, doesn't need to be the root id
|
Thread(ThreadSelection),
|
||||||
Thread(RootNoteIdBuf),
|
|
||||||
|
|
||||||
Universe,
|
Universe,
|
||||||
|
|
||||||
/// Generic filter
|
/// Generic filter, references a hash of a filter
|
||||||
Generic,
|
Generic(u64),
|
||||||
|
|
||||||
Hashtag(String),
|
Hashtag(String),
|
||||||
}
|
}
|
||||||
@@ -155,86 +217,8 @@ pub enum TimelineKind {
|
|||||||
const NOTIFS_TOKEN_DEPRECATED: &str = "notifs";
|
const NOTIFS_TOKEN_DEPRECATED: &str = "notifs";
|
||||||
const NOTIFS_TOKEN: &str = "notifications";
|
const NOTIFS_TOKEN: &str = "notifications";
|
||||||
|
|
||||||
fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> {
|
|
||||||
let hex = parser.pull_token()?;
|
|
||||||
hex::decode(hex)
|
|
||||||
.map_err(|_| ParseError::HexDecodeFailed)?
|
|
||||||
.as_slice()
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| ParseError::HexDecodeFailed)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenSerializable for TimelineKind {
|
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
|
||||||
match self {
|
|
||||||
TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer),
|
|
||||||
TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer),
|
|
||||||
TimelineKind::Notifications(pk_src) => {
|
|
||||||
writer.write_token(NOTIFS_TOKEN);
|
|
||||||
pk_src.serialize_tokens(writer);
|
|
||||||
}
|
|
||||||
TimelineKind::Profile(pk_src) => {
|
|
||||||
writer.write_token("profile");
|
|
||||||
pk_src.serialize_tokens(writer);
|
|
||||||
}
|
|
||||||
TimelineKind::Thread(root_note_id) => {
|
|
||||||
writer.write_token("thread");
|
|
||||||
writer.write_token(&root_note_id.hex());
|
|
||||||
}
|
|
||||||
TimelineKind::Universe => {
|
|
||||||
writer.write_token("universe");
|
|
||||||
}
|
|
||||||
TimelineKind::Generic => {
|
|
||||||
writer.write_token("generic");
|
|
||||||
}
|
|
||||||
TimelineKind::Hashtag(ht) => {
|
|
||||||
writer.write_token("hashtag");
|
|
||||||
writer.write_token(ht);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
|
|
||||||
TokenParser::alt(
|
|
||||||
parser,
|
|
||||||
&[
|
|
||||||
|p| Ok(TimelineKind::List(ListKind::parse_from_tokens(p)?)),
|
|
||||||
|p| Ok(TimelineKind::Algo(AlgoTimeline::parse_from_tokens(p)?)),
|
|
||||||
|p| {
|
|
||||||
// still handle deprecated form (notifs)
|
|
||||||
p.parse_any_token(&[NOTIFS_TOKEN, NOTIFS_TOKEN_DEPRECATED])?;
|
|
||||||
Ok(TimelineKind::Notifications(
|
|
||||||
PubkeySource::parse_from_tokens(p)?,
|
|
||||||
))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("profile")?;
|
|
||||||
Ok(TimelineKind::Profile(PubkeySource::parse_from_tokens(p)?))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("thread")?;
|
|
||||||
let note_id = RootNoteIdBuf::new_unsafe(parse_hex_id(p)?);
|
|
||||||
Ok(TimelineKind::Thread(note_id))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("universe")?;
|
|
||||||
Ok(TimelineKind::Universe)
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("generic")?;
|
|
||||||
Ok(TimelineKind::Generic)
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("hashtag")?;
|
|
||||||
Ok(TimelineKind::Hashtag(p.pull_token()?.to_string()))
|
|
||||||
},
|
|
||||||
],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hardcoded algo timelines
|
/// Hardcoded algo timelines
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum AlgoTimeline {
|
pub enum AlgoTimeline {
|
||||||
/// LastPerPubkey: a special nostr query that fetches the last N
|
/// LastPerPubkey: a special nostr query that fetches the last N
|
||||||
/// notes for each pubkey on the list
|
/// notes for each pubkey on the list
|
||||||
@@ -244,8 +228,8 @@ pub enum AlgoTimeline {
|
|||||||
/// The identifier for our last per pubkey algo
|
/// The identifier for our last per pubkey algo
|
||||||
const LAST_PER_PUBKEY_TOKEN: &str = "last_per_pubkey";
|
const LAST_PER_PUBKEY_TOKEN: &str = "last_per_pubkey";
|
||||||
|
|
||||||
impl TokenSerializable for AlgoTimeline {
|
impl AlgoTimeline {
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
||||||
match self {
|
match self {
|
||||||
AlgoTimeline::LastPerPubkey(list_kind) => {
|
AlgoTimeline::LastPerPubkey(list_kind) => {
|
||||||
writer.write_token(LAST_PER_PUBKEY_TOKEN);
|
writer.write_token(LAST_PER_PUBKEY_TOKEN);
|
||||||
@@ -254,16 +238,17 @@ impl TokenSerializable for AlgoTimeline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
|
pub fn parse<'a>(
|
||||||
TokenParser::alt(
|
parser: &mut TokenParser<'a>,
|
||||||
parser,
|
deck_author: &Pubkey,
|
||||||
&[|p| {
|
) -> Result<Self, ParseError<'a>> {
|
||||||
p.parse_all(|p| {
|
parser.parse_all(|p| {
|
||||||
p.parse_token(LAST_PER_PUBKEY_TOKEN)?;
|
p.parse_token(LAST_PER_PUBKEY_TOKEN)?;
|
||||||
Ok(AlgoTimeline::LastPerPubkey(ListKind::parse_from_tokens(p)?))
|
Ok(AlgoTimeline::LastPerPubkey(ListKind::parse(
|
||||||
})
|
p,
|
||||||
}],
|
deck_author,
|
||||||
)
|
)?))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -272,7 +257,7 @@ impl Display for TimelineKind {
|
|||||||
match self {
|
match self {
|
||||||
TimelineKind::List(ListKind::Contact(_src)) => f.write_str("Contacts"),
|
TimelineKind::List(ListKind::Contact(_src)) => f.write_str("Contacts"),
|
||||||
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_lk)) => f.write_str("Last Notes"),
|
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_lk)) => f.write_str("Last Notes"),
|
||||||
TimelineKind::Generic => f.write_str("Timeline"),
|
TimelineKind::Generic(_) => f.write_str("Timeline"),
|
||||||
TimelineKind::Notifications(_) => f.write_str("Notifications"),
|
TimelineKind::Notifications(_) => f.write_str("Notifications"),
|
||||||
TimelineKind::Profile(_) => f.write_str("Profile"),
|
TimelineKind::Profile(_) => f.write_str("Profile"),
|
||||||
TimelineKind::Universe => f.write_str("Universe"),
|
TimelineKind::Universe => f.write_str("Universe"),
|
||||||
@@ -283,14 +268,14 @@ impl Display for TimelineKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TimelineKind {
|
impl TimelineKind {
|
||||||
pub fn pubkey_source(&self) -> Option<&PubkeySource> {
|
pub fn pubkey(&self) -> Option<&Pubkey> {
|
||||||
match self {
|
match self {
|
||||||
TimelineKind::List(list_kind) => list_kind.pubkey_source(),
|
TimelineKind::List(list_kind) => list_kind.pubkey(),
|
||||||
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey_source(),
|
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey(),
|
||||||
TimelineKind::Notifications(pk_src) => Some(pk_src),
|
TimelineKind::Notifications(pk) => Some(pk),
|
||||||
TimelineKind::Profile(pk_src) => Some(pk_src),
|
TimelineKind::Profile(pk) => Some(pk),
|
||||||
TimelineKind::Universe => None,
|
TimelineKind::Universe => None,
|
||||||
TimelineKind::Generic => None,
|
TimelineKind::Generic(_) => None,
|
||||||
TimelineKind::Hashtag(_ht) => None,
|
TimelineKind::Hashtag(_ht) => None,
|
||||||
TimelineKind::Thread(_ht) => None,
|
TimelineKind::Thread(_ht) => None,
|
||||||
}
|
}
|
||||||
@@ -305,17 +290,108 @@ impl TimelineKind {
|
|||||||
TimelineKind::Notifications(_pk_src) => true,
|
TimelineKind::Notifications(_pk_src) => true,
|
||||||
TimelineKind::Profile(_pk_src) => true,
|
TimelineKind::Profile(_pk_src) => true,
|
||||||
TimelineKind::Universe => true,
|
TimelineKind::Universe => true,
|
||||||
TimelineKind::Generic => true,
|
TimelineKind::Generic(_) => true,
|
||||||
TimelineKind::Hashtag(_ht) => true,
|
TimelineKind::Hashtag(_ht) => true,
|
||||||
TimelineKind::Thread(_ht) => true,
|
TimelineKind::Thread(_ht) => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
||||||
|
match self {
|
||||||
|
TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer),
|
||||||
|
TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer),
|
||||||
|
TimelineKind::Notifications(pk) => {
|
||||||
|
writer.write_token(NOTIFS_TOKEN);
|
||||||
|
PubkeySource::pubkey(*pk).serialize_tokens(writer);
|
||||||
|
}
|
||||||
|
TimelineKind::Profile(pk) => {
|
||||||
|
writer.write_token("profile");
|
||||||
|
PubkeySource::pubkey(*pk).serialize_tokens(writer);
|
||||||
|
}
|
||||||
|
TimelineKind::Thread(root_note_id) => {
|
||||||
|
writer.write_token("thread");
|
||||||
|
writer.write_token(&root_note_id.root_id.hex());
|
||||||
|
}
|
||||||
|
TimelineKind::Universe => {
|
||||||
|
writer.write_token("universe");
|
||||||
|
}
|
||||||
|
TimelineKind::Generic(_usize) => {
|
||||||
|
// TODO: lookup filter and then serialize
|
||||||
|
writer.write_token("generic");
|
||||||
|
}
|
||||||
|
TimelineKind::Hashtag(ht) => {
|
||||||
|
writer.write_token("hashtag");
|
||||||
|
writer.write_token(ht);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse<'a>(
|
||||||
|
parser: &mut TokenParser<'a>,
|
||||||
|
deck_author: &Pubkey,
|
||||||
|
) -> Result<Self, ParseError<'a>> {
|
||||||
|
let profile = parser.try_parse(|p| {
|
||||||
|
p.parse_token("profile")?;
|
||||||
|
let pk_src = PubkeySource::parse_from_tokens(p)?;
|
||||||
|
Ok(TimelineKind::Profile(*pk_src.to_pubkey(deck_author)))
|
||||||
|
});
|
||||||
|
if profile.is_ok() {
|
||||||
|
return profile;
|
||||||
|
}
|
||||||
|
|
||||||
|
let notifications = parser.try_parse(|p| {
|
||||||
|
// still handle deprecated form (notifs)
|
||||||
|
p.parse_any_token(&[NOTIFS_TOKEN, NOTIFS_TOKEN_DEPRECATED])?;
|
||||||
|
let pk_src = PubkeySource::parse_from_tokens(p)?;
|
||||||
|
Ok(TimelineKind::Notifications(*pk_src.to_pubkey(deck_author)))
|
||||||
|
});
|
||||||
|
if notifications.is_ok() {
|
||||||
|
return notifications;
|
||||||
|
}
|
||||||
|
|
||||||
|
let list_tl =
|
||||||
|
parser.try_parse(|p| Ok(TimelineKind::List(ListKind::parse(p, deck_author)?)));
|
||||||
|
if list_tl.is_ok() {
|
||||||
|
return list_tl;
|
||||||
|
}
|
||||||
|
|
||||||
|
let algo_tl =
|
||||||
|
parser.try_parse(|p| Ok(TimelineKind::Algo(AlgoTimeline::parse(p, deck_author)?)));
|
||||||
|
if algo_tl.is_ok() {
|
||||||
|
return algo_tl;
|
||||||
|
}
|
||||||
|
|
||||||
|
TokenParser::alt(
|
||||||
|
parser,
|
||||||
|
&[
|
||||||
|
|p| {
|
||||||
|
p.parse_token("thread")?;
|
||||||
|
Ok(TimelineKind::Thread(ThreadSelection::from_root_id(
|
||||||
|
RootNoteIdBuf::new_unsafe(tokenator::parse_hex_id(p)?),
|
||||||
|
)))
|
||||||
|
},
|
||||||
|
|p| {
|
||||||
|
p.parse_token("universe")?;
|
||||||
|
Ok(TimelineKind::Universe)
|
||||||
|
},
|
||||||
|
|p| {
|
||||||
|
p.parse_token("generic")?;
|
||||||
|
// TODO: generic filter serialization
|
||||||
|
Ok(TimelineKind::Generic(0))
|
||||||
|
},
|
||||||
|
|p| {
|
||||||
|
p.parse_token("hashtag")?;
|
||||||
|
Ok(TimelineKind::Hashtag(p.pull_token()?.to_string()))
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn last_per_pubkey(list_kind: ListKind) -> Self {
|
pub fn last_per_pubkey(list_kind: ListKind) -> Self {
|
||||||
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind))
|
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contact_list(pk: PubkeySource) -> Self {
|
pub fn contact_list(pk: Pubkey) -> Self {
|
||||||
TimelineKind::List(ListKind::contact_list(pk))
|
TimelineKind::List(ListKind::contact_list(pk))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -323,51 +399,98 @@ impl TimelineKind {
|
|||||||
matches!(self, TimelineKind::List(ListKind::Contact(_)))
|
matches!(self, TimelineKind::List(ListKind::Contact(_)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn profile(pk: PubkeySource) -> Self {
|
pub fn profile(pk: Pubkey) -> Self {
|
||||||
TimelineKind::Profile(pk)
|
TimelineKind::Profile(pk)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn thread(root_id: RootNoteIdBuf) -> Self {
|
pub fn thread(selected_note: ThreadSelection) -> Self {
|
||||||
TimelineKind::Thread(root_id)
|
TimelineKind::Thread(selected_note)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_notifications(&self) -> bool {
|
pub fn is_notifications(&self) -> bool {
|
||||||
matches!(self, TimelineKind::Notifications(_))
|
matches!(self, TimelineKind::Notifications(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn notifications(pk: PubkeySource) -> Self {
|
pub fn notifications(pk: Pubkey) -> Self {
|
||||||
TimelineKind::Notifications(pk)
|
TimelineKind::Notifications(pk)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_timeline(self, ndb: &Ndb, default_user: Option<&[u8; 32]>) -> Option<Timeline> {
|
// TODO: probably should set default limit here
|
||||||
|
pub fn filters(&self, txn: &Transaction, ndb: &Ndb) -> FilterState {
|
||||||
|
match self {
|
||||||
|
TimelineKind::Universe => FilterState::ready(universe_filter()),
|
||||||
|
|
||||||
|
TimelineKind::List(list_k) => match list_k {
|
||||||
|
ListKind::Contact(pubkey) => contact_filter_state(txn, ndb, pubkey),
|
||||||
|
},
|
||||||
|
|
||||||
|
// TODO: still need to update this to fetch likes, zaps, etc
|
||||||
|
TimelineKind::Notifications(pubkey) => FilterState::ready(vec![Filter::new()
|
||||||
|
.pubkeys([pubkey.bytes()])
|
||||||
|
.kinds([1])
|
||||||
|
.limit(default_limit())
|
||||||
|
.build()]),
|
||||||
|
|
||||||
|
TimelineKind::Hashtag(hashtag) => FilterState::ready(vec![Filter::new()
|
||||||
|
.kinds([1])
|
||||||
|
.limit(filter::default_limit())
|
||||||
|
.tags([hashtag.clone()], 't')
|
||||||
|
.build()]),
|
||||||
|
|
||||||
|
TimelineKind::Algo(algo_timeline) => match algo_timeline {
|
||||||
|
AlgoTimeline::LastPerPubkey(list_k) => match list_k {
|
||||||
|
ListKind::Contact(pubkey) => last_per_pubkey_filter_state(ndb, pubkey),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
TimelineKind::Generic(_) => {
|
||||||
|
todo!("implement generic filter lookups")
|
||||||
|
}
|
||||||
|
|
||||||
|
TimelineKind::Thread(selection) => FilterState::ready(vec![
|
||||||
|
nostrdb::Filter::new()
|
||||||
|
.kinds([1])
|
||||||
|
.event(selection.root_id.bytes())
|
||||||
|
.build(),
|
||||||
|
nostrdb::Filter::new()
|
||||||
|
.ids([selection.root_id.bytes()])
|
||||||
|
.limit(1)
|
||||||
|
.build(),
|
||||||
|
]),
|
||||||
|
|
||||||
|
TimelineKind::Profile(pk) => FilterState::ready(vec![Filter::new()
|
||||||
|
.authors([pk.bytes()])
|
||||||
|
.kinds([1])
|
||||||
|
.limit(default_limit())
|
||||||
|
.build()]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_timeline(self, txn: &Transaction, ndb: &Ndb) -> Option<Timeline> {
|
||||||
match self {
|
match self {
|
||||||
TimelineKind::Universe => Some(Timeline::new(
|
TimelineKind::Universe => Some(Timeline::new(
|
||||||
TimelineKind::Universe,
|
TimelineKind::Universe,
|
||||||
FilterState::ready(vec![Filter::new()
|
FilterState::ready(universe_filter()),
|
||||||
.kinds([1])
|
|
||||||
.limit(default_limit())
|
|
||||||
.build()]),
|
|
||||||
TimelineTab::no_replies(),
|
TimelineTab::no_replies(),
|
||||||
)),
|
)),
|
||||||
|
|
||||||
TimelineKind::Thread(root_id) => Some(Timeline::thread(root_id)),
|
TimelineKind::Thread(root_id) => Some(Timeline::thread(root_id)),
|
||||||
|
|
||||||
TimelineKind::Generic => {
|
TimelineKind::Generic(_filter_id) => {
|
||||||
warn!("you can't convert a TimelineKind::Generic to a Timeline");
|
warn!("you can't convert a TimelineKind::Generic to a Timeline");
|
||||||
|
// TODO: you actually can! just need to look up the filter id
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk_src))) => {
|
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk))) => {
|
||||||
let pk = match &pk_src {
|
let contact_filter = Filter::new()
|
||||||
PubkeySource::DeckAuthor => default_user?,
|
.authors([pk.bytes()])
|
||||||
PubkeySource::Explicit(pk) => pk.bytes(),
|
.kinds([3])
|
||||||
};
|
.limit(1)
|
||||||
|
.build();
|
||||||
|
|
||||||
let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build();
|
|
||||||
|
|
||||||
let txn = Transaction::new(ndb).expect("txn");
|
|
||||||
let results = ndb
|
let results = ndb
|
||||||
.query(&txn, &[contact_filter.clone()], 1)
|
.query(txn, &[contact_filter.clone()], 1)
|
||||||
.expect("contact query failed?");
|
.expect("contact query failed?");
|
||||||
|
|
||||||
let kind_fn = TimelineKind::last_per_pubkey;
|
let kind_fn = TimelineKind::last_per_pubkey;
|
||||||
@@ -375,13 +498,13 @@ impl TimelineKind {
|
|||||||
|
|
||||||
if results.is_empty() {
|
if results.is_empty() {
|
||||||
return Some(Timeline::new(
|
return Some(Timeline::new(
|
||||||
kind_fn(ListKind::contact_list(pk_src)),
|
kind_fn(ListKind::contact_list(pk)),
|
||||||
FilterState::needs_remote(vec![contact_filter.clone()]),
|
FilterState::needs_remote(vec![contact_filter.clone()]),
|
||||||
tabs,
|
tabs,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let list_kind = ListKind::contact_list(pk_src);
|
let list_kind = ListKind::contact_list(pk);
|
||||||
|
|
||||||
match Timeline::last_per_pubkey(&results[0].note, &list_kind) {
|
match Timeline::last_per_pubkey(&results[0].note, &list_kind) {
|
||||||
Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => {
|
Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => {
|
||||||
@@ -399,39 +522,29 @@ impl TimelineKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TimelineKind::Profile(pk_src) => {
|
TimelineKind::Profile(pk) => {
|
||||||
let pk = match &pk_src {
|
|
||||||
PubkeySource::DeckAuthor => default_user?,
|
|
||||||
PubkeySource::Explicit(pk) => pk.bytes(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let filter = Filter::new()
|
let filter = Filter::new()
|
||||||
.authors([pk])
|
.authors([pk.bytes()])
|
||||||
.kinds([1])
|
.kinds([1])
|
||||||
.limit(default_limit())
|
.limit(default_limit())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Some(Timeline::new(
|
Some(Timeline::new(
|
||||||
TimelineKind::profile(pk_src),
|
TimelineKind::profile(pk),
|
||||||
FilterState::ready(vec![filter]),
|
FilterState::ready(vec![filter]),
|
||||||
TimelineTab::full_tabs(),
|
TimelineTab::full_tabs(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
TimelineKind::Notifications(pk_src) => {
|
TimelineKind::Notifications(pk) => {
|
||||||
let pk = match &pk_src {
|
|
||||||
PubkeySource::DeckAuthor => default_user?,
|
|
||||||
PubkeySource::Explicit(pk) => pk.bytes(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let notifications_filter = Filter::new()
|
let notifications_filter = Filter::new()
|
||||||
.pubkeys([pk])
|
.pubkeys([pk.bytes()])
|
||||||
.kinds([1])
|
.kinds([1])
|
||||||
.limit(default_limit())
|
.limit(default_limit())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Some(Timeline::new(
|
Some(Timeline::new(
|
||||||
TimelineKind::notifications(pk_src),
|
TimelineKind::notifications(pk),
|
||||||
FilterState::ready(vec![notifications_filter]),
|
FilterState::ready(vec![notifications_filter]),
|
||||||
TimelineTab::only_notes_and_replies(),
|
TimelineTab::only_notes_and_replies(),
|
||||||
))
|
))
|
||||||
@@ -439,42 +552,11 @@ impl TimelineKind {
|
|||||||
|
|
||||||
TimelineKind::Hashtag(hashtag) => Some(Timeline::hashtag(hashtag)),
|
TimelineKind::Hashtag(hashtag) => Some(Timeline::hashtag(hashtag)),
|
||||||
|
|
||||||
TimelineKind::List(ListKind::Contact(pk_src)) => {
|
TimelineKind::List(ListKind::Contact(pk)) => Some(Timeline::new(
|
||||||
let pk = match &pk_src {
|
TimelineKind::contact_list(pk),
|
||||||
PubkeySource::DeckAuthor => default_user?,
|
contact_filter_state(txn, ndb, &pk),
|
||||||
PubkeySource::Explicit(pk) => pk.bytes(),
|
TimelineTab::full_tabs(),
|
||||||
};
|
)),
|
||||||
|
|
||||||
let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build();
|
|
||||||
|
|
||||||
let txn = Transaction::new(ndb).expect("txn");
|
|
||||||
let results = ndb
|
|
||||||
.query(&txn, &[contact_filter.clone()], 1)
|
|
||||||
.expect("contact query failed?");
|
|
||||||
|
|
||||||
if results.is_empty() {
|
|
||||||
return Some(Timeline::new(
|
|
||||||
TimelineKind::contact_list(pk_src),
|
|
||||||
FilterState::needs_remote(vec![contact_filter.clone()]),
|
|
||||||
TimelineTab::full_tabs(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
match Timeline::contact_list(&results[0].note, pk_src.clone(), default_user) {
|
|
||||||
Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => {
|
|
||||||
Some(Timeline::new(
|
|
||||||
TimelineKind::contact_list(pk_src),
|
|
||||||
FilterState::needs_remote(vec![contact_filter]),
|
|
||||||
TimelineTab::full_tabs(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Unexpected error: {e}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Ok(tl) => Some(tl),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -490,7 +572,7 @@ impl TimelineKind {
|
|||||||
TimelineKind::Profile(_pubkey_source) => ColumnTitle::needs_db(self),
|
TimelineKind::Profile(_pubkey_source) => ColumnTitle::needs_db(self),
|
||||||
TimelineKind::Thread(_root_id) => ColumnTitle::simple("Thread"),
|
TimelineKind::Thread(_root_id) => ColumnTitle::simple("Thread"),
|
||||||
TimelineKind::Universe => ColumnTitle::simple("Universe"),
|
TimelineKind::Universe => ColumnTitle::simple("Universe"),
|
||||||
TimelineKind::Generic => ColumnTitle::simple("Custom"),
|
TimelineKind::Generic(_) => ColumnTitle::simple("Custom"),
|
||||||
TimelineKind::Hashtag(hashtag) => ColumnTitle::formatted(hashtag.to_string()),
|
TimelineKind::Hashtag(hashtag) => ColumnTitle::formatted(hashtag.to_string()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -506,26 +588,15 @@ impl<'a> TitleNeedsDb<'a> {
|
|||||||
TitleNeedsDb { kind }
|
TitleNeedsDb { kind }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn title<'txn>(
|
pub fn title<'txn>(&self, txn: &'txn Transaction, ndb: &Ndb) -> &'txn str {
|
||||||
&self,
|
if let TimelineKind::Profile(pubkey) = self.kind {
|
||||||
txn: &'txn Transaction,
|
let profile = ndb.get_profile_by_pubkey(txn, pubkey);
|
||||||
ndb: &Ndb,
|
let m_name = profile
|
||||||
deck_author: Option<&Pubkey>,
|
.as_ref()
|
||||||
) -> &'txn str {
|
.ok()
|
||||||
if let TimelineKind::Profile(pubkey_source) = self.kind {
|
.map(|p| crate::profile::get_display_name(Some(p)).name());
|
||||||
if let Some(deck_author) = deck_author {
|
|
||||||
let pubkey = pubkey_source.to_pubkey(deck_author);
|
|
||||||
let profile = ndb.get_profile_by_pubkey(txn, pubkey);
|
|
||||||
let m_name = profile
|
|
||||||
.as_ref()
|
|
||||||
.ok()
|
|
||||||
.map(|p| crate::profile::get_display_name(Some(p)).name());
|
|
||||||
|
|
||||||
m_name.unwrap_or("Profile")
|
m_name.unwrap_or("Profile")
|
||||||
} else {
|
|
||||||
// why would be there be no deck author? weird
|
|
||||||
"nostrich"
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
"Unknown"
|
"Unknown"
|
||||||
}
|
}
|
||||||
@@ -553,3 +624,65 @@ impl<'a> ColumnTitle<'a> {
|
|||||||
Self::NeedsDb(TitleNeedsDb::new(kind))
|
Self::NeedsDb(TitleNeedsDb::new(kind))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn contact_filter_state(txn: &Transaction, ndb: &Ndb, pk: &Pubkey) -> FilterState {
|
||||||
|
let contact_filter = Filter::new()
|
||||||
|
.authors([pk.bytes()])
|
||||||
|
.kinds([3])
|
||||||
|
.limit(1)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let results = ndb
|
||||||
|
.query(txn, &[contact_filter.clone()], 1)
|
||||||
|
.expect("contact query failed?");
|
||||||
|
|
||||||
|
if results.is_empty() {
|
||||||
|
FilterState::needs_remote(vec![contact_filter.clone()])
|
||||||
|
} else {
|
||||||
|
let with_hashtags = false;
|
||||||
|
match filter::filter_from_tags(&results[0].note, Some(pk.bytes()), with_hashtags) {
|
||||||
|
Err(notedeck::Error::Filter(FilterError::EmptyContactList)) => {
|
||||||
|
FilterState::needs_remote(vec![contact_filter])
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!("Error getting contact filter state: {err}");
|
||||||
|
FilterState::Broken(FilterError::EmptyContactList)
|
||||||
|
}
|
||||||
|
Ok(filter) => FilterState::ready(filter.into_follow_filter()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn last_per_pubkey_filter_state(ndb: &Ndb, pk: &Pubkey) -> FilterState {
|
||||||
|
let contact_filter = Filter::new()
|
||||||
|
.authors([pk.bytes()])
|
||||||
|
.kinds([3])
|
||||||
|
.limit(1)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let txn = Transaction::new(ndb).expect("txn");
|
||||||
|
let results = ndb
|
||||||
|
.query(&txn, &[contact_filter.clone()], 1)
|
||||||
|
.expect("contact query failed?");
|
||||||
|
|
||||||
|
if results.is_empty() {
|
||||||
|
FilterState::needs_remote(vec![contact_filter])
|
||||||
|
} else {
|
||||||
|
let kind = 1;
|
||||||
|
let notes_per_pk = 1;
|
||||||
|
match filter::last_n_per_pubkey_from_tags(&results[0].note, kind, notes_per_pk) {
|
||||||
|
Err(notedeck::Error::Filter(FilterError::EmptyContactList)) => {
|
||||||
|
FilterState::needs_remote(vec![contact_filter])
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!("Error getting contact filter state: {err}");
|
||||||
|
FilterState::Broken(FilterError::EmptyContactList)
|
||||||
|
}
|
||||||
|
Ok(filter) => FilterState::ready(filter),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn universe_filter() -> Vec<Filter> {
|
||||||
|
vec![Filter::new().kinds([1]).limit(default_limit()).build()]
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,26 +1,19 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
column::Columns,
|
|
||||||
decks::DecksCache,
|
|
||||||
error::Error,
|
error::Error,
|
||||||
|
multi_subscriber::MultiSubscriber,
|
||||||
subscriptions::{self, SubKind, Subscriptions},
|
subscriptions::{self, SubKind, Subscriptions},
|
||||||
thread::Thread,
|
|
||||||
timeline::kind::ListKind,
|
timeline::kind::ListKind,
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use notedeck::{
|
use notedeck::{
|
||||||
filter, CachedNote, FilterError, FilterState, FilterStates, NoteCache, NoteRef, RootNoteIdBuf,
|
filter, CachedNote, FilterError, FilterState, FilterStates, NoteCache, NoteRef, UnknownIds,
|
||||||
UnknownIds,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::fmt;
|
|
||||||
use std::sync::atomic::{AtomicU32, Ordering};
|
|
||||||
|
|
||||||
use egui_virtual_list::VirtualList;
|
use egui_virtual_list::VirtualList;
|
||||||
use enostr::{PoolRelay, Pubkey, RelayPool};
|
use enostr::{PoolRelay, Pubkey, RelayPool};
|
||||||
use nostrdb::{Filter, Ndb, Note, NoteKey, Subscription, Transaction};
|
use nostrdb::{Filter, Ndb, Note, NoteKey, Transaction};
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::hash::Hash;
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use tracing::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
@@ -29,17 +22,26 @@ pub mod cache;
|
|||||||
pub mod kind;
|
pub mod kind;
|
||||||
pub mod route;
|
pub mod route;
|
||||||
|
|
||||||
pub use cache::{TimelineCache, TimelineCacheKey};
|
pub use cache::TimelineCache;
|
||||||
pub use kind::{ColumnTitle, PubkeySource, TimelineKind};
|
pub use kind::{ColumnTitle, PubkeySource, ThreadSelection, TimelineKind};
|
||||||
pub use route::TimelineRoute;
|
|
||||||
|
|
||||||
#[derive(Debug, Hash, Copy, Clone, Eq, PartialEq)]
|
//#[derive(Debug, Hash, Clone, Eq, PartialEq)]
|
||||||
pub struct TimelineId(u32);
|
//pub type TimelineId = TimelineKind;
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
impl TimelineId {
|
impl TimelineId {
|
||||||
pub fn new(id: u32) -> Self {
|
pub fn kind(&self) -> &TimelineKind {
|
||||||
|
&self.kind
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(id: TimelineKind) -> Self {
|
||||||
TimelineId(id)
|
TimelineId(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn profile(pubkey: Pubkey) -> Self {
|
||||||
|
TimelineId::new(TimelineKind::Profile(PubkeySource::pubkey(pubkey)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for TimelineId {
|
impl fmt::Display for TimelineId {
|
||||||
@@ -47,6 +49,7 @@ impl fmt::Display for TimelineId {
|
|||||||
write!(f, "TimelineId({})", self.0)
|
write!(f, "TimelineId({})", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
|
#[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
|
||||||
pub enum ViewFilter {
|
pub enum ViewFilter {
|
||||||
@@ -186,7 +189,6 @@ impl TimelineTab {
|
|||||||
/// A column in a deck. Holds navigation state, loaded notes, column kind, etc.
|
/// A column in a deck. Holds navigation state, loaded notes, column kind, etc.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Timeline {
|
pub struct Timeline {
|
||||||
pub id: TimelineId,
|
|
||||||
pub kind: TimelineKind,
|
pub kind: TimelineKind,
|
||||||
// We may not have the filter loaded yet, so let's make it an option so
|
// We may not have the filter loaded yet, so let's make it an option so
|
||||||
// that codepaths have to explicitly handle it
|
// that codepaths have to explicitly handle it
|
||||||
@@ -194,35 +196,36 @@ pub struct Timeline {
|
|||||||
pub views: Vec<TimelineTab>,
|
pub views: Vec<TimelineTab>,
|
||||||
pub selected_view: usize,
|
pub selected_view: usize,
|
||||||
|
|
||||||
pub subscription: Option<Subscription>,
|
pub subscription: Option<MultiSubscriber>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Timeline {
|
impl Timeline {
|
||||||
/// Create a timeline from a contact list
|
/// Create a timeline from a contact list
|
||||||
pub fn contact_list(
|
pub fn contact_list(contact_list: &Note, pubkey: &[u8; 32]) -> Result<Self> {
|
||||||
contact_list: &Note,
|
|
||||||
pk_src: PubkeySource,
|
|
||||||
deck_author: Option<&[u8; 32]>,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let our_pubkey = deck_author.map(|da| pk_src.to_pubkey_bytes(da));
|
|
||||||
let with_hashtags = false;
|
let with_hashtags = false;
|
||||||
let filter =
|
let filter = filter::filter_from_tags(contact_list, Some(pubkey), with_hashtags)?
|
||||||
filter::filter_from_tags(contact_list, our_pubkey, with_hashtags)?.into_follow_filter();
|
.into_follow_filter();
|
||||||
|
|
||||||
Ok(Timeline::new(
|
Ok(Timeline::new(
|
||||||
TimelineKind::contact_list(pk_src),
|
TimelineKind::contact_list(Pubkey::new(*pubkey)),
|
||||||
FilterState::ready(filter),
|
FilterState::ready(filter),
|
||||||
TimelineTab::full_tabs(),
|
TimelineTab::full_tabs(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn thread(note_id: RootNoteIdBuf) -> Self {
|
pub fn thread(selection: ThreadSelection) -> Self {
|
||||||
let filter = Thread::filters_raw(note_id.borrow())
|
let filter = vec![
|
||||||
.iter_mut()
|
nostrdb::Filter::new()
|
||||||
.map(|fb| fb.build())
|
.kinds([1])
|
||||||
.collect();
|
.event(selection.root_id.bytes())
|
||||||
|
.build(),
|
||||||
|
nostrdb::Filter::new()
|
||||||
|
.ids([selection.root_id.bytes()])
|
||||||
|
.limit(1)
|
||||||
|
.build(),
|
||||||
|
];
|
||||||
Timeline::new(
|
Timeline::new(
|
||||||
TimelineKind::Thread(note_id),
|
TimelineKind::Thread(selection),
|
||||||
FilterState::ready(filter),
|
FilterState::ready(filter),
|
||||||
TimelineTab::only_notes_and_replies(),
|
TimelineTab::only_notes_and_replies(),
|
||||||
)
|
)
|
||||||
@@ -234,7 +237,7 @@ impl Timeline {
|
|||||||
let filter = filter::last_n_per_pubkey_from_tags(list, kind, notes_per_pk)?;
|
let filter = filter::last_n_per_pubkey_from_tags(list, kind, notes_per_pk)?;
|
||||||
|
|
||||||
Ok(Timeline::new(
|
Ok(Timeline::new(
|
||||||
TimelineKind::last_per_pubkey(list_kind.clone()),
|
TimelineKind::last_per_pubkey(*list_kind),
|
||||||
FilterState::ready(filter),
|
FilterState::ready(filter),
|
||||||
TimelineTab::only_notes_and_replies(),
|
TimelineTab::only_notes_and_replies(),
|
||||||
))
|
))
|
||||||
@@ -254,25 +257,20 @@ impl Timeline {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_view_id(id: TimelineId, selected_view: usize) -> egui::Id {
|
pub fn make_view_id(id: &TimelineKind, selected_view: usize) -> egui::Id {
|
||||||
egui::Id::new((id, selected_view))
|
egui::Id::new((id, selected_view))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn view_id(&self) -> egui::Id {
|
pub fn view_id(&self) -> egui::Id {
|
||||||
Timeline::make_view_id(self.id, self.selected_view)
|
Timeline::make_view_id(&self.kind, self.selected_view)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(kind: TimelineKind, filter_state: FilterState, views: Vec<TimelineTab>) -> Self {
|
pub fn new(kind: TimelineKind, filter_state: FilterState, views: Vec<TimelineTab>) -> Self {
|
||||||
// global unique id for all new timelines
|
|
||||||
static UIDS: AtomicU32 = AtomicU32::new(0);
|
|
||||||
|
|
||||||
let filter = FilterStates::new(filter_state);
|
let filter = FilterStates::new(filter_state);
|
||||||
let subscription: Option<Subscription> = None;
|
let subscription: Option<MultiSubscriber> = None;
|
||||||
let selected_view = 0;
|
let selected_view = 0;
|
||||||
let id = TimelineId::new(UIDS.fetch_add(1, Ordering::Relaxed));
|
|
||||||
|
|
||||||
Timeline {
|
Timeline {
|
||||||
id,
|
|
||||||
kind,
|
kind,
|
||||||
filter,
|
filter,
|
||||||
views,
|
views,
|
||||||
@@ -417,6 +415,8 @@ impl Timeline {
|
|||||||
|
|
||||||
let sub = self
|
let sub = self
|
||||||
.subscription
|
.subscription
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|s| s.local_subid)
|
||||||
.ok_or(Error::App(notedeck::Error::no_active_sub()))?;
|
.ok_or(Error::App(notedeck::Error::no_active_sub()))?;
|
||||||
|
|
||||||
let new_note_ids = ndb.poll_for_notes(sub, 500);
|
let new_note_ids = ndb.poll_for_notes(sub, 500);
|
||||||
@@ -484,10 +484,9 @@ pub fn setup_new_timeline(
|
|||||||
pool: &mut RelayPool,
|
pool: &mut RelayPool,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
since_optimize: bool,
|
since_optimize: bool,
|
||||||
our_pk: Option<&Pubkey>,
|
|
||||||
) {
|
) {
|
||||||
// if we're ready, setup local subs
|
// if we're ready, setup local subs
|
||||||
if is_timeline_ready(ndb, pool, note_cache, timeline, our_pk) {
|
if is_timeline_ready(ndb, pool, note_cache, timeline) {
|
||||||
if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
|
if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
|
||||||
error!("setup_new_timeline: {err}");
|
error!("setup_new_timeline: {err}");
|
||||||
}
|
}
|
||||||
@@ -505,7 +504,7 @@ pub fn setup_new_timeline(
|
|||||||
pub fn send_initial_timeline_filters(
|
pub fn send_initial_timeline_filters(
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
since_optimize: bool,
|
since_optimize: bool,
|
||||||
columns: &mut Columns,
|
timeline_cache: &mut TimelineCache,
|
||||||
subs: &mut Subscriptions,
|
subs: &mut Subscriptions,
|
||||||
pool: &mut RelayPool,
|
pool: &mut RelayPool,
|
||||||
relay_id: &str,
|
relay_id: &str,
|
||||||
@@ -513,7 +512,7 @@ pub fn send_initial_timeline_filters(
|
|||||||
info!("Sending initial filters to {}", relay_id);
|
info!("Sending initial filters to {}", relay_id);
|
||||||
let relay = &mut pool.relays.iter_mut().find(|r| r.url() == relay_id)?;
|
let relay = &mut pool.relays.iter_mut().find(|r| r.url() == relay_id)?;
|
||||||
|
|
||||||
for timeline in columns.timelines_mut() {
|
for (_kind, timeline) in timeline_cache.timelines.iter_mut() {
|
||||||
send_initial_timeline_filter(ndb, since_optimize, subs, relay, timeline);
|
send_initial_timeline_filter(ndb, since_optimize, subs, relay, timeline);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -527,7 +526,7 @@ pub fn send_initial_timeline_filter(
|
|||||||
relay: &mut PoolRelay,
|
relay: &mut PoolRelay,
|
||||||
timeline: &mut Timeline,
|
timeline: &mut Timeline,
|
||||||
) {
|
) {
|
||||||
let filter_state = timeline.filter.get(relay.url());
|
let filter_state = timeline.filter.get_mut(relay.url());
|
||||||
|
|
||||||
match filter_state {
|
match filter_state {
|
||||||
FilterState::Broken(err) => {
|
FilterState::Broken(err) => {
|
||||||
@@ -567,7 +566,7 @@ pub fn send_initial_timeline_filter(
|
|||||||
if can_since_optimize && filter::should_since_optimize(lim, notes.len()) {
|
if can_since_optimize && filter::should_since_optimize(lim, notes.len()) {
|
||||||
filter = filter::since_optimize_filter(filter, notes);
|
filter = filter::since_optimize_filter(filter, notes);
|
||||||
} else {
|
} else {
|
||||||
warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", filter);
|
warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", &timeline.kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
filter
|
filter
|
||||||
@@ -596,7 +595,7 @@ fn fetch_contact_list(
|
|||||||
relay: &mut PoolRelay,
|
relay: &mut PoolRelay,
|
||||||
timeline: &mut Timeline,
|
timeline: &mut Timeline,
|
||||||
) {
|
) {
|
||||||
let sub_kind = SubKind::FetchingContactList(timeline.id);
|
let sub_kind = SubKind::FetchingContactList(timeline.kind.clone());
|
||||||
let sub_id = subscriptions::new_sub_id();
|
let sub_id = subscriptions::new_sub_id();
|
||||||
let local_sub = ndb.subscribe(&filter).expect("sub");
|
let local_sub = ndb.subscribe(&filter).expect("sub");
|
||||||
|
|
||||||
@@ -621,9 +620,21 @@ fn setup_initial_timeline(
|
|||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// some timelines are one-shot and a refreshed, like last_per_pubkey algo feed
|
// some timelines are one-shot and a refreshed, like last_per_pubkey algo feed
|
||||||
if timeline.kind.should_subscribe_locally() {
|
if timeline.kind.should_subscribe_locally() {
|
||||||
timeline.subscription = Some(ndb.subscribe(filters)?);
|
let local_sub = ndb.subscribe(filters)?;
|
||||||
|
match &mut timeline.subscription {
|
||||||
|
None => {
|
||||||
|
timeline.subscription = Some(MultiSubscriber::with_initial_local_sub(
|
||||||
|
local_sub,
|
||||||
|
filters.to_vec(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(msub) => {
|
||||||
|
msub.local_subid = Some(local_sub);
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
let txn = Transaction::new(ndb)?;
|
|
||||||
debug!(
|
debug!(
|
||||||
"querying nostrdb sub {:?} {:?}",
|
"querying nostrdb sub {:?} {:?}",
|
||||||
timeline.subscription, timeline.filter
|
timeline.subscription, timeline.filter
|
||||||
@@ -634,6 +645,7 @@ fn setup_initial_timeline(
|
|||||||
lim += filter.limit().unwrap_or(1) as i32;
|
lim += filter.limit().unwrap_or(1) as i32;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let txn = Transaction::new(ndb)?;
|
||||||
let notes: Vec<NoteRef> = ndb
|
let notes: Vec<NoteRef> = ndb
|
||||||
.query(&txn, filters, lim)?
|
.query(&txn, filters, lim)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -648,15 +660,11 @@ fn setup_initial_timeline(
|
|||||||
pub fn setup_initial_nostrdb_subs(
|
pub fn setup_initial_nostrdb_subs(
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
decks_cache: &mut DecksCache,
|
timeline_cache: &mut TimelineCache,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
for decks in decks_cache.get_all_decks_mut() {
|
for (_kind, timeline) in timeline_cache.timelines.iter_mut() {
|
||||||
for deck in decks.decks_mut() {
|
if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
|
||||||
for timeline in deck.columns_mut().timelines_mut() {
|
error!("setup_initial_nostrdb_subs: {err}");
|
||||||
if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
|
|
||||||
error!("setup_initial_nostrdb_subs: {err}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -688,7 +696,6 @@ pub fn is_timeline_ready(
|
|||||||
pool: &mut RelayPool,
|
pool: &mut RelayPool,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
timeline: &mut Timeline,
|
timeline: &mut Timeline,
|
||||||
our_pk: Option<&Pubkey>,
|
|
||||||
) -> bool {
|
) -> bool {
|
||||||
// TODO: we should debounce the filter states a bit to make sure we have
|
// TODO: we should debounce the filter states a bit to make sure we have
|
||||||
// seen all of the different contact lists from each relay
|
// seen all of the different contact lists from each relay
|
||||||
@@ -721,11 +728,7 @@ pub fn is_timeline_ready(
|
|||||||
let filter = {
|
let filter = {
|
||||||
let txn = Transaction::new(ndb).expect("txn");
|
let txn = Transaction::new(ndb).expect("txn");
|
||||||
let note = ndb.get_note_by_key(&txn, note_key).expect("note");
|
let note = ndb.get_note_by_key(&txn, note_key).expect("note");
|
||||||
let add_pk = timeline
|
let add_pk = timeline.kind.pubkey().map(|pk| pk.bytes());
|
||||||
.kind
|
|
||||||
.pubkey_source()
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|pk_src| our_pk.map(|pk| pk_src.to_pubkey_bytes(pk)));
|
|
||||||
filter::filter_from_tags(¬e, add_pk, with_hashtags).map(|f| f.into_follow_filter())
|
filter::filter_from_tags(¬e, add_pk, with_hashtags).map(|f| f.into_follow_filter())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,124 +1,44 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
column::Columns,
|
|
||||||
draft::Drafts,
|
|
||||||
nav::RenderNavAction,
|
nav::RenderNavAction,
|
||||||
profile::ProfileAction,
|
profile::ProfileAction,
|
||||||
timeline::{TimelineCache, TimelineId, TimelineKind},
|
timeline::{TimelineCache, TimelineKind},
|
||||||
ui::{
|
ui::{self, note::NoteOptions, profile::ProfileView},
|
||||||
self,
|
|
||||||
note::{NoteOptions, QuoteRepostView},
|
|
||||||
profile::ProfileView,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
|
use enostr::Pubkey;
|
||||||
|
use nostrdb::Ndb;
|
||||||
use enostr::{NoteId, Pubkey};
|
|
||||||
use nostrdb::{Ndb, Transaction};
|
|
||||||
use notedeck::{Accounts, ImageCache, MuteFun, NoteCache, UnknownIds};
|
use notedeck::{Accounts, ImageCache, MuteFun, NoteCache, UnknownIds};
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
|
|
||||||
pub enum TimelineRoute {
|
|
||||||
Timeline(TimelineId),
|
|
||||||
Thread(NoteId),
|
|
||||||
Profile(Pubkey),
|
|
||||||
Reply(NoteId),
|
|
||||||
Quote(NoteId),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_pubkey<'a>(parser: &mut TokenParser<'a>) -> Result<Pubkey, ParseError<'a>> {
|
|
||||||
let hex = parser.pull_token()?;
|
|
||||||
Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_note_id<'a>(parser: &mut TokenParser<'a>) -> Result<NoteId, ParseError<'a>> {
|
|
||||||
let hex = parser.pull_token()?;
|
|
||||||
NoteId::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenSerializable for TimelineRoute {
|
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter) {
|
|
||||||
match self {
|
|
||||||
TimelineRoute::Profile(pk) => {
|
|
||||||
writer.write_token("profile");
|
|
||||||
writer.write_token(&pk.hex());
|
|
||||||
}
|
|
||||||
TimelineRoute::Thread(note_id) => {
|
|
||||||
writer.write_token("thread");
|
|
||||||
writer.write_token(¬e_id.hex());
|
|
||||||
}
|
|
||||||
TimelineRoute::Reply(note_id) => {
|
|
||||||
writer.write_token("reply");
|
|
||||||
writer.write_token(¬e_id.hex());
|
|
||||||
}
|
|
||||||
TimelineRoute::Quote(note_id) => {
|
|
||||||
writer.write_token("quote");
|
|
||||||
writer.write_token(¬e_id.hex());
|
|
||||||
}
|
|
||||||
TimelineRoute::Timeline(_tlid) => {
|
|
||||||
todo!("tlid")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
|
|
||||||
TokenParser::alt(
|
|
||||||
parser,
|
|
||||||
&[
|
|
||||||
|p| {
|
|
||||||
p.parse_token("profile")?;
|
|
||||||
Ok(TimelineRoute::Profile(parse_pubkey(p)?))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("thread")?;
|
|
||||||
Ok(TimelineRoute::Thread(parse_note_id(p)?))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("reply")?;
|
|
||||||
Ok(TimelineRoute::Reply(parse_note_id(p)?))
|
|
||||||
},
|
|
||||||
|p| {
|
|
||||||
p.parse_token("quote")?;
|
|
||||||
Ok(TimelineRoute::Quote(parse_note_id(p)?))
|
|
||||||
},
|
|
||||||
|_p| todo!("handle timeline parsing"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn render_timeline_route(
|
pub fn render_timeline_route(
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
columns: &mut Columns,
|
|
||||||
drafts: &mut Drafts,
|
|
||||||
img_cache: &mut ImageCache,
|
img_cache: &mut ImageCache,
|
||||||
unknown_ids: &mut UnknownIds,
|
unknown_ids: &mut UnknownIds,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
timeline_cache: &mut TimelineCache,
|
timeline_cache: &mut TimelineCache,
|
||||||
accounts: &mut Accounts,
|
accounts: &mut Accounts,
|
||||||
route: TimelineRoute,
|
kind: &TimelineKind,
|
||||||
col: usize,
|
col: usize,
|
||||||
textmode: bool,
|
textmode: bool,
|
||||||
|
depth: usize,
|
||||||
ui: &mut egui::Ui,
|
ui: &mut egui::Ui,
|
||||||
) -> Option<RenderNavAction> {
|
) -> Option<RenderNavAction> {
|
||||||
match route {
|
let note_options = {
|
||||||
TimelineRoute::Timeline(timeline_id) => {
|
let mut options = NoteOptions::new(kind == &TimelineKind::Universe);
|
||||||
let note_options = {
|
options.set_textmode(textmode);
|
||||||
let is_universe = if let Some(timeline) = columns.find_timeline(timeline_id) {
|
options
|
||||||
timeline.kind == TimelineKind::Universe
|
};
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut options = NoteOptions::new(is_universe);
|
|
||||||
options.set_textmode(textmode);
|
|
||||||
options
|
|
||||||
};
|
|
||||||
|
|
||||||
|
match kind {
|
||||||
|
TimelineKind::List(_)
|
||||||
|
| TimelineKind::Algo(_)
|
||||||
|
| TimelineKind::Notifications(_)
|
||||||
|
| TimelineKind::Universe
|
||||||
|
| TimelineKind::Hashtag(_)
|
||||||
|
| TimelineKind::Generic(_) => {
|
||||||
let note_action = ui::TimelineView::new(
|
let note_action = ui::TimelineView::new(
|
||||||
timeline_id,
|
kind,
|
||||||
columns,
|
timeline_cache,
|
||||||
ndb,
|
ndb,
|
||||||
note_cache,
|
note_cache,
|
||||||
img_cache,
|
img_cache,
|
||||||
@@ -130,89 +50,50 @@ pub fn render_timeline_route(
|
|||||||
note_action.map(RenderNavAction::NoteAction)
|
note_action.map(RenderNavAction::NoteAction)
|
||||||
}
|
}
|
||||||
|
|
||||||
TimelineRoute::Thread(id) => ui::ThreadView::new(
|
TimelineKind::Profile(pubkey) => {
|
||||||
|
if depth > 1 {
|
||||||
|
render_profile_route(
|
||||||
|
pubkey,
|
||||||
|
accounts,
|
||||||
|
ndb,
|
||||||
|
timeline_cache,
|
||||||
|
img_cache,
|
||||||
|
note_cache,
|
||||||
|
unknown_ids,
|
||||||
|
col,
|
||||||
|
ui,
|
||||||
|
&accounts.mutefun(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// we render profiles like timelines if they are at the root
|
||||||
|
let note_action = ui::TimelineView::new(
|
||||||
|
kind,
|
||||||
|
timeline_cache,
|
||||||
|
ndb,
|
||||||
|
note_cache,
|
||||||
|
img_cache,
|
||||||
|
note_options,
|
||||||
|
&accounts.mutefun(),
|
||||||
|
)
|
||||||
|
.ui(ui);
|
||||||
|
|
||||||
|
note_action.map(RenderNavAction::NoteAction)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TimelineKind::Thread(id) => ui::ThreadView::new(
|
||||||
timeline_cache,
|
timeline_cache,
|
||||||
ndb,
|
ndb,
|
||||||
note_cache,
|
note_cache,
|
||||||
unknown_ids,
|
unknown_ids,
|
||||||
img_cache,
|
img_cache,
|
||||||
id.bytes(),
|
id.selected_or_root(),
|
||||||
textmode,
|
textmode,
|
||||||
&accounts.mutefun(),
|
&accounts.mutefun(),
|
||||||
)
|
)
|
||||||
.id_source(egui::Id::new(("threadscroll", col)))
|
.id_source(egui::Id::new(("threadscroll", col)))
|
||||||
.ui(ui)
|
.ui(ui)
|
||||||
.map(Into::into),
|
.map(Into::into),
|
||||||
|
|
||||||
TimelineRoute::Reply(id) => {
|
|
||||||
let txn = if let Ok(txn) = Transaction::new(ndb) {
|
|
||||||
txn
|
|
||||||
} else {
|
|
||||||
ui.label("Reply to unknown note");
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
let note = if let Ok(note) = ndb.get_note_by_id(&txn, id.bytes()) {
|
|
||||||
note
|
|
||||||
} else {
|
|
||||||
ui.label("Reply to unknown note");
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
let id = egui::Id::new(("post", col, note.key().unwrap()));
|
|
||||||
let poster = accounts.selected_or_first_nsec()?;
|
|
||||||
|
|
||||||
let action = {
|
|
||||||
let draft = drafts.reply_mut(note.id());
|
|
||||||
|
|
||||||
let response = egui::ScrollArea::vertical().show(ui, |ui| {
|
|
||||||
ui::PostReplyView::new(ndb, poster, draft, note_cache, img_cache, ¬e)
|
|
||||||
.id_source(id)
|
|
||||||
.show(ui)
|
|
||||||
});
|
|
||||||
|
|
||||||
response.inner.action
|
|
||||||
};
|
|
||||||
|
|
||||||
action.map(Into::into)
|
|
||||||
}
|
|
||||||
|
|
||||||
TimelineRoute::Profile(pubkey) => render_profile_route(
|
|
||||||
&pubkey,
|
|
||||||
accounts,
|
|
||||||
ndb,
|
|
||||||
timeline_cache,
|
|
||||||
img_cache,
|
|
||||||
note_cache,
|
|
||||||
unknown_ids,
|
|
||||||
col,
|
|
||||||
ui,
|
|
||||||
&accounts.mutefun(),
|
|
||||||
),
|
|
||||||
|
|
||||||
TimelineRoute::Quote(id) => {
|
|
||||||
let txn = Transaction::new(ndb).expect("txn");
|
|
||||||
|
|
||||||
let note = if let Ok(note) = ndb.get_note_by_id(&txn, id.bytes()) {
|
|
||||||
note
|
|
||||||
} else {
|
|
||||||
ui.label("Quote of unknown note");
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
let id = egui::Id::new(("post", col, note.key().unwrap()));
|
|
||||||
|
|
||||||
let poster = accounts.selected_or_first_nsec()?;
|
|
||||||
let draft = drafts.quote_mut(note.id());
|
|
||||||
|
|
||||||
let response = egui::ScrollArea::vertical().show(ui, |ui| {
|
|
||||||
QuoteRepostView::new(ndb, poster, note_cache, img_cache, draft, ¬e)
|
|
||||||
.id_source(id)
|
|
||||||
.show(ui)
|
|
||||||
});
|
|
||||||
|
|
||||||
response.inner.action.map(Into::into)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -262,22 +143,26 @@ mod tests {
|
|||||||
use enostr::NoteId;
|
use enostr::NoteId;
|
||||||
use tokenator::{TokenParser, TokenSerializable, TokenWriter};
|
use tokenator::{TokenParser, TokenSerializable, TokenWriter};
|
||||||
|
|
||||||
|
use crate::timeline::{ThreadSelection, TimelineKind};
|
||||||
|
use enostr::Pubkey;
|
||||||
|
use notedeck::RootNoteIdBuf;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_timeline_route_serialize() {
|
fn test_timeline_route_serialize() {
|
||||||
use super::TimelineRoute;
|
use super::TimelineKind;
|
||||||
|
|
||||||
{
|
let note_id_hex = "1c54e5b0c386425f7e017d9e068ddef8962eb2ce1bb08ed27e24b93411c12e60";
|
||||||
let note_id_hex = "1c54e5b0c386425f7e017d9e068ddef8962eb2ce1bb08ed27e24b93411c12e60";
|
let note_id = NoteId::from_hex(note_id_hex).unwrap();
|
||||||
let note_id = NoteId::from_hex(note_id_hex).unwrap();
|
let data_str = format!("thread:{}", note_id_hex);
|
||||||
let data_str = format!("thread:{}", note_id_hex);
|
let data = &data_str.split(":").collect::<Vec<&str>>();
|
||||||
let data = &data_str.split(":").collect::<Vec<&str>>();
|
let mut token_writer = TokenWriter::default();
|
||||||
let mut token_writer = TokenWriter::default();
|
let mut parser = TokenParser::new(&data);
|
||||||
let mut parser = TokenParser::new(&data);
|
let parsed = TimelineKind::parse(&mut parser, &Pubkey::new(*note_id.bytes())).unwrap();
|
||||||
let parsed = TimelineRoute::parse_from_tokens(&mut parser).unwrap();
|
let expected = TimelineKind::Thread(ThreadSelection::from_root_id(
|
||||||
let expected = TimelineRoute::Thread(note_id);
|
RootNoteIdBuf::new_unsafe(*note_id.bytes()),
|
||||||
parsed.serialize_tokens(&mut token_writer);
|
));
|
||||||
assert_eq!(expected, parsed);
|
parsed.serialize_tokens(&mut token_writer);
|
||||||
assert_eq!(token_writer.str(), data_str);
|
assert_eq!(expected, parsed);
|
||||||
}
|
assert_eq!(token_writer.str(), data_str);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -143,32 +143,39 @@ impl AddColumnOption {
|
|||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
cur_account: Option<&UserAccount>,
|
cur_account: Option<&UserAccount>,
|
||||||
) -> Option<AddColumnResponse> {
|
) -> Option<AddColumnResponse> {
|
||||||
|
let txn = Transaction::new(ndb).unwrap();
|
||||||
match self {
|
match self {
|
||||||
AddColumnOption::Algo(algo_option) => Some(AddColumnResponse::Algo(algo_option)),
|
AddColumnOption::Algo(algo_option) => Some(AddColumnResponse::Algo(algo_option)),
|
||||||
AddColumnOption::Universe => TimelineKind::Universe
|
AddColumnOption::Universe => TimelineKind::Universe
|
||||||
.into_timeline(ndb, None)
|
.into_timeline(&txn, ndb)
|
||||||
.map(AddColumnResponse::Timeline),
|
|
||||||
AddColumnOption::Notification(pubkey) => TimelineKind::Notifications(pubkey)
|
|
||||||
.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes()))
|
|
||||||
.map(AddColumnResponse::Timeline),
|
.map(AddColumnResponse::Timeline),
|
||||||
|
AddColumnOption::Notification(pubkey) => {
|
||||||
|
TimelineKind::Notifications(*pubkey.to_pubkey(&cur_account.map(|kp| kp.pubkey)?))
|
||||||
|
.into_timeline(&txn, ndb)
|
||||||
|
.map(AddColumnResponse::Timeline)
|
||||||
|
}
|
||||||
AddColumnOption::UndecidedNotification => {
|
AddColumnOption::UndecidedNotification => {
|
||||||
Some(AddColumnResponse::UndecidedNotification)
|
Some(AddColumnResponse::UndecidedNotification)
|
||||||
}
|
}
|
||||||
AddColumnOption::Contacts(pubkey) => {
|
AddColumnOption::Contacts(pk_src) => {
|
||||||
let tlk = TimelineKind::contact_list(pubkey);
|
let tlk = TimelineKind::contact_list(
|
||||||
tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes()))
|
*pk_src.to_pubkey(&cur_account.map(|kp| kp.pubkey)?),
|
||||||
|
);
|
||||||
|
tlk.into_timeline(&txn, ndb)
|
||||||
.map(AddColumnResponse::Timeline)
|
.map(AddColumnResponse::Timeline)
|
||||||
}
|
}
|
||||||
AddColumnOption::ExternalNotification => Some(AddColumnResponse::ExternalNotification),
|
AddColumnOption::ExternalNotification => Some(AddColumnResponse::ExternalNotification),
|
||||||
AddColumnOption::UndecidedHashtag => Some(AddColumnResponse::Hashtag),
|
AddColumnOption::UndecidedHashtag => Some(AddColumnResponse::Hashtag),
|
||||||
AddColumnOption::Hashtag(hashtag) => TimelineKind::Hashtag(hashtag)
|
AddColumnOption::Hashtag(hashtag) => TimelineKind::Hashtag(hashtag)
|
||||||
.into_timeline(ndb, None)
|
.into_timeline(&txn, ndb)
|
||||||
.map(AddColumnResponse::Timeline),
|
.map(AddColumnResponse::Timeline),
|
||||||
AddColumnOption::UndecidedIndividual => Some(AddColumnResponse::UndecidedIndividual),
|
AddColumnOption::UndecidedIndividual => Some(AddColumnResponse::UndecidedIndividual),
|
||||||
AddColumnOption::ExternalIndividual => Some(AddColumnResponse::ExternalIndividual),
|
AddColumnOption::ExternalIndividual => Some(AddColumnResponse::ExternalIndividual),
|
||||||
AddColumnOption::Individual(pubkey_source) => {
|
AddColumnOption::Individual(pubkey_source) => {
|
||||||
let tlk = TimelineKind::profile(pubkey_source);
|
let tlk = TimelineKind::profile(
|
||||||
tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes()))
|
*pubkey_source.to_pubkey(&cur_account.map(|kp| kp.pubkey)?),
|
||||||
|
);
|
||||||
|
tlk.into_timeline(&txn, ndb)
|
||||||
.map(AddColumnResponse::Timeline)
|
.map(AddColumnResponse::Timeline)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -232,13 +239,17 @@ impl<'a> AddColumnView<'a> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn algo_last_per_pk_ui(&mut self, ui: &mut Ui) -> Option<AddColumnResponse> {
|
fn algo_last_per_pk_ui(
|
||||||
|
&mut self,
|
||||||
|
ui: &mut Ui,
|
||||||
|
deck_author: Pubkey,
|
||||||
|
) -> Option<AddColumnResponse> {
|
||||||
let algo_option = ColumnOptionData {
|
let algo_option = ColumnOptionData {
|
||||||
title: "Contact List",
|
title: "Contact List",
|
||||||
description: "Source the last note for each user in your contact list",
|
description: "Source the last note for each user in your contact list",
|
||||||
icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"),
|
icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"),
|
||||||
option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Decided(
|
option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Decided(
|
||||||
ListKind::contact_list(PubkeySource::DeckAuthor),
|
ListKind::contact_list(deck_author),
|
||||||
))),
|
))),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -319,18 +330,22 @@ impl<'a> AddColumnView<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let resp = if let Some(keypair) = key_state.get_login_keypair() {
|
let resp = if let Some(keypair) = key_state.get_login_keypair() {
|
||||||
let txn = Transaction::new(self.ndb).expect("txn");
|
{
|
||||||
if let Ok(profile) = self.ndb.get_profile_by_pubkey(&txn, keypair.pubkey.bytes()) {
|
let txn = Transaction::new(self.ndb).expect("txn");
|
||||||
egui::Frame::window(ui.style())
|
if let Ok(profile) =
|
||||||
.outer_margin(Margin {
|
self.ndb.get_profile_by_pubkey(&txn, keypair.pubkey.bytes())
|
||||||
left: 4.0,
|
{
|
||||||
right: 4.0,
|
egui::Frame::window(ui.style())
|
||||||
top: 12.0,
|
.outer_margin(Margin {
|
||||||
bottom: 32.0,
|
left: 4.0,
|
||||||
})
|
right: 4.0,
|
||||||
.show(ui, |ui| {
|
top: 12.0,
|
||||||
ProfilePreview::new(&profile, self.img_cache).ui(ui);
|
bottom: 32.0,
|
||||||
});
|
})
|
||||||
|
.show(ui, |ui| {
|
||||||
|
ProfilePreview::new(&profile, self.img_cache).ui(ui);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ui.add(add_column_button()).clicked() {
|
if ui.add(add_column_button()).clicked() {
|
||||||
@@ -470,7 +485,7 @@ impl<'a> AddColumnView<'a> {
|
|||||||
title: "Contacts",
|
title: "Contacts",
|
||||||
description: "See notes from your contacts",
|
description: "See notes from your contacts",
|
||||||
icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"),
|
icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"),
|
||||||
option: AddColumnOption::Contacts(source.clone()),
|
option: AddColumnOption::Contacts(source),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
vec.push(ColumnOptionData {
|
vec.push(ColumnOptionData {
|
||||||
@@ -609,7 +624,13 @@ pub fn render_add_column_routes(
|
|||||||
AddColumnRoute::Base => add_column_view.ui(ui),
|
AddColumnRoute::Base => add_column_view.ui(ui),
|
||||||
AddColumnRoute::Algo(r) => match r {
|
AddColumnRoute::Algo(r) => match r {
|
||||||
AddAlgoRoute::Base => add_column_view.algo_ui(ui),
|
AddAlgoRoute::Base => add_column_view.algo_ui(ui),
|
||||||
AddAlgoRoute::LastPerPubkey => add_column_view.algo_last_per_pk_ui(ui),
|
AddAlgoRoute::LastPerPubkey => {
|
||||||
|
if let Some(deck_author) = ctx.accounts.get_selected_account() {
|
||||||
|
add_column_view.algo_last_per_pk_ui(ui, deck_author.pubkey)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
AddColumnRoute::UndecidedNotification => add_column_view.notifications_ui(ui),
|
AddColumnRoute::UndecidedNotification => add_column_view.notifications_ui(ui),
|
||||||
AddColumnRoute::ExternalNotification => add_column_view.external_notification_ui(ui),
|
AddColumnRoute::ExternalNotification => add_column_view.external_notification_ui(ui),
|
||||||
@@ -628,13 +649,16 @@ pub fn render_add_column_routes(
|
|||||||
ctx.pool,
|
ctx.pool,
|
||||||
ctx.note_cache,
|
ctx.note_cache,
|
||||||
app.since_optimize,
|
app.since_optimize,
|
||||||
ctx.accounts
|
|
||||||
.get_selected_account()
|
|
||||||
.as_ref()
|
|
||||||
.map(|sa| &sa.pubkey),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
app.columns_mut(ctx.accounts)
|
app.columns_mut(ctx.accounts)
|
||||||
.add_timeline_to_column(col, timeline);
|
.column_mut(col)
|
||||||
|
.router_mut()
|
||||||
|
.route_to_replaced(Route::timeline(timeline.kind.clone()));
|
||||||
|
|
||||||
|
app.timeline_cache
|
||||||
|
.timelines
|
||||||
|
.insert(timeline.kind.clone(), timeline);
|
||||||
}
|
}
|
||||||
|
|
||||||
AddColumnResponse::Algo(algo_option) => match algo_option {
|
AddColumnResponse::Algo(algo_option) => match algo_option {
|
||||||
@@ -654,14 +678,8 @@ pub fn render_add_column_routes(
|
|||||||
// add it to our list of timelines
|
// add it to our list of timelines
|
||||||
AlgoOption::LastPerPubkey(Decision::Decided(list_kind)) => {
|
AlgoOption::LastPerPubkey(Decision::Decided(list_kind)) => {
|
||||||
let maybe_timeline = {
|
let maybe_timeline = {
|
||||||
let default_user = ctx
|
let txn = Transaction::new(ctx.ndb).unwrap();
|
||||||
.accounts
|
TimelineKind::last_per_pubkey(list_kind).into_timeline(&txn, ctx.ndb)
|
||||||
.get_selected_account()
|
|
||||||
.as_ref()
|
|
||||||
.map(|sa| sa.pubkey.bytes());
|
|
||||||
|
|
||||||
TimelineKind::last_per_pubkey(list_kind.clone())
|
|
||||||
.into_timeline(ctx.ndb, default_user)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(mut timeline) = maybe_timeline {
|
if let Some(mut timeline) = maybe_timeline {
|
||||||
@@ -672,14 +690,16 @@ pub fn render_add_column_routes(
|
|||||||
ctx.pool,
|
ctx.pool,
|
||||||
ctx.note_cache,
|
ctx.note_cache,
|
||||||
app.since_optimize,
|
app.since_optimize,
|
||||||
ctx.accounts
|
|
||||||
.get_selected_account()
|
|
||||||
.as_ref()
|
|
||||||
.map(|sa| &sa.pubkey),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
app.columns_mut(ctx.accounts)
|
app.columns_mut(ctx.accounts)
|
||||||
.add_timeline_to_column(col, timeline);
|
.column_mut(col)
|
||||||
|
.router_mut()
|
||||||
|
.route_to_replaced(Route::timeline(timeline.kind.clone()));
|
||||||
|
|
||||||
|
app.timeline_cache
|
||||||
|
.timelines
|
||||||
|
.insert(timeline.kind.clone(), timeline);
|
||||||
} else {
|
} else {
|
||||||
// we couldn't fetch the timeline yet... let's let
|
// we couldn't fetch the timeline yet... let's let
|
||||||
// the user know ?
|
// the user know ?
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use crate::nav::SwitchingAction;
|
|||||||
use crate::{
|
use crate::{
|
||||||
column::Columns,
|
column::Columns,
|
||||||
route::Route,
|
route::Route,
|
||||||
timeline::{ColumnTitle, TimelineId, TimelineKind, TimelineRoute},
|
timeline::{ColumnTitle, TimelineKind},
|
||||||
ui::{
|
ui::{
|
||||||
self,
|
self,
|
||||||
anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE},
|
anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE},
|
||||||
@@ -22,7 +22,6 @@ pub struct NavTitle<'a> {
|
|||||||
ndb: &'a Ndb,
|
ndb: &'a Ndb,
|
||||||
img_cache: &'a mut ImageCache,
|
img_cache: &'a mut ImageCache,
|
||||||
columns: &'a Columns,
|
columns: &'a Columns,
|
||||||
deck_author: Option<&'a Pubkey>,
|
|
||||||
routes: &'a [Route],
|
routes: &'a [Route],
|
||||||
col_id: usize,
|
col_id: usize,
|
||||||
}
|
}
|
||||||
@@ -32,7 +31,6 @@ impl<'a> NavTitle<'a> {
|
|||||||
ndb: &'a Ndb,
|
ndb: &'a Ndb,
|
||||||
img_cache: &'a mut ImageCache,
|
img_cache: &'a mut ImageCache,
|
||||||
columns: &'a Columns,
|
columns: &'a Columns,
|
||||||
deck_author: Option<&'a Pubkey>,
|
|
||||||
routes: &'a [Route],
|
routes: &'a [Route],
|
||||||
col_id: usize,
|
col_id: usize,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
@@ -40,7 +38,6 @@ impl<'a> NavTitle<'a> {
|
|||||||
ndb,
|
ndb,
|
||||||
img_cache,
|
img_cache,
|
||||||
columns,
|
columns,
|
||||||
deck_author,
|
|
||||||
routes,
|
routes,
|
||||||
col_id,
|
col_id,
|
||||||
}
|
}
|
||||||
@@ -123,14 +120,14 @@ impl<'a> NavTitle<'a> {
|
|||||||
// not it looks cool
|
// not it looks cool
|
||||||
self.title_pfp(ui, prev, 32.0);
|
self.title_pfp(ui, prev, 32.0);
|
||||||
|
|
||||||
let column_title = prev.title(self.columns);
|
let column_title = prev.title();
|
||||||
|
|
||||||
let back_resp = match &column_title {
|
let back_resp = match &column_title {
|
||||||
ColumnTitle::Simple(title) => ui.add(Self::back_label(title, color)),
|
ColumnTitle::Simple(title) => ui.add(Self::back_label(title, color)),
|
||||||
|
|
||||||
ColumnTitle::NeedsDb(need_db) => {
|
ColumnTitle::NeedsDb(need_db) => {
|
||||||
let txn = Transaction::new(self.ndb).unwrap();
|
let txn = Transaction::new(self.ndb).unwrap();
|
||||||
let title = need_db.title(&txn, self.ndb, self.deck_author);
|
let title = need_db.title(&txn, self.ndb);
|
||||||
ui.add(Self::back_label(title, color))
|
ui.add(Self::back_label(title, color))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -402,14 +399,11 @@ impl<'a> NavTitle<'a> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn timeline_pfp(&mut self, ui: &mut egui::Ui, id: TimelineId, pfp_size: f32) {
|
fn timeline_pfp(&mut self, ui: &mut egui::Ui, id: &TimelineKind, pfp_size: f32) {
|
||||||
let txn = Transaction::new(self.ndb).unwrap();
|
let txn = Transaction::new(self.ndb).unwrap();
|
||||||
|
|
||||||
if let Some(pfp) = self
|
if let Some(pfp) = id
|
||||||
.columns
|
.pubkey()
|
||||||
.find_timeline(id)
|
|
||||||
.and_then(|tl| tl.kind.pubkey_source())
|
|
||||||
.and_then(|pksrc| self.deck_author.map(|da| pksrc.to_pubkey(da)))
|
|
||||||
.and_then(|pk| self.pubkey_pfp(&txn, pk.bytes(), pfp_size))
|
.and_then(|pk| self.pubkey_pfp(&txn, pk.bytes(), pfp_size))
|
||||||
{
|
{
|
||||||
ui.add(pfp);
|
ui.add(pfp);
|
||||||
@@ -422,34 +416,35 @@ impl<'a> NavTitle<'a> {
|
|||||||
|
|
||||||
fn title_pfp(&mut self, ui: &mut egui::Ui, top: &Route, pfp_size: f32) {
|
fn title_pfp(&mut self, ui: &mut egui::Ui, top: &Route, pfp_size: f32) {
|
||||||
match top {
|
match top {
|
||||||
Route::Timeline(tlr) => match tlr {
|
Route::Timeline(kind) => match kind {
|
||||||
TimelineRoute::Timeline(tlid) => {
|
TimelineKind::Hashtag(_ht) => {
|
||||||
let is_hashtag = self
|
ui.add(
|
||||||
.columns
|
egui::Image::new(egui::include_image!(
|
||||||
.find_timeline(*tlid)
|
"../../../../../assets/icons/hashtag_icon_4x.png"
|
||||||
.is_some_and(|tl| matches!(tl.kind, TimelineKind::Hashtag(_)));
|
))
|
||||||
|
.fit_to_exact_size(egui::vec2(pfp_size, pfp_size)),
|
||||||
if is_hashtag {
|
);
|
||||||
ui.add(
|
|
||||||
egui::Image::new(egui::include_image!(
|
|
||||||
"../../../../../assets/icons/hashtag_icon_4x.png"
|
|
||||||
))
|
|
||||||
.fit_to_exact_size(egui::vec2(pfp_size, pfp_size)),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
self.timeline_pfp(ui, *tlid, pfp_size);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TimelineRoute::Thread(_note_id) => {}
|
TimelineKind::Profile(pubkey) => {
|
||||||
TimelineRoute::Reply(_note_id) => {}
|
|
||||||
TimelineRoute::Quote(_note_id) => {}
|
|
||||||
|
|
||||||
TimelineRoute::Profile(pubkey) => {
|
|
||||||
self.show_profile(ui, pubkey, pfp_size);
|
self.show_profile(ui, pubkey, pfp_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TimelineKind::Thread(_) => {
|
||||||
|
// no pfp for threads
|
||||||
|
}
|
||||||
|
|
||||||
|
TimelineKind::Universe
|
||||||
|
| TimelineKind::Algo(_)
|
||||||
|
| TimelineKind::Notifications(_)
|
||||||
|
| TimelineKind::Generic(_)
|
||||||
|
| TimelineKind::List(_) => {
|
||||||
|
self.timeline_pfp(ui, kind, pfp_size);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
Route::Reply(_) => {}
|
||||||
|
Route::Quote(_) => {}
|
||||||
Route::Accounts(_as) => {}
|
Route::Accounts(_as) => {}
|
||||||
Route::ComposeNote => {}
|
Route::ComposeNote => {}
|
||||||
Route::AddColumn(_add_col_route) => {}
|
Route::AddColumn(_add_col_route) => {}
|
||||||
@@ -480,7 +475,7 @@ impl<'a> NavTitle<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn title_label(&self, ui: &mut egui::Ui, top: &Route) {
|
fn title_label(&self, ui: &mut egui::Ui, top: &Route) {
|
||||||
let column_title = top.title(self.columns);
|
let column_title = top.title();
|
||||||
|
|
||||||
match &column_title {
|
match &column_title {
|
||||||
ColumnTitle::Simple(title) => {
|
ColumnTitle::Simple(title) => {
|
||||||
@@ -489,7 +484,7 @@ impl<'a> NavTitle<'a> {
|
|||||||
|
|
||||||
ColumnTitle::NeedsDb(need_db) => {
|
ColumnTitle::NeedsDb(need_db) => {
|
||||||
let txn = Transaction::new(self.ndb).unwrap();
|
let txn = Transaction::new(self.ndb).unwrap();
|
||||||
let title = need_db.title(&txn, self.ndb, self.deck_author);
|
let title = need_db.title(&txn, self.ndb);
|
||||||
ui.add(Self::title_label_value(title));
|
ui.add(Self::title_label_value(title));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ pub mod preview;
|
|||||||
pub use edit::EditProfileView;
|
pub use edit::EditProfileView;
|
||||||
use egui::load::TexturePoll;
|
use egui::load::TexturePoll;
|
||||||
use egui::{vec2, Color32, Label, Layout, Rect, RichText, Rounding, ScrollArea, Sense, Stroke};
|
use egui::{vec2, Color32, Label, Layout, Rect, RichText, Rounding, ScrollArea, Sense, Stroke};
|
||||||
use enostr::{Pubkey, PubkeyRef};
|
use enostr::Pubkey;
|
||||||
use nostrdb::{Ndb, ProfileRecord, Transaction};
|
use nostrdb::{Ndb, ProfileRecord, Transaction};
|
||||||
pub use picture::ProfilePic;
|
pub use picture::ProfilePic;
|
||||||
pub use preview::ProfilePreview;
|
pub use preview::ProfilePreview;
|
||||||
@@ -15,7 +15,7 @@ use crate::{
|
|||||||
actionbar::NoteAction,
|
actionbar::NoteAction,
|
||||||
colors, images,
|
colors, images,
|
||||||
profile::get_display_name,
|
profile::get_display_name,
|
||||||
timeline::{TimelineCache, TimelineCacheKey},
|
timeline::{TimelineCache, TimelineKind},
|
||||||
ui::{
|
ui::{
|
||||||
note::NoteOptions,
|
note::NoteOptions,
|
||||||
timeline::{tabs_ui, TimelineTabView},
|
timeline::{tabs_ui, TimelineTabView},
|
||||||
@@ -90,7 +90,7 @@ impl<'a> ProfileView<'a> {
|
|||||||
self.ndb,
|
self.ndb,
|
||||||
self.note_cache,
|
self.note_cache,
|
||||||
&txn,
|
&txn,
|
||||||
TimelineCacheKey::Profile(PubkeyRef::new(self.pubkey.bytes())),
|
&TimelineKind::Profile(*self.pubkey),
|
||||||
)
|
)
|
||||||
.get_ptr();
|
.get_ptr();
|
||||||
|
|
||||||
|
|||||||
@@ -288,7 +288,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
if router
|
if router
|
||||||
.routes()
|
.routes()
|
||||||
.iter()
|
.iter()
|
||||||
.any(|&r| r == Route::Accounts(AccountsRoute::Accounts))
|
.any(|r| r == &Route::Accounts(AccountsRoute::Accounts))
|
||||||
{
|
{
|
||||||
// return if we are already routing to accounts
|
// return if we are already routing to accounts
|
||||||
router.go_back();
|
router.go_back();
|
||||||
@@ -297,7 +297,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
SidePanelAction::Settings => {
|
SidePanelAction::Settings => {
|
||||||
if router.routes().iter().any(|&r| r == Route::Relays) {
|
if router.routes().iter().any(|r| r == &Route::Relays) {
|
||||||
// return if we are already routing to accounts
|
// return if we are already routing to accounts
|
||||||
router.go_back();
|
router.go_back();
|
||||||
} else {
|
} else {
|
||||||
@@ -308,7 +308,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
if router
|
if router
|
||||||
.routes()
|
.routes()
|
||||||
.iter()
|
.iter()
|
||||||
.any(|&r| matches!(r, Route::AddColumn(_)))
|
.any(|r| matches!(r, Route::AddColumn(_)))
|
||||||
{
|
{
|
||||||
router.go_back();
|
router.go_back();
|
||||||
} else {
|
} else {
|
||||||
@@ -316,7 +316,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
SidePanelAction::ComposeNote => {
|
SidePanelAction::ComposeNote => {
|
||||||
if router.routes().iter().any(|&r| r == Route::ComposeNote) {
|
if router.routes().iter().any(|r| r == &Route::ComposeNote) {
|
||||||
router.go_back();
|
router.go_back();
|
||||||
} else {
|
} else {
|
||||||
router.route_to(Route::ComposeNote);
|
router.route_to(Route::ComposeNote);
|
||||||
@@ -331,7 +331,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
info!("Clicked expand side panel button");
|
info!("Clicked expand side panel button");
|
||||||
}
|
}
|
||||||
SidePanelAction::Support => {
|
SidePanelAction::Support => {
|
||||||
if router.routes().iter().any(|&r| r == Route::Support) {
|
if router.routes().iter().any(|r| r == &Route::Support) {
|
||||||
router.go_back();
|
router.go_back();
|
||||||
} else {
|
} else {
|
||||||
support.refresh();
|
support.refresh();
|
||||||
@@ -339,7 +339,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
SidePanelAction::NewDeck => {
|
SidePanelAction::NewDeck => {
|
||||||
if router.routes().iter().any(|&r| r == Route::NewDeck) {
|
if router.routes().iter().any(|r| r == &Route::NewDeck) {
|
||||||
router.go_back();
|
router.go_back();
|
||||||
} else {
|
} else {
|
||||||
router.route_to(Route::NewDeck);
|
router.route_to(Route::NewDeck);
|
||||||
@@ -351,7 +351,7 @@ impl<'a> DesktopSidePanel<'a> {
|
|||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
SidePanelAction::EditDeck(index) => {
|
SidePanelAction::EditDeck(index) => {
|
||||||
if router.routes().iter().any(|&r| r == Route::EditDeck(index)) {
|
if router.routes().iter().any(|r| r == &Route::EditDeck(index)) {
|
||||||
router.go_back();
|
router.go_back();
|
||||||
} else {
|
} else {
|
||||||
switching_response = Some(crate::nav::SwitchingAction::Decks(
|
switching_response = Some(crate::nav::SwitchingAction::Decks(
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
actionbar::NoteAction,
|
actionbar::NoteAction,
|
||||||
timeline::{TimelineCache, TimelineCacheKey},
|
timeline::{ThreadSelection, TimelineCache, TimelineKind},
|
||||||
ui::note::NoteOptions,
|
ui::note::NoteOptions,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -83,7 +83,7 @@ impl<'a> ThreadView<'a> {
|
|||||||
self.ndb,
|
self.ndb,
|
||||||
self.note_cache,
|
self.note_cache,
|
||||||
&txn,
|
&txn,
|
||||||
TimelineCacheKey::Thread(root_id),
|
&TimelineKind::Thread(ThreadSelection::from_root_id(root_id.to_owned())),
|
||||||
)
|
)
|
||||||
.get_ptr();
|
.get_ptr();
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,7 @@ use std::f32::consts::PI;
|
|||||||
use crate::actionbar::NoteAction;
|
use crate::actionbar::NoteAction;
|
||||||
use crate::timeline::TimelineTab;
|
use crate::timeline::TimelineTab;
|
||||||
use crate::{
|
use crate::{
|
||||||
column::Columns,
|
timeline::{TimelineCache, TimelineKind, ViewFilter},
|
||||||
timeline::{TimelineId, ViewFilter},
|
|
||||||
ui,
|
ui,
|
||||||
ui::note::NoteOptions,
|
ui::note::NoteOptions,
|
||||||
};
|
};
|
||||||
@@ -19,8 +18,8 @@ use tracing::{error, warn};
|
|||||||
use super::anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE};
|
use super::anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE};
|
||||||
|
|
||||||
pub struct TimelineView<'a> {
|
pub struct TimelineView<'a> {
|
||||||
timeline_id: TimelineId,
|
timeline_id: &'a TimelineKind,
|
||||||
columns: &'a mut Columns,
|
timeline_cache: &'a mut TimelineCache,
|
||||||
ndb: &'a Ndb,
|
ndb: &'a Ndb,
|
||||||
note_cache: &'a mut NoteCache,
|
note_cache: &'a mut NoteCache,
|
||||||
img_cache: &'a mut ImageCache,
|
img_cache: &'a mut ImageCache,
|
||||||
@@ -31,8 +30,8 @@ pub struct TimelineView<'a> {
|
|||||||
|
|
||||||
impl<'a> TimelineView<'a> {
|
impl<'a> TimelineView<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
timeline_id: TimelineId,
|
timeline_id: &'a TimelineKind,
|
||||||
columns: &'a mut Columns,
|
timeline_cache: &'a mut TimelineCache,
|
||||||
ndb: &'a Ndb,
|
ndb: &'a Ndb,
|
||||||
note_cache: &'a mut NoteCache,
|
note_cache: &'a mut NoteCache,
|
||||||
img_cache: &'a mut ImageCache,
|
img_cache: &'a mut ImageCache,
|
||||||
@@ -43,7 +42,7 @@ impl<'a> TimelineView<'a> {
|
|||||||
TimelineView {
|
TimelineView {
|
||||||
ndb,
|
ndb,
|
||||||
timeline_id,
|
timeline_id,
|
||||||
columns,
|
timeline_cache,
|
||||||
note_cache,
|
note_cache,
|
||||||
img_cache,
|
img_cache,
|
||||||
reverse,
|
reverse,
|
||||||
@@ -57,7 +56,7 @@ impl<'a> TimelineView<'a> {
|
|||||||
ui,
|
ui,
|
||||||
self.ndb,
|
self.ndb,
|
||||||
self.timeline_id,
|
self.timeline_id,
|
||||||
self.columns,
|
self.timeline_cache,
|
||||||
self.note_cache,
|
self.note_cache,
|
||||||
self.img_cache,
|
self.img_cache,
|
||||||
self.reverse,
|
self.reverse,
|
||||||
@@ -76,8 +75,8 @@ impl<'a> TimelineView<'a> {
|
|||||||
fn timeline_ui(
|
fn timeline_ui(
|
||||||
ui: &mut egui::Ui,
|
ui: &mut egui::Ui,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
timeline_id: TimelineId,
|
timeline_id: &TimelineKind,
|
||||||
columns: &mut Columns,
|
timeline_cache: &mut TimelineCache,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
img_cache: &mut ImageCache,
|
img_cache: &mut ImageCache,
|
||||||
reversed: bool,
|
reversed: bool,
|
||||||
@@ -92,7 +91,7 @@ fn timeline_ui(
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
let scroll_id = {
|
let scroll_id = {
|
||||||
let timeline = if let Some(timeline) = columns.find_timeline_mut(timeline_id) {
|
let timeline = if let Some(timeline) = timeline_cache.timelines.get_mut(timeline_id) {
|
||||||
timeline
|
timeline
|
||||||
} else {
|
} else {
|
||||||
error!("tried to render timeline in column, but timeline was missing");
|
error!("tried to render timeline in column, but timeline was missing");
|
||||||
@@ -142,7 +141,7 @@ fn timeline_ui(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let scroll_output = scroll_area.show(ui, |ui| {
|
let scroll_output = scroll_area.show(ui, |ui| {
|
||||||
let timeline = if let Some(timeline) = columns.find_timeline_mut(timeline_id) {
|
let timeline = if let Some(timeline) = timeline_cache.timelines.get(timeline_id) {
|
||||||
timeline
|
timeline
|
||||||
} else {
|
} else {
|
||||||
error!("tried to render timeline in column, but timeline was missing");
|
error!("tried to render timeline in column, but timeline was missing");
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use crate::{column::Columns, Result};
|
use crate::{timeline::TimelineCache, Result};
|
||||||
use nostrdb::{Ndb, NoteKey, Transaction};
|
use nostrdb::{Ndb, NoteKey, Transaction};
|
||||||
use notedeck::{CachedNote, NoteCache, UnknownIds};
|
use notedeck::{CachedNote, NoteCache, UnknownIds};
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
@@ -6,12 +6,12 @@ use tracing::error;
|
|||||||
pub fn update_from_columns(
|
pub fn update_from_columns(
|
||||||
txn: &Transaction,
|
txn: &Transaction,
|
||||||
unknown_ids: &mut UnknownIds,
|
unknown_ids: &mut UnknownIds,
|
||||||
columns: &Columns,
|
timeline_cache: &TimelineCache,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let before = unknown_ids.ids().len();
|
let before = unknown_ids.ids().len();
|
||||||
if let Err(e) = get_unknown_ids(txn, unknown_ids, columns, ndb, note_cache) {
|
if let Err(e) = get_unknown_ids(txn, unknown_ids, timeline_cache, ndb, note_cache) {
|
||||||
error!("UnknownIds::update {e}");
|
error!("UnknownIds::update {e}");
|
||||||
}
|
}
|
||||||
let after = unknown_ids.ids().len();
|
let after = unknown_ids.ids().len();
|
||||||
@@ -27,7 +27,7 @@ pub fn update_from_columns(
|
|||||||
pub fn get_unknown_ids(
|
pub fn get_unknown_ids(
|
||||||
txn: &Transaction,
|
txn: &Transaction,
|
||||||
unknown_ids: &mut UnknownIds,
|
unknown_ids: &mut UnknownIds,
|
||||||
columns: &Columns,
|
timeline_cache: &TimelineCache,
|
||||||
ndb: &Ndb,
|
ndb: &Ndb,
|
||||||
note_cache: &mut NoteCache,
|
note_cache: &mut NoteCache,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@@ -36,7 +36,7 @@ pub fn get_unknown_ids(
|
|||||||
|
|
||||||
let mut new_cached_notes: Vec<(NoteKey, CachedNote)> = vec![];
|
let mut new_cached_notes: Vec<(NoteKey, CachedNote)> = vec![];
|
||||||
|
|
||||||
for timeline in columns.timelines() {
|
for (_kind, timeline) in timeline_cache.timelines.iter() {
|
||||||
for noteref in timeline.all_or_any_notes() {
|
for noteref in timeline.all_or_any_notes() {
|
||||||
let note = ndb.get_note_by_key(txn, noteref.key)?;
|
let note = ndb.get_note_by_key(txn, noteref.key)?;
|
||||||
let note_key = note.key().unwrap();
|
let note_key = note.key().unwrap();
|
||||||
|
|||||||
@@ -5,3 +5,4 @@ edition = "2021"
|
|||||||
description = "A simple library for parsing a serializing string tokens"
|
description = "A simple library for parsing a serializing string tokens"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
hex = { workspace = true }
|
||||||
|
|||||||
@@ -218,3 +218,15 @@ pub trait TokenSerializable: Sized {
|
|||||||
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>>;
|
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>>;
|
||||||
fn serialize_tokens(&self, writer: &mut TokenWriter);
|
fn serialize_tokens(&self, writer: &mut TokenWriter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a 32 byte hex string
|
||||||
|
pub fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> {
|
||||||
|
use hex;
|
||||||
|
|
||||||
|
let hexid = parser.pull_token()?;
|
||||||
|
hex::decode(hexid)
|
||||||
|
.map_err(|_| ParseError::HexDecodeFailed)?
|
||||||
|
.as_slice()
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| ParseError::HexDecodeFailed)
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user