Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Notes holder refactor #377

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ eframe = { git = "https://github.com/emilk/egui", rev = "fcb7764e48ce00f8f8e58da
egui_extras = { git = "https://github.com/emilk/egui", rev = "fcb7764e48ce00f8f8e58da10f937410d65b0bfb", package = "egui_extras", features = ["all_loaders"] }
ehttp = "0.2.0"
egui_tabs = { git = "https://github.com/damus-io/egui-tabs", branch = "egui-0.28" }
egui_nav = { git = "https://github.com/damus-io/egui-nav", rev = "6ba42de2bae384d10e35c532f3856b81d2e9f645" }
egui_nav = { git = "https://github.com/damus-io/egui-nav", rev = "956338a90e09c7cda951d554626483e0cdbc7825" }
egui_virtual_list = { git = "https://github.com/jb55/hello_egui", branch = "egui-0.28", package = "egui_virtual_list" }
reqwest = { version = "0.12.4", default-features = false, features = [ "rustls-tls-native-roots" ] }
image = { version = "0.25", features = ["jpeg", "png", "webp"] }
Expand Down
1 change: 1 addition & 0 deletions enostr/src/relay/pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ impl RelayPool {
}

pub fn unsubscribe(&mut self, subid: String) {
// TODO(jb55): switch to &str
for relay in &mut self.relays {
relay.relay.send(&ClientMessage::close(subid.clone()));
}
Expand Down
77 changes: 47 additions & 30 deletions src/actionbar.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use crate::{
note::NoteRef,
note::{NoteRef, RootNoteId},
notecache::NoteCache,
notes_holder::{NotesHolder, NotesHolderStorage},
route::{Route, Router},
thread::Thread,
timeline::{CachedTimeline, TimelineCache, TimelineCacheKey},
};
use enostr::{NoteId, Pubkey, RelayPool};
use nostrdb::{Ndb, Transaction};
Expand All @@ -22,11 +21,11 @@ pub struct NoteActionResponse {
}

pub struct NewNotes {
pub id: [u8; 32],
pub id: TimelineCacheKey,
pub notes: Vec<NoteRef>,
}

pub enum NotesHolderResult {
pub enum BarResult {
NewNotes(NewNotes),
}

Expand All @@ -41,13 +40,22 @@ fn open_thread(
router: &mut Router<Route>,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
threads: &mut NotesHolderStorage<Thread>,
timeline_cache: &mut TimelineCache,
selected_note: &[u8; 32],
) -> Option<NotesHolderResult> {
router.route_to(Route::thread(NoteId::new(selected_note.to_owned())));
) -> Option<BarResult> {
let root_id_raw =
crate::note::root_note_id_from_selected_id(ndb, note_cache, txn, selected_note);
let root_id = RootNoteId::new_unsafe(root_id_raw);

let root_id = crate::note::root_note_id_from_selected_id(ndb, note_cache, txn, selected_note);
Thread::open(ndb, note_cache, txn, pool, threads, root_id)
router.route_to(Route::thread(root_id.clone()));

timeline_cache.open(
ndb,
note_cache,
txn,
pool,
&TimelineCacheKey::thread(root_id),
)
}

impl BarAction {
Expand All @@ -56,21 +64,27 @@ impl BarAction {
self,
ndb: &Ndb,
router: &mut Router<Route>,
threads: &mut NotesHolderStorage<Thread>,
timeline_cache: &mut TimelineCache,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
txn: &Transaction,
) -> Option<NotesHolderResult> {
) -> Option<BarResult> {
match self {
BarAction::Reply(note_id) => {
router.route_to(Route::reply(note_id));
router.navigating = true;
None
}

BarAction::OpenThread(note_id) => {
open_thread(ndb, txn, router, note_cache, pool, threads, note_id.bytes())
}
BarAction::OpenThread(note_id) => open_thread(
ndb,
txn,
router,
note_cache,
pool,
timeline_cache,
note_id.bytes(),
),

BarAction::Quote(note_id) => {
router.route_to(Route::quote(note_id));
Expand All @@ -85,51 +99,54 @@ impl BarAction {
self,
ndb: &Ndb,
router: &mut Router<Route>,
threads: &mut NotesHolderStorage<Thread>,
timeline_cache: &mut TimelineCache,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
txn: &Transaction,
) {
if let Some(br) = self.execute(ndb, router, threads, note_cache, pool, txn) {
br.process(ndb, note_cache, txn, threads);
if let Some(br) = self.execute(ndb, router, timeline_cache, note_cache, pool, txn) {
br.process(ndb, note_cache, txn, timeline_cache);
}
}
}

impl NotesHolderResult {
pub fn new_notes(notes: Vec<NoteRef>, id: [u8; 32]) -> Self {
NotesHolderResult::NewNotes(NewNotes::new(notes, id))
impl BarResult {
pub fn new_notes(notes: Vec<NoteRef>, id: TimelineCacheKey) -> Self {
Self::NewNotes(NewNotes::new(notes, id))
}

pub fn process<N: NotesHolder>(
pub fn process(
&self,
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &Transaction,
storage: &mut NotesHolderStorage<N>,
timeline_cache: &mut TimelineCache,
) {
match self {
// update the thread for next render if we have new notes
NotesHolderResult::NewNotes(new_notes) => {
let holder = storage
.notes_holder_mutated(ndb, note_cache, txn, &new_notes.id)
Self::NewNotes(new_notes) => {
let notes = timeline_cache
.notes(ndb, note_cache, txn, &new_notes.id)
.get_ptr();
new_notes.process(holder);
new_notes.process(notes);
}
}
}
}

impl NewNotes {
pub fn new(notes: Vec<NoteRef>, id: [u8; 32]) -> Self {
pub fn new(notes: Vec<NoteRef>, id: TimelineCacheKey) -> Self {
NewNotes { notes, id }
}

/// Simple helper for processing a NewThreadNotes result. It simply
/// inserts/merges the notes into the thread cache
pub fn process<N: NotesHolder>(&self, thread: &mut N) {
pub fn process(&self, thread: &mut CachedTimeline) {
// threads are chronological, ie reversed from reverse-chronological, the default.
let reversed = true;
thread.get_view().insert(&self.notes, reversed);
thread
.timeline()
.get_current_view_mut()
.insert(&self.notes, reversed);
}
}
58 changes: 13 additions & 45 deletions src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@
nav,
note::NoteRef,
notecache::{CachedNote, NoteCache},
notes_holder::NotesHolderStorage,

Check failure on line 16 in src/app.rs

View workflow job for this annotation

GitHub Actions / Check

unresolved imports `crate::notes_holder`, `crate::profile::Profile`, `crate::thread`

Check failure on line 16 in src/app.rs

View workflow job for this annotation

GitHub Actions / Clippy

unresolved imports `crate::notes_holder`, `crate::profile::Profile`, `crate::thread`

Check failure on line 16 in src/app.rs

View workflow job for this annotation

GitHub Actions / Clippy

unresolved imports `crate::notes_holder`, `crate::profile::Profile`, `crate::thread`

Check failure on line 16 in src/app.rs

View workflow job for this annotation

GitHub Actions / Check

unresolved imports `crate::notes_holder`, `crate::profile::Profile`, `crate::thread`

Check failure on line 16 in src/app.rs

View workflow job for this annotation

GitHub Actions / Test Suite

unresolved imports `crate::notes_holder`, `crate::profile::Profile`, `crate::thread`

Check failure on line 16 in src/app.rs

View workflow job for this annotation

GitHub Actions / Test Suite

unresolved imports `crate::notes_holder`, `crate::profile::Profile`, `crate::thread`
profile::Profile,
storage::{self, DataPath, DataPathType, Directory, FileKeyStorage, KeyStorageType},
subscriptions::{SubKind, Subscriptions},
support::Support,
thread::Thread,
timeline::{Timeline, TimelineId, TimelineKind, ViewFilter},
timeline::{Timeline, TimelineCache, TimelineId, TimelineKind, ViewFilter},
ui::{self, DesktopSidePanel},
unknowns::UnknownIds,
view_state::ViewState,
Expand All @@ -43,7 +43,6 @@
pub enum DamusState {
Initializing,
Initialized,
NewTimelineSub(TimelineId),
}

/// We derive Deserialize/Serialize so we can persist app state on shutdown.
Expand All @@ -57,13 +56,12 @@
pub view_state: ViewState,
pub unknown_ids: UnknownIds,
pub drafts: Drafts,
pub threads: NotesHolderStorage<Thread>,
pub profiles: NotesHolderStorage<Profile>,
pub timeline_cache: TimelineCache,
pub img_cache: ImageCache,
pub accounts: AccountManager,
pub subscriptions: Subscriptions,
pub app_rect_handler: AppSizeHandler,
pub support: Support,
pub subscriptions: Subscriptions,

frame_history: crate::frame_history::FrameHistory,

Expand Down Expand Up @@ -102,7 +100,7 @@
fn send_initial_timeline_filter(
ndb: &Ndb,
can_since_optimize: bool,
subs: &mut Subscriptions,
subs: &mut RemoteSubscriptions,

Check failure on line 103 in src/app.rs

View workflow job for this annotation

GitHub Actions / Check

cannot find type `RemoteSubscriptions` in this scope

Check failure on line 103 in src/app.rs

View workflow job for this annotation

GitHub Actions / Clippy

cannot find type `RemoteSubscriptions` in this scope

Check failure on line 103 in src/app.rs

View workflow job for this annotation

GitHub Actions / Clippy

cannot find type `RemoteSubscriptions` in this scope

Check failure on line 103 in src/app.rs

View workflow job for this annotation

GitHub Actions / Check

cannot find type `RemoteSubscriptions` in this scope

Check failure on line 103 in src/app.rs

View workflow job for this annotation

GitHub Actions / Test Suite

cannot find type `RemoteSubscriptions` in this scope

Check failure on line 103 in src/app.rs

View workflow job for this annotation

GitHub Actions / Test Suite

cannot find type `RemoteSubscriptions` in this scope
pool: &mut RelayPool,
timeline: &mut Timeline,
to: &str,
Expand Down Expand Up @@ -156,6 +154,7 @@
//let sub_id = damus.gen_subid(&SubKind::Initial);
let sub_id = Uuid::new_v4().to_string();
subs.subs.insert(sub_id.clone(), SubKind::Initial);
timeline.subscription.set_remote_subid(sub_id.clone());

let cmd = ClientMessage::req(sub_id, new_filters);
pool.send_to(&cmd, to);
Expand Down Expand Up @@ -362,7 +361,7 @@
note_cache: &mut NoteCache,
filters: &[Filter],
) -> Result<()> {
timeline.subscription = Some(ndb.subscribe(filters)?);
timeline.subscription.subscribe_local();
let txn = Transaction::new(ndb)?;
debug!(
"querying nostrdb sub {:?} {:?}",
Expand Down Expand Up @@ -478,33 +477,6 @@
.expect("home subscription failed");
}

DamusState::NewTimelineSub(new_timeline_id) => {
info!("adding new timeline {}", new_timeline_id);
setup_new_nostrdb_sub(
&damus.ndb,
&mut damus.note_cache,
&mut damus.columns,
new_timeline_id,
)
.expect("new timeline subscription failed");

if let Some(filter) = {
let timeline = damus
.columns
.find_timeline(new_timeline_id)
.expect("timeline");
match &timeline.filter {
FilterState::Ready(filters) => Some(filters.clone()),
_ => None,
}
} {
let subid = Uuid::new_v4().to_string();
damus.pool.subscribe(subid, filter);

damus.state = DamusState::Initialized;
}
}

DamusState::Initialized => (),
};

Expand Down Expand Up @@ -755,10 +727,9 @@
pool,
debug,
unknown_ids: UnknownIds::default(),
subscriptions: Subscriptions::default(),
subscriptions: RemoteSubscriptions::default(),
since_optimize: parsed_args.since_optimize,
threads: NotesHolderStorage::default(),
profiles: NotesHolderStorage::default(),
timeline_cache: TimelineCache::default(),
drafts: Drafts::default(),
state: DamusState::Initializing,
img_cache: ImageCache::new(imgcache_dir),
Expand Down Expand Up @@ -819,10 +790,6 @@
}
}

pub fn subscribe_new_timeline(&mut self, timeline_id: TimelineId) {
self.state = DamusState::NewTimelineSub(timeline_id);
}

pub fn mock<P: AsRef<Path>>(data_path: P) -> Self {
let mut columns = Columns::new();
let filter = Filter::from_json(include_str!("../queries/global.json")).unwrap();
Expand All @@ -845,10 +812,9 @@
Self {
debug,
unknown_ids: UnknownIds::default(),
subscriptions: Subscriptions::default(),
subscriptions: RemoteSubscriptions::default(),
since_optimize: true,
threads: NotesHolderStorage::default(),
profiles: NotesHolderStorage::default(),
timeline_cache: TimelineCache::default(),
drafts: Drafts::default(),
state: DamusState::Initializing,
pool: RelayPool::new(),
Expand Down Expand Up @@ -877,6 +843,7 @@
&mut self.subscriptions.subs
}

/*
pub fn note_cache_mut(&mut self) -> &mut NoteCache {
&mut self.note_cache
}
Expand All @@ -889,13 +856,14 @@
&self.threads
}

pub fn threads_mut(&mut self) -> &mut NotesHolderStorage<Thread> {
pub fn timeline_cache_mut(&mut self) -> &mut TimelineCache {
&mut self.threads
}

pub fn note_cache(&self) -> &NoteCache {
&self.note_cache
}
*/
}

/*
Expand Down
Loading
Loading