diff --git a/enostr/src/keypair.rs b/enostr/src/keypair.rs index ce1966d..187a57a 100644 --- a/enostr/src/keypair.rs +++ b/enostr/src/keypair.rs @@ -16,7 +16,7 @@ impl Keypair { let cloned_secret_key = secret_key.clone(); let nostr_keys = nostr::Keys::new(secret_key); Keypair { - pubkey: Pubkey::new(&nostr_keys.public_key().to_bytes()), + pubkey: Pubkey::new(nostr_keys.public_key().to_bytes()), secret_key: Some(cloned_secret_key), } } @@ -61,7 +61,7 @@ impl FullKeypair { let (xopk, _) = secret_key.x_only_public_key(&nostr::SECP256K1); let secret_key = nostr::SecretKey::from(*secret_key); FullKeypair { - pubkey: Pubkey::new(&xopk.serialize()), + pubkey: Pubkey::new(xopk.serialize()), secret_key, } } diff --git a/enostr/src/pubkey.rs b/enostr/src/pubkey.rs index 3941c03..a46c75f 100644 --- a/enostr/src/pubkey.rs +++ b/enostr/src/pubkey.rs @@ -5,14 +5,14 @@ use nostr::bech32::Hrp; use std::fmt; use tracing::debug; -#[derive(Debug, Eq, PartialEq, Clone, Hash)] +#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)] pub struct Pubkey([u8; 32]); static HRP_NPUB: Hrp = Hrp::parse_unchecked("npub"); impl Pubkey { - pub fn new(data: &[u8; 32]) -> Self { - Self(*data) + pub fn new(data: [u8; 32]) -> Self { + Self(data) } pub fn hex(&self) -> String { diff --git a/src/app.rs b/src/app.rs index e2c2386..e6da417 100644 --- a/src/app.rs +++ b/src/app.rs @@ -20,6 +20,7 @@ use crate::timeline::{Timeline, TimelineSource, ViewFilter}; use crate::ui::note::PostAction; use crate::ui::{self, AccountSelectionWidget, DesktopGlobalPopup}; use crate::ui::{DesktopSidePanel, RelayView, View}; +use crate::unknowns::UnknownIds; use crate::{filter, Result}; use egui_nav::{Nav, NavAction}; use enostr::{ClientMessage, RelayEvent, RelayMessage, RelayPool}; @@ -30,10 +31,9 @@ use uuid::Uuid; use egui::{Context, Frame, Style}; use egui_extras::{Size, StripBuilder}; -use nostrdb::{BlockType, Config, Filter, Mention, Ndb, Note, NoteKey, Transaction}; +use nostrdb::{Config, Filter, Ndb, Note, Transaction}; -use std::collections::{HashMap, HashSet}; -use std::hash::Hash; +use std::collections::HashMap; use std::path::Path; use std::time::Duration; use tracing::{debug, error, info, trace, warn}; @@ -57,6 +57,7 @@ pub struct Damus { pub selected_timeline: i32, pub ndb: Ndb, + pub unknown_ids: UnknownIds, pub drafts: Drafts, pub threads: Threads, pub img_cache: ImageCache, @@ -262,12 +263,11 @@ fn try_process_event(damus: &mut Damus, ctx: &egui::Context) -> Result<()> { } } - let mut unknown_ids: HashSet = HashSet::new(); for timeline in 0..damus.timelines.len() { let src = TimelineSource::column(timeline); if let Ok(true) = is_timeline_ready(damus, timeline) { - if let Err(err) = src.poll_notes_into_view(damus, &mut unknown_ids) { + if let Err(err) = src.poll_notes_into_view(damus) { error!("poll_notes_into_view: {err}"); } } else { @@ -275,21 +275,24 @@ fn try_process_event(damus: &mut Damus, ctx: &egui::Context) -> Result<()> { } } - /* - let unknown_ids: Vec = unknown_ids.into_iter().collect(); - if let Some(filters) = get_unknown_ids_filter(&unknown_ids) { - info!( - "Getting {} unknown author profiles from relays", - unknown_ids.len() - ); - let msg = ClientMessage::req("unknown_ids".to_string(), filters); - damus.pool.send(&msg); + if damus.unknown_ids.ready_to_send() { + unknown_id_send(damus); } - */ Ok(()) } +fn unknown_id_send(damus: &mut Damus) { + let filter = damus.unknown_ids.filter().expect("filter"); + info!( + "Getting {} unknown ids from relays", + damus.unknown_ids.ids().len() + ); + let msg = ClientMessage::req("unknownids".to_string(), filter); + damus.unknown_ids.clear(); + damus.pool.send(&msg); +} + /// Check our timeline filter and see if we have any filter data ready. /// Our timelines may require additional data before it is functional. For /// example, when we have to fetch a contact list before we do the actual @@ -345,117 +348,6 @@ fn is_timeline_ready(damus: &mut Damus, timeline: usize) -> Result { Ok(true) } -#[derive(Hash, Clone, Copy, PartialEq, Eq)] -pub enum UnknownId { - Pubkey([u8; 32]), - Id([u8; 32]), -} - -impl UnknownId { - pub fn is_pubkey(&self) -> Option<&[u8; 32]> { - match self { - UnknownId::Pubkey(pk) => Some(pk), - _ => None, - } - } - - pub fn is_id(&self) -> Option<&[u8; 32]> { - match self { - UnknownId::Id(id) => Some(id), - _ => None, - } - } -} - -/// Look for missing notes in various parts of notes that we see: -/// -/// - pubkeys and notes mentioned inside the note -/// - notes being replied to -/// -/// We return all of this in a HashSet so that we can fetch these from -/// remote relays. -/// -pub fn get_unknown_note_ids<'a>( - ndb: &Ndb, - cached_note: &CachedNote, - txn: &'a Transaction, - note: &Note<'a>, - note_key: NoteKey, - ids: &mut HashSet, -) -> Result<()> { - // the author pubkey - - if ndb.get_profile_by_pubkey(txn, note.pubkey()).is_err() { - ids.insert(UnknownId::Pubkey(*note.pubkey())); - } - - // pull notes that notes are replying to - if cached_note.reply.root.is_some() { - let note_reply = cached_note.reply.borrow(note.tags()); - if let Some(root) = note_reply.root() { - if ndb.get_note_by_id(txn, root.id).is_err() { - ids.insert(UnknownId::Id(*root.id)); - } - } - - if !note_reply.is_reply_to_root() { - if let Some(reply) = note_reply.reply() { - if ndb.get_note_by_id(txn, reply.id).is_err() { - ids.insert(UnknownId::Id(*reply.id)); - } - } - } - } - - let blocks = ndb.get_blocks_by_key(txn, note_key)?; - for block in blocks.iter(note) { - if block.blocktype() != BlockType::MentionBech32 { - continue; - } - - match block.as_mention().unwrap() { - Mention::Pubkey(npub) => { - if ndb.get_profile_by_pubkey(txn, npub.pubkey()).is_err() { - ids.insert(UnknownId::Pubkey(*npub.pubkey())); - } - } - Mention::Profile(nprofile) => { - if ndb.get_profile_by_pubkey(txn, nprofile.pubkey()).is_err() { - ids.insert(UnknownId::Pubkey(*nprofile.pubkey())); - } - } - Mention::Event(ev) => match ndb.get_note_by_id(txn, ev.id()) { - Err(_) => { - ids.insert(UnknownId::Id(*ev.id())); - if let Some(pk) = ev.pubkey() { - if ndb.get_profile_by_pubkey(txn, pk).is_err() { - ids.insert(UnknownId::Pubkey(*pk)); - } - } - } - Ok(note) => { - if ndb.get_profile_by_pubkey(txn, note.pubkey()).is_err() { - ids.insert(UnknownId::Pubkey(*note.pubkey())); - } - } - }, - Mention::Note(note) => match ndb.get_note_by_id(txn, note.id()) { - Err(_) => { - ids.insert(UnknownId::Id(*note.id())); - } - Ok(note) => { - if ndb.get_profile_by_pubkey(txn, note.pubkey()).is_err() { - ids.insert(UnknownId::Pubkey(*note.pubkey())); - } - } - }, - _ => {} - } - } - - Ok(()) -} - #[cfg(feature = "profiling")] fn setup_profiling() { puffin::set_scopes_on(true); // tell puffin to collect data @@ -529,6 +421,10 @@ fn update_damus(damus: &mut Damus, ctx: &egui::Context) { setup_profiling(); damus.state = DamusState::Initialized; + // this lets our eose handler know to close unknownids right away + damus + .subscriptions() + .insert("unknownids".to_string(), SubKind::OneShot); setup_initial_nostrdb_subs(damus).expect("home subscription failed"); } @@ -547,89 +443,25 @@ fn process_event(damus: &mut Damus, _subid: &str, event: &str) { } } -fn get_unknown_ids(txn: &Transaction, damus: &mut Damus) -> Result> { - #[cfg(feature = "profiling")] - puffin::profile_function!(); - - let mut ids: HashSet = HashSet::new(); - let mut new_cached_notes: Vec<(NoteKey, CachedNote)> = vec![]; - - for timeline in &damus.timelines { - for noteref in timeline.notes(ViewFilter::NotesAndReplies) { - let note = damus.ndb.get_note_by_key(txn, noteref.key)?; - let note_key = note.key().unwrap(); - let cached_note = damus.note_cache().cached_note(noteref.key); - let cached_note = if let Some(cn) = cached_note { - cn.clone() - } else { - let new_cached_note = CachedNote::new(¬e); - new_cached_notes.push((note_key, new_cached_note.clone())); - new_cached_note - }; - - let _ = get_unknown_note_ids( - &damus.ndb, - &cached_note, - txn, - ¬e, - note.key().unwrap(), - &mut ids, - ); - } - } - - // This is mainly done to avoid the double mutable borrow that would happen - // if we tried to update the note_cache mutably in the loop above - for (note_key, note) in new_cached_notes { - damus.note_cache_mut().cache_mut().insert(note_key, note); - } - - Ok(ids.into_iter().collect()) -} - -fn get_unknown_ids_filter(ids: &[UnknownId]) -> Option> { - if ids.is_empty() { - return None; - } - - let ids = &ids[0..500.min(ids.len())]; - let mut filters: Vec = vec![]; - - let pks: Vec<&[u8; 32]> = ids.iter().flat_map(|id| id.is_pubkey()).collect(); - if !pks.is_empty() { - let pk_filter = Filter::new().authors(pks).kinds([0]).build(); - - filters.push(pk_filter); - } - - let note_ids: Vec<&[u8; 32]> = ids.iter().flat_map(|id| id.is_id()).collect(); - if !note_ids.is_empty() { - filters.push(Filter::new().ids(note_ids).build()); - } - - Some(filters) -} - fn handle_eose(damus: &mut Damus, subid: &str, relay_url: &str) -> Result<()> { let sub_kind = if let Some(sub_kind) = damus.subscriptions().get(subid) { sub_kind } else { - warn!("got unknown eose subid {}", subid); + let n_subids = damus.subscriptions().len(); + warn!( + "got unknown eose subid {}, {} tracked subscriptions", + subid, n_subids + ); return Ok(()); }; match *sub_kind { SubKind::Initial => { let txn = Transaction::new(&damus.ndb)?; - let ids = get_unknown_ids(&txn, damus)?; - if let Some(filters) = get_unknown_ids_filter(&ids) { - info!("Getting {} unknown ids from {}", ids.len(), relay_url); - let sub_id = Uuid::new_v4().to_string(); - - let msg = ClientMessage::req(sub_id.clone(), filters); - // unknownids are a oneshot request - damus.subscriptions().insert(sub_id, SubKind::OneShot); - damus.pool.send_to(&msg, relay_url); + UnknownIds::update(&txn, damus); + // this is possible if this is the first time + if damus.unknown_ids.ready_to_send() { + unknown_id_send(damus); } } @@ -807,6 +639,7 @@ impl Damus { Self { pool, is_mobile, + unknown_ids: UnknownIds::default(), subscriptions: Subscriptions::default(), since_optimize: parsed_args.since_optimize, threads: Threads::default(), @@ -841,6 +674,7 @@ impl Damus { config.set_ingester_threads(2); Self { is_mobile, + unknown_ids: UnknownIds::default(), subscriptions: Subscriptions::default(), since_optimize: true, threads: Threads::default(), diff --git a/src/lib.rs b/src/lib.rs index 780a1e4..bbe870e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -35,6 +35,7 @@ mod time; mod timecache; mod timeline; pub mod ui; +mod unknowns; mod user_account; #[cfg(test)] diff --git a/src/timeline.rs b/src/timeline.rs index e1d641a..92d46eb 100644 --- a/src/timeline.rs +++ b/src/timeline.rs @@ -1,8 +1,8 @@ -use crate::app::{get_unknown_note_ids, UnknownId}; use crate::column::{ColumnKind, PubkeySource}; use crate::error::Error; use crate::note::NoteRef; use crate::notecache::CachedNote; +use crate::unknowns::UnknownIds; use crate::{filter, filter::FilterState}; use crate::{Damus, Result}; use std::sync::atomic::{AtomicU32, Ordering}; @@ -13,7 +13,6 @@ use egui_virtual_list::VirtualList; use enostr::Pubkey; use nostrdb::{Note, Subscription, Transaction}; use std::cell::RefCell; -use std::collections::HashSet; use std::rc::Rc; use tracing::{debug, error}; @@ -70,11 +69,7 @@ impl<'a> TimelineSource<'a> { /// Check local subscriptions for new notes and insert them into /// timelines (threads, columns) - pub fn poll_notes_into_view( - &self, - app: &mut Damus, - ids: &mut HashSet, - ) -> Result<()> { + pub fn poll_notes_into_view(&self, app: &mut Damus) -> Result<()> { let sub = { let txn = Transaction::new(&app.ndb).expect("txn"); if let Some(sub) = self.sub(app, &txn) { @@ -102,11 +97,7 @@ impl<'a> TimelineSource<'a> { continue; }; - let cached_note = app - .note_cache_mut() - .cached_note_or_insert(key, ¬e) - .clone(); - let _ = get_unknown_note_ids(&app.ndb, &cached_note, &txn, ¬e, key, ids); + UnknownIds::update_from_note(&txn, app, ¬e); let created_at = note.created_at(); new_refs.push((note, NoteRef { key, created_at })); @@ -297,7 +288,7 @@ impl Timeline { /// Create a timeline from a contact list pub fn contact_list(contact_list: &Note) -> Result { let filter = filter::filter_from_tags(contact_list)?.into_follow_filter(); - let pk_src = PubkeySource::Explicit(Pubkey::new(contact_list.pubkey())); + let pk_src = PubkeySource::Explicit(Pubkey::new(*contact_list.pubkey())); Ok(Timeline::new( ColumnKind::contact_list(pk_src), diff --git a/src/ui/thread.rs b/src/ui/thread.rs index b904586..de6f967 100644 --- a/src/ui/thread.rs +++ b/src/ui/thread.rs @@ -1,7 +1,6 @@ use crate::{actionbar::BarResult, timeline::TimelineSource, ui, Damus}; use nostrdb::{NoteKey, Transaction}; -use std::collections::HashSet; -use tracing::warn; +use tracing::{error, warn}; pub struct ThreadView<'a> { app: &'a mut Damus, @@ -72,11 +71,8 @@ impl<'a> ThreadView<'a> { }; // poll for new notes and insert them into our existing notes - { - let mut ids = HashSet::new(); - let _ = TimelineSource::Thread(root_id) - .poll_notes_into_view(self.app, &mut ids); - // TODO: do something with unknown ids + if let Err(e) = TimelineSource::Thread(root_id).poll_notes_into_view(self.app) { + error!("Thread::poll_notes_into_view: {e}"); } let (len, list) = { diff --git a/src/unknowns.rs b/src/unknowns.rs new file mode 100644 index 0000000..72ab4fa --- /dev/null +++ b/src/unknowns.rs @@ -0,0 +1,278 @@ +use crate::notecache::CachedNote; +use crate::timeline::ViewFilter; +use crate::{Damus, Result}; +use enostr::{Filter, NoteId, Pubkey}; +use nostrdb::{BlockType, Mention, Ndb, Note, NoteKey, Transaction}; +use std::collections::HashSet; +use std::time::{Duration, Instant}; +use tracing::error; + +/// Unknown Id searcher +#[derive(Default)] +pub struct UnknownIds { + ids: HashSet, + first_updated: Option, + last_updated: Option, +} + +impl UnknownIds { + /// Simple debouncer + pub fn ready_to_send(&self) -> bool { + if self.ids.is_empty() { + return false; + } + + // we trigger on first set + if self.first_updated == self.last_updated { + return true; + } + + let last_updated = if let Some(last) = self.last_updated { + last + } else { + // if we've + return true; + }; + + Instant::now() - last_updated >= Duration::from_secs(2) + } + + pub fn ids(&self) -> &HashSet { + &self.ids + } + + pub fn ids_mut(&mut self) -> &mut HashSet { + &mut self.ids + } + + pub fn clear(&mut self) { + self.ids = HashSet::default(); + } + + pub fn filter(&self) -> Option> { + let ids: Vec<&UnknownId> = self.ids.iter().collect(); + get_unknown_ids_filter(&ids) + } + + /// We've updated some unknown ids, update the last_updated time to now + pub fn mark_updated(&mut self) { + let now = Instant::now(); + if self.first_updated.is_none() { + self.first_updated = Some(now); + } + self.last_updated = Some(now); + } + + pub fn update_from_note(txn: &Transaction, app: &mut Damus, note: &Note) -> bool { + let before = app.unknown_ids.ids().len(); + let key = note.key().expect("note key"); + let cached_note = app + .note_cache_mut() + .cached_note_or_insert(key, note) + .clone(); + if let Err(e) = + get_unknown_note_ids(&app.ndb, &cached_note, txn, note, app.unknown_ids.ids_mut()) + { + error!("UnknownIds::update_from_note {e}"); + } + let after = app.unknown_ids.ids().len(); + + if before != after { + app.unknown_ids.mark_updated(); + true + } else { + false + } + } + + pub fn update(txn: &Transaction, app: &mut Damus) -> bool { + let before = app.unknown_ids.ids().len(); + if let Err(e) = get_unknown_ids(txn, app) { + error!("UnknownIds::update {e}"); + } + let after = app.unknown_ids.ids().len(); + + if before != after { + app.unknown_ids.mark_updated(); + true + } else { + false + } + } +} + +#[derive(Hash, Clone, Copy, PartialEq, Eq)] +pub enum UnknownId { + Pubkey(Pubkey), + Id(NoteId), +} + +impl UnknownId { + pub fn is_pubkey(&self) -> Option<&Pubkey> { + match self { + UnknownId::Pubkey(pk) => Some(pk), + _ => None, + } + } + + pub fn is_id(&self) -> Option<&NoteId> { + match self { + UnknownId::Id(id) => Some(id), + _ => None, + } + } +} + +/// Look for missing notes in various parts of notes that we see: +/// +/// - pubkeys and notes mentioned inside the note +/// - notes being replied to +/// +/// We return all of this in a HashSet so that we can fetch these from +/// remote relays. +/// +pub fn get_unknown_note_ids<'a>( + ndb: &Ndb, + cached_note: &CachedNote, + txn: &'a Transaction, + note: &Note<'a>, + ids: &mut HashSet, +) -> Result<()> { + // the author pubkey + + if ndb.get_profile_by_pubkey(txn, note.pubkey()).is_err() { + ids.insert(UnknownId::Pubkey(Pubkey::new(*note.pubkey()))); + } + + // pull notes that notes are replying to + if cached_note.reply.root.is_some() { + let note_reply = cached_note.reply.borrow(note.tags()); + if let Some(root) = note_reply.root() { + if ndb.get_note_by_id(txn, root.id).is_err() { + ids.insert(UnknownId::Id(NoteId::new(*root.id))); + } + } + + if !note_reply.is_reply_to_root() { + if let Some(reply) = note_reply.reply() { + if ndb.get_note_by_id(txn, reply.id).is_err() { + ids.insert(UnknownId::Id(NoteId::new(*reply.id))); + } + } + } + } + + let blocks = ndb.get_blocks_by_key(txn, note.key().expect("note key"))?; + for block in blocks.iter(note) { + if block.blocktype() != BlockType::MentionBech32 { + continue; + } + + match block.as_mention().unwrap() { + Mention::Pubkey(npub) => { + if ndb.get_profile_by_pubkey(txn, npub.pubkey()).is_err() { + ids.insert(UnknownId::Pubkey(Pubkey::new(*npub.pubkey()))); + } + } + Mention::Profile(nprofile) => { + if ndb.get_profile_by_pubkey(txn, nprofile.pubkey()).is_err() { + ids.insert(UnknownId::Pubkey(Pubkey::new(*nprofile.pubkey()))); + } + } + Mention::Event(ev) => match ndb.get_note_by_id(txn, ev.id()) { + Err(_) => { + ids.insert(UnknownId::Id(NoteId::new(*ev.id()))); + if let Some(pk) = ev.pubkey() { + if ndb.get_profile_by_pubkey(txn, pk).is_err() { + ids.insert(UnknownId::Pubkey(Pubkey::new(*pk))); + } + } + } + Ok(note) => { + if ndb.get_profile_by_pubkey(txn, note.pubkey()).is_err() { + ids.insert(UnknownId::Pubkey(Pubkey::new(*note.pubkey()))); + } + } + }, + Mention::Note(note) => match ndb.get_note_by_id(txn, note.id()) { + Err(_) => { + ids.insert(UnknownId::Id(NoteId::new(*note.id()))); + } + Ok(note) => { + if ndb.get_profile_by_pubkey(txn, note.pubkey()).is_err() { + ids.insert(UnknownId::Pubkey(Pubkey::new(*note.pubkey()))); + } + } + }, + _ => {} + } + } + + Ok(()) +} + +fn get_unknown_ids(txn: &Transaction, damus: &mut Damus) -> Result<()> { + #[cfg(feature = "profiling")] + puffin::profile_function!(); + + let mut new_cached_notes: Vec<(NoteKey, CachedNote)> = vec![]; + + for timeline in &damus.timelines { + for noteref in timeline.notes(ViewFilter::NotesAndReplies) { + let note = damus.ndb.get_note_by_key(txn, noteref.key)?; + let note_key = note.key().unwrap(); + let cached_note = damus.note_cache().cached_note(noteref.key); + let cached_note = if let Some(cn) = cached_note { + cn.clone() + } else { + let new_cached_note = CachedNote::new(¬e); + new_cached_notes.push((note_key, new_cached_note.clone())); + new_cached_note + }; + + let _ = get_unknown_note_ids( + &damus.ndb, + &cached_note, + txn, + ¬e, + damus.unknown_ids.ids_mut(), + ); + } + } + + // This is mainly done to avoid the double mutable borrow that would happen + // if we tried to update the note_cache mutably in the loop above + for (note_key, note) in new_cached_notes { + damus.note_cache_mut().cache_mut().insert(note_key, note); + } + + Ok(()) +} + +fn get_unknown_ids_filter(ids: &[&UnknownId]) -> Option> { + if ids.is_empty() { + return None; + } + + let ids = &ids[0..500.min(ids.len())]; + let mut filters: Vec = vec![]; + + let pks: Vec<&[u8; 32]> = ids + .iter() + .flat_map(|id| id.is_pubkey().map(|pk| pk.bytes())) + .collect(); + if !pks.is_empty() { + let pk_filter = Filter::new().authors(pks).kinds([0]).build(); + filters.push(pk_filter); + } + + let note_ids: Vec<&[u8; 32]> = ids + .iter() + .flat_map(|id| id.is_id().map(|id| id.bytes())) + .collect(); + if !note_ids.is_empty() { + filters.push(Filter::new().ids(note_ids).build()); + } + + Some(filters) +}