Remove event_tags index and two unused temporary functions

This commit is contained in:
Mike Dilger 2023-08-03 08:22:30 +12:00
parent feba64d9d2
commit c0dadafd7d
4 changed files with 4 additions and 173 deletions

View File

@ -94,9 +94,6 @@ pub async fn process_new_event(
GLOBALS.storage.write_person_relay(&pr)?;
}
// Save the tags into event_tag table
GLOBALS.storage.write_event_tags(event)?;
for tag in event.tags.iter() {
match tag {
Tag::Event {

View File

@ -68,12 +68,8 @@ impl Storage {
tracing::info!("LMDB: imported relays.");
// old table "event"
// old table "event_tag"
// Copy events (and regenerate event_tags)
import_events(&db, |event: &Event| {
self.write_event(event)?;
self.write_event_tags(event)
})?;
// Copy events
import_events(&db, |event: &Event| self.write_event(event))?;
tracing::info!("LMDB: imported events and tag index");
// old table "person"

View File

@ -14,8 +14,7 @@ use lmdb::{
Cursor, Database, DatabaseFlags, Environment, EnvironmentFlags, Stat, Transaction, WriteFlags,
};
use nostr_types::{
EncryptedPrivateKey, Event, EventKind, Id, MilliSatoshi, PublicKey, PublicKeyHex, RelayUrl,
Tag, Unixtime,
EncryptedPrivateKey, Event, EventKind, Id, MilliSatoshi, PublicKey, RelayUrl, Unixtime,
};
use speedy::{Readable, Writable};
use std::collections::{HashMap, HashSet};
@ -58,12 +57,6 @@ pub struct Storage {
// val: serde_json::to_vec(relay) | serde_json::from_slice(bytes)
relays: Database,
// Tag -> Id
// (dup keys, so multiple Ids per tag)
// key: key!(serde_json::to_vec(&tag)) (and remove trailing empty fields)
// val: id.as_slice() | Id(val[0..32].try_into()?)
event_tags: Database,
// Id -> Event
// key: id.as_slice() | Id(val[0..32].try_into()?)
// val: event.write_to_vec() | Event::read_from_buffer(val)
@ -138,11 +131,6 @@ impl Storage {
let relays = env.create_db(Some("relays"), DatabaseFlags::empty())?;
let event_tags = env.create_db(
Some("event_tags"),
DatabaseFlags::DUP_SORT | DatabaseFlags::DUP_FIXED,
)?;
let events = env.create_db(Some("events"), DatabaseFlags::empty())?;
let event_ek_pk_index = env.create_db(
@ -172,7 +160,6 @@ impl Storage {
event_viewed,
hashtags,
relays,
event_tags,
events,
event_ek_pk_index,
event_ek_c_index,
@ -222,11 +209,6 @@ impl Storage {
Ok(txn.stat(self.relays)?)
}
pub fn get_event_tags_stats(&self) -> Result<Stat, Error> {
let txn = self.env.begin_ro_txn()?;
Ok(txn.stat(self.event_tags)?)
}
pub fn get_event_stats(&self) -> Result<Stat, Error> {
let txn = self.env.begin_ro_txn()?;
Ok(txn.stat(self.events)?)
@ -350,36 +332,9 @@ impl Storage {
}
txn.commit()?;
// Delete from event_tags
// (unfortunately since Ids are the values, we have to scan the whole thing)
let mut txn = self.env.begin_rw_txn()?;
let mut cursor = txn.open_rw_cursor(self.event_tags)?;
let iter = cursor.iter_start();
let mut deletions: Vec<(Vec<u8>, Vec<u8>)> = Vec::new();
for result in iter {
match result {
Err(e) => return Err(e.into()),
Ok((key, val)) => {
let id = Id(val[0..32].try_into()?);
if ids.contains(&id) {
deletions.push((key.to_owned(), val.to_owned()));
}
}
}
}
drop(cursor);
tracing::info!(
"PRUNE: deleting {} records from event_tags",
deletions.len()
);
for deletion in deletions.drain(..) {
txn.del(self.event_tags, &deletion.0, Some(&deletion.1))?;
}
txn.commit()?;
let mut txn = self.env.begin_rw_txn()?;
// Delete from relationships
// (unfortunately because of the 2nd Id in the tag, we have to scan the whole thing)
let mut txn = self.env.begin_rw_txn()?;
let mut cursor = txn.open_rw_cursor(self.relationships)?;
let iter = cursor.iter_start();
let mut deletions: Vec<Vec<u8>> = Vec::new();
@ -677,54 +632,6 @@ impl Storage {
Ok(output)
}
pub fn write_event_tags(&self, event: &Event) -> Result<(), Error> {
let mut txn = self.env.begin_rw_txn()?;
for tag in &event.tags {
let mut tagbytes = serde_json::to_vec(&tag)?;
tagbytes.truncate(MAX_LMDB_KEY);
txn.put(
self.event_tags,
&tagbytes,
&event.id.as_slice(),
WriteFlags::empty(),
)?;
}
txn.commit()?;
Ok(())
}
/// This finds events that have a tag starting with the values in the
/// passed in tag, and potentially having more tag fields.
pub fn find_events_with_tags(&self, tag: Tag) -> Result<Vec<Id>, Error> {
let mut start_key = serde_json::to_vec(&tag)?;
// remove trailing bracket so we match tags with addl fields
let _ = start_key.pop();
// remove any trailing empty fields
while start_key.ends_with(b",\"\"") {
start_key.truncate(start_key.len() - 3);
}
start_key.truncate(MAX_LMDB_KEY);
let txn = self.env.begin_ro_txn()?;
let mut cursor = txn.open_ro_cursor(self.event_tags)?;
let iter = cursor.iter_from(start_key.clone());
let mut output: Vec<Id> = Vec::new();
for result in iter {
match result {
Err(e) => return Err(e.into()),
Ok((key, val)) => {
// Stop once we get to a non-matching tag
if !key.starts_with(&start_key) {
break;
}
// Add the event
let id = Id(val[0..32].try_into()?);
output.push(id);
}
}
}
Ok(output)
}
pub fn write_event(&self, event: &Event) -> Result<(), Error> {
// write to lmdb 'events'
let bytes = event.write_to_vec()?;
@ -1208,61 +1115,6 @@ impl Storage {
.cloned())
}
// This is temporary to feed src/events.rs which will be going away in a future
// code pass
pub fn fetch_reply_related_events(&self, since: Unixtime) -> Result<Vec<Event>, Error> {
let public_key: PublicKeyHex = match GLOBALS.signer.public_key() {
None => return Ok(vec![]),
Some(pk) => pk.into(),
};
let reply_related_kinds = GLOBALS.settings.read().feed_related_event_kinds();
let tag = Tag::Pubkey {
pubkey: public_key,
recommended_relay_url: None,
petname: None,
trailing: vec![],
};
let tagged_event_ids = self.find_events_with_tags(tag)?;
let events: Vec<Event> = tagged_event_ids
.iter()
.filter_map(|id| match self.read_event(*id) {
Ok(Some(event)) => {
if event.created_at > since && reply_related_kinds.contains(&event.kind) {
Some(event)
} else {
None
}
}
_ => None,
})
.collect();
Ok(events)
}
// This is temporary to feed src/events.rs which will be going away in a future
// code pass
pub fn fetch_relay_lists(&self) -> Result<Vec<Event>, Error> {
let mut relay_lists =
self.find_events(&[EventKind::RelayList], &[], None, |_| true, false)?;
let mut latest: HashMap<PublicKey, Event> = HashMap::new();
for event in relay_lists.drain(..) {
if let Some(current_best) = latest.get(&event.pubkey) {
if current_best.created_at >= event.created_at {
continue;
}
}
let _ = latest.insert(event.pubkey, event);
}
Ok(latest.values().map(|v| v.to_owned()).collect())
}
pub fn get_highest_local_parent_event_id(&self, id: Id) -> Result<Option<Id>, Error> {
let event = match self.read_event(id)? {
Some(event) => event,

View File

@ -110,20 +110,6 @@ pub(super) fn update(_app: &mut GossipUi, _ctx: &Context, _frame: &mut eframe::F
ui.label(event_references_person_stats);
ui.add_space(6.0);
let event_tags_stats = GLOBALS
.storage
.get_event_tags_stats()
.map(|s| {
format!(
"Event Index (Tags): {} records, {} pages",
s.entries(),
s.leaf_pages()
)
})
.unwrap_or("".to_owned());
ui.label(event_tags_stats);
ui.add_space(6.0);
let relationships_stats = GLOBALS
.storage
.get_relationships_stats()