1
0
mirror of git://jb55.com/damus synced 2024-09-16 02:03:45 +00:00

perf: don't continuously attempt to fetch old profiles

Changelog-Changed: Save bandwidth by only fetching new profiles after a certain amount of time
This commit is contained in:
William Casarin 2023-10-23 10:31:47 +08:00
parent bbccc27a26
commit 76508dbbfd
12 changed files with 86 additions and 48 deletions

View File

@ -64,7 +64,8 @@ class EventsModel: ObservableObject {
case .ok:
break
case .eose:
load_profiles(profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events), damus_state: state)
let txn = NdbTxn(ndb: self.state.ndb)
load_profiles(profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn)
}
}
}

View File

@ -53,8 +53,8 @@ class FollowersModel: ObservableObject {
has_contact.insert(ev.pubkey)
}
func load_profiles(relay_id: String) {
let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [])
func load_profiles<Y>(relay_id: String, txn: NdbTxn<Y>) {
let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [], txn: txn)
if authors.isEmpty {
return
}
@ -83,7 +83,8 @@ class FollowersModel: ObservableObject {
case .eose(let sub_id):
if sub_id == self.sub_id {
load_profiles(relay_id: relay_id)
let txn = NdbTxn(ndb: self.damus_state.ndb)
load_profiles(relay_id: relay_id, txn: txn)
} else if sub_id == self.profiles_id {
damus_state.pool.unsubscribe(sub_id: profiles_id, to: [relay_id])
}

View File

@ -22,11 +22,11 @@ class FollowingModel {
self.hashtags = hashtags
}
func get_filter() -> NostrFilter {
func get_filter<Y>(txn: NdbTxn<Y>) -> NostrFilter {
var f = NostrFilter(kinds: [.metadata])
f.authors = self.contacts.reduce(into: Array<Pubkey>()) { acc, pk in
// don't fetch profiles we already have
if damus_state.profiles.has_fresh_profile(id: pk) {
if damus_state.profiles.has_fresh_profile(id: pk, txn: txn) {
return
}
acc.append(pk)
@ -34,8 +34,8 @@ class FollowingModel {
return f
}
func subscribe() {
let filter = get_filter()
func subscribe<Y>(txn: NdbTxn<Y>) {
let filter = get_filter(txn: txn)
if (filter.authors?.count ?? 0) == 0 {
needs_sub = false
return

View File

@ -430,14 +430,15 @@ class HomeModel {
case .eose(let sub_id):
let txn = NdbTxn(ndb: damus_state.ndb)
if sub_id == dms_subid {
var dms = dms.dms.flatMap { $0.events }
dms.append(contentsOf: incoming_dms)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state, txn: txn)
} else if sub_id == notifications_subid {
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn)
} else if sub_id == home_subid {
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state, txn: txn)
}
self.loading = false

View File

@ -123,8 +123,9 @@ class ProfileModel: ObservableObject, Equatable {
break
//notify(.notice, notice)
case .eose:
let txn = NdbTxn(ndb: damus.ndb)
if resp.subid == sub_id {
load_profiles(profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus)
load_profiles(profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus, txn: txn)
}
progress += 1
break

View File

@ -83,38 +83,38 @@ class SearchHomeModel: ObservableObject {
// global events are not realtime
unsubscribe(to: relay_id)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state)
let txn = NdbTxn(ndb: damus_state.ndb)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state, txn: txn)
}
break
}
}
}
func find_profiles_to_fetch(profiles: Profiles, load: PubkeysToLoad, cache: EventCache) -> [Pubkey] {
func find_profiles_to_fetch<Y>(profiles: Profiles, load: PubkeysToLoad, cache: EventCache, txn: NdbTxn<Y>) -> [Pubkey] {
switch load {
case .from_events(let events):
return find_profiles_to_fetch_from_events(profiles: profiles, events: events, cache: cache)
return find_profiles_to_fetch_from_events(profiles: profiles, events: events, cache: cache, txn: txn)
case .from_keys(let pks):
return find_profiles_to_fetch_from_keys(profiles: profiles, pks: pks)
return find_profiles_to_fetch_from_keys(profiles: profiles, pks: pks, txn: txn)
}
}
func find_profiles_to_fetch_from_keys(profiles: Profiles, pks: [Pubkey]) -> [Pubkey] {
Array(Set(pks.filter { pk in !profiles.has_fresh_profile(id: pk) }))
func find_profiles_to_fetch_from_keys<Y>(profiles: Profiles, pks: [Pubkey], txn: NdbTxn<Y>) -> [Pubkey] {
Array(Set(pks.filter { pk in !profiles.has_fresh_profile(id: pk, txn: txn) }))
}
func find_profiles_to_fetch_from_events(profiles: Profiles, events: [NostrEvent], cache: EventCache) -> [Pubkey] {
func find_profiles_to_fetch_from_events<Y>(profiles: Profiles, events: [NostrEvent], cache: EventCache, txn: NdbTxn<Y>) -> [Pubkey] {
var pubkeys = Set<Pubkey>()
for ev in events {
// lookup profiles from boosted events
if ev.known_kind == .boost, let bev = ev.get_inner_event(cache: cache), !profiles.has_fresh_profile(id: bev.pubkey) {
if ev.known_kind == .boost, let bev = ev.get_inner_event(cache: cache), !profiles.has_fresh_profile(id: bev.pubkey, txn: txn) {
pubkeys.insert(bev.pubkey)
}
if !profiles.has_fresh_profile(id: ev.pubkey) {
if !profiles.has_fresh_profile(id: ev.pubkey, txn: txn) {
pubkeys.insert(ev.pubkey)
}
}
@ -127,27 +127,42 @@ enum PubkeysToLoad {
case from_keys([Pubkey])
}
func load_profiles(profiles_subid: String, relay_id: String, load: PubkeysToLoad, damus_state: DamusState) {
let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events)
func load_profiles<Y>(profiles_subid: String, relay_id: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn<Y>) {
let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn)
guard !authors.isEmpty else {
return
}
print("loading \(authors.count) profiles from \(relay_id)")
let filter = NostrFilter(kinds: [.metadata],
authors: authors)
damus_state.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { sub_id, conn_ev in
guard case .nostr_event(let ev) = conn_ev,
case .eose = ev,
sub_id == profiles_subid
else {
return
print("load_profiles: requesting \(authors.count) profiles from \(relay_id)")
let filter = NostrFilter(kinds: [.metadata], authors: authors)
damus_state.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { rid, conn_ev in
let now = UInt64(Date.now.timeIntervalSince1970)
switch conn_ev {
case .ws_event:
break
case .nostr_event(let ev):
guard ev.subid == profiles_subid, rid == relay_id else { return }
switch ev {
case .event(_, let ev):
if ev.known_kind == .metadata {
damus_state.ndb.write_profile_last_fetched(pubkey: ev.pubkey, fetched_at: now)
}
case .eose:
print("load_profiles: done loading \(authors.count) profiles from \(relay_id)")
damus_state.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id])
case .ok:
break
case .notice:
break
}
}
print("done loading \(authors.count) profiles from \(relay_id)")
damus_state.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id])
}
}

View File

@ -80,7 +80,8 @@ class SearchModel: ObservableObject {
self.loading = false
if sub_id == self.sub_id {
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state)
let txn = NdbTxn(ndb: state.ndb)
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state, txn: txn)
}
}
}

View File

@ -120,7 +120,8 @@ class ThreadModel: ObservableObject {
}
if sub_id == self.base_subid {
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map)), damus_state: damus_state)
let txn = NdbTxn(ndb: damus_state.ndb)
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map)), damus_state: damus_state, txn: txn)
}
}

View File

@ -55,7 +55,8 @@ class ZapsModel: ObservableObject {
break
case .eose:
let events = state.events.lookup_zaps(target: target).map { $0.request.ev }
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state)
let txn = NdbTxn(ndb: state.ndb)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn)
case .event(_, let ev):
guard ev.kind == 9735,
let zapper = state.profiles.lookup_zapper(pubkey: target.pubkey),

View File

@ -30,7 +30,7 @@ class ProfileData {
class Profiles {
private var ndb: Ndb
static let db_freshness_threshold: TimeInterval = 24 * 60 * 60
static let db_freshness_threshold: TimeInterval = 24 * 60 * 8
@MainActor
private var profiles: [Pubkey: ProfileData] = [:]
@ -93,9 +93,24 @@ class Profiles {
return ndb.lookup_profile_key(pubkey)
}
func has_fresh_profile(id: Pubkey) -> Bool {
guard let recv = lookup_with_timestamp(id).unsafeUnownedValue?.receivedAt else { return false }
return Date.now.timeIntervalSince(Date(timeIntervalSince1970: Double(recv))) < Profiles.db_freshness_threshold
func has_fresh_profile<Y>(id: Pubkey, txn: NdbTxn<Y>) -> Bool {
guard let fetched_at = ndb.read_profile_last_fetched(txn: txn, pubkey: id)
else {
return false
}
// In situations where a batch of profiles was fetched all at once,
// this will reduce the herding of the profile requests
let fuzz = Double.random(in: -60...60)
let threshold = Profiles.db_freshness_threshold + fuzz
let fetch_date = Date(timeIntervalSince1970: Double(fetched_at))
let since = Date.now.timeIntervalSince(fetch_date)
let fresh = since < threshold
//print("fresh = \(fresh): fetch_date \(since) < threshold \(threshold) \(id)")
return fresh
}
}

View File

@ -29,10 +29,10 @@ class LNUrls {
guard tries < 5 else { return nil }
self.endpoints[pubkey] = .failed(tries: tries + 1)
case .fetched(let pr):
print("lnurls.lookup_or_fetch fetched \(lnurl)")
//print("lnurls.lookup_or_fetch fetched \(lnurl)")
return pr
case .fetching(let task):
print("lnurls.lookup_or_fetch already fetching \(lnurl)")
//print("lnurls.lookup_or_fetch already fetching \(lnurl)")
return await task.value
case .not_fetched:
print("lnurls.lookup_or_fetch not fetched \(lnurl)")

View File

@ -151,7 +151,8 @@ struct FollowingView: View {
}
.tabViewStyle(.page(indexDisplayMode: .never))
.onAppear {
following.subscribe()
let txn = NdbTxn(ndb: self.damus_state.ndb)
following.subscribe(txn: txn)
}
.onDisappear {
following.unsubscribe()