From f9a7deea365b43e1397839efeb4d0e2aa8a105d1 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Mon, 23 Oct 2023 10:31:47 +0800 Subject: [PATCH] perf: don't continuously attempt to fetch old profiles Changelog-Changed: Save bandwidth by only fetching new profiles after a certain amount of time --- damus/Models/EventsModel.swift | 3 +- damus/Models/FollowersModel.swift | 7 ++-- damus/Models/FollowingModel.swift | 8 ++-- damus/Models/HomeModel.swift | 7 ++-- damus/Models/ProfileModel.swift | 3 +- damus/Models/SearchHomeModel.swift | 67 ++++++++++++++++++------------ damus/Models/SearchModel.swift | 3 +- damus/Models/ThreadModel.swift | 3 +- damus/Models/ZapsModel.swift | 3 +- damus/Nostr/Profiles.swift | 23 ++++++++-- damus/Util/LNUrls.swift | 4 +- damus/Views/FollowingView.swift | 3 +- 12 files changed, 86 insertions(+), 48 deletions(-) diff --git a/damus/Models/EventsModel.swift b/damus/Models/EventsModel.swift index 96248b8640..7c045e916d 100644 --- a/damus/Models/EventsModel.swift +++ b/damus/Models/EventsModel.swift @@ -64,7 +64,8 @@ class EventsModel: ObservableObject { case .ok: break case .eose: - load_profiles(profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events), damus_state: state) + let txn = NdbTxn(ndb: self.state.ndb) + load_profiles(profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn) } } } diff --git a/damus/Models/FollowersModel.swift b/damus/Models/FollowersModel.swift index 4faee3f03f..1f5ca6caeb 100644 --- a/damus/Models/FollowersModel.swift +++ b/damus/Models/FollowersModel.swift @@ -53,8 +53,8 @@ class FollowersModel: ObservableObject { has_contact.insert(ev.pubkey) } - func load_profiles(relay_id: String) { - let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? []) + func load_profiles(relay_id: String, txn: NdbTxn) { + let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [], txn: txn) if authors.isEmpty { return } @@ -83,7 +83,8 @@ class FollowersModel: ObservableObject { case .eose(let sub_id): if sub_id == self.sub_id { - load_profiles(relay_id: relay_id) + let txn = NdbTxn(ndb: self.damus_state.ndb) + load_profiles(relay_id: relay_id, txn: txn) } else if sub_id == self.profiles_id { damus_state.pool.unsubscribe(sub_id: profiles_id, to: [relay_id]) } diff --git a/damus/Models/FollowingModel.swift b/damus/Models/FollowingModel.swift index 45f84fe1ef..87c67074e8 100644 --- a/damus/Models/FollowingModel.swift +++ b/damus/Models/FollowingModel.swift @@ -22,11 +22,11 @@ class FollowingModel { self.hashtags = hashtags } - func get_filter() -> NostrFilter { + func get_filter(txn: NdbTxn) -> NostrFilter { var f = NostrFilter(kinds: [.metadata]) f.authors = self.contacts.reduce(into: Array()) { acc, pk in // don't fetch profiles we already have - if damus_state.profiles.has_fresh_profile(id: pk) { + if damus_state.profiles.has_fresh_profile(id: pk, txn: txn) { return } acc.append(pk) @@ -34,8 +34,8 @@ class FollowingModel { return f } - func subscribe() { - let filter = get_filter() + func subscribe(txn: NdbTxn) { + let filter = get_filter(txn: txn) if (filter.authors?.count ?? 0) == 0 { needs_sub = false return diff --git a/damus/Models/HomeModel.swift b/damus/Models/HomeModel.swift index 4ee4d752b6..fd0a16b6c8 100644 --- a/damus/Models/HomeModel.swift +++ b/damus/Models/HomeModel.swift @@ -430,14 +430,15 @@ class HomeModel { case .eose(let sub_id): + let txn = NdbTxn(ndb: damus_state.ndb) if sub_id == dms_subid { var dms = dms.dms.flatMap { $0.events } dms.append(contentsOf: incoming_dms) - load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state) + load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state, txn: txn) } else if sub_id == notifications_subid { - load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state) + load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) } else if sub_id == home_subid { - load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state) + load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state, txn: txn) } self.loading = false diff --git a/damus/Models/ProfileModel.swift b/damus/Models/ProfileModel.swift index 71229d3173..3f40602e63 100644 --- a/damus/Models/ProfileModel.swift +++ b/damus/Models/ProfileModel.swift @@ -123,8 +123,9 @@ class ProfileModel: ObservableObject, Equatable { break //notify(.notice, notice) case .eose: + let txn = NdbTxn(ndb: damus.ndb) if resp.subid == sub_id { - load_profiles(profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus) + load_profiles(profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus, txn: txn) } progress += 1 break diff --git a/damus/Models/SearchHomeModel.swift b/damus/Models/SearchHomeModel.swift index 1fd3d3729a..fdfa675193 100644 --- a/damus/Models/SearchHomeModel.swift +++ b/damus/Models/SearchHomeModel.swift @@ -83,38 +83,38 @@ class SearchHomeModel: ObservableObject { // global events are not realtime unsubscribe(to: relay_id) - load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state) + let txn = NdbTxn(ndb: damus_state.ndb) + load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state, txn: txn) } - - + break } } } -func find_profiles_to_fetch(profiles: Profiles, load: PubkeysToLoad, cache: EventCache) -> [Pubkey] { +func find_profiles_to_fetch(profiles: Profiles, load: PubkeysToLoad, cache: EventCache, txn: NdbTxn) -> [Pubkey] { switch load { case .from_events(let events): - return find_profiles_to_fetch_from_events(profiles: profiles, events: events, cache: cache) + return find_profiles_to_fetch_from_events(profiles: profiles, events: events, cache: cache, txn: txn) case .from_keys(let pks): - return find_profiles_to_fetch_from_keys(profiles: profiles, pks: pks) + return find_profiles_to_fetch_from_keys(profiles: profiles, pks: pks, txn: txn) } } -func find_profiles_to_fetch_from_keys(profiles: Profiles, pks: [Pubkey]) -> [Pubkey] { - Array(Set(pks.filter { pk in !profiles.has_fresh_profile(id: pk) })) +func find_profiles_to_fetch_from_keys(profiles: Profiles, pks: [Pubkey], txn: NdbTxn) -> [Pubkey] { + Array(Set(pks.filter { pk in !profiles.has_fresh_profile(id: pk, txn: txn) })) } -func find_profiles_to_fetch_from_events(profiles: Profiles, events: [NostrEvent], cache: EventCache) -> [Pubkey] { +func find_profiles_to_fetch_from_events(profiles: Profiles, events: [NostrEvent], cache: EventCache, txn: NdbTxn) -> [Pubkey] { var pubkeys = Set() for ev in events { // lookup profiles from boosted events - if ev.known_kind == .boost, let bev = ev.get_inner_event(cache: cache), !profiles.has_fresh_profile(id: bev.pubkey) { + if ev.known_kind == .boost, let bev = ev.get_inner_event(cache: cache), !profiles.has_fresh_profile(id: bev.pubkey, txn: txn) { pubkeys.insert(bev.pubkey) } - if !profiles.has_fresh_profile(id: ev.pubkey) { + if !profiles.has_fresh_profile(id: ev.pubkey, txn: txn) { pubkeys.insert(ev.pubkey) } } @@ -127,27 +127,42 @@ enum PubkeysToLoad { case from_keys([Pubkey]) } -func load_profiles(profiles_subid: String, relay_id: String, load: PubkeysToLoad, damus_state: DamusState) { - let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events) +func load_profiles(profiles_subid: String, relay_id: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn) { + let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn) + guard !authors.isEmpty else { return } - print("loading \(authors.count) profiles from \(relay_id)") - - let filter = NostrFilter(kinds: [.metadata], - authors: authors) - - damus_state.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { sub_id, conn_ev in - guard case .nostr_event(let ev) = conn_ev, - case .eose = ev, - sub_id == profiles_subid - else { - return + print("load_profiles: requesting \(authors.count) profiles from \(relay_id)") + + let filter = NostrFilter(kinds: [.metadata], authors: authors) + + damus_state.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { rid, conn_ev in + + let now = UInt64(Date.now.timeIntervalSince1970) + switch conn_ev { + case .ws_event: + break + case .nostr_event(let ev): + guard ev.subid == profiles_subid, rid == relay_id else { return } + + switch ev { + case .event(_, let ev): + if ev.known_kind == .metadata { + damus_state.ndb.write_profile_last_fetched(pubkey: ev.pubkey, fetched_at: now) + } + case .eose: + print("load_profiles: done loading \(authors.count) profiles from \(relay_id)") + damus_state.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id]) + case .ok: + break + case .notice: + break + } } - print("done loading \(authors.count) profiles from \(relay_id)") - damus_state.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id]) + } } diff --git a/damus/Models/SearchModel.swift b/damus/Models/SearchModel.swift index a80eb5558a..0d945ca397 100644 --- a/damus/Models/SearchModel.swift +++ b/damus/Models/SearchModel.swift @@ -80,7 +80,8 @@ class SearchModel: ObservableObject { self.loading = false if sub_id == self.sub_id { - load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state) + let txn = NdbTxn(ndb: state.ndb) + load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state, txn: txn) } } } diff --git a/damus/Models/ThreadModel.swift b/damus/Models/ThreadModel.swift index 9c44f32983..6b25bb2bc1 100644 --- a/damus/Models/ThreadModel.swift +++ b/damus/Models/ThreadModel.swift @@ -120,7 +120,8 @@ class ThreadModel: ObservableObject { } if sub_id == self.base_subid { - load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map)), damus_state: damus_state) + let txn = NdbTxn(ndb: damus_state.ndb) + load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map)), damus_state: damus_state, txn: txn) } } diff --git a/damus/Models/ZapsModel.swift b/damus/Models/ZapsModel.swift index 3a9aeb34c0..9acc4778f0 100644 --- a/damus/Models/ZapsModel.swift +++ b/damus/Models/ZapsModel.swift @@ -55,7 +55,8 @@ class ZapsModel: ObservableObject { break case .eose: let events = state.events.lookup_zaps(target: target).map { $0.request.ev } - load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state) + let txn = NdbTxn(ndb: state.ndb) + load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn) case .event(_, let ev): guard ev.kind == 9735, let zapper = state.profiles.lookup_zapper(pubkey: target.pubkey), diff --git a/damus/Nostr/Profiles.swift b/damus/Nostr/Profiles.swift index 10dec04cbb..ad9a5ddeb2 100644 --- a/damus/Nostr/Profiles.swift +++ b/damus/Nostr/Profiles.swift @@ -30,7 +30,7 @@ class ProfileData { class Profiles { private var ndb: Ndb - static let db_freshness_threshold: TimeInterval = 24 * 60 * 60 + static let db_freshness_threshold: TimeInterval = 24 * 60 * 8 @MainActor private var profiles: [Pubkey: ProfileData] = [:] @@ -93,9 +93,24 @@ class Profiles { return ndb.lookup_profile_key(pubkey) } - func has_fresh_profile(id: Pubkey) -> Bool { - guard let recv = lookup_with_timestamp(id).unsafeUnownedValue?.receivedAt else { return false } - return Date.now.timeIntervalSince(Date(timeIntervalSince1970: Double(recv))) < Profiles.db_freshness_threshold + func has_fresh_profile(id: Pubkey, txn: NdbTxn) -> Bool { + guard let fetched_at = ndb.read_profile_last_fetched(txn: txn, pubkey: id) + else { + return false + } + + // In situations where a batch of profiles was fetched all at once, + // this will reduce the herding of the profile requests + let fuzz = Double.random(in: -60...60) + let threshold = Profiles.db_freshness_threshold + fuzz + let fetch_date = Date(timeIntervalSince1970: Double(fetched_at)) + + let since = Date.now.timeIntervalSince(fetch_date) + let fresh = since < threshold + + //print("fresh = \(fresh): fetch_date \(since) < threshold \(threshold) \(id)") + + return fresh } } diff --git a/damus/Util/LNUrls.swift b/damus/Util/LNUrls.swift index c4614c338d..9d00b1a195 100644 --- a/damus/Util/LNUrls.swift +++ b/damus/Util/LNUrls.swift @@ -29,10 +29,10 @@ class LNUrls { guard tries < 5 else { return nil } self.endpoints[pubkey] = .failed(tries: tries + 1) case .fetched(let pr): - print("lnurls.lookup_or_fetch fetched \(lnurl)") + //print("lnurls.lookup_or_fetch fetched \(lnurl)") return pr case .fetching(let task): - print("lnurls.lookup_or_fetch already fetching \(lnurl)") + //print("lnurls.lookup_or_fetch already fetching \(lnurl)") return await task.value case .not_fetched: print("lnurls.lookup_or_fetch not fetched \(lnurl)") diff --git a/damus/Views/FollowingView.swift b/damus/Views/FollowingView.swift index 045daa0cd1..16dab49c16 100644 --- a/damus/Views/FollowingView.swift +++ b/damus/Views/FollowingView.swift @@ -151,7 +151,8 @@ struct FollowingView: View { } .tabViewStyle(.page(indexDisplayMode: .never)) .onAppear { - following.subscribe() + let txn = NdbTxn(ndb: self.damus_state.ndb) + following.subscribe(txn: txn) } .onDisappear { following.unsubscribe()