Skip to content

Commit

Permalink
perf: don't continuously attempt to fetch old profiles
Browse files Browse the repository at this point in the history
Changelog-Changed: Save bandwidth by only fetching new profiles after a certain amount of time
  • Loading branch information
jb55 authored and suhailsaqan committed Oct 29, 2023
1 parent d1d3012 commit f9a7dee
Show file tree
Hide file tree
Showing 12 changed files with 86 additions and 48 deletions.
3 changes: 2 additions & 1 deletion damus/Models/EventsModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,8 @@ class EventsModel: ObservableObject {
case .ok:
break
case .eose:
load_profiles(profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events), damus_state: state)
let txn = NdbTxn(ndb: self.state.ndb)
load_profiles(profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn)
}
}
}
7 changes: 4 additions & 3 deletions damus/Models/FollowersModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ class FollowersModel: ObservableObject {
has_contact.insert(ev.pubkey)
}

func load_profiles(relay_id: String) {
let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [])
func load_profiles<Y>(relay_id: String, txn: NdbTxn<Y>) {
let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [], txn: txn)
if authors.isEmpty {
return
}
Expand Down Expand Up @@ -83,7 +83,8 @@ class FollowersModel: ObservableObject {

case .eose(let sub_id):
if sub_id == self.sub_id {
load_profiles(relay_id: relay_id)
let txn = NdbTxn(ndb: self.damus_state.ndb)
load_profiles(relay_id: relay_id, txn: txn)
} else if sub_id == self.profiles_id {
damus_state.pool.unsubscribe(sub_id: profiles_id, to: [relay_id])
}
Expand Down
8 changes: 4 additions & 4 deletions damus/Models/FollowingModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,20 @@ class FollowingModel {
self.hashtags = hashtags
}

func get_filter() -> NostrFilter {
func get_filter<Y>(txn: NdbTxn<Y>) -> NostrFilter {
var f = NostrFilter(kinds: [.metadata])
f.authors = self.contacts.reduce(into: Array<Pubkey>()) { acc, pk in
// don't fetch profiles we already have
if damus_state.profiles.has_fresh_profile(id: pk) {
if damus_state.profiles.has_fresh_profile(id: pk, txn: txn) {
return
}
acc.append(pk)
}
return f
}

func subscribe() {
let filter = get_filter()
func subscribe<Y>(txn: NdbTxn<Y>) {
let filter = get_filter(txn: txn)
if (filter.authors?.count ?? 0) == 0 {
needs_sub = false
return
Expand Down
7 changes: 4 additions & 3 deletions damus/Models/HomeModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -430,14 +430,15 @@ class HomeModel {

case .eose(let sub_id):

let txn = NdbTxn(ndb: damus_state.ndb)
if sub_id == dms_subid {
var dms = dms.dms.flatMap { $0.events }
dms.append(contentsOf: incoming_dms)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state, txn: txn)
} else if sub_id == notifications_subid {
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn)
} else if sub_id == home_subid {
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state, txn: txn)
}

self.loading = false
Expand Down
3 changes: 2 additions & 1 deletion damus/Models/ProfileModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,9 @@ class ProfileModel: ObservableObject, Equatable {
break
//notify(.notice, notice)
case .eose:
let txn = NdbTxn(ndb: damus.ndb)
if resp.subid == sub_id {
load_profiles(profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus)
load_profiles(profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus, txn: txn)
}
progress += 1
break
Expand Down
67 changes: 41 additions & 26 deletions damus/Models/SearchHomeModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -83,38 +83,38 @@ class SearchHomeModel: ObservableObject {
// global events are not realtime
unsubscribe(to: relay_id)

load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state)
let txn = NdbTxn(ndb: damus_state.ndb)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state, txn: txn)
}



break
}
}
}

func find_profiles_to_fetch(profiles: Profiles, load: PubkeysToLoad, cache: EventCache) -> [Pubkey] {
func find_profiles_to_fetch<Y>(profiles: Profiles, load: PubkeysToLoad, cache: EventCache, txn: NdbTxn<Y>) -> [Pubkey] {
switch load {
case .from_events(let events):
return find_profiles_to_fetch_from_events(profiles: profiles, events: events, cache: cache)
return find_profiles_to_fetch_from_events(profiles: profiles, events: events, cache: cache, txn: txn)
case .from_keys(let pks):
return find_profiles_to_fetch_from_keys(profiles: profiles, pks: pks)
return find_profiles_to_fetch_from_keys(profiles: profiles, pks: pks, txn: txn)
}
}

func find_profiles_to_fetch_from_keys(profiles: Profiles, pks: [Pubkey]) -> [Pubkey] {
Array(Set(pks.filter { pk in !profiles.has_fresh_profile(id: pk) }))
func find_profiles_to_fetch_from_keys<Y>(profiles: Profiles, pks: [Pubkey], txn: NdbTxn<Y>) -> [Pubkey] {
Array(Set(pks.filter { pk in !profiles.has_fresh_profile(id: pk, txn: txn) }))
}

func find_profiles_to_fetch_from_events(profiles: Profiles, events: [NostrEvent], cache: EventCache) -> [Pubkey] {
func find_profiles_to_fetch_from_events<Y>(profiles: Profiles, events: [NostrEvent], cache: EventCache, txn: NdbTxn<Y>) -> [Pubkey] {
var pubkeys = Set<Pubkey>()

for ev in events {
// lookup profiles from boosted events
if ev.known_kind == .boost, let bev = ev.get_inner_event(cache: cache), !profiles.has_fresh_profile(id: bev.pubkey) {
if ev.known_kind == .boost, let bev = ev.get_inner_event(cache: cache), !profiles.has_fresh_profile(id: bev.pubkey, txn: txn) {
pubkeys.insert(bev.pubkey)
}

if !profiles.has_fresh_profile(id: ev.pubkey) {
if !profiles.has_fresh_profile(id: ev.pubkey, txn: txn) {
pubkeys.insert(ev.pubkey)
}
}
Expand All @@ -127,27 +127,42 @@ enum PubkeysToLoad {
case from_keys([Pubkey])
}

func load_profiles(profiles_subid: String, relay_id: String, load: PubkeysToLoad, damus_state: DamusState) {
let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events)
func load_profiles<Y>(profiles_subid: String, relay_id: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn<Y>) {
let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn)

guard !authors.isEmpty else {
return
}

print("loading \(authors.count) profiles from \(relay_id)")

let filter = NostrFilter(kinds: [.metadata],
authors: authors)

damus_state.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { sub_id, conn_ev in
guard case .nostr_event(let ev) = conn_ev,
case .eose = ev,
sub_id == profiles_subid
else {
return
print("load_profiles: requesting \(authors.count) profiles from \(relay_id)")

let filter = NostrFilter(kinds: [.metadata], authors: authors)

damus_state.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { rid, conn_ev in

let now = UInt64(Date.now.timeIntervalSince1970)
switch conn_ev {
case .ws_event:
break
case .nostr_event(let ev):
guard ev.subid == profiles_subid, rid == relay_id else { return }

switch ev {
case .event(_, let ev):
if ev.known_kind == .metadata {
damus_state.ndb.write_profile_last_fetched(pubkey: ev.pubkey, fetched_at: now)
}
case .eose:
print("load_profiles: done loading \(authors.count) profiles from \(relay_id)")
damus_state.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id])
case .ok:
break
case .notice:
break
}
}

print("done loading \(authors.count) profiles from \(relay_id)")
damus_state.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id])

}
}

3 changes: 2 additions & 1 deletion damus/Models/SearchModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ class SearchModel: ObservableObject {
self.loading = false

if sub_id == self.sub_id {
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state)
let txn = NdbTxn(ndb: state.ndb)
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state, txn: txn)
}
}
}
Expand Down
3 changes: 2 additions & 1 deletion damus/Models/ThreadModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ class ThreadModel: ObservableObject {
}

if sub_id == self.base_subid {
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map)), damus_state: damus_state)
let txn = NdbTxn(ndb: damus_state.ndb)
load_profiles(profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map)), damus_state: damus_state, txn: txn)
}
}

Expand Down
3 changes: 2 additions & 1 deletion damus/Models/ZapsModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ class ZapsModel: ObservableObject {
break
case .eose:
let events = state.events.lookup_zaps(target: target).map { $0.request.ev }
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state)
let txn = NdbTxn(ndb: state.ndb)
load_profiles(profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn)
case .event(_, let ev):
guard ev.kind == 9735,
let zapper = state.profiles.lookup_zapper(pubkey: target.pubkey),
Expand Down
23 changes: 19 additions & 4 deletions damus/Nostr/Profiles.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class ProfileData {
class Profiles {
private var ndb: Ndb

static let db_freshness_threshold: TimeInterval = 24 * 60 * 60
static let db_freshness_threshold: TimeInterval = 24 * 60 * 8

@MainActor
private var profiles: [Pubkey: ProfileData] = [:]
Expand Down Expand Up @@ -93,9 +93,24 @@ class Profiles {
return ndb.lookup_profile_key(pubkey)
}

func has_fresh_profile(id: Pubkey) -> Bool {
guard let recv = lookup_with_timestamp(id).unsafeUnownedValue?.receivedAt else { return false }
return Date.now.timeIntervalSince(Date(timeIntervalSince1970: Double(recv))) < Profiles.db_freshness_threshold
func has_fresh_profile<Y>(id: Pubkey, txn: NdbTxn<Y>) -> Bool {
guard let fetched_at = ndb.read_profile_last_fetched(txn: txn, pubkey: id)
else {
return false
}

// In situations where a batch of profiles was fetched all at once,
// this will reduce the herding of the profile requests
let fuzz = Double.random(in: -60...60)
let threshold = Profiles.db_freshness_threshold + fuzz
let fetch_date = Date(timeIntervalSince1970: Double(fetched_at))

let since = Date.now.timeIntervalSince(fetch_date)
let fresh = since < threshold

//print("fresh = \(fresh): fetch_date \(since) < threshold \(threshold) \(id)")

return fresh
}
}

Expand Down
4 changes: 2 additions & 2 deletions damus/Util/LNUrls.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ class LNUrls {
guard tries < 5 else { return nil }
self.endpoints[pubkey] = .failed(tries: tries + 1)
case .fetched(let pr):
print("lnurls.lookup_or_fetch fetched \(lnurl)")
//print("lnurls.lookup_or_fetch fetched \(lnurl)")
return pr
case .fetching(let task):
print("lnurls.lookup_or_fetch already fetching \(lnurl)")
//print("lnurls.lookup_or_fetch already fetching \(lnurl)")
return await task.value
case .not_fetched:
print("lnurls.lookup_or_fetch not fetched \(lnurl)")
Expand Down
3 changes: 2 additions & 1 deletion damus/Views/FollowingView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,8 @@ struct FollowingView: View {
}
.tabViewStyle(.page(indexDisplayMode: .never))
.onAppear {
following.subscribe()
let txn = NdbTxn(ndb: self.damus_state.ndb)
following.subscribe(txn: txn)
}
.onDisappear {
following.unsubscribe()
Expand Down

0 comments on commit f9a7dee

Please sign in to comment.