diff --git a/CHANGELOG.md b/CHANGELOG.md index a5c347470..c93955ae4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -90,6 +90,7 @@ * pool: add relay reconnection and disconnection unit tests ([Yuki Kishimoto]) * pool: add `RelayServiceFlags::GOSSIP` flag ([Yuki Kishimoto]) * sdk: allow to specify relay pool notification channel size in `Options` ([Yuki Kishimoto]) +* sdk: add support to NIP17 relay list ([Yuki Kishimoto]) * relay-builder: add `RelayTestOptions` ([Yuki Kishimoto]) * connect: add `NostrConnect::non_secure_set_user_public_key` ([Yuki Kishimoto]) * ffi: add `make_private_msg` func ([Yuki Kishimoto]) diff --git a/bindings/nostr-sdk-ffi/src/client/mod.rs b/bindings/nostr-sdk-ffi/src/client/mod.rs index d7a6db09a..960e48b46 100644 --- a/bindings/nostr-sdk-ffi/src/client/mod.rs +++ b/bindings/nostr-sdk-ffi/src/client/mod.rs @@ -517,7 +517,10 @@ impl Client { .into()) } - /// Send private direct message to all relays + /// Send a private direct message + /// + /// If gossip is enabled, the message will be sent to the NIP17 relays (automatically discovered). + /// If gossip is not enabled will be sent to all relays with WRITE` relay service flag. /// /// <https://github.com/nostr-protocol/nips/blob/master/17.md> #[uniffi::method(default(rumor_extra_tags = []))] diff --git a/bindings/nostr-sdk-js/src/client/mod.rs b/bindings/nostr-sdk-js/src/client/mod.rs index 104f1814b..07192c0b6 100644 --- a/bindings/nostr-sdk-js/src/client/mod.rs +++ b/bindings/nostr-sdk-js/src/client/mod.rs @@ -530,7 +530,10 @@ impl JsClient { .map(|id| id.into()) } - /// Send private direct message to all relays + /// Send a private direct message + /// + /// If gossip is enabled, the message will be sent to the NIP17 relays (automatically discovered). + /// If gossip is not enabled will be sent to all relays with WRITE` relay service flag. /// /// <https://github.com/nostr-protocol/nips/blob/master/17.md> #[wasm_bindgen(js_name = sendPrivateMsg)] diff --git a/crates/nostr-sdk/examples/bot.rs b/crates/nostr-sdk/examples/bot.rs index 2ba51433b..67f8ef9b0 100644 --- a/crates/nostr-sdk/examples/bot.rs +++ b/crates/nostr-sdk/examples/bot.rs @@ -9,7 +9,10 @@ async fn main() -> Result<()> { tracing_subscriber::fmt::init(); let keys = Keys::parse("nsec12kcgs78l06p30jz7z7h3n2x2cy99nw2z6zspjdp7qc206887mwvs95lnkx")?; - let client = Client::new(keys.clone()); + let client = Client::builder() + .signer(keys.clone()) + .opts(Options::new().gossip(true)) + .build(); println!("Bot public key: {}", keys.public_key().to_bech32()?); @@ -50,17 +53,8 @@ async fn main() -> Result<()> { ), }; - // Build private message - let event = - EventBuilder::private_msg(&keys, sender, content, []).await?; - // Send private message - // client.send_event(event).await?; - - // Send private message to specific relays - client - .send_event_to(["wss://auth.nostr1.com"], event) - .await?; + client.send_private_msg(sender, content, []).await?; } } Err(e) => tracing::error!("Impossible to decrypt direct message: {e}"), diff --git a/crates/nostr-sdk/examples/nostr-connect.rs b/crates/nostr-sdk/examples/nostr-connect.rs index 2005535cb..00735666e 100644 --- a/crates/nostr-sdk/examples/nostr-connect.rs +++ b/crates/nostr-sdk/examples/nostr-connect.rs @@ -40,11 +40,11 @@ async fn main() -> Result<()> { let output = client.send_event_builder(builder).await?; println!("Published text note: {}\n", output.id()); - let signer = client.signer().await?; let receiver = PublicKey::from_bech32("npub1drvpzev3syqt0kjrls50050uzf25gehpz9vgdw08hvex7e0vgfeq0eseet")?; - let event = EventBuilder::private_msg(&signer, receiver, "Hello from rust-nostr", []).await?; - let output = client.send_event(event).await?; + let output = client + .send_private_msg(receiver, "Hello from rust-nostr", []) + .await?; println!("Sent DM: {}", output.id()); Ok(()) diff --git a/crates/nostr-sdk/src/client/mod.rs b/crates/nostr-sdk/src/client/mod.rs index 6e1ad42ae..b53f90f28 100644 --- a/crates/nostr-sdk/src/client/mod.rs +++ b/crates/nostr-sdk/src/client/mod.rs @@ -86,6 +86,9 @@ pub enum Error { /// Broken down filters for gossip are empty #[error("gossip broken down filters are empty")] GossipFiltersEmpty, + /// DMs relays not found + #[error("DMs relays not found")] + DMsRelaysNotFound, /// Metadata not found #[error("metadata not found")] MetadataNotFound, @@ -1014,57 +1017,7 @@ impl Client { return Ok(self.pool.send_event(event).await?); } - // ########## Gossip ########## - - // Get all public keys involved in the event - let public_keys = event - .tags - .public_keys() - .copied() - .chain(iter::once(event.pubkey)); - - // Check what are up-to-date in the gossip graph and which ones require an update - let outdated_public_keys = self.gossip_graph.check_outdated(public_keys).await; - self.update_outdated_gossip_graph(outdated_public_keys) - .await?; - - // Get relays - let mut outbox = self.gossip_graph.get_outbox_relays(&[event.pubkey]).await; - let inbox = self - .gossip_graph - .get_inbox_relays(event.tags.public_keys()) - .await; - - // Add outbox relays - for url in outbox.iter() { - if self.add_gossip_relay(url).await? { - self.connect_relay(url).await?; - } - } - - // Add inbox relays - for url in inbox.iter() { - if self.add_gossip_relay(url).await? { - self.connect_relay(url).await?; - } - } - - // Get WRITE relays - // TODO: avoid clone of both url and relay - let write_relays = self - .pool - .relays_with_flag(RelayServiceFlags::WRITE, FlagCheck::All) - .await - .into_keys(); - - // Extend OUTBOX relays with WRITE ones - outbox.extend(write_relays); - - // Union of OUTBOX (and WRITE) with INBOX relays - let urls = outbox.union(&inbox); - - // Send event - Ok(self.pool.send_event_to(urls, event).await?) + self.gossip_send_event(event, false).await } /// Send multiple events at once to all relays with [`RelayServiceFlags::WRITE`] flag. @@ -1325,7 +1278,10 @@ impl Client { Ok(contacts) } - /// Send private direct message to all relays + /// Send a private direct message + /// + /// If `gossip` is enabled (see [`Options::gossip`]) the message will be sent to the NIP17 relays (automatically discovered). + /// If gossip is not enabled will be sent to all relays with [`RelayServiceFlags::WRITE`] flag. /// /// <https://github.com/nostr-protocol/nips/blob/master/17.md> #[inline] @@ -1343,10 +1299,16 @@ impl Client { let signer = self.signer().await?; let event: Event = EventBuilder::private_msg(&signer, receiver, message, rumor_extra_tags).await?; - self.send_event(event).await + + // NOT gossip, send to all relays + if !self.opts.gossip { + return self.send_event(event).await; + } + + self.gossip_send_event(event, true).await } - /// Send private direct message to specific relays + /// Send a private direct message to specific relays /// /// <https://github.com/nostr-protocol/nips/blob/master/17.md> #[inline] @@ -1693,8 +1655,8 @@ impl Client { if !outdated_public_keys.is_empty() { // Compose filters let filter: Filter = Filter::default() - .authors(outdated_public_keys) - .kind(Kind::RelayList); + .authors(outdated_public_keys.clone()) + .kinds([Kind::RelayList, Kind::InboxRelays]); // Query from database let database = self.database(); @@ -1716,6 +1678,11 @@ impl Client { .fetch_events_from(relays, vec![filter], Some(Duration::from_secs(10))) .await?; + // Update last check for these public keys + self.gossip_graph + .update_last_check(outdated_public_keys) + .await; + // Merge database and relays events let merged: Events = events.merge(stored_events); @@ -1779,6 +1746,76 @@ impl Client { Ok(broken_down.filters) } + async fn gossip_send_event(&self, event: Event, nip17: bool) -> Result<Output<EventId>, Error> { + // Get all public keys involved in the event + let public_keys = event + .tags + .public_keys() + .copied() + .chain(iter::once(event.pubkey)); + + // Check what are up to date in the gossip graph and which ones require an update + let outdated_public_keys = self.gossip_graph.check_outdated(public_keys).await; + self.update_outdated_gossip_graph(outdated_public_keys) + .await?; + + let urls: HashSet<Url> = if nip17 && event.kind == Kind::GiftWrap { + // Get NIP17 relays + // Get only for relays for p tags since gift wraps are signed with random key (random author) + let relays = self + .gossip_graph + .get_nip17_inbox_relays(event.tags.public_keys()) + .await; + + if relays.is_empty() { + return Err(Error::DMsRelaysNotFound); + } + + // Add outbox and inbox relays + for url in relays.iter() { + if self.add_gossip_relay(url).await? { + self.connect_relay(url).await?; + } + } + + relays + } else { + // Get NIP65 relays + let mut outbox = self + .gossip_graph + .get_nip65_outbox_relays(&[event.pubkey]) + .await; + let inbox = self + .gossip_graph + .get_nip65_inbox_relays(event.tags.public_keys()) + .await; + + // Add outbox and inbox relays + for url in outbox.iter().chain(inbox.iter()) { + if self.add_gossip_relay(url).await? { + self.connect_relay(url).await?; + } + } + + // Get WRITE relays + // TODO: avoid clone of both url and relay + let write_relays = self + .pool + .relays_with_flag(RelayServiceFlags::WRITE, FlagCheck::All) + .await + .into_keys(); + + // Extend OUTBOX relays with WRITE ones + outbox.extend(write_relays); + + // Union of OUTBOX (and WRITE) with INBOX relays + outbox.union(&inbox).cloned().collect() + }; + + // Send event + Ok(self.pool.send_event_to(urls, event).await?) + } + async fn gossip_stream_events( &self, filters: Vec<Filter>, diff --git a/crates/nostr-sdk/src/gossip/constant.rs b/crates/nostr-sdk/src/gossip/constant.rs index fcea6bfcb..410095954 100644 --- a/crates/nostr-sdk/src/gossip/constant.rs +++ b/crates/nostr-sdk/src/gossip/constant.rs @@ -7,3 +7,4 @@ use std::time::Duration; /// Max number of relays allowed in NIP17/NIP65 lists pub const MAX_RELAYS_LIST: usize = 5; pub const PUBKEY_METADATA_OUTDATED_AFTER: Duration = Duration::from_secs(60 * 60); // 60 min +pub const CHECK_OUTDATED_INTERVAL: Duration = Duration::from_secs(60 * 5); // 5 min diff --git a/crates/nostr-sdk/src/gossip/graph.rs b/crates/nostr-sdk/src/gossip/graph.rs index 6e53c6b80..2a6644b49 100644 --- a/crates/nostr-sdk/src/gossip/graph.rs +++ b/crates/nostr-sdk/src/gossip/graph.rs @@ -8,7 +8,7 @@ use std::sync::Arc; use nostr::prelude::*; use tokio::sync::{RwLock, RwLockReadGuard}; -use super::constant::{MAX_RELAYS_LIST, PUBKEY_METADATA_OUTDATED_AFTER}; +use super::constant::{CHECK_OUTDATED_INTERVAL, MAX_RELAYS_LIST, PUBKEY_METADATA_OUTDATED_AFTER}; // TODO: add support to DM relay list @@ -23,16 +23,24 @@ pub struct BrokenDownFilters { pub urls: HashSet<Url>, } -#[derive(Debug, Clone)] -struct RelayListMetadata { - pub map: HashMap<Url, Option<RelayMetadata>>, +#[derive(Debug, Clone, Default)] +struct RelayList<T> { + pub collection: T, /// Timestamp of when the event metadata was created pub event_created_at: Timestamp, /// Timestamp of when the metadata was updated pub last_update: Timestamp, } -type PublicKeyMap = HashMap<PublicKey, RelayListMetadata>; +#[derive(Debug, Clone, Default)] +struct RelayLists { + pub nip17: RelayList<HashSet<Url>>, + pub nip65: RelayList<HashMap<Url, Option<RelayMetadata>>>, + /// Timestamp of the last check + pub last_check: Timestamp, +} + +type PublicKeyMap = HashMap<PublicKey, RelayLists>; #[derive(Debug, Clone)] pub struct GossipGraph { @@ -56,34 +64,66 @@ impl GossipGraph { { let mut public_keys = self.public_keys.write().await; - for event in events.into_iter().filter(|e| e.kind == Kind::RelayList) { - public_keys - .entry(event.pubkey) - .and_modify(|m| { - // Update only if new metadata has more recent timestamp - if event.created_at >= m.event_created_at { - *m = RelayListMetadata { - map: nip65::extract_relay_list(&event) + for event in events.into_iter() { + if event.kind == Kind::RelayList { + public_keys + .entry(event.pubkey) + .and_modify(|lists| { + // Update only if new metadata has more recent timestamp + if event.created_at >= lists.nip65.event_created_at { + lists.nip65 = RelayList { + collection: nip65::extract_relay_list(&event) + .take(MAX_RELAYS_LIST) + .map(|(u, m)| (u.clone(), *m)) + .collect(), + event_created_at: event.created_at, + last_update: Timestamp::now(), + }; + } + }) + .or_insert_with(|| RelayLists { + nip65: RelayList { + collection: nip65::extract_relay_list(&event) + .take(MAX_RELAYS_LIST) .map(|(u, m)| (u.clone(), *m)) + .collect(), + event_created_at: event.created_at, + last_update: Timestamp::now(), + }, + ..Default::default() + }); + } else if event.kind == Kind::InboxRelays { + public_keys + .entry(event.pubkey) + .and_modify(|lists| { + // Update only if new metadata has more recent timestamp + if event.created_at >= lists.nip17.event_created_at { + lists.nip17 = RelayList { + collection: nip17::extract_relay_list(&event) + .take(MAX_RELAYS_LIST) + .cloned() + .collect(), + event_created_at: event.created_at, + last_update: Timestamp::now(), + }; + } + }) + .or_insert_with(|| RelayLists { + nip17: RelayList { + collection: nip17::extract_relay_list(&event) .take(MAX_RELAYS_LIST) + .cloned() .collect(), event_created_at: event.created_at, last_update: Timestamp::now(), - }; - } - }) - .or_insert_with(|| RelayListMetadata { - map: nip65::extract_relay_list(&event) - .map(|(u, m)| (u.clone(), *m)) - .take(MAX_RELAYS_LIST) - .collect(), - event_created_at: event.created_at, - last_update: Timestamp::now(), - }); + }, + ..Default::default() + }); + } } } - /// Check for what public keys the metadata are outdated or not existent + /// Check for what public keys the metadata are outdated or not existent (both for NIP17 and NIP65) pub async fn check_outdated<I>(&self, public_keys: I) -> HashSet<PublicKey> where I: IntoIterator<Item = PublicKey>, @@ -95,9 +135,19 @@ impl GossipGraph { for public_key in public_keys.into_iter() { match map.get(&public_key) { - Some(meta) => { - let empty: bool = meta.map.is_empty(); - let expired: bool = meta.last_update + PUBKEY_METADATA_OUTDATED_AFTER < now; + Some(lists) => { + if lists.last_check + CHECK_OUTDATED_INTERVAL > now { + continue; + } + + // Check if collections are empty + let empty: bool = + lists.nip17.collection.is_empty() || lists.nip65.collection.is_empty(); + + // Check if expired + let expired: bool = lists.nip17.last_update + PUBKEY_METADATA_OUTDATED_AFTER + < now + || lists.nip65.last_update + PUBKEY_METADATA_OUTDATED_AFTER < now; if empty || expired { outdated.insert(public_key); @@ -113,6 +163,46 @@ impl GossipGraph { outdated } + pub async fn update_last_check<I>(&self, public_keys: I) + where + I: IntoIterator<Item = PublicKey>, + { + let mut map = self.public_keys.write().await; + let now = Timestamp::now(); + + for public_key in public_keys.into_iter() { + map.entry(public_key) + .and_modify(|lists| { + lists.last_check = now; + }) + .or_insert_with(|| RelayLists { + last_check: now, + ..Default::default() + }); + } + } + + fn get_nip17_relays<'a, I>( + &self, + txn: &RwLockReadGuard<PublicKeyMap>, + public_keys: I, + ) -> HashSet<Url> + where + I: IntoIterator<Item = &'a PublicKey>, + { + let mut urls: HashSet<Url> = HashSet::new(); + + for public_key in public_keys.into_iter() { + if let Some(lists) = txn.get(public_key) { + for url in lists.nip17.collection.iter() { + urls.insert(url.clone()); + } + } + } + + urls + } + fn get_nip65_relays<'a, I>( &self, txn: &RwLockReadGuard<PublicKeyMap>, @@ -125,8 +215,8 @@ impl GossipGraph { let mut urls: HashSet<Url> = HashSet::new(); for public_key in public_keys.into_iter() { - if let Some(meta) = txn.get(public_key) { - for (url, m) in meta.map.iter() { + if let Some(lists) = txn.get(public_key) { + for (url, m) in lists.nip65.collection.iter() { let insert: bool = match m { Some(val) => match metadata { Some(metadata) => val == &metadata, @@ -145,6 +235,32 @@ impl GossipGraph { urls } + fn map_nip17_relays<'a, I>( + &self, + txn: &RwLockReadGuard<PublicKeyMap>, + public_keys: I, + ) -> HashMap<Url, BTreeSet<PublicKey>> + where + I: IntoIterator<Item = &'a PublicKey>, + { + let mut urls: HashMap<Url, BTreeSet<PublicKey>> = HashMap::new(); + + for public_key in public_keys.into_iter() { + if let Some(lists) = txn.get(public_key) { + for url in lists.nip17.collection.iter() { + urls.entry(url.clone()) + .and_modify(|s| { + s.insert(*public_key); + }) + .or_default() + .insert(*public_key); + } + } + } + + urls + } + fn map_nip65_relays<'a, I>( &self, txn: &RwLockReadGuard<PublicKeyMap>, @@ -157,8 +273,8 @@ impl GossipGraph { let mut urls: HashMap<Url, BTreeSet<PublicKey>> = HashMap::new(); for public_key in public_keys.into_iter() { - if let Some(meta) = txn.get(public_key) { - for (url, m) in meta.map.iter() { + if let Some(lists) = txn.get(public_key) { + for (url, m) in lists.nip65.collection.iter() { let insert: bool = match m { Some(val) => val == &metadata, None => true, @@ -181,7 +297,7 @@ impl GossipGraph { /// Get outbox (write) relays for public keys #[inline] - pub async fn get_outbox_relays<'a, I>(&self, public_keys: I) -> HashSet<Url> + pub async fn get_nip65_outbox_relays<'a, I>(&self, public_keys: I) -> HashSet<Url> where I: IntoIterator<Item = &'a PublicKey>, { @@ -191,7 +307,7 @@ impl GossipGraph { /// Get inbox (read) relays for public keys #[inline] - pub async fn get_inbox_relays<'a, I>(&self, public_keys: I) -> HashSet<Url> + pub async fn get_nip65_inbox_relays<'a, I>(&self, public_keys: I) -> HashSet<Url> where I: IntoIterator<Item = &'a PublicKey>, { @@ -199,9 +315,19 @@ impl GossipGraph { self.get_nip65_relays(&txn, public_keys, Some(RelayMetadata::Read)) } + /// Get NIP17 inbox (read) relays for public keys + #[inline] + pub async fn get_nip17_inbox_relays<'a, I>(&self, public_keys: I) -> HashSet<Url> + where + I: IntoIterator<Item = &'a PublicKey>, + { + let txn = self.public_keys.read().await; + self.get_nip17_relays(&txn, public_keys) + } + /// Map outbox (write) relays for public keys #[inline] - fn map_outbox_relays<'a, I>( + fn map_nip65_outbox_relays<'a, I>( &self, txn: &RwLockReadGuard<PublicKeyMap>, public_keys: I, @@ -212,9 +338,9 @@ impl GossipGraph { self.map_nip65_relays(txn, public_keys, RelayMetadata::Write) } - /// Map inbox (read) relays for public keys + /// Map NIP65 inbox (read) relays for public keys #[inline] - fn map_inbox_relays<'a, I>( + fn map_nip65_inbox_relays<'a, I>( &self, txn: &RwLockReadGuard<PublicKeyMap>, public_keys: I, @@ -243,7 +369,10 @@ impl GossipGraph { match (&filter.authors, &p_tag) { (Some(authors), None) => { // Get map of outbox relays - let outbox = self.map_outbox_relays(&txn, authors); + let mut outbox = self.map_nip65_outbox_relays(&txn, authors); + + // Extend with NIP17 relays + outbox.extend(self.map_nip17_relays(&txn, authors)); // Construct new filters for (relay, pk_set) in outbox.into_iter() { @@ -264,7 +393,10 @@ impl GossipGraph { } (None, Some(p_public_keys)) => { // Get map of inbox relays - let inbox = self.map_inbox_relays(&txn, p_public_keys); + let mut inbox = self.map_nip65_inbox_relays(&txn, p_public_keys); + + // Extend with NIP17 relays + inbox.extend(self.map_nip17_relays(&txn, p_public_keys)); // Construct new filters for (relay, pk_set) in inbox.into_iter() { @@ -287,8 +419,11 @@ impl GossipGraph { } (Some(authors), Some(p_public_keys)) => { // Get map of outbox and inbox relays - let pks = authors.union(p_public_keys); - let relays = self.get_nip65_relays(&txn, pks, None); + let mut relays = + self.get_nip65_relays(&txn, authors.union(p_public_keys), None); + + // Extend with NIP17 relays + relays.extend(self.get_nip17_relays(&txn, authors.union(p_public_keys))); for relay in relays.into_iter() { urls.insert(relay.clone());