Skip to content

Commit

Permalink
nostr: move tag::indexes to nostr-database
Browse files Browse the repository at this point in the history
Remove `Filter::match_event` and everything related.
  • Loading branch information
yukibtc committed Jan 8, 2024
1 parent 0663ced commit 2439a3d
Show file tree
Hide file tree
Showing 11 changed files with 83 additions and 332 deletions.
29 changes: 6 additions & 23 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 1 addition & 5 deletions bindings/nostr-ffi/src/message/subscription.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use uniffi::{Enum, Object};

use crate::error::Result;
use crate::helper::unwrap_or_clone_arc;
use crate::{Event, EventId, PublicKey, Timestamp};
use crate::{EventId, PublicKey, Timestamp};

#[derive(Enum)]
pub enum Alphabet {
Expand Down Expand Up @@ -324,10 +324,6 @@ impl Filter {
Arc::new(builder)
}

pub fn match_event(&self, event: Arc<Event>) -> bool {
self.inner.match_event(event.as_ref().deref())
}

pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
Expand Down
138 changes: 52 additions & 86 deletions crates/nostr-database/src/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,19 @@
//! Nostr Database Indexes
use std::cmp::Ordering;
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
//use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::sync::Arc;

use nostr::event::id;
use nostr::nips::nip01::Coordinate;
use nostr::secp256k1::XOnlyPublicKey;
use nostr::{
Alphabet, Event, EventId, Filter, GenericTagValue, Kind, TagIndexValues, TagIndexes, Timestamp,
};
use nostr::{Alphabet, Event, EventId, Filter, GenericTagValue, Kind, Timestamp};
use rayon::prelude::*;
use thiserror::Error;
use tokio::sync::RwLock;

use crate::raw::RawEvent;
use crate::tag_indexes::{TagIndexValues, TagIndexes};

/// Public Key Prefix Size
const PUBLIC_KEY_PREFIX_SIZE: usize = 8;
Expand Down Expand Up @@ -81,32 +79,11 @@ impl From<&Event> for EventIndex {
event_id: e.id,
pubkey: PublicKeyPrefix::from(e.pubkey),
kind: e.kind,
tags: e.build_tags_index(),
tags: TagIndexes::from(e.tags.iter().map(|t| t.as_vec())),
}
}
}

impl EventIndex {
fn filter_tags_match(&self, filter: &FilterIndex) -> bool {
if filter.generic_tags.is_empty() {
return true;
}

if self.tags.is_empty() {
return false;
}

filter.generic_tags.iter().all(|(tagname, set)| {
self.tags.get(tagname).map_or(false, |valset| {
TagIndexValues::iter(set)
.filter(|t| valset.contains(t))
.count()
> 0
})
})
}
}

/// Public Key prefix
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct PublicKeyPrefix([u8; PUBLIC_KEY_PREFIX_SIZE]);
Expand Down Expand Up @@ -134,12 +111,12 @@ impl From<[u8; 32]> for PublicKeyPrefix {

#[derive(Default)]
struct FilterIndex {
ids: BTreeSet<EventId>,
authors: BTreeSet<PublicKeyPrefix>,
kinds: BTreeSet<Kind>,
ids: HashSet<EventId>,
authors: HashSet<PublicKeyPrefix>,
kinds: HashSet<Kind>,
since: Option<Timestamp>,
until: Option<Timestamp>,
generic_tags: BTreeMap<Alphabet, BTreeSet<GenericTagValue>>,
generic_tags: HashMap<Alphabet, BTreeSet<GenericTagValue>>,
}

impl FilterIndex {
Expand Down Expand Up @@ -167,21 +144,60 @@ impl FilterIndex {
.insert(identifier);
self
}

fn ids_match(&self, event: &EventIndex) -> bool {
self.ids.is_empty() || self.ids.contains(&event.event_id)
}

fn authors_match(&self, event: &EventIndex) -> bool {
self.authors.is_empty() || self.authors.contains(&event.pubkey)
}

fn tag_match(&self, event: &EventIndex) -> bool {
if self.generic_tags.is_empty() {
return true;
}
if event.tags.is_empty() {
return false;
}

self.generic_tags.iter().all(|(tagname, set)| {
event.tags.get(tagname).map_or(false, |valset| {
TagIndexValues::iter(set.iter())
.filter(|t| valset.contains(t))
.count()
> 0
})
})
}

fn kind_match(&self, kind: &Kind) -> bool {
self.kinds.is_empty() || self.kinds.contains(kind)
}

pub fn match_event(&self, event: &EventIndex) -> bool {
self.ids_match(event)
&& self.since.map_or(true, |t| event.created_at >= t)
&& self.until.map_or(true, |t| event.created_at <= t)
&& self.kind_match(&event.kind)
&& self.authors_match(event)
&& self.tag_match(event)
}
}

impl From<Filter> for FilterIndex {
fn from(value: Filter) -> Self {
Self {
ids: value.ids,
ids: value.ids.into_iter().collect(),
authors: value
.authors
.into_iter()
.map(PublicKeyPrefix::from)
.collect(),
kinds: value.kinds,
kinds: value.kinds.into_iter().collect(),
since: value.since,
until: value.until,
generic_tags: value.generic_tags,
generic_tags: value.generic_tags.into_iter().collect(),
}
}
}
Expand Down Expand Up @@ -468,61 +484,11 @@ impl DatabaseIndexes {
T: Into<FilterIndex>,
{
let filter: FilterIndex = filter.into();
index.par_iter().filter(move |m| {
!deleted_ids.contains(&m.event_id)
&& filter.until.map_or(true, |t| m.created_at <= t)
&& filter.since.map_or(true, |t| m.created_at >= t)
&& (filter.ids.is_empty() || filter.ids.contains(&m.event_id))
&& (filter.authors.is_empty() || filter.authors.contains(&m.pubkey))
&& (filter.kinds.is_empty() || filter.kinds.contains(&m.kind))
&& m.filter_tags_match(&filter)
index.par_iter().filter(move |event| {
!deleted_ids.contains(&event.event_id) && filter.match_event(event)
})
}

/* fn internal_multi_parallel_query<'a, I, T>(
&self,
index: &'a BTreeSet<EventIndex>,
deleted: &'a HashSet<EventId>,
filters: I,
) -> impl ParallelIterator<Item = &'a EventIndex>
where
I: IntoIterator<Item = T>,
T: Into<FilterIndex>,
{
let filters: Vec<FilterIndex> = filters.into_iter().map(|f| f.into()).collect();
let limits: Vec<Option<usize>> = filters.iter().map(|f| f.limit).collect();
let counter: Vec<AtomicUsize> = filters.iter().map(|_| AtomicUsize::new(0)).collect();
index
.par_iter()
.filter(move |i| !deleted.contains(&i.event_id))
.filter(move |i| {
filters.par_iter().enumerate().any(|(index, filter)| {
if let Some(Some(limit)) = limits.get(index) {
if let Some(counter) = counter.get(index) {
if counter.load(AtomicOrdering::SeqCst) >= *limit {
return false;
}
}
}
let status: bool = filter.until.map_or(true, |t| i.created_at <= t)
&& filter.since.map_or(true, |t| i.created_at >= t)
&& (filter.ids.is_empty() || filter.ids.contains(&i.event_id))
&& (filter.authors.is_empty() || filter.authors.contains(&i.pubkey))
&& (filter.kinds.is_empty() || filter.kinds.contains(&i.kind))
&& i.filter_tags_match(&filter);
if status {
if let Some(counter) = counter.get(index) {
counter.fetch_add(1, AtomicOrdering::SeqCst);
}
}
status
})
})
} */

/// Query
#[tracing::instrument(skip_all, level = "trace")]
pub async fn query<I>(&self, filters: I) -> Vec<EventId>
Expand Down
1 change: 1 addition & 0 deletions crates/nostr-database/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ pub mod memory;
mod options;
pub mod profile;
mod raw;
mod tag_indexes;

pub use self::error::DatabaseError;
#[cfg(feature = "flatbuf")]
Expand Down
10 changes: 4 additions & 6 deletions crates/nostr-database/src/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use std::sync::Arc;

use async_trait::async_trait;
use nostr::nips::nip01::Coordinate;
use nostr::{Event, EventId, Filter, FiltersMatchEvent, Timestamp, Url};
use nostr::{Event, EventId, Filter, Timestamp, Url};
use tokio::sync::RwLock;

use crate::{
Expand Down Expand Up @@ -167,15 +167,13 @@ impl NostrDatabase for MemoryDatabase {
#[tracing::instrument(skip_all, level = "trace")]
async fn query(&self, filters: Vec<Filter>) -> Result<Vec<Event>, Self::Err> {
if self.opts.events {
let ids = self.indexes.query(filters.clone()).await;
let ids = self.indexes.query(filters).await;
let events = self.events.read().await;

let mut list: Vec<Event> = Vec::new();
for event_id in ids.into_iter() {
if let Some(event) = events.get(&event_id) {
if filters.match_event(event) {
list.push(event.clone());
}
if let Some(event) = events.get(&event_id).cloned() {
list.push(event);
}
}
Ok(list)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,28 +4,24 @@

//! Tag Indexes
use alloc::string::{String, ToString};
use alloc::vec::Vec;
use std::collections::{HashMap, HashSet};
use std::ops::{Deref, DerefMut};

use alloc::collections::{BTreeMap, BTreeSet};
use core::ops::{Deref, DerefMut};

use bitcoin::hashes::siphash24::Hash as SipHash24;
use bitcoin::hashes::Hash;

use crate::{Alphabet, GenericTagValue};
use nostr::hashes::siphash24::Hash as SipHash24;
use nostr::hashes::Hash;
use nostr::{Alphabet, GenericTagValue};

/// Tag Index Value Size
pub const TAG_INDEX_VALUE_SIZE: usize = 8;

/// Tag Indexes
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct TagIndexes {
inner: BTreeMap<Alphabet, TagIndexValues>,
inner: HashMap<Alphabet, TagIndexValues>,
}

impl Deref for TagIndexes {
type Target = BTreeMap<Alphabet, TagIndexValues>;
type Target = HashMap<Alphabet, TagIndexValues>;
fn deref(&self) -> &Self::Target {
&self.inner
}
Expand Down Expand Up @@ -76,11 +72,11 @@ where
/// Tag Index Values
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct TagIndexValues {
inner: BTreeSet<[u8; TAG_INDEX_VALUE_SIZE]>,
inner: HashSet<[u8; TAG_INDEX_VALUE_SIZE]>,
}

impl Deref for TagIndexValues {
type Target = BTreeSet<[u8; TAG_INDEX_VALUE_SIZE]>;
type Target = HashSet<[u8; TAG_INDEX_VALUE_SIZE]>;
fn deref(&self) -> &Self::Target {
&self.inner
}
Expand All @@ -94,10 +90,11 @@ impl DerefMut for TagIndexValues {

impl TagIndexValues {
#[allow(missing_docs)]
pub fn iter(
set: &BTreeSet<GenericTagValue>,
) -> impl Iterator<Item = [u8; TAG_INDEX_VALUE_SIZE]> + '_ {
set.iter().map(|value| {
pub fn iter<'a, I>(iter: I) -> impl Iterator<Item = [u8; TAG_INDEX_VALUE_SIZE]> + 'a
where
I: Iterator<Item = &'a GenericTagValue> + 'a,
{
iter.map(|value| {
let s: String = value.to_string();
hash(s)
})
Expand Down
Loading

0 comments on commit 2439a3d

Please sign in to comment.