Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace Ahash with Fnv for Hashbrown #825

Merged
merged 4 commits into from
Jun 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ num-integer = { version = "0.1", default-features = false }
criterion = "0.5.0"
derivative = "2"
digest = { version = "0.10", default-features = false }
hashbrown = "0.14"
hashbrown = { version = "0.14", default-features = false, features = ["inline-more", "allocator-api2"] }
hex = "0.4"
itertools = { version = "0.13", default-features = false }
libtest-mimic = "0.7.0"
Expand All @@ -69,7 +69,6 @@ sha3 = { version = "0.10", default-features = false }
blake2 = { version = "0.10", default-features = false }
zeroize = { version = "1", default-features = false }


proc-macro2 = "1.0"
quote = "1.0"
syn = "2.0"
Expand Down
6 changes: 6 additions & 0 deletions ec/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ zeroize = { workspace = true, features = ["zeroize_derive"] }
hashbrown.workspace = true
itertools.workspace = true

[target.'cfg(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr"))'.dependencies]
ahash = { version = "0.8", default-features = false}

[target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies]
fnv = { version = "1.0", default-features = false }

[dev-dependencies]
ark-test-curves = { workspace = true, features = ["bls12_381_curve"] }
sha2.workspace = true
Expand Down
21 changes: 20 additions & 1 deletion ec/src/hashing/curve_maps/elligator2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,24 @@ impl<P: Elligator2Config> MapToCurve<Projective<P>> for Elligator2Map<P> {

#[cfg(test)]
mod test {
#[cfg(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
))]
type DefaultHasher = ahash::AHasher;

#[cfg(not(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
)))]
type DefaultHasher = fnv::FnvHasher;

use crate::{
hashing::{map_to_curve_hasher::MapToCurveBasedHasher, HashToCurve},
CurveConfig,
Expand Down Expand Up @@ -275,7 +293,8 @@ mod test {
);
}

let mut counts = HashMap::new();
let mut counts =
HashMap::with_hasher(core::hash::BuildHasherDefault::<DefaultHasher>::default());

let mode = map_range
.iter()
Expand Down
21 changes: 20 additions & 1 deletion ec/src/hashing/curve_maps/swu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,24 @@ impl<P: SWUConfig> MapToCurve<Projective<P>> for SWUMap<P> {

#[cfg(test)]
mod test {
#[cfg(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
))]
type DefaultHasher = ahash::AHasher;

#[cfg(not(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
)))]
type DefaultHasher = fnv::FnvHasher;

use crate::{
hashing::{map_to_curve_hasher::MapToCurveBasedHasher, HashToCurve},
CurveConfig,
Expand Down Expand Up @@ -255,7 +273,8 @@ mod test {
map_range.push(SWUMap::<TestSWUMapToCurveConfig>::map_to_curve(element).unwrap());
}

let mut counts = HashMap::new();
let mut counts =
HashMap::with_hasher(core::hash::BuildHasherDefault::<DefaultHasher>::default());

let mode = map_range
.iter()
Expand Down
18 changes: 18 additions & 0 deletions ec/src/scalar_mul/variable_base/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,24 @@ pub use stream_pippenger::*;

use super::ScalarMul;

#[cfg(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
))]
type DefaultHasher = ahash::AHasher;

#[cfg(not(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
)))]
type DefaultHasher = fnv::FnvHasher;

pub trait VariableBaseMSM: ScalarMul {
/// Computes an inner product between the [`PrimeField`] elements in `scalars`
/// and the corresponding group elements in `bases`.
Expand Down
9 changes: 6 additions & 3 deletions ec/src/scalar_mul/variable_base/stream_pippenger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use ark_ff::{PrimeField, Zero};
use ark_std::{borrow::Borrow, vec::*};
use hashbrown::HashMap;

use super::VariableBaseMSM;
use super::{DefaultHasher, VariableBaseMSM};

/// Struct for the chunked Pippenger algorithm.
pub struct ChunkedPippenger<G: VariableBaseMSM> {
Expand Down Expand Up @@ -67,7 +67,7 @@ impl<G: VariableBaseMSM> ChunkedPippenger<G> {

/// Hash map struct for Pippenger algorithm.
pub struct HashMapPippenger<G: VariableBaseMSM> {
buffer: HashMap<G::MulBase, G::ScalarField>,
buffer: HashMap<G::MulBase, G::ScalarField, core::hash::BuildHasherDefault<DefaultHasher>>,
result: G,
buf_size: usize,
}
Expand All @@ -76,7 +76,10 @@ impl<G: VariableBaseMSM> HashMapPippenger<G> {
/// Produce a new hash map with the maximum msm buffer size.
pub fn new(max_msm_buffer: usize) -> Self {
Self {
buffer: HashMap::with_capacity(max_msm_buffer),
buffer: HashMap::with_capacity_and_hasher(
max_msm_buffer,
core::hash::BuildHasherDefault::<DefaultHasher>::default(),
),
result: G::zero(),
buf_size: max_msm_buffer,
}
Expand Down
6 changes: 6 additions & 0 deletions poly/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ rayon = { workspace = true, optional = true }
derivative = { workspace = true, features = [ "use_core" ] }
hashbrown.workspace = true

[target.'cfg(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr"))'.dependencies]
ahash = { version = "0.8", default-features = false}

[target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies]
fnv = { version = "1.0", default-features = false }

[dev-dependencies]
ark-test-curves = { path = "../test-curves", default-features = false, features = [ "bls12_381_curve", "bn384_small_two_adicity_curve", "mnt4_753_curve"] }
criterion = "0.5.1"
Expand Down
18 changes: 18 additions & 0 deletions poly/src/evaluations/multivariate/multilinear/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,24 @@ use ark_std::rand::Rng;

use crate::Polynomial;

#[cfg(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
))]
type DefaultHasher = ahash::AHasher;

#[cfg(not(all(
target_has_atomic = "8",
target_has_atomic = "16",
target_has_atomic = "32",
target_has_atomic = "64",
target_has_atomic = "ptr"
)))]
type DefaultHasher = fnv::FnvHasher;

/// This trait describes an interface for the multilinear extension
/// of an array.
/// The latter is a multilinear polynomial represented in terms of its
Expand Down
17 changes: 12 additions & 5 deletions poly/src/evaluations/multivariate/multilinear/sparse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ use hashbrown::HashMap;
#[cfg(feature = "parallel")]
use rayon::prelude::*;

use super::DefaultHasher;

/// Stores a multilinear polynomial in sparse evaluation form.
#[derive(Clone, PartialEq, Eq, Hash, Default, CanonicalSerialize, CanonicalDeserialize)]
pub struct SparseMultilinearExtension<F: Field> {
Expand Down Expand Up @@ -66,7 +68,8 @@ impl<F: Field> SparseMultilinearExtension<F> {
) -> Self {
assert!(num_nonzero_entries <= (1 << num_vars));

let mut map = HashMap::new();
let mut map =
HashMap::with_hasher(core::hash::BuildHasherDefault::<DefaultHasher>::default());
for _ in 0..num_nonzero_entries {
let mut index = usize::rand(rng) & ((1 << num_vars) - 1);
while map.get(&index).is_some() {
Expand Down Expand Up @@ -173,7 +176,8 @@ impl<F: Field> MultilinearExtension<F> for SparseMultilinearExtension<F> {
point = &point[focus_length..];
let pre = precompute_eq(focus);
let dim = focus.len();
let mut result = HashMap::new();
let mut result =
HashMap::with_hasher(core::hash::BuildHasherDefault::<DefaultHasher>::default());
for src_entry in last.iter() {
let old_idx = *src_entry.0;
let gz = pre[old_idx & ((1 << dim) - 1)];
Expand Down Expand Up @@ -261,7 +265,8 @@ impl<'a, 'b, F: Field> Add<&'a SparseMultilinearExtension<F>>
"trying to add non-zero polynomial with different number of variables"
);
// simply merge the evaluations
let mut evaluations = HashMap::new();
let mut evaluations =
HashMap::with_hasher(core::hash::BuildHasherDefault::<DefaultHasher>::default());
for (&i, &v) in self.evaluations.iter().chain(rhs.evaluations.iter()) {
*(evaluations.entry(i).or_insert(F::zero())) += v;
}
Expand Down Expand Up @@ -395,11 +400,13 @@ fn tuples_to_treemap<F: Field>(tuples: &[(usize, F)]) -> BTreeMap<usize, F> {
BTreeMap::from_iter(tuples.iter().map(|(i, v)| (*i, *v)))
}

fn treemap_to_hashmap<F: Field>(map: &BTreeMap<usize, F>) -> HashMap<usize, F> {
fn treemap_to_hashmap<F: Field>(
map: &BTreeMap<usize, F>,
) -> HashMap<usize, F, core::hash::BuildHasherDefault<DefaultHasher>> {
HashMap::from_iter(map.iter().map(|(i, v)| (*i, *v)))
}

fn hashmap_to_treemap<F: Field>(map: &HashMap<usize, F>) -> BTreeMap<usize, F> {
fn hashmap_to_treemap<F: Field, S>(map: &HashMap<usize, F, S>) -> BTreeMap<usize, F> {
BTreeMap::from_iter(map.iter().map(|(i, v)| (*i, *v)))
}

Expand Down
Loading