Skip to content

Commit

Permalink
fix: update benchmarks to use public API
Browse files Browse the repository at this point in the history
This commit updates the benchmarks to use the public API instead of internal functions:

- Replace `decrypt_full_set` with public `decrypt` function in benchmarks
- Update imports in benches/lib.rs to use only public API
- Maintain same benchmarking functionality while using proper public interface

The change ensures that benchmarks follow the same patterns as regular usage
of the library by utilizing the public API instead of internal implementation
details. This makes the benchmarks more representative of real-world usage
and maintains proper API boundaries.
  • Loading branch information
dirvine committed Nov 18, 2024
1 parent a921482 commit 2ba9389
Show file tree
Hide file tree
Showing 7 changed files with 127 additions and 87 deletions.
4 changes: 2 additions & 2 deletions benches/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
#![allow(missing_copy_implementations, missing_debug_implementations)]

use criterion::{BatchSize, Bencher, Criterion};
use self_encryption::{decrypt_full_set, encrypt, test_helpers::random_bytes};
use self_encryption::{decrypt, encrypt, test_helpers::random_bytes};
use std::time::Duration;

// sample size is _NOT_ the number of times the command is run...
Expand Down Expand Up @@ -75,7 +75,7 @@ fn read(b: &mut Bencher, bytes_len: usize) {
|| encrypt(random_bytes(bytes_len)).unwrap(),
// actual benchmark
|(data_map, encrypted_chunks)| {
let _raw_data = decrypt_full_set(&data_map, &encrypted_chunks).unwrap();
let _raw_data = decrypt(&data_map, &encrypted_chunks).unwrap();
},
BatchSize::SmallInput,
);
Expand Down
5 changes: 2 additions & 3 deletions examples/basic_encryptor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@
use bytes::Bytes;
use docopt::Docopt;
use rayon::prelude::*;
use self_encryption::decrypt;
use self_encryption::{
self, DataMap, EncryptedChunk, Error, Result,
encrypt, serialize, deserialize
self, deserialize, encrypt, serialize, DataMap, EncryptedChunk, Error, Result,
};
use serde::Deserialize;
use std::{
Expand All @@ -56,7 +56,6 @@ use std::{
sync::Arc,
};
use xor_name::XorName;
use self_encryption::decrypt;

#[rustfmt::skip]
static USAGE: &str = "
Expand Down
22 changes: 6 additions & 16 deletions examples/parallel_streaming_decryptor.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
use bytes::Bytes;
use self_encryption::{
streaming_decrypt_from_storage, DataMap, Error, Result, deserialize
};
use std::{
fs::File,
io::Read,
path::Path,
};
use rayon::prelude::*;
use self_encryption::{deserialize, streaming_decrypt_from_storage, DataMap, Error, Result};
use std::{fs::File, io::Read, path::Path};
use xor_name::XorName;

fn main() -> Result<()> {
Expand All @@ -31,21 +25,17 @@ fn main() -> Result<()> {
};

// Use the streaming decryption function
streaming_decrypt_from_storage(
&data_map,
Path::new("output_file.dat"),
get_chunk_parallel,
)?;
streaming_decrypt_from_storage(&data_map, Path::new("output_file.dat"), get_chunk_parallel)?;

Ok(())
}

// Helper function to load data map from a file
fn load_data_map(path: &str) -> Result<DataMap> {
let mut file = File::open(path)
.map_err(|e| Error::Generic(format!("Failed to open data map: {}", e)))?;
let mut file =
File::open(path).map_err(|e| Error::Generic(format!("Failed to open data map: {}", e)))?;
let mut data = Vec::new();
file.read_to_end(&mut data)
.map_err(|e| Error::Generic(format!("Failed to read data map: {}", e)))?;
deserialize(&data)
}
}
6 changes: 1 addition & 5 deletions src/decrypt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,7 @@ pub fn decrypt_sorted_set(

/// Decrypt a chunk, given the index of that chunk in the sequence of chunks,
/// and the raw encrypted content.
pub fn decrypt_chunk(
chunk_index: usize,
content: &Bytes,
src_hashes: &[XorName],
) -> Result<Bytes> {
pub fn decrypt_chunk(chunk_index: usize, content: &Bytes, src_hashes: &[XorName]) -> Result<Bytes> {
let pki = get_pad_key_and_iv(chunk_index, src_hashes);
let (pad, key, iv) = pki;

Expand Down
27 changes: 10 additions & 17 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ pub use decrypt::decrypt_chunk;
use utils::*;
pub use xor_name::XorName;


pub use self::{
data_map::{ChunkInfo, DataMap},
error::{Error, Result},
Expand Down Expand Up @@ -218,10 +217,7 @@ pub fn encrypt(bytes: Bytes) -> Result<(DataMap, Vec<EncryptedChunk>)> {
/// # Returns
///
/// * `Result<Bytes>` - The decrypted data or an error if chunks are missing/corrupted
pub(crate) fn decrypt_full_set(
data_map: &DataMap,
chunks: &[EncryptedChunk],
) -> Result<Bytes> {
pub(crate) fn decrypt_full_set(data_map: &DataMap, chunks: &[EncryptedChunk]) -> Result<Bytes> {
let src_hashes = extract_hashes(data_map);

// Create a mapping of chunk hashes to chunks for efficient lookup
Expand Down Expand Up @@ -505,7 +501,9 @@ where

for (info, chunk) in root_map.infos().iter().zip(encrypted_chunks.iter()) {
let decrypted_chunk = decrypt_chunk(info.index, &chunk.content, &src_hashes)?;
output_file.write_all(&decrypted_chunk).map_err(Error::from)?;
output_file
.write_all(&decrypted_chunk)
.map_err(Error::from)?;
}

Ok(())
Expand All @@ -524,10 +522,7 @@ where
/// # Returns
///
/// * `Result<DataMap>` - The root data map or an error if retrieval or decryption fails.
pub fn get_root_data_map_parallel<F>(
data_map: DataMap,
get_chunk_parallel: &F,
) -> Result<DataMap>
pub fn get_root_data_map_parallel<F>(data_map: DataMap, get_chunk_parallel: &F) -> Result<DataMap>
where
F: Fn(&[XorName]) -> Result<Vec<Bytes>>,
{
Expand Down Expand Up @@ -598,8 +593,7 @@ where
///
/// * `Result<Vec<u8>>` - The serialized bytes or an error
pub fn serialize<T: serde::Serialize>(data: &T) -> Result<Vec<u8>> {
bincode::serialize(data)
.map_err(|e| Error::Generic(format!("Serialization error: {}", e)))
bincode::serialize(data).map_err(|e| Error::Generic(format!("Serialization error: {}", e)))
}

/// Deserializes bytes into a data structure using bincode.
Expand All @@ -612,8 +606,7 @@ pub fn serialize<T: serde::Serialize>(data: &T) -> Result<Vec<u8>> {
///
/// * `Result<T>` - The deserialized data structure or an error
pub fn deserialize<T: serde::de::DeserializeOwned>(bytes: &[u8]) -> Result<T> {
bincode::deserialize(bytes)
.map_err(|e| Error::Generic(format!("Deserialization error: {}", e)))
bincode::deserialize(bytes).map_err(|e| Error::Generic(format!("Deserialization error: {}", e)))
}

/// Verifies and deserializes a chunk by checking its content hash matches the provided name.
Expand All @@ -632,18 +625,18 @@ pub fn verify_chunk(name: XorName, bytes: &[u8]) -> Result<EncryptedChunk> {
let chunk = EncryptedChunk {
content: Bytes::from(bytes.to_vec()),
};

// Calculate the hash of the encrypted content directly
let calculated_hash = XorName::from_content(chunk.content.as_ref());

// Verify the hash matches
if calculated_hash != name {
return Err(Error::Generic(format!(
"Chunk content hash mismatch. Expected: {:?}, Got: {:?}",
name, calculated_hash
)));
}

Ok(chunk)
}

Expand Down
49 changes: 30 additions & 19 deletions src/python.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
use crate::{
decrypt as rust_decrypt,
decrypt_from_storage as rust_decrypt_from_storage,
encrypt as rust_encrypt,
encrypt_from_file as rust_encrypt_from_file,
decrypt as rust_decrypt, decrypt_from_storage as rust_decrypt_from_storage,
encrypt as rust_encrypt, encrypt_from_file as rust_encrypt_from_file,
shrink_data_map as rust_shrink_data_map,
streaming_decrypt_from_storage as rust_streaming_decrypt_from_storage,
ChunkInfo, DataMap as RustDataMap, EncryptedChunk as RustEncryptedChunk, Error, Result,
streaming_decrypt_from_storage as rust_streaming_decrypt_from_storage, ChunkInfo,
DataMap as RustDataMap, EncryptedChunk as RustEncryptedChunk, Error, Result,
};
use bytes::Bytes;
use pyo3::prelude::*;
Expand Down Expand Up @@ -140,24 +138,28 @@ fn encrypt(_py: Python<'_>, data: &PyBytes) -> PyResult<(PyDataMap, Vec<PyEncryp
let bytes = Bytes::from(data.as_bytes().to_vec());
let (data_map, chunks) = rust_encrypt(bytes)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))?;

Ok((
PyDataMap { inner: data_map },
chunks.into_iter().map(|c| PyEncryptedChunk { inner: c }).collect(),
chunks
.into_iter()
.map(|c| PyEncryptedChunk { inner: c })
.collect(),
))
}

#[pyfunction]
fn encrypt_from_file(input_path: String, output_dir: String) -> PyResult<(PyDataMap, Vec<String>)> {
let (data_map, chunk_names) = rust_encrypt_from_file(
&PathBuf::from(input_path),
&PathBuf::from(output_dir),
)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))?;
let (data_map, chunk_names) =
rust_encrypt_from_file(&PathBuf::from(input_path), &PathBuf::from(output_dir))
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))?;

Ok((
PyDataMap { inner: data_map },
chunk_names.into_iter().map(|name| hex::encode(name.0)).collect(),
chunk_names
.into_iter()
.map(|name| hex::encode(name.0))
.collect(),
))
}

Expand All @@ -166,7 +168,7 @@ fn decrypt(data_map: &PyDataMap, chunks: Vec<PyEncryptedChunk>) -> PyResult<Py<P
let chunks: Vec<RustEncryptedChunk> = chunks.into_iter().map(|c| c.inner).collect();
let result = rust_decrypt(&data_map.inner, &chunks)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))?;

Python::with_gil(|py| Ok(PyBytes::new(py, &result).into()))
}

Expand Down Expand Up @@ -212,7 +214,10 @@ fn shrink_data_map(

Ok((
PyDataMap { inner: shrunk_map },
chunks.into_iter().map(|c| PyEncryptedChunk { inner: c }).collect(),
chunks
.into_iter()
.map(|c| PyEncryptedChunk { inner: c })
.collect(),
))
}

Expand All @@ -234,15 +239,21 @@ fn streaming_decrypt_from_storage(
Ok(chunk_data.into_iter().map(Bytes::from).collect())
};

rust_streaming_decrypt_from_storage(&data_map.inner, &PathBuf::from(output_path), get_chunk_parallel)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))
rust_streaming_decrypt_from_storage(
&data_map.inner,
&PathBuf::from(output_path),
get_chunk_parallel,
)
.map_err(|e| PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string()))
}

#[pyfunction]
fn verify_chunk(name: &PyXorName, content: &PyBytes) -> PyResult<PyEncryptedChunk> {
match crate::verify_chunk(name.inner, content.as_bytes()) {
Ok(chunk) => Ok(PyEncryptedChunk { inner: chunk }),
Err(e) => Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(e.to_string())),
Err(e) => Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
e.to_string(),
)),
}
}

Expand Down
Loading

0 comments on commit 2ba9389

Please sign in to comment.