diff --git a/benches/lib.rs b/benches/lib.rs
index e3f67a5af..275e8471e 100644
--- a/benches/lib.rs
+++ b/benches/lib.rs
@@ -51,6 +51,11 @@ use std::time::Duration;
 // https://bheisler.github.io/criterion.rs/book/analysis.html#measurement
 const SAMPLE_SIZE: usize = 20;
 
+/// The maximum size (before compression) of an individual chunk of a file, defined as 1024kiB.
+const MAX_CHUNK_SIZE: usize = 1024 * 1024;
+/// The minimum size (before compression) of an individual chunk of a file, defined as 1B.
+const MIN_CHUNK_SIZE: usize = 1;
+
 fn custom_criterion() -> Criterion {
     Criterion::default()
         .measurement_time(Duration::from_secs(40))
@@ -63,7 +68,8 @@ fn write(b: &mut Bencher<'_>, bytes_len: usize) {
         || random_bytes(bytes_len),
         // actual benchmark
         |bytes| {
-            let (_data_map, _encrypted_chunks) = encrypt(bytes).unwrap();
+            let (_data_map, _encrypted_chunks) =
+                encrypt(bytes, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE).unwrap();
         },
         BatchSize::SmallInput,
     );
@@ -72,7 +78,7 @@ fn write(b: &mut Bencher<'_>, bytes_len: usize) {
 fn read(b: &mut Bencher, bytes_len: usize) {
     b.iter_batched(
         // the setup
-        || encrypt(random_bytes(bytes_len)).unwrap(),
+        || encrypt(random_bytes(bytes_len), MIN_CHUNK_SIZE, MAX_CHUNK_SIZE).unwrap(),
         // actual benchmark
         |(data_map, encrypted_chunks)| {
             let _raw_data = decrypt_full_set(&data_map, &encrypted_chunks).unwrap();
diff --git a/examples/basic_encryptor.rs b/examples/basic_encryptor.rs
index ee1fb2b49..f26d4a85b 100644
--- a/examples/basic_encryptor.rs
+++ b/examples/basic_encryptor.rs
@@ -89,6 +89,11 @@ fn file_name(name: XorName) -> String {
     string
 }
 
+/// The maximum size (before compression) of an individual chunk of a file, defined as 1024kiB.
+const MAX_CHUNK_SIZE: usize = 1024 * 1024;
+/// The minimum size (before compression) of an individual chunk of a file, defined as 1B.
+const MIN_CHUNK_SIZE: usize = 1;
+
 #[derive(Clone)]
 struct DiskBasedStorage {
     pub(crate) storage_path: String,
@@ -147,7 +152,8 @@ async fn main() {
                 Err(error) => return println!("{}", error),
             }
 
-            let (data_map, encrypted_chunks) = encrypt(Bytes::from(data)).unwrap();
+            let (data_map, encrypted_chunks) =
+                encrypt(Bytes::from(data), MIN_CHUNK_SIZE, MAX_CHUNK_SIZE).unwrap();
 
             let result = encrypted_chunks
                 .par_iter()
diff --git a/src/chunk.rs b/src/chunk.rs
index ab65225c0..af644fd9f 100644
--- a/src/chunk.rs
+++ b/src/chunk.rs
@@ -32,15 +32,20 @@ pub struct RawChunk {
 
 /// Hash all the chunks.
 /// Creates [num cores] batches.
-pub(crate) fn batch_chunks(bytes: Bytes) -> (usize, Vec<EncryptionBatch>) {
+pub(crate) fn batch_chunks(
+    bytes: Bytes,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> (usize, Vec<EncryptionBatch>) {
     let data_size = bytes.len();
-    let num_chunks = get_num_chunks(data_size);
+    let num_chunks = get_num_chunks(data_size, min_chunk_size, max_chunk_size);
 
     let raw_chunks: Vec<_> = (0..num_chunks)
         .map(|index| (index, bytes.clone()))
         .par_bridge()
         .map(|(index, bytes)| {
-            let (start, end) = get_start_end_positions(data_size, index);
+            let (start, end) =
+                get_start_end_positions(data_size, index, min_chunk_size, max_chunk_size);
             let data = bytes.slice(start..end);
             let hash = XorName::from_content(data.as_ref());
             RawChunk { index, data, hash }
@@ -63,10 +68,14 @@ pub(crate) fn batch_chunks(bytes: Bytes) -> (usize, Vec<EncryptionBatch>) {
 }
 
 /// Calculate (start_position, end_position) for each chunk for the input file size
-pub(crate) fn batch_positions(data_size: usize) -> Vec<(usize, usize)> {
-    let num_chunks = get_num_chunks(data_size);
+pub(crate) fn batch_positions(
+    data_size: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> Vec<(usize, usize)> {
+    let num_chunks = get_num_chunks(data_size, min_chunk_size, max_chunk_size);
 
     (0..num_chunks)
-        .map(|index| get_start_end_positions(data_size, index))
+        .map(|index| get_start_end_positions(data_size, index, min_chunk_size, max_chunk_size))
         .collect()
 }
diff --git a/src/data_map.rs b/src/data_map.rs
index c96019d0a..e099138d5 100644
--- a/src/data_map.rs
+++ b/src/data_map.rs
@@ -13,7 +13,7 @@ use xor_name::XorName;
 
 /// Holds the information that is required to recover the content of the encrypted file.
 /// This is held as a vector of `ChunkInfo`, i.e. a list of the file's chunk hashes.
-/// Only files larger than 3072 bytes (3 * MIN_CHUNK_SIZE) can be self-encrypted.
+/// Only files larger than 3072 bytes (3 * chunk size) can be self-encrypted.
 /// Smaller files will have to be batched together.
 #[derive(Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Clone)]
 pub struct DataMap(Vec<ChunkInfo>);
diff --git a/src/lib.rs b/src/lib.rs
index 384cfd127..4b880d0e8 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -123,12 +123,6 @@ use xor_name::XorName;
 pub use bytes;
 pub use xor_name;
 
-/// The minimum size (before compression) of data to be self-encrypted, defined as 3B.
-pub const MIN_ENCRYPTABLE_BYTES: usize = 3 * MIN_CHUNK_SIZE;
-/// The maximum size (before compression) of an individual chunk of a file, defined as 500kiB.
-pub const MAX_CHUNK_SIZE: usize = 512 * 1024;
-/// The minimum size (before compression) of an individual chunk of a file, defined as 1B.
-pub const MIN_CHUNK_SIZE: usize = 1;
 /// Controls the compression-speed vs compression-density tradeoffs.  The higher the quality, the
 /// slower the compression.  Range is 0 to 11.
 pub const COMPRESSION_QUALITY: i32 = 6;
@@ -163,12 +157,17 @@ pub struct StreamSelfEncryptor {
 impl StreamSelfEncryptor {
     /// For encryption, return with an intialized streaming encryptor.
     /// If a `chunk_dir` is provided, the encrypted_chunks will be written into the specified dir as well.
-    pub fn encrypt_from_file(file_path: PathBuf, chunk_dir: Option<PathBuf>) -> Result<Self> {
+    pub fn encrypt_from_file(
+        file_path: PathBuf,
+        chunk_dir: Option<PathBuf>,
+        min_chunk_size: usize,
+        max_chunk_size: usize,
+    ) -> Result<Self> {
         let file = File::open(&*file_path)?;
         let metadata = file.metadata()?;
         let file_size = metadata.len();
 
-        let batch_positions = batch_positions(file_size as usize);
+        let batch_positions = batch_positions(file_size as usize, min_chunk_size, max_chunk_size);
 
         Ok(StreamSelfEncryptor {
             file_path,
@@ -350,13 +349,18 @@ impl StreamSelfDecryptor {
 }
 
 /// Read a file from the disk to encrypt, and output the chunks to a given output directory if presents.
-pub fn encrypt_from_file(file_path: &Path, output_dir: &Path) -> Result<(DataMap, Vec<XorName>)> {
+pub fn encrypt_from_file(
+    file_path: &Path,
+    output_dir: &Path,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> Result<(DataMap, Vec<XorName>)> {
     let mut file = File::open(file_path)?;
     let mut bytes = Vec::new();
     let _ = file.read_to_end(&mut bytes)?;
     let bytes = Bytes::from(bytes);
 
-    let (data_map, encrypted_chunks) = encrypt(bytes)?;
+    let (data_map, encrypted_chunks) = encrypt(bytes, min_chunk_size, max_chunk_size)?;
 
     let mut chunk_names = Vec::new();
     for chunk in encrypted_chunks {
@@ -401,16 +405,21 @@ pub fn decrypt_from_chunk_files(
 /// Encrypts a set of bytes and returns the encrypted data together with
 /// the data map that is derived from the input data, and is used to later decrypt the encrypted data.
 /// Returns an error if the size is too small for self-encryption.
-/// Only files larger than 3072 bytes (3 * MIN_CHUNK_SIZE) can be self-encrypted.
+/// Only files larger than 3072 bytes (3 * min_chunk_size) can be self-encrypted.
 /// Smaller files will have to be batched together for self-encryption to work.
-pub fn encrypt(bytes: Bytes) -> Result<(DataMap, Vec<EncryptedChunk>)> {
-    if (MIN_ENCRYPTABLE_BYTES) > bytes.len() {
+pub fn encrypt(
+    bytes: Bytes,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> Result<(DataMap, Vec<EncryptedChunk>)> {
+    let min_encryptable_bytes = 3 * min_chunk_size;
+    if (min_encryptable_bytes) > bytes.len() {
         return Err(Error::Generic(format!(
             "Too small for self-encryption! Required size at least {}",
-            MIN_ENCRYPTABLE_BYTES
+            min_encryptable_bytes
         )));
     }
-    let (num_chunks, batches) = chunk::batch_chunks(bytes);
+    let (num_chunks, batches) = chunk::batch_chunks(bytes, min_chunk_size, max_chunk_size);
     let (data_map, encrypted_chunks) = encrypt::encrypt(batches);
     if num_chunks > encrypted_chunks.len() {
         return Err(Error::Encryption);
@@ -480,13 +489,20 @@ pub struct SeekInfo {
 /// It is used to first fetch chunks using the `index_range`.
 /// Then the chunks are passed into `self_encryption::decrypt_range` together
 /// with `relative_pos` from the `SeekInfo` instance, and the `len` to be read.
-pub fn seek_info(file_size: usize, pos: usize, len: usize) -> SeekInfo {
-    let (start_index, end_index) = overlapped_chunks(file_size, pos, len);
-
-    let relative_pos = if start_index == 2 && file_size < 3 * MAX_CHUNK_SIZE {
-        pos - (2 * get_chunk_size(file_size, 0))
+pub fn seek_info(
+    file_size: usize,
+    pos: usize,
+    len: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> SeekInfo {
+    let (start_index, end_index) =
+        overlapped_chunks(file_size, pos, len, min_chunk_size, max_chunk_size);
+
+    let relative_pos = if start_index == 2 && file_size < 3 * max_chunk_size {
+        pos - (2 * get_chunk_size(file_size, 0, min_chunk_size, max_chunk_size))
     } else {
-        pos % get_chunk_size(file_size, start_index)
+        pos % get_chunk_size(file_size, start_index, min_chunk_size, max_chunk_size)
     };
 
     SeekInfo {
@@ -501,9 +517,15 @@ pub fn seek_info(file_size: usize, pos: usize, len: usize) -> SeekInfo {
 
 /// Returns the chunk index range [start, end) that is overlapped by the byte range defined by `pos`
 /// and `len`. Returns empty range if `file_size` is so small that there are no chunks.
-fn overlapped_chunks(file_size: usize, pos: usize, len: usize) -> (usize, usize) {
+fn overlapped_chunks(
+    file_size: usize,
+    pos: usize,
+    len: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> (usize, usize) {
     // FIX THIS SHOULD NOT BE ALLOWED
-    if file_size < (3 * MIN_CHUNK_SIZE) || pos >= file_size || len == 0 {
+    if file_size < (3 * min_chunk_size) || pos >= file_size || len == 0 {
         return (0, 0);
     }
 
@@ -513,8 +535,8 @@ fn overlapped_chunks(file_size: usize, pos: usize, len: usize) -> (usize, usize)
         None => file_size,
     };
 
-    let start_index = get_chunk_index(file_size, pos);
-    let end_index = get_chunk_index(file_size, end);
+    let start_index = get_chunk_index(file_size, pos, min_chunk_size, max_chunk_size);
+    let end_index = get_chunk_index(file_size, end, min_chunk_size, max_chunk_size);
 
     (start_index, end_index)
 }
@@ -561,26 +583,31 @@ fn get_pki(src_hash: &XorName, n_1_src_hash: &XorName, n_2_src_hash: &XorName) -
 }
 
 // Returns the number of chunks according to file size.
-fn get_num_chunks(file_size: usize) -> usize {
-    if file_size < (3 * MIN_CHUNK_SIZE) {
+fn get_num_chunks(file_size: usize, min_chunk_size: usize, max_chunk_size: usize) -> usize {
+    if file_size < (3 * min_chunk_size) {
         return 0;
     }
-    if file_size < (3 * MAX_CHUNK_SIZE) {
+    if file_size < (3 * max_chunk_size) {
         return 3;
     }
-    if file_size % MAX_CHUNK_SIZE == 0 {
-        file_size / MAX_CHUNK_SIZE
+    if file_size % max_chunk_size == 0 {
+        file_size / max_chunk_size
     } else {
-        (file_size / MAX_CHUNK_SIZE) + 1
+        (file_size / max_chunk_size) + 1
     }
 }
 
-// Returns the size of a chunk according to file size.
-fn get_chunk_size(file_size: usize, chunk_index: usize) -> usize {
-    if file_size < 3 * MIN_CHUNK_SIZE {
+// Returns the size of a chunk according to file size and defined chunk sizes.
+fn get_chunk_size(
+    file_size: usize,
+    chunk_index: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> usize {
+    if file_size < 3 * min_chunk_size {
         return 0;
     }
-    if file_size < 3 * MAX_CHUNK_SIZE {
+    if file_size < 3 * max_chunk_size {
         if chunk_index < 2 {
             return file_size / 3;
         } else {
@@ -588,63 +615,82 @@ fn get_chunk_size(file_size: usize, chunk_index: usize) -> usize {
             return file_size - (2 * (file_size / 3));
         }
     }
-    let total_chunks = get_num_chunks(file_size);
+    let total_chunks = get_num_chunks(file_size, min_chunk_size, max_chunk_size);
     if chunk_index < total_chunks - 2 {
-        return MAX_CHUNK_SIZE;
+        return max_chunk_size;
     }
-    let remainder = file_size % MAX_CHUNK_SIZE;
+    let remainder = file_size % max_chunk_size;
     let penultimate = (total_chunks - 2) == chunk_index;
     if remainder == 0 {
-        return MAX_CHUNK_SIZE;
+        return max_chunk_size;
     }
-    if remainder < MIN_CHUNK_SIZE {
+    if remainder < min_chunk_size {
         if penultimate {
-            MAX_CHUNK_SIZE - MIN_CHUNK_SIZE
+            max_chunk_size - min_chunk_size
         } else {
-            MIN_CHUNK_SIZE + remainder
+            min_chunk_size + remainder
         }
     } else if penultimate {
-        MAX_CHUNK_SIZE
+        max_chunk_size
     } else {
         remainder
     }
 }
 
 // Returns the [start, end) half-open byte range of a chunk.
-fn get_start_end_positions(file_size: usize, chunk_index: usize) -> (usize, usize) {
-    if get_num_chunks(file_size) == 0 {
+fn get_start_end_positions(
+    file_size: usize,
+    chunk_index: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> (usize, usize) {
+    if get_num_chunks(file_size, min_chunk_size, max_chunk_size) == 0 {
         return (0, 0);
     }
-    let start = get_start_position(file_size, chunk_index);
-    (start, start + get_chunk_size(file_size, chunk_index))
+    let start = get_start_position(file_size, chunk_index, min_chunk_size, max_chunk_size);
+    (
+        start,
+        start + get_chunk_size(file_size, chunk_index, min_chunk_size, max_chunk_size),
+    )
 }
 
-fn get_start_position(file_size: usize, chunk_index: usize) -> usize {
-    let total_chunks = get_num_chunks(file_size);
+fn get_start_position(
+    file_size: usize,
+    chunk_index: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> usize {
+    let total_chunks = get_num_chunks(file_size, min_chunk_size, max_chunk_size);
     if total_chunks == 0 {
         return 0;
     }
     let last = (total_chunks - 1) == chunk_index;
-    let first_chunk_size = get_chunk_size(file_size, 0);
+    let first_chunk_size = get_chunk_size(file_size, 0, min_chunk_size, max_chunk_size);
     if last {
-        first_chunk_size * (chunk_index - 1) + get_chunk_size(file_size, chunk_index - 1)
+        first_chunk_size * (chunk_index - 1)
+            + get_chunk_size(file_size, chunk_index - 1, min_chunk_size, max_chunk_size)
     } else {
         first_chunk_size * chunk_index
     }
 }
 
-fn get_chunk_index(file_size: usize, position: usize) -> usize {
-    let num_chunks = get_num_chunks(file_size);
+fn get_chunk_index(
+    file_size: usize,
+    position: usize,
+    min_chunk_size: usize,
+    max_chunk_size: usize,
+) -> usize {
+    let num_chunks = get_num_chunks(file_size, min_chunk_size, max_chunk_size);
     if num_chunks == 0 {
         return 0; // FIX THIS SHOULD NOT BE ALLOWED
     }
 
-    let chunk_size = get_chunk_size(file_size, 0);
+    let chunk_size = get_chunk_size(file_size, 0, min_chunk_size, max_chunk_size);
     let remainder = file_size % chunk_size;
 
     if remainder == 0
-        || remainder >= MIN_CHUNK_SIZE
-        || position < file_size - remainder - MIN_CHUNK_SIZE
+        || remainder >= min_chunk_size
+        || position < file_size - remainder - min_chunk_size
     {
         usize::min(position / chunk_size, num_chunks - 1)
     } else {
diff --git a/src/tests.rs b/src/tests.rs
index deaa519e0..b3ae5ae11 100644
--- a/src/tests.rs
+++ b/src/tests.rs
@@ -9,7 +9,7 @@
 use crate::{
     decrypt_full_set, decrypt_range, encrypt, get_chunk_size, get_num_chunks, overlapped_chunks,
     seek_info, test_helpers::random_bytes, DataMap, EncryptedChunk, Error, StreamSelfDecryptor,
-    StreamSelfEncryptor, MIN_ENCRYPTABLE_BYTES,
+    StreamSelfEncryptor,
 };
 use bytes::Bytes;
 use itertools::Itertools;
@@ -20,6 +20,13 @@ use std::{
 };
 use tempfile::tempdir;
 
+/// The maximum size (before compression) of an individual chunk of a file, defined as 1024kiB.
+const MAX_CHUNK_SIZE: usize = 1024 * 1024;
+/// The minimum size (before compression) of an individual chunk of a file, defined as 1B.
+const MIN_CHUNK_SIZE: usize = 1;
+
+const MIN_ENCRYPTABLE_BYTES: usize = 3 * MIN_CHUNK_SIZE;
+
 #[test]
 fn test_stream_self_encryptor() -> Result<(), Error> {
     // Create a 10MB temporary file
@@ -34,8 +41,12 @@ fn test_stream_self_encryptor() -> Result<(), Error> {
     create_dir_all(chunk_path.clone())?;
 
     // Encrypt the file using StreamSelfEncryptor
-    let mut encryptor =
-        StreamSelfEncryptor::encrypt_from_file(file_path, Some(chunk_path.clone()))?;
+    let mut encryptor = StreamSelfEncryptor::encrypt_from_file(
+        file_path,
+        Some(chunk_path.clone()),
+        MIN_CHUNK_SIZE,
+        MAX_CHUNK_SIZE,
+    )?;
     let mut encrypted_chunks = Vec::new();
     let mut data_map = None;
     while let Ok((chunk, map)) = encryptor.next_encryption() {
@@ -100,7 +111,7 @@ fn write_and_read() -> Result<(), Error> {
     let file_size = 10_000_000;
     let bytes = random_bytes(file_size);
 
-    let (data_map, encrypted_chunks) = encrypt_chunks(bytes.clone())?;
+    let (data_map, encrypted_chunks) = test_encrypt_chunks(bytes.clone())?;
     let raw_data = decrypt_full_set(&data_map, &encrypted_chunks)?;
 
     compare(bytes, raw_data)
@@ -112,20 +123,20 @@ fn seek_indices() -> Result<(), Error> {
     let pos = 0;
     let len = file_size / 2;
 
-    let info = seek_info(file_size, pos, len);
+    let info = seek_info(file_size, pos, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(0, info.relative_pos);
     assert_eq!(0, info.index_range.start);
     assert_eq!(1, info.index_range.end);
 
     let pos = len;
-    let info = seek_info(file_size, pos, len);
+    let info = seek_info(file_size, pos, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(512, info.relative_pos);
     assert_eq!(1, info.index_range.start);
     assert_eq!(2, info.index_range.end);
 
-    let info = seek_info(file_size, pos, len + 1);
+    let info = seek_info(file_size, pos, len + 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(512, info.relative_pos);
     assert_eq!(1, info.index_range.start);
@@ -140,25 +151,25 @@ fn seek_indices_on_medium_size_file() -> Result<(), Error> {
     let pos = 0;
     let len = 131072;
 
-    let info = seek_info(file_size, pos, len);
+    let info = seek_info(file_size, pos, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(0, info.relative_pos);
     assert_eq!(0, info.index_range.start);
     assert_eq!(0, info.index_range.end);
 
-    let info = seek_info(file_size, 131072, len);
+    let info = seek_info(file_size, 131072, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(131072, info.relative_pos);
     assert_eq!(0, info.index_range.start);
     assert_eq!(0, info.index_range.end);
 
-    let info = seek_info(file_size, 393216, len);
+    let info = seek_info(file_size, 393216, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(70128, info.relative_pos);
     assert_eq!(1, info.index_range.start);
     assert_eq!(1, info.index_range.end);
 
-    let info = seek_info(file_size, 655360, len);
+    let info = seek_info(file_size, 655360, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(9184, info.relative_pos);
     assert_eq!(2, info.index_range.start);
@@ -172,42 +183,42 @@ fn seek_indices_on_small_size_file() -> Result<(), Error> {
     let file_size = 1024;
 
     // first byte of index 0
-    let info = seek_info(file_size, 0, 340);
+    let info = seek_info(file_size, 0, 340, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(0, info.relative_pos);
     assert_eq!(0, info.index_range.start);
     assert_eq!(0, info.index_range.end);
 
     // first byte of index 1
-    let info = seek_info(file_size, 341, 340);
+    let info = seek_info(file_size, 341, 340, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(0, info.relative_pos);
     assert_eq!(1, info.index_range.start);
     assert_eq!(1, info.index_range.end);
 
     // first byte of index 2
-    let info = seek_info(file_size, 682, 340);
+    let info = seek_info(file_size, 682, 340, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(0, info.relative_pos);
     assert_eq!(2, info.index_range.start);
     assert_eq!(2, info.index_range.end);
 
     // last byte of index 2
-    let info = seek_info(file_size, file_size - 1, 1);
+    let info = seek_info(file_size, file_size - 1, 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(341, info.relative_pos);
     assert_eq!(2, info.index_range.start);
     assert_eq!(2, info.index_range.end);
 
     // overflow - should this error?
-    let info = seek_info(file_size, file_size, 1);
+    let info = seek_info(file_size, file_size, 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(1, info.relative_pos);
     assert_eq!(0, info.index_range.start);
     assert_eq!(0, info.index_range.end);
 
     // last byte of index 2 (as 2 remainders in last chunk)
-    let info = seek_info(file_size + 1, file_size, 1);
+    let info = seek_info(file_size + 1, file_size, 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     assert_eq!(342, info.relative_pos);
     assert_eq!(2, info.index_range.start);
@@ -220,21 +231,48 @@ fn seek_indices_on_small_size_file() -> Result<(), Error> {
 fn get_chunk_sizes() -> Result<(), Error> {
     let file_size = 969_265;
 
-    assert_eq!(323088, get_chunk_size(file_size, 0));
-    assert_eq!(323088, get_chunk_size(file_size, 1));
-    assert_eq!(323089, get_chunk_size(file_size, 2));
+    assert_eq!(
+        323088,
+        get_chunk_size(file_size, 0, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
+    assert_eq!(
+        323088,
+        get_chunk_size(file_size, 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
+    assert_eq!(
+        323089,
+        get_chunk_size(file_size, 2, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
 
     let file_size = 1024;
 
-    assert_eq!(341, get_chunk_size(file_size, 0));
-    assert_eq!(341, get_chunk_size(file_size, 1));
-    assert_eq!(342, get_chunk_size(file_size, 2));
+    assert_eq!(
+        341,
+        get_chunk_size(file_size, 0, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
+    assert_eq!(
+        341,
+        get_chunk_size(file_size, 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
+    assert_eq!(
+        342,
+        get_chunk_size(file_size, 2, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
 
     let file_size = 1025;
 
-    assert_eq!(341, get_chunk_size(file_size, 0));
-    assert_eq!(341, get_chunk_size(file_size, 1));
-    assert_eq!(343, get_chunk_size(file_size, 2));
+    assert_eq!(
+        341,
+        get_chunk_size(file_size, 0, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
+    assert_eq!(
+        341,
+        get_chunk_size(file_size, 1, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
+    assert_eq!(
+        343,
+        get_chunk_size(file_size, 2, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)
+    );
 
     Ok(())
 }
@@ -247,7 +285,7 @@ fn seek_and_join() -> Result<(), Error> {
         for divisor in 2..15 {
             let len = file_size / divisor;
             let data = random_bytes(file_size);
-            let (data_map, encrypted_chunks) = encrypt_chunks(data.clone())?;
+            let (data_map, encrypted_chunks) = test_encrypt_chunks(data.clone())?;
 
             // Read first part
             let read_data_1 = {
@@ -282,7 +320,13 @@ fn seek(
     len: usize,
 ) -> Result<Bytes, Error> {
     let expected_data = bytes.slice(pos..(pos + len));
-    let info = seek_info(data_map.file_size(), pos, len);
+    let info = seek_info(
+        data_map.file_size(),
+        pos,
+        len,
+        MIN_CHUNK_SIZE,
+        MAX_CHUNK_SIZE,
+    );
 
     // select a subset of chunks; the ones covering the bytes we want to read
     let subset: Vec<_> = encrypted_chunks
@@ -314,10 +358,11 @@ fn seek_over_chunk_limit() -> Result<(), Error> {
         let expected_data = bytes.slice(pos..(pos + len));
 
         // the chunks covering the bytes we want to read
-        let (start_index, end_index) = overlapped_chunks(file_size, pos, len);
+        let (start_index, end_index) =
+            overlapped_chunks(file_size, pos, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
         // first encrypt the whole file
-        let (data_map, encrypted_chunks) = encrypt_chunks(bytes.clone())?;
+        let (data_map, encrypted_chunks) = test_encrypt_chunks(bytes.clone())?;
 
         // select a subset of chunks; the ones covering the bytes we want to read
         let subset: Vec<_> = encrypted_chunks
@@ -327,7 +372,8 @@ fn seek_over_chunk_limit() -> Result<(), Error> {
             .collect();
 
         // the start position within the first chunk (thus `relative`..)
-        let relative_pos = pos % get_chunk_size(file_size, start_index);
+        let relative_pos =
+            pos % get_chunk_size(file_size, start_index, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
         let read_data = decrypt_range(&data_map, &subset, relative_pos, len)?;
 
         compare(expected_data, read_data)?;
@@ -345,10 +391,11 @@ fn seek_with_length_over_data_size() -> Result<(), Error> {
     let len = bytes.len() - start_pos + 1;
 
     // the chunks covering the bytes we want to read
-    let (start_index, end_index) = overlapped_chunks(file_size, start_pos, len);
+    let (start_index, end_index) =
+        overlapped_chunks(file_size, start_pos, len, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
 
     // first encrypt the whole file
-    let (data_map, encrypted_chunks) = encrypt_chunks(bytes.clone())?;
+    let (data_map, encrypted_chunks) = test_encrypt_chunks(bytes.clone())?;
 
     // select a subset of chunks; the ones covering the bytes we want to read
     let subset: Vec<_> = encrypted_chunks
@@ -380,9 +427,9 @@ fn compare(original: Bytes, result: Bytes) -> Result<(), Error> {
     Ok(())
 }
 
-fn encrypt_chunks(bytes: Bytes) -> Result<(DataMap, Vec<EncryptedChunk>), Error> {
-    let num_chunks = get_num_chunks(bytes.len());
-    let (data_map, encrypted_chunks) = encrypt(bytes)?;
+fn test_encrypt_chunks(bytes: Bytes) -> Result<(DataMap, Vec<EncryptedChunk>), Error> {
+    let num_chunks = get_num_chunks(bytes.len(), MIN_CHUNK_SIZE, MAX_CHUNK_SIZE);
+    let (data_map, encrypted_chunks) = encrypt(bytes, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)?;
 
     assert_eq!(num_chunks, encrypted_chunks.len());
 
diff --git a/tests/lib.rs b/tests/lib.rs
index b033649ad..f4eeaecb9 100644
--- a/tests/lib.rs
+++ b/tests/lib.rs
@@ -51,9 +51,14 @@
 )]
 
 use bytes::Bytes;
-use self_encryption::{encrypt, ChunkInfo, Result, MAX_CHUNK_SIZE};
+use self_encryption::{encrypt, ChunkInfo, Result};
 use xor_name::XorName;
 
+/// The maximum size (before compression) of an individual chunk of a file, defined as 1024kiB.
+const MAX_CHUNK_SIZE: usize = 1024 * 1024;
+/// The minimum size (before compression) of an individual chunk of a file, defined as 1B.
+const MIN_CHUNK_SIZE: usize = 1;
+
 #[tokio::test]
 async fn cross_platform_check() -> Result<()> {
     let content_size: usize = 20 * MAX_CHUNK_SIZE + 100;
@@ -62,258 +67,258 @@ async fn cross_platform_check() -> Result<()> {
         *c = (i % 17) as u8;
     }
 
-    let (data_map, _) = encrypt(Bytes::from(content))?;
+    let (data_map, _) = encrypt(Bytes::from(content), MIN_CHUNK_SIZE, MAX_CHUNK_SIZE)?;
 
     // (NB: this hard-coded ref needs update if algorithm changes)
     let ref_data_map = vec![
         ChunkInfo {
             src_hash: XorName([
-                248, 242, 229, 119, 92, 211, 180, 222, 177, 34, 82, 94, 51, 178, 62, 12, 185, 77,
-                145, 206, 168, 75, 176, 141, 46, 197, 1, 83, 199, 165, 37, 28,
+                219, 177, 84, 234, 189, 172, 82, 64, 169, 100, 5, 56, 3, 43, 142, 126, 51, 235,
+                194, 243, 30, 130, 132, 197, 137, 36, 170, 62, 46, 44, 176, 201,
             ]),
             dst_hash: XorName([
-                160, 57, 64, 193, 147, 235, 173, 54, 53, 206, 248, 12, 40, 147, 119, 107, 154, 21,
-                50, 57, 151, 18, 151, 0, 95, 157, 103, 220, 160, 79, 248, 85,
+                248, 155, 46, 153, 173, 52, 226, 212, 133, 172, 107, 200, 72, 150, 41, 50, 116, 77,
+                85, 92, 67, 168, 25, 56, 93, 61, 209, 194, 65, 172, 227, 130,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                68, 137, 10, 147, 116, 198, 83, 144, 156, 198, 68, 195, 94, 96, 125, 162, 213, 218,
-                179, 255, 177, 143, 232, 48, 99, 204, 118, 246, 67, 243, 190, 96,
+                65, 81, 63, 82, 119, 126, 216, 9, 44, 18, 160, 174, 225, 8, 202, 32, 245, 140, 14,
+                169, 252, 209, 97, 96, 134, 165, 102, 106, 250, 196, 27, 70,
             ]),
             dst_hash: XorName([
-                30, 212, 77, 155, 165, 236, 65, 212, 88, 181, 48, 138, 226, 135, 144, 227, 132,
-                195, 223, 199, 172, 235, 51, 146, 109, 209, 54, 63, 34, 169, 91, 55,
+                42, 62, 224, 152, 136, 214, 91, 160, 125, 249, 229, 115, 81, 220, 213, 34, 29, 173,
+                235, 99, 67, 210, 234, 160, 79, 254, 208, 174, 117, 127, 205, 36,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                227, 224, 98, 89, 131, 120, 169, 214, 165, 171, 189, 187, 15, 7, 80, 133, 16, 63,
-                74, 197, 17, 127, 22, 137, 171, 117, 34, 195, 186, 185, 51, 2,
+                80, 237, 26, 5, 69, 59, 53, 210, 44, 236, 191, 69, 92, 39, 113, 124, 206, 169, 5,
+                126, 189, 2, 146, 80, 68, 186, 142, 219, 37, 170, 135, 61,
             ]),
             dst_hash: XorName([
-                166, 232, 206, 232, 6, 23, 232, 20, 105, 230, 249, 86, 35, 117, 181, 65, 192, 245,
-                65, 130, 238, 50, 188, 82, 193, 115, 172, 113, 237, 33, 248, 102,
+                200, 203, 81, 29, 131, 156, 60, 140, 166, 254, 103, 60, 212, 223, 22, 41, 85, 192,
+                140, 154, 33, 34, 188, 94, 84, 101, 62, 254, 164, 81, 209, 154,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                48, 157, 209, 23, 176, 114, 223, 155, 203, 103, 11, 52, 211, 111, 167, 33, 13, 77,
-                71, 6, 188, 152, 179, 76, 155, 59, 4, 92, 3, 9, 67, 227,
+                168, 223, 46, 4, 138, 115, 226, 112, 179, 67, 36, 186, 170, 199, 21, 195, 41, 17,
+                99, 227, 30, 226, 46, 42, 78, 210, 189, 107, 185, 167, 32, 74,
             ]),
             dst_hash: XorName([
-                156, 144, 25, 237, 84, 230, 81, 90, 205, 79, 203, 161, 113, 141, 59, 138, 117, 157,
-                50, 9, 46, 76, 68, 64, 254, 250, 59, 11, 27, 134, 114, 175,
+                42, 138, 132, 73, 12, 78, 47, 136, 153, 177, 25, 247, 202, 227, 145, 31, 193, 9,
+                33, 63, 89, 160, 240, 51, 189, 72, 94, 193, 75, 144, 58, 233,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                53, 238, 190, 32, 115, 7, 143, 124, 163, 186, 189, 137, 50, 118, 2, 232, 57, 223,
-                124, 10, 239, 109, 31, 4, 77, 67, 150, 92, 207, 26, 53, 0,
+                41, 137, 66, 160, 103, 223, 72, 133, 180, 83, 8, 139, 180, 108, 20, 196, 106, 59,
+                73, 6, 160, 187, 8, 16, 93, 157, 142, 155, 85, 118, 239, 192,
             ]),
             dst_hash: XorName([
-                146, 0, 118, 252, 165, 0, 60, 204, 12, 126, 121, 68, 193, 237, 32, 58, 78, 125,
-                110, 49, 215, 140, 37, 90, 141, 80, 8, 205, 206, 94, 115, 91,
+                220, 162, 48, 182, 212, 178, 139, 207, 231, 191, 209, 53, 187, 22, 66, 221, 242,
+                66, 220, 19, 96, 201, 137, 25, 101, 184, 1, 178, 80, 204, 253, 179,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                208, 239, 194, 163, 28, 94, 172, 182, 163, 69, 43, 242, 76, 157, 70, 10, 49, 228,
-                153, 45, 154, 149, 111, 131, 132, 48, 67, 149, 198, 188, 147, 187,
+                48, 226, 1, 203, 69, 49, 140, 152, 90, 232, 209, 42, 178, 241, 60, 11, 24, 2, 196,
+                26, 14, 229, 127, 68, 119, 116, 135, 195, 248, 217, 227, 78,
             ]),
             dst_hash: XorName([
-                138, 105, 198, 150, 73, 205, 0, 204, 67, 235, 102, 199, 152, 47, 215, 34, 230, 6,
-                211, 6, 72, 38, 102, 74, 161, 22, 201, 229, 73, 179, 241, 183,
+                168, 232, 79, 142, 149, 51, 198, 62, 224, 177, 45, 203, 243, 51, 12, 23, 104, 80,
+                174, 5, 246, 234, 54, 70, 58, 11, 100, 117, 60, 67, 65, 64,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                127, 180, 215, 240, 32, 8, 203, 232, 31, 47, 232, 156, 181, 145, 96, 189, 228, 127,
-                8, 243, 144, 169, 251, 212, 128, 243, 90, 159, 209, 101, 22, 26,
+                92, 201, 208, 153, 241, 202, 111, 28, 118, 47, 47, 32, 121, 48, 203, 48, 230, 107,
+                102, 195, 184, 106, 245, 173, 157, 171, 139, 50, 28, 56, 80, 225,
             ]),
             dst_hash: XorName([
-                163, 215, 111, 245, 3, 80, 107, 218, 200, 254, 69, 43, 230, 168, 85, 162, 65, 230,
-                46, 203, 49, 1, 99, 25, 102, 218, 105, 129, 215, 124, 132, 104,
+                199, 114, 193, 185, 26, 6, 140, 71, 142, 73, 45, 198, 110, 126, 232, 182, 226, 85,
+                137, 210, 69, 24, 139, 163, 236, 47, 155, 130, 43, 229, 148, 172,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                79, 17, 134, 221, 54, 248, 197, 215, 92, 180, 23, 186, 143, 71, 41, 138, 151, 174,
-                241, 128, 212, 7, 63, 136, 61, 132, 177, 198, 129, 20, 168, 87,
+                50, 8, 67, 204, 158, 4, 255, 227, 50, 18, 176, 150, 249, 233, 188, 72, 86, 217, 61,
+                100, 161, 131, 124, 26, 245, 166, 44, 16, 125, 230, 153, 190,
             ]),
             dst_hash: XorName([
-                207, 109, 164, 0, 68, 241, 197, 210, 209, 143, 239, 76, 198, 12, 225, 162, 159, 37,
-                175, 0, 159, 239, 160, 178, 18, 75, 206, 126, 208, 0, 142, 213,
+                151, 255, 185, 86, 239, 216, 199, 233, 149, 16, 247, 122, 156, 66, 178, 95, 32,
+                219, 218, 228, 63, 23, 34, 207, 140, 20, 75, 2, 225, 3, 243, 193,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                119, 172, 206, 200, 245, 153, 32, 24, 14, 70, 123, 251, 75, 66, 0, 50, 44, 145,
-                126, 243, 42, 39, 232, 208, 117, 190, 105, 120, 169, 193, 192, 228,
+                132, 6, 224, 90, 168, 59, 66, 114, 199, 67, 140, 171, 226, 213, 141, 21, 32, 143,
+                4, 192, 143, 64, 253, 216, 200, 76, 162, 121, 130, 169, 89, 229,
             ]),
             dst_hash: XorName([
-                243, 107, 119, 61, 216, 70, 121, 241, 109, 84, 231, 232, 220, 177, 230, 158, 168,
-                204, 215, 19, 185, 45, 178, 225, 103, 198, 119, 238, 144, 175, 38, 147,
+                126, 221, 146, 123, 252, 37, 250, 160, 75, 182, 9, 39, 80, 87, 93, 229, 173, 203,
+                31, 203, 208, 190, 226, 111, 87, 78, 246, 141, 85, 237, 82, 87,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                52, 10, 82, 208, 199, 46, 246, 175, 107, 245, 168, 201, 212, 133, 79, 187, 24, 226,
-                10, 241, 43, 148, 84, 103, 153, 32, 66, 36, 146, 87, 60, 37,
+                238, 37, 229, 233, 96, 228, 150, 41, 89, 130, 145, 198, 50, 165, 207, 108, 15, 167,
+                122, 116, 209, 223, 68, 203, 24, 169, 74, 93, 44, 170, 24, 233,
             ]),
             dst_hash: XorName([
-                77, 167, 37, 235, 4, 230, 211, 221, 27, 211, 207, 32, 23, 202, 118, 100, 8, 199,
-                67, 28, 195, 87, 141, 11, 24, 138, 34, 233, 63, 68, 123, 236,
+                109, 123, 118, 55, 228, 175, 144, 231, 103, 223, 51, 185, 146, 37, 47, 46, 185,
+                208, 140, 202, 231, 18, 70, 47, 48, 245, 254, 93, 185, 120, 17, 143,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                234, 15, 144, 252, 29, 7, 78, 150, 66, 143, 174, 179, 66, 68, 42, 120, 8, 164, 46,
-                52, 160, 207, 208, 231, 27, 130, 21, 85, 37, 208, 47, 244,
+                70, 131, 32, 243, 131, 152, 215, 108, 51, 231, 184, 113, 117, 8, 164, 174, 151,
+                152, 232, 29, 11, 58, 104, 46, 55, 81, 249, 207, 213, 77, 151, 237,
             ]),
             dst_hash: XorName([
-                207, 20, 30, 153, 250, 15, 151, 131, 100, 211, 67, 43, 61, 243, 191, 134, 242, 134,
-                57, 183, 213, 94, 7, 240, 252, 121, 250, 158, 97, 246, 149, 112,
+                85, 8, 26, 126, 9, 32, 28, 70, 112, 134, 226, 170, 46, 25, 115, 222, 131, 175, 117,
+                141, 96, 45, 201, 108, 148, 142, 12, 27, 184, 109, 44, 70,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                19, 159, 237, 131, 18, 84, 26, 161, 106, 99, 30, 134, 241, 30, 186, 36, 119, 74,
-                59, 254, 246, 37, 96, 24, 200, 211, 236, 79, 53, 174, 252, 32,
+                50, 175, 184, 213, 76, 189, 138, 227, 190, 200, 141, 26, 235, 78, 173, 171, 137,
+                95, 43, 119, 8, 145, 253, 102, 189, 117, 247, 89, 246, 214, 129, 182,
             ]),
             dst_hash: XorName([
-                97, 110, 47, 182, 255, 22, 193, 218, 28, 21, 118, 43, 163, 189, 60, 14, 48, 88,
-                197, 236, 146, 105, 40, 25, 53, 0, 90, 168, 159, 115, 143, 168,
+                240, 135, 94, 165, 73, 209, 176, 218, 159, 232, 76, 254, 32, 84, 238, 245, 226, 2,
+                227, 194, 95, 48, 125, 227, 42, 118, 85, 160, 39, 83, 2, 124,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                185, 120, 111, 228, 41, 75, 228, 6, 222, 23, 163, 157, 32, 254, 96, 15, 210, 204,
-                1, 147, 238, 121, 11, 33, 57, 5, 45, 54, 79, 237, 135, 139,
+                160, 175, 104, 136, 24, 18, 192, 185, 147, 31, 227, 81, 212, 143, 214, 63, 52, 62,
+                218, 48, 35, 220, 0, 184, 62, 137, 152, 35, 144, 149, 229, 86,
             ]),
             dst_hash: XorName([
-                52, 40, 33, 121, 186, 17, 252, 107, 128, 67, 227, 187, 86, 57, 142, 200, 119, 201,
-                141, 120, 246, 70, 169, 99, 84, 208, 167, 233, 13, 125, 224, 168,
+                198, 136, 45, 128, 93, 197, 174, 93, 27, 19, 218, 211, 184, 14, 214, 97, 182, 149,
+                36, 161, 66, 19, 118, 105, 240, 100, 104, 1, 192, 87, 236, 132,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                183, 193, 139, 225, 128, 162, 132, 138, 184, 75, 153, 229, 203, 147, 49, 174, 96,
-                73, 135, 218, 79, 235, 79, 135, 162, 223, 248, 58, 82, 35, 196, 153,
+                158, 201, 252, 234, 200, 107, 72, 126, 69, 234, 165, 203, 122, 90, 36, 46, 82, 183,
+                61, 84, 128, 62, 118, 112, 222, 74, 164, 198, 20, 217, 96, 143,
             ]),
             dst_hash: XorName([
-                129, 161, 112, 120, 153, 202, 222, 238, 92, 86, 180, 251, 231, 79, 103, 59, 158,
-                156, 53, 126, 49, 0, 223, 72, 66, 83, 34, 154, 249, 74, 147, 147,
+                187, 81, 209, 66, 106, 200, 142, 130, 197, 102, 170, 211, 120, 197, 65, 210, 229,
+                57, 27, 231, 120, 217, 180, 231, 34, 155, 32, 41, 78, 74, 193, 115,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                53, 1, 114, 234, 6, 112, 255, 8, 148, 43, 130, 202, 155, 114, 99, 246, 81, 204, 77,
-                60, 119, 237, 100, 198, 159, 144, 203, 60, 157, 246, 205, 22,
+                208, 35, 197, 158, 225, 12, 21, 130, 132, 59, 227, 65, 238, 178, 232, 169, 186, 48,
+                27, 106, 153, 46, 168, 196, 199, 70, 105, 236, 161, 167, 109, 43,
             ]),
             dst_hash: XorName([
-                235, 170, 170, 154, 173, 162, 71, 155, 236, 208, 97, 41, 167, 62, 209, 5, 255, 65,
-                75, 239, 235, 133, 161, 30, 152, 3, 221, 99, 140, 207, 31, 64,
+                145, 170, 97, 191, 204, 99, 185, 85, 4, 199, 204, 34, 104, 219, 97, 0, 184, 167,
+                32, 173, 83, 249, 254, 42, 251, 10, 168, 231, 211, 67, 70, 120,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                24, 147, 188, 118, 102, 72, 207, 163, 202, 63, 40, 237, 169, 100, 8, 190, 23, 67,
-                243, 179, 196, 232, 214, 36, 76, 83, 220, 76, 241, 238, 107, 23,
+                191, 47, 52, 224, 196, 196, 113, 118, 243, 7, 35, 213, 174, 114, 228, 229, 165,
+                182, 217, 102, 55, 16, 174, 159, 197, 166, 75, 192, 182, 186, 173, 1,
             ]),
             dst_hash: XorName([
-                115, 143, 30, 6, 239, 108, 101, 10, 213, 216, 75, 254, 13, 110, 10, 245, 50, 189,
-                83, 39, 63, 72, 11, 160, 107, 139, 123, 181, 64, 233, 190, 200,
+                130, 233, 29, 245, 160, 80, 144, 117, 139, 251, 91, 240, 232, 173, 233, 168, 61,
+                138, 88, 0, 92, 133, 16, 118, 29, 118, 131, 218, 42, 197, 132, 54,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                100, 94, 19, 195, 150, 133, 161, 134, 150, 106, 44, 152, 201, 113, 171, 176, 147,
-                244, 165, 93, 46, 227, 247, 118, 188, 29, 130, 19, 130, 137, 244, 15,
+                116, 242, 114, 183, 140, 120, 52, 135, 104, 100, 112, 208, 10, 8, 99, 108, 78, 75,
+                84, 111, 100, 57, 241, 143, 117, 172, 80, 19, 43, 142, 225, 227,
             ]),
             dst_hash: XorName([
-                120, 86, 200, 233, 111, 96, 122, 72, 234, 77, 181, 205, 248, 56, 175, 55, 124, 174,
-                152, 163, 125, 67, 25, 33, 90, 151, 57, 103, 27, 123, 100, 148,
+                0, 52, 220, 168, 128, 29, 228, 70, 0, 29, 73, 244, 83, 7, 171, 237, 31, 236, 231,
+                24, 148, 14, 100, 16, 117, 82, 41, 11, 216, 126, 209, 127,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                248, 242, 229, 119, 92, 211, 180, 222, 177, 34, 82, 94, 51, 178, 62, 12, 185, 77,
-                145, 206, 168, 75, 176, 141, 46, 197, 1, 83, 199, 165, 37, 28,
+                219, 177, 84, 234, 189, 172, 82, 64, 169, 100, 5, 56, 3, 43, 142, 126, 51, 235,
+                194, 243, 30, 130, 132, 197, 137, 36, 170, 62, 46, 44, 176, 201,
             ]),
             dst_hash: XorName([
-                148, 17, 25, 147, 128, 108, 212, 70, 12, 32, 68, 96, 192, 215, 241, 123, 162, 224,
-                223, 52, 230, 27, 100, 122, 97, 85, 148, 53, 103, 230, 21, 11,
+                77, 246, 174, 53, 36, 156, 19, 157, 46, 142, 60, 60, 122, 133, 52, 118, 73, 80, 40,
+                205, 174, 231, 211, 110, 38, 8, 189, 206, 102, 252, 166, 34,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                68, 137, 10, 147, 116, 198, 83, 144, 156, 198, 68, 195, 94, 96, 125, 162, 213, 218,
-                179, 255, 177, 143, 232, 48, 99, 204, 118, 246, 67, 243, 190, 96,
+                65, 81, 63, 82, 119, 126, 216, 9, 44, 18, 160, 174, 225, 8, 202, 32, 245, 140, 14,
+                169, 252, 209, 97, 96, 134, 165, 102, 106, 250, 196, 27, 70,
             ]),
             dst_hash: XorName([
-                30, 212, 77, 155, 165, 236, 65, 212, 88, 181, 48, 138, 226, 135, 144, 227, 132,
-                195, 223, 199, 172, 235, 51, 146, 109, 209, 54, 63, 34, 169, 91, 55,
+                42, 62, 224, 152, 136, 214, 91, 160, 125, 249, 229, 115, 81, 220, 213, 34, 29, 173,
+                235, 99, 67, 210, 234, 160, 79, 254, 208, 174, 117, 127, 205, 36,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                227, 224, 98, 89, 131, 120, 169, 214, 165, 171, 189, 187, 15, 7, 80, 133, 16, 63,
-                74, 197, 17, 127, 22, 137, 171, 117, 34, 195, 186, 185, 51, 2,
+                80, 237, 26, 5, 69, 59, 53, 210, 44, 236, 191, 69, 92, 39, 113, 124, 206, 169, 5,
+                126, 189, 2, 146, 80, 68, 186, 142, 219, 37, 170, 135, 61,
             ]),
             dst_hash: XorName([
-                166, 232, 206, 232, 6, 23, 232, 20, 105, 230, 249, 86, 35, 117, 181, 65, 192, 245,
-                65, 130, 238, 50, 188, 82, 193, 115, 172, 113, 237, 33, 248, 102,
+                200, 203, 81, 29, 131, 156, 60, 140, 166, 254, 103, 60, 212, 223, 22, 41, 85, 192,
+                140, 154, 33, 34, 188, 94, 84, 101, 62, 254, 164, 81, 209, 154,
             ]),
             index: 0,
             src_size: 0,
         },
         ChunkInfo {
             src_hash: XorName([
-                199, 77, 9, 166, 29, 63, 254, 6, 165, 71, 110, 151, 121, 199, 60, 144, 197, 6, 92,
-                182, 237, 202, 223, 171, 20, 80, 193, 237, 148, 96, 190, 70,
+                176, 37, 236, 132, 229, 46, 239, 66, 127, 19, 235, 251, 254, 140, 231, 120, 170,
+                173, 169, 2, 98, 159, 72, 160, 215, 103, 243, 7, 179, 63, 61, 173,
             ]),
             dst_hash: XorName([
-                221, 131, 122, 148, 84, 180, 72, 155, 240, 84, 4, 189, 156, 65, 164, 204, 215, 198,
-                118, 227, 41, 95, 185, 117, 152, 128, 119, 205, 173, 180, 155, 86,
+                160, 38, 187, 68, 9, 245, 147, 175, 244, 167, 195, 133, 79, 231, 89, 53, 165, 222,
+                24, 162, 83, 158, 227, 193, 103, 232, 230, 209, 244, 58, 44, 208,
             ]),
             index: 0,
             src_size: 0,