diff --git a/Cargo.lock b/Cargo.lock index 56d5a6205..12c0395d1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -52,6 +52,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "ansi_colours" version = "1.2.2" @@ -322,6 +328,7 @@ dependencies = [ "bitwarden-api-identity", "cbc", "chrono", + "criterion", "getrandom 0.2.10", "hkdf", "hmac", @@ -593,6 +600,12 @@ dependencies = [ "thiserror", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cbc" version = "0.1.2" @@ -630,6 +643,33 @@ dependencies = [ "winapi", ] +[[package]] +name = "ciborium" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -849,6 +889,66 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + [[package]] name = "crossbeam-utils" version = "0.8.16" @@ -1404,6 +1504,12 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + [[package]] name = "hashbrown" version = "0.12.3" @@ -1674,6 +1780,15 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb" +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.11.0" @@ -2034,6 +2149,12 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + [[package]] name = "openssl" version = "0.10.56" @@ -2238,6 +2359,34 @@ dependencies = [ "time", ] +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2438,6 +2587,26 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -2693,7 +2862,7 @@ dependencies = [ "bitwarden", "bitwarden-json", "bitwarden-uniffi", - "itertools", + "itertools 0.11.0", "schemars", "serde_json", ] @@ -3159,6 +3328,16 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.6.0" diff --git a/crates/bitwarden/Cargo.toml b/crates/bitwarden/Cargo.toml index 345b1f9cb..7f826abc0 100644 --- a/crates/bitwarden/Cargo.toml +++ b/crates/bitwarden/Cargo.toml @@ -59,5 +59,10 @@ bitwarden-api-identity = { path = "../bitwarden-api-identity", version = "=0.2.1 bitwarden-api-api = { path = "../bitwarden-api-api", version = "=0.2.1" } [dev-dependencies] +criterion = { version = "0.5.1", features = ["html_reports"] } tokio = { version = "1.28.2", features = ["rt", "macros"] } wiremock = "0.5.18" + +[[bench]] +name = "chunked_decryption" +harness = false diff --git a/crates/bitwarden/benches/chunked_decryption.rs b/crates/bitwarden/benches/chunked_decryption.rs new file mode 100644 index 000000000..c3a1c6eab --- /dev/null +++ b/crates/bitwarden/benches/chunked_decryption.rs @@ -0,0 +1,67 @@ +use std::io::Write; + +use bitwarden::crypto::{encrypt_aes256, ChunkedDecryptor, SymmetricCryptoKey}; +use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use rand::RngCore; + +struct SizeFmt(usize); +impl std::fmt::Display for SizeFmt { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + const SUFFIXES: [&str; 5] = ["B", "KB", "MB", "GB", "TB"]; + let mut size = self.0 as f64; + for suffix in SUFFIXES { + if size < 1024.0 { + return write!(f, "{:.1}{}", size, suffix); + } + size /= 1024.0; + } + write!(f, "{}", self.0) + } +} + +pub fn criterion_benchmark(c: &mut Criterion) { + let mut group = c.benchmark_group("decryption"); + + for size in [100 * 1024, 15 * 1024 * 1024, 200 * 1024 * 1024] { + group.throughput(criterion::Throughput::Bytes(size as u64)); + if size > 1024 * 1024 { + group.sample_size(20); + } + + let mut initial_buf = Vec::with_capacity(size); + initial_buf.resize(size, 0); + rand::thread_rng().fill_bytes(&mut initial_buf[..size]); + let key: SymmetricCryptoKey = SymmetricCryptoKey::generate("test"); + let enc_str = encrypt_aes256(&initial_buf, key.mac_key, key.key).unwrap(); + let enc_buf = enc_str.to_buffer().unwrap(); + + group.bench_with_input( + BenchmarkId::new("decrypt_with_key", SizeFmt(size)), + &size, + |b, _size| b.iter(|| black_box(enc_str.decrypt_with_key(&key).unwrap())), + ); + + for chunk_size in [64, 2048, 8192] { + group.bench_with_input( + BenchmarkId::new(format!("ChunkedDecryptor[{chunk_size}]"), SizeFmt(size)), + &size, + |b, _size| { + b.iter(|| { + let mut decrypted_buf = Vec::with_capacity(size); + let mut cd = ChunkedDecryptor::new(&key, &mut decrypted_buf); + + for chunk in enc_buf.chunks(chunk_size) { + cd.write_all(chunk).unwrap(); + } + cd.finalize().unwrap(); + + // + }) + }, + ); + } + } +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/crates/bitwarden/src/crypto/chunked_decryptor.rs b/crates/bitwarden/src/crypto/chunked_decryptor.rs index 79adaf970..edd3ba8ed 100644 --- a/crates/bitwarden/src/crypto/chunked_decryptor.rs +++ b/crates/bitwarden/src/crypto/chunked_decryptor.rs @@ -1,3 +1,5 @@ +use std::io::Write; + use aes::{ cipher::{generic_array::GenericArray, Unsigned}, Aes256, @@ -10,7 +12,6 @@ use crate::{ error::{EncStringParseError, Error, Result}, }; -#[cfg(feature = "mobile")] use { crate::error::CryptoError, aes::{ @@ -41,57 +42,67 @@ pub enum EncryptionType { mac: [u8; 32], hmac: PbkdfSha256Hmac, decryptor: Decryptor, + + // Buffer for storing the last block from the chunk, + // so we can unpad it when hitting the end of the stream + last_block: [u8; 16], + last_block_filled: bool, }, } -// To avoid issues, we need to make sure this is bigger or equal than all the ciphers block sizes -#[cfg(feature = "mobile")] -const MAX_BLOCK_SIZE: usize = 16; - -#[cfg(feature = "mobile")] -pub struct ChunkedDecryptor { +struct ChunkedDecryptorConfigured { enc_type: EncryptionType, // Block size of the cipher used, the data passed to the decryptor must be exactly this size block_size: usize, +} - // Buffer for storing the last block from the previous chunk, either partially or in full - buf: [u8; MAX_BLOCK_SIZE], - buf_len: usize, +// The second variant is big enough to trigger the lint, but it's not +// a problem as the first variant is only used briefly during setup +#[allow(clippy::large_enum_variant)] +enum ChunkedDecryptorState<'a> { + Initial(&'a SymmetricCryptoKey), + Configured(ChunkedDecryptorConfigured), } -#[cfg(feature = "mobile")] -impl ChunkedDecryptor { - /// Creates a new decryptor for a chunked cipher string - /// Important: The first chunk must contain the encryption type, MAC and IV (which are contained in the first bytes - /// of the encrypted blob) plus at least one block, so make sure that the initial chunk is at least 65 bytes long - pub fn new(key: SymmetricCryptoKey, initial_chunk: &[u8]) -> Result<(Self, Vec)> { - let remaining_chunk; - let block_size; +pub struct ChunkedDecryptor<'a, Output: Write> { + state: ChunkedDecryptorState<'a>, + output: Output, + buffer: Vec, +} + +const INTERNAL_BUFFER_SIZE: usize = 4096; +const MIN_UNBUFFERED_SIZE: usize = 64; +impl<'a, Output: Write> ChunkedDecryptor<'a, Output> { + pub fn new(key: &'a SymmetricCryptoKey, output: Output) -> Self { + Self { + state: ChunkedDecryptorState::Initial(key), + output, + buffer: Vec::with_capacity(INTERNAL_BUFFER_SIZE), + } + } + + fn read_initial( + key: &SymmetricCryptoKey, + buf: &[u8], + ) -> Result<(Option, usize)> { // The first byte of the message indicates the encryption type - let Some(&enc_type_num) = initial_chunk.first() else { - return Err(EncStringParseError::InvalidType { - enc_type: "Missing".to_string(), - parts: 1, - } - .into()); + let Some(&enc_type_num) = buf.first() else { + return Ok((None, 0)); }; - let enc_type = match enc_type_num { + let (enc_type, block_size, bytes_read) = match enc_type_num { 0 => unimplemented!(), 1 | 2 => { - if initial_chunk.len() < 49 { - return Err(EncStringParseError::InvalidLength { - expected: 49, - got: initial_chunk.len(), - } - .into()); + const HEADER_SIZE: usize = 49; + + if buf.len() < HEADER_SIZE { + return Ok((None, 0)); } // Extract IV and MAC from the initial chunk, and separate the rest of the chunk - let iv: [u8; 16] = initial_chunk[1..17].try_into().unwrap(); - let mac: [u8; 32] = initial_chunk[17..49].try_into().unwrap(); - remaining_chunk = &initial_chunk[49..]; + let iv: [u8; 16] = buf[1..17].try_into().unwrap(); + let mac: [u8; 32] = buf[17..HEADER_SIZE].try_into().unwrap(); let Some(mac_key) = &key.mac_key else { return Err(CryptoError::InvalidMac.into()); }; @@ -102,20 +113,23 @@ impl ChunkedDecryptor { hmac.update(&iv); match enc_type_num { - 1 => { - block_size = as BlockSizeUser>::BlockSize::USIZE; - EncryptionType::AesCbc128_HmacSha256_B64 { iv, mac, hmac } - } - 2 => { - let decryptor = Decryptor::new(&key.key, GenericArray::from_slice(&iv)); - block_size = as BlockSizeUser>::BlockSize::USIZE; + 1 => ( + EncryptionType::AesCbc128_HmacSha256_B64 { iv, mac, hmac }, + as BlockSizeUser>::BlockSize::USIZE, + HEADER_SIZE, + ), + 2 => ( EncryptionType::AesCbc256_HmacSha256_B64 { iv, mac, hmac, - decryptor, - } - } + decryptor: Decryptor::new(&key.key, (&iv).into()), + last_block: [0u8; 16], + last_block_filled: false, + }, + as BlockSizeUser>::BlockSize::USIZE, + HEADER_SIZE, + ), _ => unreachable!(), } } @@ -128,106 +142,103 @@ impl ChunkedDecryptor { } }; - let mut decryptor = Self { - enc_type, - block_size, - buf: [0u8; MAX_BLOCK_SIZE], - buf_len: 0, - }; - // Process the rest of the initial chunk - let decrypted_initial_chunk = decryptor.decrypt_chunk(remaining_chunk)?; - Ok((decryptor, decrypted_initial_chunk)) + Ok(( + Some(ChunkedDecryptorConfigured { + enc_type, + block_size, + }), + bytes_read, + )) } - /// Decrypts a chunk of data, the chunk size must greater than the cipher's block size (16 bytes) - pub fn decrypt_chunk(&mut self, chunk: &[u8]) -> Result> { - match &mut self.enc_type { + fn read_blocks( + state: &mut ChunkedDecryptorConfigured, + output: &mut Output, + buf: &mut [u8], + ) -> Result { + match &mut state.enc_type { EncryptionType::AesCbc256_B64 { .. } => unimplemented!(), EncryptionType::AesCbc128_HmacSha256_B64 { .. } => unimplemented!(), EncryptionType::AesCbc256_HmacSha256_B64 { - hmac, decryptor, .. + hmac, + decryptor, + last_block, + last_block_filled, + .. } => { - // Only work with chunks larger than the block size - if chunk.len() < self.block_size { - return Err(Error::Internal("Chunk size too small")); + // If we got less than a block, we need to wait for more data + if buf.len() < state.block_size { + return Ok(0); } - // Update HMAC, this doesn't care about block sizes, so just pass the whole chunk - hmac.update(chunk); + // Make sure we only process full blocks + // We decrypt one block less than we have so we can remove the padding in finalize + let bytes_to_process = buf.len() - (buf.len() % state.block_size); + let bytes_to_decrypt = bytes_to_process - state.block_size; - // Preallocate the result vector based on the chunk size plus an extra block to account for partial blocks - let mut result = Vec::with_capacity(chunk.len() + self.block_size); - - let mut process_block = |block: &[u8]| { - debug_assert_eq!(block.len(), self.block_size); - - let mut block = GenericArray::clone_from_slice(block); - decryptor.decrypt_block_mut(&mut block); - result.extend_from_slice(&block); - }; + // Update HMAC value for all the processed bytes + hmac.update(&buf[..bytes_to_process]); - let skip_initial_bytes = if self.buf_len > 0 { - // Process partial block if there is one. This will also process a full block if buf_len == block_size - let bytes_to_complete_partial = self.block_size - self.buf_len; - - // Fill up the partial block with the first bytes of the chunk - self.buf[self.buf_len..self.block_size] - .copy_from_slice(&chunk[0..bytes_to_complete_partial]); - - // Process the now filled partial block - process_block(&self.buf[..self.block_size]); - - bytes_to_complete_partial + // Process the last block from the previous call, as we are not at the end of the stream + if *last_block_filled { + decryptor.decrypt_block_mut(last_block.into()); + output.write_all(last_block)?; } else { - 0 - }; - - // Check how many bytes we need to process the previous partial data and the current chunk - let full_chunk_size = chunk.len() - skip_initial_bytes; - let mut remainder_bytes = full_chunk_size % self.block_size; - - // Make sure we leave at least one block unprocessed, to remove the padding later - if remainder_bytes == 0 { - remainder_bytes = self.block_size; + *last_block_filled = true; } - let chunk_to_process = &chunk[skip_initial_bytes..(chunk.len() - remainder_bytes)]; + // Store the last block for later, in case this is the end of the stream + last_block + .copy_from_slice(&buf[bytes_to_decrypt..bytes_to_decrypt + state.block_size]); - for block in chunk_to_process.chunks_exact(self.block_size) { - process_block(block) + // Split the buffer into blocks and decrypt them in place + for block in buf[..bytes_to_decrypt].chunks_exact_mut(state.block_size) { + decryptor.decrypt_block_mut(block.into()); } + // Write all the decrypted blocks at once + output.write_all(&buf[..bytes_to_decrypt])?; - self.buf[0..remainder_bytes] - .copy_from_slice(&chunk[chunk.len() - remainder_bytes..]); - self.buf_len = remainder_bytes; - - Ok(result) + Ok(bytes_to_process) } } } - pub fn finalize(mut self) -> Result> { + pub fn finalize(mut self) -> Result<()> { + // Flush internal buffer before processing last block + self.flush()?; + + let ChunkedDecryptorState::Configured(mut state) = self.state else { + return Err(Error::Internal("ChunkedDecryptor has not been written to")); + }; + // Process last block separately and handle it's padding - let last_buf = match &mut self.enc_type { + match &mut state.enc_type { EncryptionType::AesCbc256_B64 { .. } => unimplemented!(), EncryptionType::AesCbc128_HmacSha256_B64 { .. } => unimplemented!(), - EncryptionType::AesCbc256_HmacSha256_B64 { decryptor, .. } => { - if self.buf_len == self.block_size { - let mut block = GenericArray::clone_from_slice(&self.buf[..self.block_size]); - decryptor.decrypt_block_mut(&mut block); - - Pkcs7::unpad(&block).unwrap().to_vec() - } else if self.buf_len == 0 { - return Err(Error::Internal("Missing block at the end of the data")); + EncryptionType::AesCbc256_HmacSha256_B64 { + decryptor, + last_block, + last_block_filled, + .. + } => { + if *last_block_filled { + let block: &mut GenericArray<_, _> = last_block.into(); + decryptor.decrypt_block_mut(block); + + let Ok(block_unpadded) = Pkcs7::unpad(block) else { + return Err(Error::Internal("Invalid padding")); + }; + self.output.write_all(block_unpadded)?; + self.output.flush()?; } else { - return Err(Error::Internal("Partial block at the end of the data")); + return Err(Error::Internal("Invalid block at the end of the data")); } } }; // Validate MAC - match self.enc_type { - EncryptionType::AesCbc256_B64 { iv: _ } => unimplemented!(), + match state.enc_type { + EncryptionType::AesCbc256_B64 { iv: _ } => { /* No HMAC, nothing to do */ } EncryptionType::AesCbc128_HmacSha256_B64 { mac, hmac, .. } | EncryptionType::AesCbc256_HmacSha256_B64 { mac, hmac, .. } => { if hmac.finalize() != CtOutput::new(mac.into()) { @@ -236,46 +247,103 @@ impl ChunkedDecryptor { } } - Ok(last_buf) + Ok(()) + } +} + +impl<'a, Output: Write> Write for ChunkedDecryptor<'a, Output> { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + // Insert received data into the buffer + self.buffer.extend_from_slice(buf); + if self.buffer.is_empty() { + return Ok(0); + } + + // If we have a small amount of bytes and enough space, copy them to the internal buffer and return + let incoming_buf_len = buf.len(); + if incoming_buf_len > 0 && self.buffer.len() < MIN_UNBUFFERED_SIZE { + return Ok(incoming_buf_len); + } + + let written = match &mut self.state { + ChunkedDecryptorState::Initial(key) => { + let (state, bytes_read) = Self::read_initial(key, &self.buffer)?; + if let Some(state) = state { + self.state = ChunkedDecryptorState::Configured(state); + } + bytes_read + } + ChunkedDecryptorState::Configured(state) => { + Self::read_blocks(state, &mut self.output, &mut self.buffer)? + } + }; + + // Remove the processed bytes from the internal buffer + self.buffer.drain(..written); + + Ok(incoming_buf_len) + } + + fn flush(&mut self) -> std::io::Result<()> { + // Make sure the internal buffer has been processed entirely + // Note: We don't need to flush the output here, as that is done in finalize + self.write(&[]).map(|_| ()) } } -#[cfg(feature = "mobile")] pub fn decrypt_file( key: SymmetricCryptoKey, encrypted_file_path: &std::path::Path, decrypted_file_path: &std::path::Path, ) -> Result<()> { - // TODO: Move to use an async file implementation - use std::{ - fs::File, - io::{Read, Write}, - }; + use std::fs::File; let mut encrypted_file = File::open(encrypted_file_path)?; let mut decrypted_file = File::create(decrypted_file_path)?; - let mut buffer = [0; 4096]; - let bytes_read = encrypted_file.read(&mut buffer)?; - if bytes_read == 0 { - return Err(Error::Internal("Empty file")); - } - let (mut decryptor, initial_chunk) = ChunkedDecryptor::new(key, &buffer[..bytes_read])?; - decrypted_file.write_all(&initial_chunk)?; + let mut decryptor = ChunkedDecryptor::new(&key, &mut decrypted_file); + std::io::copy(&mut encrypted_file, &mut decryptor)?; + decryptor.finalize()?; - loop { - let bytes_read = encrypted_file.read(&mut buffer)?; - if bytes_read == 0 { - break; - } - let chunk = decryptor.decrypt_chunk(&buffer[..bytes_read])?; - decrypted_file.write_all(&chunk)?; - } + Ok(()) +} - let chunk = decryptor.finalize()?; - decrypted_file.write_all(&chunk)?; +#[cfg(test)] +mod tests { + use std::io::Write; - decrypted_file.flush()?; + use rand::RngCore; - Ok(()) + use crate::crypto::{encrypt_aes256, SymmetricCryptoKey}; + + use super::ChunkedDecryptor; + + #[test] + fn test_chunk_decryption() { + // Test different combinations of cipher and chunk sizes + for size in [64, 500, 100_000, 9_000_000] { + let mut initial_buf = Vec::with_capacity(size); + initial_buf.resize(size, 0); + rand::thread_rng().fill_bytes(&mut initial_buf[..size]); + let key: SymmetricCryptoKey = SymmetricCryptoKey::generate("test"); + let encrypted_buf = encrypt_aes256(&initial_buf, key.mac_key, key.key) + .unwrap() + .to_buffer() + .unwrap(); + + let mut decrypted_buf = Vec::with_capacity(size); + + for chunk_size in [1, 15, 16, 64, 1024] { + decrypted_buf.clear(); + let mut cd = ChunkedDecryptor::new(&key, &mut decrypted_buf); + + for chunk in encrypted_buf.chunks(chunk_size) { + cd.write_all(chunk).unwrap(); + } + cd.finalize().unwrap(); + + assert_eq!(initial_buf, decrypted_buf); + } + } + } } diff --git a/crates/bitwarden/src/crypto/enc_string.rs b/crates/bitwarden/src/crypto/enc_string.rs index 57958af8c..2543077ca 100644 --- a/crates/bitwarden/src/crypto/enc_string.rs +++ b/crates/bitwarden/src/crypto/enc_string.rs @@ -121,7 +121,7 @@ impl FromStr for EncString { impl EncString { #[cfg(feature = "mobile")] - pub(crate) fn from_buffer(buf: &[u8]) -> Result { + pub fn from_buffer(buf: &[u8]) -> Result { if buf.is_empty() { return Err(EncStringParseError::NoType.into()); } @@ -157,7 +157,7 @@ impl EncString { } #[cfg(feature = "mobile")] - pub(crate) fn to_buffer(&self) -> Result> { + pub fn to_buffer(&self) -> Result> { let mut buf; match self { diff --git a/crates/bitwarden/src/error.rs b/crates/bitwarden/src/error.rs index cd742f810..fb4d2fd02 100644 --- a/crates/bitwarden/src/error.rs +++ b/crates/bitwarden/src/error.rs @@ -94,6 +94,15 @@ pub enum EncStringParseError { InvalidLength { expected: usize, got: usize }, } +impl From for std::io::Error { + fn from(e: Error) -> Self { + match e { + Error::Io(e) => e, + e => std::io::Error::new(std::io::ErrorKind::Other, e), + } + } +} + // Ensure that the error messages implement Send and Sync #[cfg(test)] const _: () = { diff --git a/crates/bitwarden/src/vault/send.rs b/crates/bitwarden/src/vault/send.rs index 0445f6043..c44e3e55c 100644 --- a/crates/bitwarden/src/vault/send.rs +++ b/crates/bitwarden/src/vault/send.rs @@ -336,20 +336,11 @@ pub async fn download_send_file_from_url( let mut file = std::fs::File::create(path)?; let mut file_response = client.get(url).send().await?; - let initial_chunk = file_response.chunk().await?.unwrap(); - - let (mut decryptor, chunk) = crate::crypto::ChunkedDecryptor::new(key, &initial_chunk)?; - file.write_all(&chunk)?; - + let mut decryptor = crate::crypto::ChunkedDecryptor::new(&key, &mut file); while let Some(chunk) = file_response.chunk().await? { - let chunk = decryptor.decrypt_chunk(&chunk)?; - file.write_all(&chunk)?; + decryptor.write_all(&chunk)?; } - - let chunk = decryptor.finalize()?; - file.write_all(&chunk)?; - - file.flush()?; + decryptor.finalize()?; Ok(()) }