diff --git a/.github/workflows/auto_merge_prs.yml b/.github/workflows/auto_merge_prs.yml deleted file mode 100644 index 95e021961..000000000 --- a/.github/workflows/auto_merge_prs.yml +++ /dev/null @@ -1,39 +0,0 @@ -# auto merge workflow. -# -# Auto merge PR if commit msg begins with `chore(release):`, -# or if it has been raised by Dependabot. -# Uses https://github.com/ridedott/merge-me-action. - -name: Merge Version Change and Dependabot PRs automatically - -on: pull_request - -jobs: - merge: - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v2 - with: - fetch-depth: '0' - - - name: get commit message - run: | - commitmsg=$(git log --format=%s -n 1 ${{ github.event.pull_request.head.sha }}) - echo "commitmsg=${commitmsg}" >> $GITHUB_ENV - - - name: show commit message - run : echo $commitmsg - - - name: Merge Version change PR - if: startsWith( env.commitmsg, 'chore(release):') - uses: ridedott/merge-me-action@81667e6ae186ddbe6d3c3186d27d91afa7475e2c - with: - GITHUB_LOGIN: dirvine - GITHUB_TOKEN: ${{ secrets.MERGE_BUMP_BRANCH_TOKEN }} - MERGE_METHOD: REBASE - - - name: Dependabot Merge - uses: ridedott/merge-me-action@master - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - MERGE_METHOD: REBASE diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 74269d842..7869e2189 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -81,14 +81,11 @@ jobs: target key: ${{ runner.os }}-cargo-cache-${{ hashFiles('**/Cargo.lock') }} - # Run cargo tarpaulin & push result to coveralls.io - - name: rust-tarpaulin code coverage check - uses: actions-rs/tarpaulin@v0.1 - with: - args: "-v --release" - version: "0.15.0" - out-type: Lcov - timeout: 2000 + # Generate code coverage & push result to coveralls.io + - name: Install cargo-llvm-cov + uses: taiki-e/install-action@cargo-llvm-cov + - name: Generate code coverage + run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info - name: Push code coverage results to coveralls.io uses: coverallsapp/github-action@master with: diff --git a/src/chunk.rs b/src/chunk.rs index 603353438..ab65225c0 100644 --- a/src/chunk.rs +++ b/src/chunk.rs @@ -11,7 +11,6 @@ use bytes::Bytes; use rayon::prelude::*; use xor_name::XorName; -/// #[derive(Clone)] pub(crate) struct EncryptionBatch { pub(crate) raw_chunks: Vec, diff --git a/src/lib.rs b/src/lib.rs index b7d7e1fed..284b3cae5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -147,7 +147,7 @@ pub struct EncryptedChunk { #[derive(Clone)] pub struct StreamSelfEncryptor { // File path for the encryption target. - file_path: Box, + file_path: PathBuf, // List of `(start_position, end_position)` for each chunk for the target file. batch_positions: Vec<(usize, usize)>, // Current step (i.e. chunk_index) for encryption @@ -157,16 +157,13 @@ pub struct StreamSelfEncryptor { // Progressing collection of source chunks' names src_hashes: BTreeMap, // File path to flush encrypted_chunks into. - chunk_dir: Option>, + chunk_dir: Option, } impl StreamSelfEncryptor { /// For encryption, return with an intialized streaming encryptor. /// If a `chunk_dir` is provided, the encrypted_chunks will be written into the specified dir as well. - pub fn encrypt_from_file( - file_path: Box, - chunk_dir: Option>, - ) -> Result { + pub fn encrypt_from_file(file_path: PathBuf, chunk_dir: Option) -> Result { let file = File::open(&*file_path)?; let metadata = file.metadata()?; let file_size = metadata.len(); @@ -260,7 +257,7 @@ impl StreamSelfEncryptor { /// The streaming decryptor to carry out the decryption on fly, chunk by chunk. pub struct StreamSelfDecryptor { // File path for the decryption output. - file_path: Box, + file_path: PathBuf, // Current step (i.e. chunk_index) for decryption chunk_index: usize, // Source hashes of the chunks that collected from the data_map, they shall already be sorted by index. @@ -273,7 +270,7 @@ pub struct StreamSelfDecryptor { impl StreamSelfDecryptor { /// For decryption, return with an intialized streaming decryptor - pub fn decrypt_to_file(file_path: Box, data_map: &DataMap) -> Result { + pub fn decrypt_to_file(file_path: PathBuf, data_map: &DataMap) -> Result { let temp_dir = tempdir()?; let src_hashes = extract_hashes(data_map); @@ -336,7 +333,7 @@ impl StreamSelfDecryptor { // Drain any in-order chunks due to the recent filled in piece. fn drain_unprocessed(&mut self) -> Result<()> { while let Some(chunk_name) = self.encrypted_chunks.get(&self.chunk_index) { - let file_path = self.temp_dir.path().join(&hex::encode(chunk_name)); + let file_path = self.temp_dir.path().join(hex::encode(chunk_name)); let mut chunk_file = File::open(file_path)?; let mut chunk_data = Vec::new(); let _ = chunk_file.read_to_end(&mut chunk_data)?; @@ -366,7 +363,7 @@ pub fn encrypt_from_file(file_path: &Path, output_dir: &Path) -> Result<(DataMap let chunk_name = XorName::from_content(&chunk.content); chunk_names.push(chunk_name); - let file_path = output_dir.join(&hex::encode(chunk_name)); + let file_path = output_dir.join(hex::encode(chunk_name)); let mut output_file = File::create(file_path)?; output_file.write_all(&chunk.content)?; } @@ -384,7 +381,7 @@ pub fn decrypt_from_chunk_files( let mut encrypted_chunks = Vec::new(); for chunk_info in data_map.infos() { let chunk_name = chunk_info.dst_hash; - let file_path = chunk_dir.join(&hex::encode(chunk_name)); + let file_path = chunk_dir.join(hex::encode(chunk_name)); let mut chunk_file = File::open(file_path)?; let mut chunk_data = Vec::new(); let _ = chunk_file.read_to_end(&mut chunk_data)?; diff --git a/src/tests.rs b/src/tests.rs index 73bfe2d21..deaa519e0 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -34,10 +34,8 @@ fn test_stream_self_encryptor() -> Result<(), Error> { create_dir_all(chunk_path.clone())?; // Encrypt the file using StreamSelfEncryptor - let mut encryptor = StreamSelfEncryptor::encrypt_from_file( - Box::new(file_path), - Some(Box::new(chunk_path.clone())), - )?; + let mut encryptor = + StreamSelfEncryptor::encrypt_from_file(file_path, Some(chunk_path.clone()))?; let mut encrypted_chunks = Vec::new(); let mut data_map = None; while let Ok((chunk, map)) = encryptor.next_encryption() { @@ -68,7 +66,7 @@ fn test_stream_self_encryptor() -> Result<(), Error> { } let mut decryptor = - StreamSelfDecryptor::decrypt_to_file(Box::new(decrypted_file_path.clone()), &data_map)?; + StreamSelfDecryptor::decrypt_to_file(decrypted_file_path.clone(), &data_map)?; for chunk in encrypted_chunks { let _ = decryptor.next_encrypted(chunk)?; } @@ -82,7 +80,7 @@ fn test_stream_self_encryptor() -> Result<(), Error> { // Use the flushed encrypted chunks to recover the file and verify with the original data let mut flushed_encrypted_chunks = Vec::new(); for chunk_info in data_map.infos() { - let file_path = chunk_path.join(&hex::encode(chunk_info.dst_hash)); + let file_path = chunk_path.join(hex::encode(chunk_info.dst_hash)); let mut chunk_file = File::open(file_path)?; let mut chunk_data = Vec::new(); let _ = chunk_file.read_to_end(&mut chunk_data)?; diff --git a/tests/lib.rs b/tests/lib.rs index 0533684f1..b033649ad 100644 --- a/tests/lib.rs +++ b/tests/lib.rs @@ -6,6 +6,8 @@ // KIND, either express or implied. Please review the Licences for the specific language governing // permissions and limitations relating to use of the SAFE Network Software. +//! Tests for the self-encryption crate + // For explanation of lint checks, run `rustc -W help` or see // https://github.com/maidsafe/QA/blob/master/Documentation/Rust%20Lint%20Checks.md #![forbid(