Skip to content

Commit

Permalink
added reloda tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ehsan6sha committed Sep 6, 2023
1 parent cae7b47 commit bf47a1f
Showing 1 changed file with 214 additions and 3 deletions.
217 changes: 214 additions & 3 deletions src/private_forest/private_forest_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -751,7 +751,7 @@ fn synced_test_large_file_write_stream_with_reload() {
let path_string: String = path_buf.to_string_lossy().into_owned();

let path = vec!["root".into(), format!("file_stream{}.bin", i)];
let cid = reload_helper
cid = reload_helper
.synced_write_file_stream_from_path(&path, &path_string)
.unwrap();

Expand Down Expand Up @@ -788,7 +788,7 @@ fn synced_test_large_file_write_stream_with_reload() {

let path = vec!["root".into(), "large_file_stream.bin".into()];
let reload_helper = &mut PrivateDirectoryHelper::synced_reload(blockstore, cid).unwrap();
let cid = reload_helper
cid = reload_helper
.synced_write_file_stream_from_path(&path, &path_string)
.unwrap();
println!("cid: {:?}", cid);
Expand Down Expand Up @@ -846,7 +846,7 @@ fn synced_test_large_file_write_stream_with_reload() {

let reload_helper = &mut PrivateDirectoryHelper::synced_reload(blockstore, cid).unwrap();
let path = vec!["root".into(), "large_file_stream2.bin".into()];
let cid = reload_helper
cid = reload_helper
.synced_write_file_stream_from_path(&path, &path_string)
.unwrap();
println!("cid: {:?}", cid);
Expand Down Expand Up @@ -894,3 +894,214 @@ fn synced_test_large_file_write_stream_with_reload() {
file2.read_to_end(&mut content2).unwrap();
assert_eq!(content1, content2);
}

#[tokio::test]
async fn test_large_file_write_stream_with_reload() {
let itteration = 2;
let empty_key: Vec<u8> = vec![0; 32];

let store = KVBlockStore::new(
String::from("./tmp/synced_test_large_file_write_stream"),
CODEC_DAG_CBOR,
);
let blockstore = &mut FFIFriendlyBlockStore::new(Box::new(store));

let (helper, access_key, cid) =
&mut PrivateDirectoryHelper::init(blockstore, empty_key.to_owned())
.await
.unwrap();

let mut cid = cid.to_owned();
println!("cid: {:?}", cid);
println!("access_key: {:?}", access_key.to_owned());

for i in 1..=itteration {
let path = vec!["root".into(), format!("test_{}", i).into()];
cid = helper.mkdir(&path).await.unwrap();
println!("CID for mkdir test_{}: {:?}", i, cid);
}

for i in 1..=itteration {
let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
println!(
"*******************Starting write iteration {}******************",
i
);

// Generate first dummy 1MB payload
let mut data = generate_dummy_data(1 * 1024 * 1024); // 1MB in bytes
rand::thread_rng().fill_bytes(&mut data);
let tmp_file = NamedTempFile::new().unwrap();
async_std::fs::write(tmp_file.path(), &data).await.unwrap();

let path_buf: PathBuf = tmp_file.path().to_path_buf();
let path_string: String = path_buf.to_string_lossy().into_owned();

let path = vec!["root".into(), format!("file_stream{}.bin", i)];
cid = reload_helper
.write_file_stream_from_path(&path, &path_string)
.await
.unwrap();

println!("cid: {:?}", cid);
println!("access_key: {:?}", access_key);
}

let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
let ls_result: Vec<(String, wnfs::common::Metadata)> =
reload_helper.ls_files(&["root".into()]).await.unwrap();
println!("ls: {:?}", ls_result);
let filenames_from_ls: Vec<String> = ls_result.iter().map(|(name, _)| name.clone()).collect();

let mut found = true;
for i in 1..=itteration {
let file_name = format!("file_stream{}.bin", i);
if !filenames_from_ls.contains(&file_name) {
found = false;
break;
}
}

assert!(found, "Not all expected files are present");

// Generate a dummy 100MB payload
let mut data = generate_dummy_data(100 * 1024 * 1024); // 1000MB in bytes
rand::thread_rng().fill_bytes(&mut data);
let tmp_file = NamedTempFile::new().unwrap();
async_std::fs::write(tmp_file.path(), &data).await.unwrap();

let path_buf: PathBuf = tmp_file.path().to_path_buf();
let path_string: String = path_buf.to_string_lossy().into_owned();

let path = vec!["root".into(), "large_file_stream.bin".into()];
let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
cid = reload_helper
.write_file_stream_from_path(&path, &path_string)
.await
.unwrap();
println!("cid: {:?}", cid);
println!("access_key: {:?}", access_key);
let ls_result = reload_helper.ls_files(&["root".into()]).await.unwrap();
println!("ls: {:?}", ls_result);
assert!(ls_result
.iter()
.any(|item| item.0 == "large_file_stream.bin"));

let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
let ls_result = reload_helper.ls_files(&["root".into()]).await.unwrap();
println!("ls: {:?}", ls_result);
assert!(ls_result
.iter()
.any(|item| item.0 == "large_file_stream.bin"));

let tmp_file_read = NamedTempFile::new().unwrap();
let path_buf_read: PathBuf = tmp_file_read.path().to_path_buf();
let path_string_read: String = path_buf_read.to_string_lossy().into_owned();
let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
reload_helper
.read_filestream_to_path(
&path_string_read,
&["root".into(), "large_file_stream.bin".into()],
0,
)
.await
.unwrap();

let mut file1 = File::open(tmp_file.path()).unwrap();
let mut file2 = File::open(tmp_file_read.path()).unwrap();

let metadata1 = file1.metadata().unwrap();
let metadata2 = file2.metadata().unwrap();
println!(
"original filesize: {:?} and read size: {:?}",
metadata1.len(),
metadata2.len()
);
assert_eq!(metadata1.len(), metadata2.len(), "File sizes do not match");

let mut content1 = Vec::new();
let mut content2 = Vec::new();

file1.read_to_end(&mut content1).unwrap();
file2.read_to_end(&mut content2).unwrap();
assert_eq!(content1, content2);

println!("read_file_stream_from_path checks done");

// Generate second dummy 60MB payload
let mut data = generate_dummy_data(60 * 1024 * 1024); // 1000MB in bytes
rand::thread_rng().fill_bytes(&mut data);
let tmp_file = NamedTempFile::new().unwrap();
async_std::fs::write(tmp_file.path(), &data).await.unwrap();

let path_buf: PathBuf = tmp_file.path().to_path_buf();
let path_string: String = path_buf.to_string_lossy().into_owned();

let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
let path = vec!["root".into(), "large_file_stream2.bin".into()];
cid = reload_helper
.write_file_stream_from_path(&path, &path_string)
.await
.unwrap();
println!("cid: {:?}", cid);
println!("access_key: {:?}", access_key);

let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
let ls_result = reload_helper.ls_files(&["root".into()]).await.unwrap();
println!("ls: {:?}", ls_result);
assert!(ls_result
.iter()
.any(|item| item.0 == "large_file_stream2.bin"));

let tmp_file_read = NamedTempFile::new().unwrap();
let path_buf_read: PathBuf = tmp_file_read.path().to_path_buf();
let path_string_read: String = path_buf_read.to_string_lossy().into_owned();
let reload_helper = &mut PrivateDirectoryHelper::reload(blockstore, cid)
.await
.unwrap();
reload_helper
.read_filestream_to_path(
&path_string_read,
&["root".into(), "large_file_stream2.bin".into()],
0,
)
.await
.unwrap();
println!("read_filestream_to_path2 done");
let mut file1 = File::open(tmp_file.path()).unwrap();
let mut file2 = File::open(tmp_file_read.path()).unwrap();

let metadata1 = file1.metadata().unwrap();
let metadata2 = file2.metadata().unwrap();
println!(
"original filesize: {:?} and read size: {:?}",
metadata1.len(),
metadata2.len()
);
assert_eq!(
metadata1.len(),
metadata2.len(),
"File sizes 2 do not match"
);

let mut content1 = Vec::new();
let mut content2 = Vec::new();

file1.read_to_end(&mut content1).unwrap();
file2.read_to_end(&mut content2).unwrap();
assert_eq!(content1, content2);
}

0 comments on commit bf47a1f

Please sign in to comment.