Skip to content

Commit

Permalink
ref(snapshot): modify file names to match upstream
Browse files Browse the repository at this point in the history
  • Loading branch information
zeapoz committed Apr 2, 2024
1 parent 395e5d6 commit b0f9fb1
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 17 deletions.
28 changes: 16 additions & 12 deletions src/processor/snapshot/exporter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use prost::Message;
use super::{
database::{self, SnapshotDB},
types::{self, SnapshotFactoryDependency, SnapshotHeader},
DEFAULT_DB_PATH, SNAPSHOT_FACTORY_DEPS_FILE_NAME, SNAPSHOT_HEADER_FILE_NAME,
DEFAULT_DB_PATH, SNAPSHOT_FACTORY_DEPS_FILE_NAME_SUFFIX, SNAPSHOT_HEADER_FILE_NAME,
};

pub mod protobuf {
Expand All @@ -24,17 +24,17 @@ pub struct SnapshotExporter {
}

impl SnapshotExporter {
pub fn new(basedir: &Path, db_path: Option<String>) -> Self {
pub fn new(basedir: &Path, db_path: Option<String>) -> Result<Self> {
let db_path = match db_path {
Some(p) => PathBuf::from(p),
None => PathBuf::from(DEFAULT_DB_PATH),
};

let database = SnapshotDB::new_read_only(db_path).unwrap();
Self {
let database = SnapshotDB::new_read_only(db_path)?;
Ok(Self {
basedir: basedir.to_path_buf(),
database,
}
})
}

pub fn export_snapshot(&self, chunk_size: u64) -> Result<()> {
Expand Down Expand Up @@ -77,7 +77,10 @@ impl SnapshotExporter {
buf.reserve(fd_len - buf.capacity());
}

let path = self.basedir.join(SNAPSHOT_FACTORY_DEPS_FILE_NAME);
let path = self.basedir.join(format!(
"snapshot_l1_batch_{}_{}",
header.l1_batch_number, SNAPSHOT_FACTORY_DEPS_FILE_NAME_SUFFIX
));
header.factory_deps_filepath = path
.clone()
.into_os_string()
Expand All @@ -103,7 +106,7 @@ impl SnapshotExporter {

fn export_storage_logs(&self, chunk_size: u64, header: &mut SnapshotHeader) -> Result<()> {
let mut buf = BytesMut::new();
let mut chunk_index = 0;
let mut chunk_id = 0;

let index_to_key_map = self.database.cf_handle(database::INDEX_TO_KEY_MAP).unwrap();
let mut iterator = self
Expand Down Expand Up @@ -144,15 +147,16 @@ impl SnapshotExporter {
buf.reserve(chunk_len - buf.capacity());
}

chunk_index += 1;
let path = PathBuf::new()
.join(&self.basedir)
.join(format!("{chunk_index}.gz"));
let path = PathBuf::new().join(&self.basedir).join(format!(
"snapshot_l1_batch_{}_storage_logs_part_{:0>4}.proto.gzip",
header.l1_batch_number, chunk_id
));
chunk_id += 1;

header
.storage_logs_chunks
.push(types::SnapshotStorageLogsChunkMetadata {
chunk_id: chunk_index,
chunk_id,
filepath: path
.clone()
.into_os_string()
Expand Down
14 changes: 10 additions & 4 deletions src/processor/snapshot/importer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use super::{
SnapshotFactoryDependencies, SnapshotStorageLog, SnapshotStorageLogsChunk,
},
types::SnapshotHeader,
SNAPSHOT_FACTORY_DEPS_FILE_NAME, SNAPSHOT_HEADER_FILE_NAME,
SNAPSHOT_FACTORY_DEPS_FILE_NAME_SUFFIX, SNAPSHOT_HEADER_FILE_NAME,
};
use crate::processor::tree::tree_wrapper::TreeWrapper;

Expand All @@ -47,7 +47,7 @@ impl SnapshotImporter {

pub async fn run(mut self) -> Result<()> {
let header = self.read_header()?;
let factory_deps = self.read_factory_deps()?;
let factory_deps = self.read_factory_deps(&header)?;
let storage_logs_chunk = self.read_storage_logs_chunks(&header)?;

self.tree
Expand All @@ -63,8 +63,11 @@ impl SnapshotImporter {
Ok(header)
}

fn read_factory_deps(&self) -> Result<SnapshotFactoryDependencies> {
let factory_deps_path = self.directory.join(SNAPSHOT_FACTORY_DEPS_FILE_NAME);
fn read_factory_deps(&self, header: &SnapshotHeader) -> Result<SnapshotFactoryDependencies> {
let factory_deps_path = self.directory.join(format!(
"snapshot_l1_batch_{}_{}",
header.l1_batch_number, SNAPSHOT_FACTORY_DEPS_FILE_NAME_SUFFIX
));
let bytes = fs::read(factory_deps_path)?;
let mut decoder = GzDecoder::new(&bytes[..]);

Expand Down Expand Up @@ -97,6 +100,9 @@ impl SnapshotImporter {
let mut decompressed_bytes = Vec::new();
decoder.read_to_end(&mut decompressed_bytes)?;

// TODO: It would be nice to avoid the intermediary step of decoding. Something like
// implementing a method on the types::* that does it automatically. Will improve
// readabitly for the export code too as a bonus.
let storage_logs_chunk = SnapshotStorageLogsChunk::decode(&decompressed_bytes[..])?;
chunks.push(storage_logs_chunk);
}
Expand Down
2 changes: 1 addition & 1 deletion src/processor/snapshot/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use crate::processor::snapshot::types::MiniblockNumber;

pub const DEFAULT_DB_PATH: &str = "snapshot_db";
pub const SNAPSHOT_HEADER_FILE_NAME: &str = "snapshot-header.json";
pub const SNAPSHOT_FACTORY_DEPS_FILE_NAME: &str = "factory_deps.dat";
pub const SNAPSHOT_FACTORY_DEPS_FILE_NAME_SUFFIX: &str = "factory_deps.proto.gzip";

pub struct SnapshotBuilder {
database: SnapshotDB,
Expand Down

0 comments on commit b0f9fb1

Please sign in to comment.