Skip to content

Commit

Permalink
feat: account slot indexes (#575)
Browse files Browse the repository at this point in the history
  • Loading branch information
dinhani-cw authored Apr 12, 2024
1 parent 10db006 commit ab0de45
Show file tree
Hide file tree
Showing 33 changed files with 56,969 additions and 1,227,259 deletions.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,9 @@ dev = []
# Application is running in performance test mode.
perf = []

# Enable prefetching slots during EVM execution.
evm-slot-prefetch = []

# Enable metrics dependencies and code for metrics collection.
metrics = ["dep:metrics", "dep:metrics-exporter-prometheus"]

Expand Down
18 changes: 9 additions & 9 deletions src/bin/importer_offline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@ use stratus::log_and_err;
use tokio::sync::mpsc;
use tokio_util::sync::CancellationToken;

/// Number of blocks fetched in each query.
const BLOCKS_BY_FETCH: usize = 10_000;

/// Number of tasks in the backlog. Each task contains 10_000 blocks and all receipts for them.
const BACKLOG_SIZE: usize = 50;

Expand Down Expand Up @@ -83,6 +80,7 @@ async fn run(config: ImporterOfflineConfig) -> anyhow::Result<()> {
let _loader_task = tokio::spawn(execute_external_rpc_storage_loader(
rpc_storage,
cancellation.clone(),
config.blocks_by_fetch,
config.paralellism,
block_start,
block_end,
Expand Down Expand Up @@ -189,6 +187,7 @@ async fn execute_external_rpc_storage_loader(
rpc_storage: Arc<dyn ExternalRpcStorage>,
cancellation: CancellationToken,
// data
blocks_by_fetch: usize,
paralellism: usize,
mut start: BlockNumber,
end: BlockNumber,
Expand All @@ -199,9 +198,9 @@ async fn execute_external_rpc_storage_loader(
// prepare loads to be executed in parallel
let mut tasks = Vec::new();
while start <= end {
let end = min(start + (BLOCKS_BY_FETCH - 1), end);
let end = min(start + (blocks_by_fetch - 1), end);
tasks.push(load_blocks_and_receipts(Arc::clone(&rpc_storage), cancellation.clone(), start, end));
start += BLOCKS_BY_FETCH;
start += blocks_by_fetch;
}

// execute loads in parallel
Expand Down Expand Up @@ -285,16 +284,17 @@ async fn block_number_to_stop(rpc_storage: &Arc<dyn ExternalRpcStorage>) -> anyh
// -----------------------------------------------------------------------------
fn export_snapshot(external_block: &ExternalBlock, external_receipts: &ExternalReceipts, mined_block: &Block) -> anyhow::Result<()> {
// generate snapshot
let snapshot = InMemoryPermanentStorage::dump_snapshot(mined_block.compact_account_changes());
let state_snapshot = InMemoryPermanentStorage::dump_snapshot(mined_block.compact_account_changes());
let receipts_snapshot = external_receipts.filter_block(external_block.number());

// create dir
let dir = format!("tests/fixtures/block-{}/", mined_block.number());
let dir = format!("tests/fixtures/snapshots/{}/", mined_block.number());
fs::create_dir_all(&dir)?;

// write json
fs::write(format!("{}/block.json", dir), serde_json::to_string_pretty(external_block)?)?;
fs::write(format!("{}/receipts.json", dir), serde_json::to_string_pretty(external_receipts)?)?;
fs::write(format!("{}/snapshot.json", dir), serde_json::to_string_pretty(&snapshot)?)?;
fs::write(format!("{}/receipts.json", dir), serde_json::to_string_pretty(&receipts_snapshot)?)?;
fs::write(format!("{}/snapshot.json", dir), serde_json::to_string_pretty(&state_snapshot)?)?;

Ok(())
}
Expand Down
4 changes: 4 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,10 @@ pub struct ImporterOfflineConfig {
#[arg(short = 'p', long = "paralellism", env = "PARALELLISM", default_value = "1")]
pub paralellism: usize,

/// Number of blocks by database fetch.
#[arg(short = 'b', long = "blocks-by-fetch", env = "BLOCKS_BY_FETCH", default_value = "10000")]
pub blocks_by_fetch: usize,

/// Write data to CSV file instead of permanent storage.
#[arg(long = "export-csv", env = "EXPORT_CSV", default_value = "false")]
pub export_csv: bool,
Expand Down
27 changes: 27 additions & 0 deletions src/eth/evm/evm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
//! facilitates flexible EVM integrations, enabling the project to adapt to different blockchain environments
//! or requirements while maintaining a consistent execution interface.
use itertools::Itertools;

use crate::eth::primitives::Address;
use crate::eth::primitives::BlockNumber;
use crate::eth::primitives::Bytes;
Expand Down Expand Up @@ -33,6 +35,8 @@ pub trait Evm {
fn execute(&mut self, input: EvmInput) -> anyhow::Result<EvmExecutionResult>;
}

pub type EvmInputSlotKeys = Vec<Vec<u8>>;

/// EVM input data. Usually derived from a transaction or call.
#[derive(Debug, Clone, Default)]
pub struct EvmInput {
Expand Down Expand Up @@ -155,4 +159,27 @@ impl EvmInput {
},
})
}

/// Calculates all possible 32 byte keys that can be used to access storage slots.
///
/// Possible inputs are:
/// * Sender address.
/// * Receiver address (unlikely).
/// * Every 32 bytes of the data field.
pub fn possible_slot_keys(&self) -> EvmInputSlotKeys {
let mut inputs = vec![];

// from
inputs.push(self.from.as_bytes().to_vec());

// to
if let Some(ref to) = self.to {
inputs.push(to.as_bytes().to_vec());
}

// data
inputs.extend(self.data.0.rchunks(32).map(|chunk| chunk.to_vec()).collect_vec());

inputs
}
}
1 change: 1 addition & 0 deletions src/eth/evm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ pub mod revm;
pub use evm::Evm;
pub use evm::EvmExecutionResult;
pub use evm::EvmInput;
pub use evm::EvmInputSlotKeys;
Loading

0 comments on commit ab0de45

Please sign in to comment.