Skip to content

Commit

Permalink
chore: create append only neo-schema (#372)
Browse files Browse the repository at this point in the history
* chore: create append only neo-schema

* chore: add tables

* chore: use full new schema

* chore: fine tunning the configuration

* lint

* lint
  • Loading branch information
renancloudwalk authored Mar 16, 2024
1 parent abea843 commit 45f4cb6
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 30 deletions.

This file was deleted.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

33 changes: 19 additions & 14 deletions src/eth/storage/hybrid/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ use num_traits::cast::ToPrimitive;
use rand::rngs::StdRng;
use rand::seq::IteratorRandom;
use rand::SeedableRng;
use serde_json::Value;
use sqlx::postgres::PgPoolOptions;
use sqlx::Pool;
use tokio::sync::mpsc;
Expand Down Expand Up @@ -47,7 +46,9 @@ use crate::eth::storage::StorageError;
#[derive(Debug)]
struct BlockTask {
block_number: BlockNumber,
block_data: Value,
block_hash: Hash,
block_data: Block,
account_changes: Vec<ExecutionAccountChanges>,
}

#[derive(Debug, Default, serde::Serialize, serde::Deserialize)]
Expand Down Expand Up @@ -78,7 +79,7 @@ impl HybridPermanentStorage {
tracing::info!(?config, "starting hybrid storage");

let connection_pool = PgPoolOptions::new()
.min_connections(config.connections)
.min_connections(config.connections / 2)
.max_connections(config.connections)
.acquire_timeout(config.acquire_timeout)
.connect(&config.url)
Expand Down Expand Up @@ -123,19 +124,26 @@ impl HybridPermanentStorage {
let pool_clone = Arc::<sqlx::Pool<sqlx::Postgres>>::clone(&pool);
// Here we attempt to insert the block data into the database.
// Adjust the SQL query according to your table schema.
let block_data = serde_json::to_value(&block_task.block_data).unwrap();
let account_changes = serde_json::to_value(&block_task.account_changes).unwrap();
tokio::spawn(async move {
let result = sqlx::query!(
"INSERT INTO neo_blocks (block_number, block, created_at) VALUES ($1, $2, NOW())",
"INSERT INTO neo_blocks (block_number, block_hash, block, account_changes, created_at) VALUES ($1, $2, $3, $4, NOW());",
block_task.block_number as _,
block_task.block_data
block_task.block_hash as _,
block_data as _,
account_changes as _,
)
.execute(&*pool_clone)
.await;

// Handle the result of the insert operation.
match result {
Ok(_) => tracing::info!("Block {} inserted successfully.", block_task.block_number),
Err(e) => tracing::error!("Failed to insert block {}: {}", block_task.block_number, e),
Err(e) => {
dbg!(&e);
tracing::error!("Failed to insert block {}: {}", block_task.block_number, e);
}
}
});
}
Expand Down Expand Up @@ -398,18 +406,15 @@ impl PermanentStorage for HybridPermanentStorage {

async fn after_commit_hook(&self) -> anyhow::Result<()> {
let b = self.read_block(&BlockSelection::Latest).await?;
if let Some(bb) = b {
let s = format!("{} => {}", bb.number(), bb.transactions.len());
dbg!(s);
let bbb = *bb.number();

if let Some(block) = b {
let block_task = BlockTask {
block_number: bbb,
block_data: serde_json::to_value(bb).unwrap(),
block_number: *block.number(),
block_hash: block.hash().clone(),
block_data: block.clone(),
account_changes: Vec::new(), //TODO make account changes work from postgres then we can load it on memory
};

// Send the task to be processed by a worker
dbg!("sending block task");
self.task_sender.send(block_task).await.expect("Failed to send block task");
}

Expand Down
44 changes: 43 additions & 1 deletion static/schema/001-init.sql
Original file line number Diff line number Diff line change
Expand Up @@ -723,11 +723,53 @@ CREATE UNIQUE INDEX index_transactions_on_hash ON public.transactions USING btre

--- XXX temporary
CREATE TABLE public.neo_blocks (
block_number numeric PRIMARY KEY,
block_number BIGINT PRIMARY KEY,
block_hash BYTEA NOT NULL,
block JSONB NOT NULL,
account_changes JSONB NOT NULL,
created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT now()
);

CREATE TABLE public.neo_accounts (
block_number BIGINT NOT NULL,
address BYTEA NOT NULL,
bytecode BYTEA,
balance NUMERIC(38, 0),
nonce NUMERIC,
created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT now(),
PRIMARY KEY (address, block_number),
FOREIGN KEY (block_number) REFERENCES public.neo_blocks(block_number)
);

CREATE TABLE public.neo_account_slots (
block_number BIGINT NOT NULL,
index INT NOT NULL,
account_address BYTEA NOT NULL,
value BYTEA,
created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT now(),
PRIMARY KEY (account_address, index, block_number),
FOREIGN KEY (block_number) REFERENCES public.neo_blocks(block_number)
);

CREATE TABLE public.neo_transactions (
block_number BIGINT NOT NULL,
hash BYTEA NOT NULL,
transaction_data JSONB NOT NULL,
created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT now(),
PRIMARY KEY (hash),
FOREIGN KEY (block_number) REFERENCES public.neo_blocks(block_number)
);

CREATE TABLE public.neo_logs (
block_number BIGINT NOT NULL,
hash BYTEA NOT NULL,
address BYTEA NOT NULL,
log_data JSONB NOT NULL,
created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT now(),
PRIMARY KEY (hash, address, block_number),
FOREIGN KEY (block_number) REFERENCES public.neo_blocks(block_number)
);

--
-- PostgreSQL database dump complete
--
Expand Down

0 comments on commit 45f4cb6

Please sign in to comment.