Skip to content

Commit

Permalink
Merge branch 'jerry-yu:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
Pencil-Yao authored Mar 16, 2022
2 parents f88c6c6 + 8ba1e92 commit 8c68cc0
Show file tree
Hide file tree
Showing 42 changed files with 2,856 additions and 4,951 deletions.
1,851 changes: 1,125 additions & 726 deletions Cargo.lock
100644 → 100755

Large diffs are not rendered by default.

21 changes: 11 additions & 10 deletions Cargo.toml
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
[package]
name = "cita_recover"
version = "0.1.0"
authors = ["yubo <[email protected]>"]
version = "0.2.0"
authors = ["yubo <[email protected]>"]
edition = "2018"

[dependencies]
clap = "2.32.0"
log = "0.4.6"
bincode = "0.8.0"
#cita-types = { git = "https://github.com/citahub/cita-common.git", rev = "683d1f228" }
common-types = { path = "./types" }
#proof = { git = "https://github.com/citahub/cita-common.git", rev = "683d1f228" }
#db = { git = "https://github.com/citahub/cita-common.git", rev = "683d1f228" }
#rlp = { git = "https://github.com/citahub/cita-common.git", rev = "683d1f228" }
#libproto = { git = "https://github.com/citahub/cita-common.git", rev = "683d1f228" }


cita-types = { git = "https://github.com/citahub/cita-common.git", branch = "develop" }
common-types = { path = "./types",default-features=true }
proof = { git = "https://github.com/citahub/cita-common.git", branch = "develop" }
hashable = { git = "https://github.com/citahub/cita-common.git", branch = "develop" }
rlp = { git = "https://github.com/citahub/cita-common.git", branch = "develop" }
libproto = { git = "https://github.com/citahub/cita-common.git", branch = "develop" }
authority_manage = { git = "https://github.com/citahub/cita-common.git", branch = "develop" }
cita-database = "0.1"
Empty file modified README.md
100644 → 100755
Empty file.
Binary file removed bin/cita_recover
Binary file not shown.
Binary file added bin/cita_recover_secp256k1
Binary file not shown.
Binary file modified bin/cita_recover_sm2
Binary file not shown.
1 change: 0 additions & 1 deletion rust-toolchain

This file was deleted.

192 changes: 107 additions & 85 deletions src/main.rs
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,67 +1,64 @@
extern crate bincode;
//extern crate cita_types;
extern crate clap;
extern crate common_types as types;
//extern crate db as cita_db;
//extern crate libproto;
extern crate log;
//extern crate proof;
//extern crate rlp;

use types::libproto;
use types::proof;
//use types::db as cita_db;
use types::cita_types;
use types::rlp;

use cita_types::H256;
use common_types as types;
use types::db_indexes::{
BlockNumber2Hash, BlockNumber2Header, CurrentHash, CurrentProof, DBIndex, Hash2Header,
};

use authority_manage::AuthorityManage;
use cita_types::{Address, H256};
use clap::App;
use types::cita_db::{DBTransaction, Database, DatabaseConfig};

use cita_database::{Config, DataCategory, Database, RocksDB, NUM_COLUMNS};

/*use libproto::blockchain::{
AccountGasLimit as ProtoAccountGasLimit, Proof as ProtoProof, ProofType,
};
use rlp::{decode, encode, Decodable, Encodable};
*/
use std::path::Path;
use types::db::{Readable, Writable};
// use types::db::{Readable, Writable};
use types::header::*;
use types::{db, extras, BlockNumber};
// use types::{db, extras, BlockNumber};

use bincode::{serialize, Infinite};
use proof::BftProof;
use std::fs::{read_dir, remove_file, OpenOptions};
use std::io::{self, Read, Seek, Write};
use std::mem::transmute;
//use types::db::Key;

const PTYPE: u8 = 5;
const HTYPE: u8 = 4;
const DATA_PATH: &str = "DATA_PATH";
const WAL: &str = "/wal";
const NOSQL: &str = "/nosql";
const STATEDB: &str = "/statedb";

fn delete_higher_log_file(data_path: &str, height: usize) {
let wal_path = data_path.to_string() + WAL;
if let Ok(entries) = read_dir(wal_path.clone()) {
for entry in entries {
if let Ok(entry) = entry {
// Here, `entry` is a `DirEntry`.
if let Ok(fname) = entry.file_name().into_string() {
let vec_str: Vec<&str> = fname.split(".log").collect();
if !vec_str.is_empty() {
let hi = vec_str[0].parse::<usize>().unwrap_or(0);
if hi > height {
let del_file = wal_path.clone() + "/" + &fname;
println!("del file name {:?}", del_file);
let _ = remove_file(del_file);
}
for entry in entries.flatten() {
// Here, `entry` is a `DirEntry`.
if let Ok(fname) = entry.file_name().into_string() {
let vec_str: Vec<&str> = fname.split(".log").collect();
if !vec_str.is_empty() {
let hi = vec_str[0].parse::<usize>().unwrap_or(0);
if hi > height {
let del_file = wal_path.clone() + "/" + &fname;
println!("del file name {:?}", del_file);
let _ = remove_file(del_file);
}
}
}
}
}
}

fn fix_authority_log(data_path: &str, height: usize, old_validators: Vec<Address>) {
std::env::set_var(DATA_PATH, data_path);
let mut auth = AuthorityManage::new();
println!("fix_authority_log old authoritys -- {:?}", old_validators);
auth.receive_authorities_list(height - 1, &old_validators, &old_validators);
}

fn fix_wal_index(data_path: &str, height: usize) -> Result<usize, io::Error> {
let idex_file = data_path.to_string() + WAL + "/index";
let mut ifs = OpenOptions::new()
Expand Down Expand Up @@ -127,15 +124,22 @@ fn fix_executor_db(data_path: &str, dst_height: u64) -> bool {
return false;
}

let database_config = DatabaseConfig::with_columns(db::NUM_COLUMNS);
let exec_db = Database::open(&database_config, &*exec_path).expect("exec DB file not found");
let database_config = Config::with_category_num(NUM_COLUMNS);
let exec_db = RocksDB::open(&exec_path, &database_config).expect("exec DB file not found");

let hash: H256 = exec_db
.read(db::COL_EXTRA, &extras::CurrentHash)
.expect("CurrentHash value not found");
let hdr: Header = exec_db
.read(db::COL_HEADERS, &hash)
.expect("CurrentHeader value not found");
let hash = exec_db
.get(Some(DataCategory::Extra), &CurrentHash.get_index().to_vec())
.unwrap_or(None)
.map(|h| rlp::decode::<H256>(&h))
.expect("current hash not get");

let hash_key = Hash2Header(hash).get_index();

let hdr = exec_db
.get(Some(DataCategory::Headers), &hash_key)
.unwrap_or(None)
.map(|h| rlp::decode::<Header>(&h))
.expect("hashe's header not found");

if hdr.number() < dst_height {
println!(
Expand All @@ -146,34 +150,41 @@ fn fix_executor_db(data_path: &str, dst_height: u64) -> bool {
//return false;
}

let dst_hash: H256 = exec_db
.read(db::COL_EXTRA, &dst_height)
.expect("Dst Hash value not found");

let dst_header: Option<Header> = exec_db.read(db::COL_HEADERS, &dst_hash);

if let Some(_dst_header) = dst_header {
let mut batch = DBTransaction::new();
batch.write(db::COL_EXTRA, &extras::CurrentHash, &dst_hash);
exec_db.write(batch).unwrap();
println!("write dst_hash is {:?}", dst_hash);
} else {
println!("Executor Dst header value not found");
return false;
}
let pkey = BlockNumber2Hash(dst_height).get_index().to_vec();
let dst_hash = exec_db
.get(Some(DataCategory::Extra), &pkey)
.unwrap_or(None)
.map(|h| rlp::decode::<H256>(&h))
.expect("dst hash not get");

let hash_header_key = Hash2Header(dst_hash).get_index().to_vec();

let _dst_header = exec_db
.get(Some(DataCategory::Headers), &hash_header_key)
.unwrap_or(None)
.map(|h| rlp::decode::<Header>(&h))
.expect("dst header not get");

exec_db
.insert(
Some(DataCategory::Extra),
CurrentHash.get_index().to_vec(),
rlp::encode(&dst_hash).into_vec(),
)
.expect("write current hash error");
println!("write dst_hash is {:?}", dst_hash);
true
}

fn fix_chain_db(data_path: &str, dst_height: u64) -> bool {
let chain_path = data_path.to_string() + NOSQL;

if !Path::new(&chain_path).exists() {
println!("chain db dir not exist");
return false;
}

let database_config = DatabaseConfig::with_columns(db::NUM_COLUMNS);
let chain_db = Database::open(&database_config, &*chain_path).expect("DB file not found");
let database_config = Config::with_category_num(NUM_COLUMNS);
let chain_db = RocksDB::open(&chain_path, &database_config).expect("DB file not found");

/* let hash: H256 = chain_db
.read(db::COL_EXTRA, &extras::CurrentHash)
Expand All @@ -190,13 +201,24 @@ fn fix_chain_db(data_path: &str, dst_height: u64) -> bool {
return false;
}
*/

let hkey = BlockNumber2Header(dst_height).get_index().to_vec();

let dst_header: Header = chain_db
.read(db::COL_HEADERS, &dst_height)
.get(Some(DataCategory::Headers), &hkey)
.unwrap_or(None)
.map(|hdr| rlp::decode(&hdr))
.expect("Dst header value not found");

let dst_hash = dst_header.hash().unwrap();
let mut batch = DBTransaction::new();
batch.write(db::COL_EXTRA, &extras::CurrentHash, &dst_hash);

chain_db
.insert(
Some(DataCategory::Extra),
CurrentHash.get_index().to_vec(),
rlp::encode(&dst_hash).into_vec(),
)
.expect("chain write current hash error");

//Tmp,To be deleted
/*{
Expand All @@ -218,33 +240,33 @@ fn fix_chain_db(data_path: &str, dst_height: u64) -> bool {
result[0] = extras::ExtrasIndex::BlockBodyHash as u8;
let yy = chain_db.get(db::COL_BODIES, &result).unwrap().unwrap();
println!("len get bodys {:?}",yy);
let proof = dst_header.proof();
let btf_proof = BftProof::from(proof.clone());
println!("btf proof -- {:?}" ,btf_proof);
}*/

let nh: Option<Header> = chain_db.read(db::COL_HEADERS, &(dst_height + 1));
if let Some(next_header) = nh {
let proof = next_header.proof();
let btf_proof = BftProof::from(proof.clone());
if btf_proof.height == dst_height as usize {
batch.write(db::COL_EXTRA, &extras::CurrentProof, &proof);

let pmsg = serialize(&btf_proof, Infinite).unwrap();
let hmsg = dst_hash.to_vec();
let _ = fix_height_log(data_path, dst_height, pmsg, hmsg);
}
} else {
println!(
"current proof not inserted,as height's {} not found",
dst_height
);
let hash_key = BlockNumber2Header(dst_height + 1).get_index();
let next_header = chain_db
.get(Some(DataCategory::Headers), &hash_key)
.unwrap_or(None)
.map(|h| rlp::decode::<Header>(&h))
.expect("current proof not inserted,as height's not found");

let proof = next_header.proof();
let btf_proof = BftProof::from(proof.clone());
if btf_proof.height == dst_height as usize {
chain_db
.insert(
Some(DataCategory::Extra),
CurrentProof.get_index().to_vec(),
rlp::encode(proof).into_vec(),
)
.expect("chain write current proof error");

let pmsg = serialize(&btf_proof, Infinite).unwrap();
let hmsg = dst_hash.to_vec();
let _ = fix_height_log(data_path, dst_height, pmsg, hmsg);
let validators: Vec<Address> = btf_proof.commits.keys().cloned().collect();
fix_authority_log(data_path, dst_height as usize, validators);
}

chain_db.write(batch).unwrap();
//println!("header is {:?}", dst_header);
println!("dst_hash is {:?}", dst_hash);
true
Expand Down
Loading

0 comments on commit 8c68cc0

Please sign in to comment.