diff --git a/src/cfg.rs b/src/cfg.rs index 73f81547..e1070fb7 100644 --- a/src/cfg.rs +++ b/src/cfg.rs @@ -4,9 +4,7 @@ use dirs::home_dir; use serde::{Deserialize, Serialize}; use std::{fs, path::Path, sync::Arc}; -use crate::da::{CelestiaConnection, LocalDataAvailabilityLayer}; - -use crate::da::DataAvailabilityLayer; +use crate::da::{CelestiaConnection, DataAvailabilityLayer, LocalDataAvailabilityLayer}; #[derive(Clone, Debug, Subcommand, Deserialize)] pub enum Commands { @@ -218,7 +216,7 @@ pub async fn initialize_da_layer(config: &Config) -> Arc Arc::new(da) as Arc, Err(e) => { - panic!("Failed to connect to Celestia: {}", e); + panic!("connecting to celestia: {}", e); } } } diff --git a/src/da.rs b/src/da.rs index ab9b3724..74d714ad 100644 --- a/src/da.rs +++ b/src/da.rs @@ -1,6 +1,6 @@ use crate::{ consts::CHANNEL_BUFFER_SIZE, - error::{DataAvailabilityError, DatabaseError, DeimosError, GeneralError}, + error::{DAResult, DataAvailabilityError, DeimosResult, GeneralError}, utils::Signable, zk_snark::{Bls12Proof, VerifyingKey}, }; @@ -42,54 +42,45 @@ impl TryFrom<&Blob> for EpochJson { fn try_from(value: &Blob) -> Result { // convert blob data to utf8 string let data_str = String::from_utf8(value.data.clone()).map_err(|e| { - GeneralError::ParsingError(format!("Could not convert blob data to utf8 string: {}", e)) + GeneralError::EncodingError(format!("encoding blob data to utf8 string: {}", e)) })?; - // convert utf8 string to EpochJson serde_json::from_str(&data_str) - .map_err(|e| GeneralError::ParsingError(format!("Could not parse epoch json: {}", e))) + .map_err(|e| GeneralError::DecodingError(format!("epoch json: {}", e))) } } impl Signable for EpochJson { - fn get_signature(&self) -> Result { + fn get_signature(&self) -> DeimosResult { match &self.signature { - Some(signature) => { - let signature = Signature::from_str(signature).map_err(|_| { - DeimosError::General(GeneralError::ParsingError( - "Cannot parse signature".to_string(), - )) - })?; - Ok(signature) - } - None => Err(DeimosError::General(GeneralError::MissingArgumentError)), + Some(signature) => Signature::from_str(signature) + .map_err(|e| GeneralError::ParsingError(format!("signature: {}", e)).into()), + None => Err(GeneralError::MissingArgumentError("signature".to_string()).into()), } } - fn get_content_to_sign(&self) -> Result { + fn get_content_to_sign(&self) -> DeimosResult { let mut copy = self.clone(); copy.signature = None; - serde_json::to_string(©).map_err(|_| { - DeimosError::General(GeneralError::ParsingError("Cannot serialize".to_string())) - }) + serde_json::to_string(©).map_err(|e| GeneralError::EncodingError(e.to_string()).into()) } - fn get_public_key(&self) -> Result { + fn get_public_key(&self) -> DeimosResult { + //TODO(@distractedm1nd): the below comment isn't good enough of an argument to not return the public key, it should be fixed + // for epoch json the public key to verify is the one from the sequencer which should be already be public and known from every light client // so if we use this function there should be an error - Err(DeimosError::Database(DatabaseError::NotFoundError( - "Public key not found".to_string(), - ))) + Err(GeneralError::MissingArgumentError("public key".to_string()).into()) } } #[async_trait] pub trait DataAvailabilityLayer: Send + Sync { - async fn get_message(&self) -> Result; - async fn initialize_sync_target(&self) -> Result; - async fn get(&self, height: u64) -> Result, DataAvailabilityError>; - async fn submit(&self, epoch: &EpochJson) -> Result; - async fn start(&self) -> Result<(), DataAvailabilityError>; + async fn get_message(&self) -> DAResult; + async fn initialize_sync_target(&self) -> DAResult; + async fn get(&self, height: u64) -> DAResult>; + async fn submit(&self, epoch: &EpochJson) -> DAResult; + async fn start(&self) -> DAResult<()>; } pub struct CelestiaConnection { @@ -105,23 +96,23 @@ pub struct NoopDataAvailabilityLayer {} #[async_trait] impl DataAvailabilityLayer for NoopDataAvailabilityLayer { - async fn get_message(&self) -> Result { + async fn get_message(&self) -> DAResult { Ok(0) } - async fn initialize_sync_target(&self) -> Result { + async fn initialize_sync_target(&self) -> DAResult { Ok(0) } - async fn get(&self, _: u64) -> Result, DataAvailabilityError> { + async fn get(&self, _: u64) -> DAResult> { Ok(vec![]) } - async fn submit(&self, _: &EpochJson) -> Result { + async fn submit(&self, _: &EpochJson) -> DAResult { Ok(0) } - async fn start(&self) -> Result<(), DataAvailabilityError> { + async fn start(&self) -> DAResult<()> { Ok(()) } } @@ -139,14 +130,14 @@ impl CelestiaConnection { connection_string: &String, auth_token: Option<&str>, namespace_hex: &String, - ) -> Result { + ) -> DAResult { let (tx, rx) = channel(CHANNEL_BUFFER_SIZE); let client = Client::new(&connection_string, auth_token) .await .map_err(|e| { - DataAvailabilityError::InitializationError(format!( - "Websocket initialization failed: {}", + DataAvailabilityError::ConnectionError(format!( + "websocket initialization failed: {}", e )) })?; @@ -154,15 +145,20 @@ impl CelestiaConnection { let decoded_hex = match hex::decode(namespace_hex) { Ok(hex) => hex, Err(e) => { - return Err(DataAvailabilityError::InitializationError(format!( - "Hex decoding failed: {}", - e - ))) + return Err(DataAvailabilityError::GeneralError( + GeneralError::DecodingError(format!( + "decoding namespace '{}': {}", + namespace_hex, e + )), + )) } }; let namespace_id = Namespace::new_v0(&decoded_hex).map_err(|e| { - DataAvailabilityError::InitializationError(format!("Namespace creation failed: {}", e)) + DataAvailabilityError::GeneralError(GeneralError::EncodingError(format!( + "creating namespace '{}': {}", + namespace_hex, e + ))) })?; Ok(CelestiaConnection { @@ -176,25 +172,25 @@ impl CelestiaConnection { #[async_trait] impl DataAvailabilityLayer for CelestiaConnection { - async fn get_message(&self) -> Result { + async fn get_message(&self) -> DAResult { match self.synctarget_rx.lock().await.recv().await { Some(height) => Ok(height), None => Err(DataAvailabilityError::ChannelReceiveError), } } - async fn initialize_sync_target(&self) -> Result { + async fn initialize_sync_target(&self) -> DAResult { match HeaderClient::header_network_head(&self.client).await { Ok(extended_header) => Ok(extended_header.header.height.value()), Err(err) => Err(DataAvailabilityError::NetworkError(format!( - "Could not get network head from DA layer: {}", + "getting network head from da layer: {}", err ))), } } - async fn get(&self, height: u64) -> Result, DataAvailabilityError> { - debug! {"Getting epoch {} from DA layer", height}; + async fn get(&self, height: u64) -> DAResult> { + trace!("searching for epoch on da layer at height {}", height); match BlobClient::blob_get_all(&self.client, height, &[self.namespace_id]).await { Ok(blobs) => { let mut epochs = Vec::new(); @@ -202,10 +198,13 @@ impl DataAvailabilityLayer for CelestiaConnection { match EpochJson::try_from(blob) { Ok(epoch_json) => epochs.push(epoch_json), Err(_) => { - DataAvailabilityError::DataRetrievalError( - height, - "Could not parse epoch json for blob".to_string(), - ); + DataAvailabilityError::GeneralError(GeneralError::ParsingError( + format!( + "marshalling blob from height {} to epoch json: {}", + height, + serde_json::to_string(&blob).unwrap() + ), + )); } } } @@ -213,23 +212,27 @@ impl DataAvailabilityLayer for CelestiaConnection { } Err(err) => Err(DataAvailabilityError::DataRetrievalError( height, - format!("Could not get epoch from DA layer: {}", err), + format!("getting epoch from da layer: {}", err), )), } } - async fn submit(&self, epoch: &EpochJson) -> Result { - debug! {"Posting epoch {} to DA layer", epoch.height}; + async fn submit(&self, epoch: &EpochJson) -> DAResult { + debug!("posting epoch {} to da layer", epoch.height); let data = serde_json::to_string(&epoch).map_err(|e| { DataAvailabilityError::GeneralError(GeneralError::ParsingError(format!( - "Could not serialize epoch json: {}", + "serializing epoch json: {}", e ))) })?; - let blob = Blob::new(self.namespace_id.clone(), data.into_bytes()) - .map_err(|_| DataAvailabilityError::GeneralError(GeneralError::BlobCreationError))?; - debug!("submitted blob with commitment {:?}", serde_json::to_string(&blob.clone().commitment).unwrap()); + let blob = Blob::new(self.namespace_id.clone(), data.into_bytes()).map_err(|e| { + DataAvailabilityError::GeneralError(GeneralError::BlobCreationError(e.to_string())) + })?; + debug!( + "submitted blob with commitment {:?}", + serde_json::to_string(&blob.clone().commitment).unwrap() + ); trace!("blob: {:?}", serde_json::to_string(&blob).unwrap()); match self .client @@ -237,19 +240,19 @@ impl DataAvailabilityLayer for CelestiaConnection { .await { Ok(height) => Ok(height), - Err(err) => Err(DataAvailabilityError::NetworkError(format!( - "Could not submit epoch to DA layer: {}", - err - ))), + Err(err) => Err(DataAvailabilityError::SubmissionError( + epoch.height, + err.to_string(), + )), } } - async fn start(&self) -> Result<(), DataAvailabilityError> { + async fn start(&self) -> DAResult<()> { let mut header_sub = HeaderClient::header_subscribe(&self.client) .await .map_err(|e| { DataAvailabilityError::NetworkError(format!( - "Could not subscribe to header updates from DA layer: {}", + "subscribing to headers from da layer: {}", e )) })?; @@ -265,20 +268,18 @@ impl DataAvailabilityLayer for CelestiaConnection { debug!("sent sync target update for height {}", height); } Err(_) => { - DataAvailabilityError::SyncTargetError( - "sending".to_string(), - format!( - "Failed to send sync target update message for height {}", - height - ), - ); + DataAvailabilityError::SyncTargetError(format!( + "sending sync target update message for height {}", + height + )); } } } - Err(_) => { - DataAvailabilityError::NetworkError( - "Could not get header from DA layer".to_string(), - ); + Err(e) => { + DataAvailabilityError::NetworkError(format!( + "retrieving header from da layer: {}", + e + )); } } } @@ -295,15 +296,15 @@ impl LocalDataAvailabilityLayer { #[async_trait] impl DataAvailabilityLayer for LocalDataAvailabilityLayer { - async fn get_message(&self) -> Result { + async fn get_message(&self) -> DAResult { Ok(100) } - async fn initialize_sync_target(&self) -> Result { + async fn initialize_sync_target(&self) -> DAResult { Ok(0) // header starts always at zero in test cases } - async fn get(&self, height: u64) -> Result, DataAvailabilityError> { + async fn get(&self, height: u64) -> DAResult> { let mut file = File::open("data.json").expect("Unable to open file"); let mut contents = String::new(); file.lock_exclusive().expect("Unable to lock file"); @@ -326,7 +327,7 @@ impl DataAvailabilityLayer for LocalDataAvailabilityLayer { } } - async fn submit(&self, epoch: &EpochJson) -> Result { + async fn submit(&self, epoch: &EpochJson) -> DAResult { let mut file = OpenOptions::new() .read(true) .write(true) @@ -369,7 +370,7 @@ impl DataAvailabilityLayer for LocalDataAvailabilityLayer { Ok(epoch.height) } - async fn start(&self) -> Result<(), DataAvailabilityError> { + async fn start(&self) -> DAResult<()> { Ok(()) } } diff --git a/src/error.rs b/src/error.rs index a89775a3..98e025a7 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,92 +1,98 @@ use indexed_merkle_tree::error::MerkleTreeError; use thiserror::Error; +// Result alias for [`DeimosError`] +pub type DeimosResult = Result; + #[derive(Error, Debug)] pub enum DeimosError { - #[error("General error: {0}")] - General(GeneralError), - #[error("Database error: {0}")] - Database(DatabaseError), - #[error("Data availability error: {0}")] - DataAvailability(DataAvailabilityError), - #[error("Proof error: {0}")] - Proof(ProofError), - #[error("Merkle tree error: {0}")] - MerkleTree(MerkleTreeError), + #[error(transparent)] + General(#[from] GeneralError), + #[error(transparent)] + Database(#[from] DatabaseError), + #[error(transparent)] + DataAvailability(#[from] DataAvailabilityError), + #[error(transparent)] + Proof(#[from] ProofError), + #[error(transparent)] + MerkleTree(#[from] MerkleTreeError), } // general reusable errors #[derive(Error, Debug)] pub enum GeneralError { - #[error("Parsing error: {0}")] + #[error("parsing: {0}")] ParsingError(String), - #[error("Failed to create Blob object")] - BlobCreationError, - #[error("Hexadecimal decoding error: {0}")] - HexDecodingError(String), - #[error("Encoding error: {0}")] + #[error("creating blob object: {0}")] + BlobCreationError(String), + #[error("encoding: {0}")] EncodingError(String), - #[error("Decoding error: {0}")] + #[error("decoding: {0}")] DecodingError(String), - #[error("Required argument missing")] - MissingArgumentError, - #[error("Invalid public key")] + #[error("missing argument: {0}")] + MissingArgumentError(String), + #[error("invalid public key")] InvalidPublicKey, - #[error("Invalid signature")] + #[error("invalid signature")] InvalidSignature, - #[error("Failed to start webserver")] + #[error("starting webserver")] WebserverError, } #[derive(Error, Debug)] pub enum DatabaseError { - #[error("Failed to acquire lock on the Database connection")] + #[error("acquiring database lock")] LockError, - #[error("Failed to retrieve keys from {0} dictionary from the Database database")] + #[error("retrieving keys from {0} dictionary")] KeysError(String), #[error("{0} not found")] NotFoundError(String), - #[error("Failed to retrieve the input order list from the Database database")] + #[error("retreiving input order list")] GetInputOrderError, - #[error("Failed to write {0} to the Database database")] + #[error("writing {0} to database")] WriteError(String), - #[error("Failed to delete {0} from the Database database")] + #[error("deleting {0} from database")] DeleteError(String), + #[error(transparent)] + GeneralError(#[from] GeneralError), } +// Result alias for [`DataAvailabilityError`] +pub type DAResult = Result; + #[derive(Error, Debug)] pub enum DataAvailabilityError { - #[error("Initialization error: {0}")] + #[error("initializing: {0}")] InitializationError(String), // TODO: is this error needed? doesn't seem to be used anywhere rn - #[error("Failed to establish connection: {0}")] + #[error("establishing connection to da layer: {0}")] ConnectionError(String), - #[error("The data channel has been closed")] + #[error("data channel is closed")] ChannelClosed, - #[error("Network error: {0}")] + #[error("da networking error: {0}")] NetworkError(String), - #[error("Data retrieval error at height {0}: {1}")] + #[error("retrieving data at height {0}: {1}")] DataRetrievalError(u64, String), - #[error("Error submitting data at height {0}: {1}")] + #[error("submitting epoch {0} to da layer: {1}")] SubmissionError(u64, String), - #[error("Error {0} new sync target: {1}")] - SyncTargetError(String, String), - #[error("Error receiving message from channel")] + #[error("setting new sync target: {0}")] + SyncTargetError(String), + #[error("receiving message on channel")] ChannelReceiveError, - #[error("General Deimos error: {0}")] + #[error(transparent)] GeneralError(#[from] GeneralError), } #[derive(Error, Debug)] pub enum ProofError { - #[error("Failed to generate proof")] - GenerationError, - #[error("Failed to verify proof")] - VerificationError, - #[error("Failed to deserialize G1Affine point")] + #[error("generating proof: {0}")] + GenerationError(String), + #[error("verifying proof: {0}")] + VerificationError(String), + #[error("deserializing G1Affine point")] G1AffineDeserializationError, - #[error("Failed to unpack proof components")] - ProofUnpackError, - #[error("Invalid proof format")] + #[error("unpacking proof components: {0}")] + ProofUnpackError(String), + #[error("invalid proof format")] InvalidFormatError, } diff --git a/src/node_types.rs b/src/node_types.rs index 1e631ba6..41ada7ed 100644 --- a/src/node_types.rs +++ b/src/node_types.rs @@ -1,11 +1,11 @@ use crate::{ consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL}, - error::DataAvailabilityError, + error::{DataAvailabilityError, DeimosResult}, }; use async_trait::async_trait; use crypto_hash::{hex_digest, Algorithm}; use ed25519_dalek::{Signer, SigningKey}; -use indexed_merkle_tree::{error::MerkleTreeError, node::Node, tree::IndexedMerkleTree}; +use indexed_merkle_tree::{node::Node, tree::IndexedMerkleTree}; use std::{self, sync::Arc, time::Duration}; use tokio::{ sync::{ @@ -31,7 +31,7 @@ use crate::{ #[async_trait] pub trait NodeType { - async fn start(self: Arc) -> std::result::Result<(), DeimosError>; + async fn start(self: Arc) -> DeimosResult<()>; // async fn stop(&self) -> Result<(), String>; } @@ -53,12 +53,10 @@ pub struct LightClient { #[async_trait] impl NodeType for Sequencer { - async fn start(self: Arc) -> std::result::Result<(), DeimosError> { + async fn start(self: Arc) -> DeimosResult<()> { // start listening for new headers to update sync target if let Err(e) = self.da.start().await { - return Err(DeimosError::DataAvailability( - DataAvailabilityError::InitializationError(e.to_string()), - )); + return Err(DataAvailabilityError::InitializationError(e.to_string()).into()); } let derived_keys = self.db.get_derived_keys(); @@ -80,13 +78,13 @@ impl NodeType for Sequencer { .ws .start(self.clone()) .await - .map_err(|_| DeimosError::General(GeneralError::WebserverError)) + .map_err(|_| GeneralError::WebserverError.into()) } } #[async_trait] impl NodeType for LightClient { - async fn start(self: Arc) -> std::result::Result<(), DeimosError> { + async fn start(self: Arc) -> DeimosResult<()> { // start listening for new headers to update sync target self.da.start().await.unwrap(); @@ -155,7 +153,7 @@ impl NodeType for LightClient { handle .await - .map_err(|_| DeimosError::General(GeneralError::WebserverError)) + .map_err(|_| GeneralError::WebserverError.into()) } } @@ -251,7 +249,7 @@ impl Sequencer { /// 3. Waits for a specified duration before starting the next epoch. /// 4. Calls `set_epoch_commitment` to fetch and set the commitment for the current epoch. /// 5. Repeats steps 2-4 periodically. - pub async fn finalize_epoch(&self) -> Result { + pub async fn finalize_epoch(&self) -> DeimosResult { let epoch = match self.db.get_epoch() { Ok(epoch) => epoch + 1, Err(_) => 0, @@ -264,8 +262,7 @@ impl Sequencer { // add the commitment for the operations ran since the last epoch let current_commitment = self - .create_tree() - .map_err(DeimosError::MerkleTree)? + .create_tree()? .get_commitment() .map_err(DeimosError::MerkleTree)?; @@ -284,7 +281,7 @@ impl Sequencer { let prev_epoch = epoch - 1; self.db.get_commitment(&prev_epoch).unwrap() } else { - let empty_commitment = self.create_tree().map_err(DeimosError::MerkleTree)?; + let empty_commitment = self.create_tree()?; empty_commitment .get_commitment() .map_err(DeimosError::MerkleTree)? @@ -307,10 +304,8 @@ impl Sequencer { }; let serialized_epoch_json_without_signature = - serde_json::to_string(&epoch_json).map_err(|_| { - DeimosError::General(GeneralError::ParsingError( - "Cannot parse epoch json".to_string(), - )) + serde_json::to_string(&epoch_json).map_err(|e| { + GeneralError::ParsingError(format!("epoch json: {}", e.to_string()).into()) })?; let signature = self .key @@ -321,14 +316,14 @@ impl Sequencer { Ok(epoch_json_with_signature) } - async fn get_message(&self) -> std::result::Result { + async fn get_message(&self) -> DeimosResult { match self.epoch_buffer_rx.lock().await.recv().await { Some(epoch) => Ok(epoch), - None => Err(DataAvailabilityError::ChannelReceiveError), + None => Err(DataAvailabilityError::ChannelReceiveError.into()), } } - pub fn create_tree(&self) -> Result { + pub fn create_tree(&self) -> DeimosResult { // TODO: better error handling (#11) // Retrieve the keys from input order and sort them. let ordered_derived_dict_keys: Vec = @@ -402,7 +397,7 @@ impl Sequencer { } // create tree, setting left / right child property for each node - IndexedMerkleTree::new(nodes) + IndexedMerkleTree::new(nodes).map_err(DeimosError::MerkleTree) } /// Updates an entry in the database based on the given operation, incoming entry, and the signature from the user. @@ -412,13 +407,7 @@ impl Sequencer { /// * `operation` - An `Operation` enum variant representing the type of operation to be performed (Add or Revoke). /// * `incoming_entry` - A reference to an `IncomingEntry` struct containing the key and the entry data to be updated. /// * `signature` - A `Signature` struct representing the signature. - /// - /// # Returns - /// - /// * `true` if the operation was successful and the entry was updated. - /// * `false` if the operation was unsuccessful, e.g., due to an invalid signature or other errors. - /// - pub fn update_entry(&self, signature: &UpdateEntryJson) -> bool { + pub fn update_entry(&self, signature: &UpdateEntryJson) -> DeimosResult<()> { debug!( "updating entry for uid {} with msg {}", signature.id, signature.signed_message @@ -426,19 +415,23 @@ impl Sequencer { let signed_content = match verify_signature(signature, Some(signature.public_key.clone())) { Ok(content) => content, Err(_) => { + // TODO(@distractedm1nd): Add to error instead of logging error!( "updating entry for uid {}: invalid signature with pubkey {} on msg {}", signature.id, signature.public_key, signature.signed_message ); - return false; + return Err(GeneralError::InvalidSignature.into()); } }; let message_obj: IncomingEntry = match serde_json::from_str(&signed_content) { Ok(obj) => obj, Err(e) => { - error!("parsing signed content: {}", e); - return false; + return Err(GeneralError::ParsingError(format!( + "signed content: {}", + e.to_string() + )) + .into()); } }; @@ -478,7 +471,7 @@ impl Sequencer { .set_derived_entry(&incoming_entry, &new_chain_entry, false) .unwrap(); - true + Ok(()) } Err(_) => { debug!("Hashchain does not exist, creating new one..."); @@ -504,7 +497,7 @@ impl Sequencer { .set_derived_entry(&incoming_entry, new_chain.last().unwrap(), true) .unwrap(); - true + Ok(()) } } } diff --git a/src/storage.rs b/src/storage.rs index bb4ac654..99180fb7 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -14,7 +14,7 @@ use std::{self, fmt::Display, sync::Mutex}; use crate::cfg::RedisConfig; use crate::utils::Signable; use crate::{ - error::{DatabaseError, DeimosError, GeneralError}, + error::{DatabaseError, DeimosError, DeimosResult, GeneralError}, utils::parse_json_to_proof, }; @@ -67,52 +67,57 @@ pub struct UpdateEntryJson { pub public_key: String, } -fn decode_signed_message(signed_message: &String) -> Result, DeimosError> { +fn decode_signed_message(signed_message: &String) -> DeimosResult> { let signed_message_bytes = general_purpose::STANDARD .decode(&signed_message) - .map_err(|_| { - DeimosError::General(GeneralError::DecodingError( - "failed to decode signed message".to_string(), - )) + .map_err(|e| { + DeimosError::General(GeneralError::DecodingError(format!( + "signed message: {}", + e.to_string() + ))) })?; // check if the signed message is (at least) 64 bytes long if signed_message_bytes.len() < 64 { - return Err(DeimosError::General(GeneralError::ParsingError( - "signed message is too short".to_string(), - ))); + return Err(GeneralError::ParsingError(format!( + "signed message is too short: {} < 64", + signed_message_bytes.len(), + )) + .into()); } else { Ok(signed_message_bytes) } } impl Signable for UpdateEntryJson { - fn get_signature(&self) -> Result { + fn get_signature(&self) -> DeimosResult { let signed_message_bytes = decode_signed_message(&self.signed_message)?; // extract the first 64 bytes from the signed message which are the signature let signature_bytes: &[u8; 64] = match signed_message_bytes.get(..64) { Some(array_section) => match array_section.try_into() { Ok(array) => array, - Err(_) => Err(DeimosError::General(GeneralError::ParsingError( - "failed to convert signed message to array".to_string(), - )))?, + Err(e) => Err(DeimosError::General(GeneralError::DecodingError(format!( + "signed message to array: {}", + e + ))))?, }, - None => Err(DeimosError::General(GeneralError::ParsingError( - "failed to extract signature from signed message".to_string(), - )))?, + None => Err(DeimosError::General(GeneralError::DecodingError(format!( + "extracting signature from signed message: {}", + &self.signed_message + ))))?, }; Ok(Signature::from_bytes(signature_bytes)) } - fn get_content_to_sign(&self) -> Result { + fn get_content_to_sign(&self) -> DeimosResult { let signed_message_bytes = decode_signed_message(&self.signed_message)?; let message_bytes = &signed_message_bytes[64..]; Ok(String::from_utf8_lossy(message_bytes).to_string()) } - fn get_public_key(&self) -> Result { + fn get_public_key(&self) -> DeimosResult { Ok(self.public_key.clone()) } } @@ -130,7 +135,7 @@ pub struct RedisConnections { pub trait Database: Send + Sync { fn get_keys(&self) -> Result, DatabaseError>; fn get_derived_keys(&self) -> Result, DatabaseError>; - fn get_hashchain(&self, key: &String) -> Result, DeimosError>; + fn get_hashchain(&self, key: &String) -> Result, DatabaseError>; fn get_derived_value(&self, key: &String) -> Result; fn get_derived_keys_in_order(&self) -> Result, DatabaseError>; fn get_commitment(&self, epoch: &u64) -> Result; @@ -225,31 +230,23 @@ impl Database for RedisConnections { Ok(keys) } - fn get_hashchain(&self, key: &String) -> Result, DeimosError> { + fn get_hashchain(&self, key: &String) -> Result, DatabaseError> { let mut con = self .main_dict .lock() - .map_err(|_| DeimosError::Database(DatabaseError::LockError))?; - let value: String = con.get(key).map_err(|_| { - DeimosError::Database(DatabaseError::NotFoundError(format!("Key: {}", key))) - })?; - - let chain: Vec = serde_json::from_str(&value).map_err(|_| { - DeimosError::General(GeneralError::ParsingError(format!( - "failed to parse hashchain" - ))) - })?; + .map_err(|_| DatabaseError::LockError)?; + let value: String = con + .get(key) + .map_err(|_| DatabaseError::NotFoundError(format!("key: {}", key)))?; - Ok(chain) + serde_json::from_str(&value) + .map_err(|e| GeneralError::ParsingError(format!("hashchain: {}", e)).into()) } fn get_derived_value(&self, key: &String) -> Result { let mut con = self.lock_connection(&self.derived_dict)?; - let derived_value: String = con - .get(key) - .map_err(|_| DatabaseError::NotFoundError(format!("Key: {}", key)))?; - - Ok(derived_value) + con.get(key) + .map_err(|_| DatabaseError::NotFoundError(format!("key: {}", key))) } // TODO: noticed a strange behavior with the get_derived_keys() function, it returns the values in seemingly random order. Need to investigate more @@ -259,41 +256,36 @@ impl Database for RedisConnections { let mut input_con = self.lock_connection(&self.input_order)?; // The lrange method returns a list of the elements between two indices. 0 and -1 mean the first and last element, i.e. the entire list. - let order: Vec = input_con + input_con .lrange("input_order", 0, -1) - .map_err(|_| DatabaseError::GetInputOrderError)?; - - Ok(order) + .map_err(|_| DatabaseError::GetInputOrderError) } fn get_commitment(&self, epoch: &u64) -> Result { let mut con = self.lock_connection(&self.commitments)?; - let commitment = match con.get::<&str, String>(&format!("epoch_{}", epoch)) { + match con.get::<&str, String>(&format!("epoch_{}", epoch)) { Ok(value) => { let trimmed_value = value.trim_matches('"').to_string(); Ok(trimmed_value) } Err(_) => Err(DatabaseError::NotFoundError(format!( - "Commitment from epoch_{}", + "commitment from epoch_{}", epoch ))), - }; - commitment + } } fn get_proof(&self, id: &String) -> Result { let mut con = self.lock_connection(&self.merkle_proofs)?; - let proof = con - .get(id) - .map_err(|_| DatabaseError::NotFoundError(format!("Proof with id: {}", id)))?; - Ok(proof) + con.get(id) + .map_err(|_| DatabaseError::NotFoundError(format!("Proof with id: {}", id))) } fn get_proofs_in_epoch(&self, epoch: &u64) -> Result, DatabaseError> { let mut con = self.lock_connection(&self.merkle_proofs)?; let mut epoch_proofs: Vec = con .keys::<&String, Vec>(&format!("epoch_{}*", epoch)) - .map_err(|_| DatabaseError::NotFoundError(format!("Epoch: {}", epoch)))?; + .map_err(|_| DatabaseError::NotFoundError(format!("epoch: {}", epoch)))?; // Sort epoch_proofs by extracting epoch number and number within the epoch epoch_proofs.sort_by(|a, b| { @@ -324,29 +316,27 @@ impl Database for RedisConnections { let mut con = self.lock_connection(&self.app_state)?; let epoch: u64 = con .get("epoch") - .map_err(|_| DatabaseError::NotFoundError(format!("Current epoch")))?; + .map_err(|_| DatabaseError::NotFoundError(format!("current epoch")))?; Ok(epoch) } fn get_epoch_operation(&self) -> Result { let mut con = self.lock_connection(&self.app_state)?; - let epoch_operation: u64 = con - .get("epoch_operation") - .map_err(|_| DatabaseError::NotFoundError(format!("Epoch operation")))?; - Ok(epoch_operation) + con.get("epoch_operation") + .map_err(|_| DatabaseError::NotFoundError(format!("epoch operation"))) } fn set_epoch(&self, epoch: &u64) -> Result<(), DatabaseError> { let mut con = self.lock_connection(&self.app_state)?; con.set::<&str, &u64, String>("epoch", epoch) - .map_err(|_| DatabaseError::WriteError(format!("Epoch: {}", epoch)))?; - Ok(()) // TODO: should we return the written string instead of ()? + .map_err(|_| DatabaseError::WriteError(format!("epoch: {}", epoch)))?; + Ok(()) } fn reset_epoch_operation_counter(&self) -> Result<(), DatabaseError> { let mut con = self.lock_connection(&self.app_state)?; con.set::<&str, &u64, String>("epoch_operation", &0) - .map_err(|_| DatabaseError::WriteError(format!("reset operations to 0")))?; + .map_err(|_| DatabaseError::WriteError(format!("epoch_operation->0")))?; Ok(()) } @@ -360,9 +350,7 @@ impl Database for RedisConnections { .lock() .map_err(|_| DeimosError::Database(DatabaseError::LockError))?; let value = serde_json::to_string(&value).map_err(|_| { - DeimosError::General(GeneralError::ParsingError(format!( - "failed to parse hashchain to string" - ))) + DeimosError::General(GeneralError::ParsingError(format!("hashchain to string"))) })?; con.set::<&String, String, String>(&incoming_entry.id, value) .map_err(|_| { @@ -404,8 +392,7 @@ impl Database for RedisConnections { .lock() .map_err(|_| DeimosError::Database(DatabaseError::LockError))?; - let epochs: Result, DeimosError> = con - .keys::<&str, Vec>("*") + con.keys::<&str, Vec>("*") .map_err(|_| { DeimosError::Database(DatabaseError::NotFoundError("Commitments".to_string())) })? @@ -417,18 +404,13 @@ impl Database for RedisConnections { ))) }) }) - .collect(); - - epochs + .collect() } fn increment_epoch_operation(&self) -> Result { let mut con = self.lock_connection(&self.app_state)?; - let incremented_epoch = con - .incr::<&'static str, u64, u64>("epoch_operation", 1) - .map_err(|_| DatabaseError::WriteError(format!("incremented epoch")))?; - - Ok(incremented_epoch) + con.incr::<&'static str, u64, u64>("epoch_operation", 1) + .map_err(|_| DatabaseError::WriteError(format!("incremented epoch"))) } fn add_merkle_proof( @@ -574,7 +556,6 @@ mod tests { let mut keys = redis_connections.get_keys().unwrap(); keys.sort(); - // Überprüfe, ob die zurückgegebenen Schlüssel korrekt sind let expected_keys: Vec = vec![ "test_key1".to_string(), "test_key2".to_string(), @@ -740,7 +721,7 @@ mod tests { assert!(hashchain.is_err()); let error = hashchain.unwrap_err(); assert!( - matches!(error, DeimosError::Database(DatabaseError::NotFoundError(msg)) if msg == "Key: missing_test_key") + matches!(error, DatabaseError::NotFoundError(msg) if msg == "key: missing_test_key") ); teardown(&redis_connections); @@ -783,7 +764,7 @@ mod tests { assert!(hashchain.is_err()); let error = hashchain.unwrap_err(); assert!( - matches!(error, DeimosError::General(GeneralError::ParsingError(msg)) if msg == "failed to parse hashchain") + matches!(error, DatabaseError::GeneralError(GeneralError::ParsingError(msg)) if msg == "failed to parse hashchain") ); teardown(&redis_connections); diff --git a/src/utils.rs b/src/utils.rs index 4dca2fb2..eec7445a 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,5 +1,5 @@ use crate::{ - error::{DeimosError, GeneralError, ProofError}, + error::{DeimosError, DeimosResult, GeneralError, ProofError}, storage::{ChainEntry, Operation}, zk_snark::{ hex_to_scalar, InsertMerkleProofCircuit, ProofVariantCircuit, UpdateMerkleProofCircuit, @@ -39,24 +39,21 @@ pub fn parse_json_to_proof(json_str: &str) -> Result Result { +pub fn decode_public_key(pub_key_str: &String) -> DeimosResult { // decode the public key from base64 string to bytes - let public_key_bytes = engine.decode(pub_key_str).map_err(|e| { - GeneralError::DecodingError(format!("Error while decoding hex string: {}", e)) - })?; + let public_key_bytes = engine + .decode(pub_key_str) + .map_err(|e| GeneralError::DecodingError(format!("hex string: {}", e)))?; - let public_key_array: [u8; 32] = public_key_bytes.try_into().map_err(|_| { - GeneralError::ParsingError("Error while converting Vec to [u8; 32]".to_string()) - })?; + let public_key_array: [u8; 32] = public_key_bytes + .try_into() + .map_err(|_| GeneralError::ParsingError("Vec to [u8; 32]".to_string()))?; - let public_key = Ed25519VerifyingKey::from_bytes(&public_key_array).map_err(|_| { - GeneralError::DecodingError("Unable to decode ed25519 verifying key".to_string()) - })?; - - Ok(public_key) + Ed25519VerifyingKey::from_bytes(&public_key_array) + .map_err(|_| GeneralError::DecodingError("ed25519 verifying key".to_string()).into()) } -pub fn validate_proof(proof_value: String) -> Result<(), DeimosError> { +pub fn validate_proof(proof_value: String) -> DeimosResult<()> { if let Ok((non_membership_proof, first_proof, second_proof)) = serde_json::from_str::<(NonMembershipProof, UpdateProof, UpdateProof)>(&proof_value) { @@ -70,7 +67,11 @@ pub fn validate_proof(proof_value: String) -> Result<(), DeimosError> { insertion_circuit.create_and_verify_snark()?; Ok(()) } else { - Err(DeimosError::Proof(ProofError::VerificationError)) + // TODO: could insertion_proof.verify() maybe return a more detailed error to use? + Err( + ProofError::VerificationError("insertion proof could not be verified".to_string()) + .into(), + ) } } else if let Ok(proof) = serde_json::from_str::(&proof_value) { if proof.verify() { @@ -78,32 +79,40 @@ pub fn validate_proof(proof_value: String) -> Result<(), DeimosError> { update_circuit.create_and_verify_snark()?; Ok(()) } else { - Err(DeimosError::Proof(ProofError::VerificationError)) + Err( + ProofError::VerificationError("update proof could not be verified".to_string()) + .into(), + ) } } else { - Err(DeimosError::Proof(ProofError::InvalidFormatError)) + Err(ProofError::InvalidFormatError.into()) } } pub fn create_and_verify_snark( circuit: ProofVariantCircuit, scalars: Vec, -) -> Result<(groth16::Proof, VerifyingKey), DeimosError> { +) -> DeimosResult<(groth16::Proof, VerifyingKey)> { let rng = &mut OsRng; trace!("creating parameters with BLS12-381 pairing-friendly elliptic curve construction...."); - let params = groth16::generate_random_parameters::(circuit.clone(), rng) - .map_err(|_| DeimosError::Proof(ProofError::ProofUnpackError))?; + let params = + groth16::generate_random_parameters::(circuit.clone(), rng).map_err(|e| { + DeimosError::Proof(ProofError::ProofUnpackError(format!( + "generating random params: {}", + e + ))) + })?; trace!("creating proof for zkSNARK..."); let proof = groth16::create_random_proof(circuit, ¶ms, rng) - .map_err(|_| DeimosError::Proof(ProofError::GenerationError))?; + .map_err(|e| DeimosError::Proof(ProofError::GenerationError(e.to_string())))?; trace!("preparing verifying key for zkSNARK..."); let pvk = groth16::prepare_verifying_key(¶ms.vk); groth16::verify_proof(&pvk, &proof, &scalars) - .map_err(|_| DeimosError::Proof(ProofError::VerificationError))?; + .map_err(|e| DeimosError::Proof(ProofError::VerificationError(e.to_string())))?; Ok((proof, params.vk)) } @@ -132,26 +141,27 @@ pub fn validate_epoch( debug!("validate_epoch: verifying zkSNARK proof..."); groth16::verify_proof(&pvk, &proof, &scalars) - .map_err(|_| DeimosError::Proof(ProofError::VerificationError))?; + .map_err(|e| DeimosError::Proof(ProofError::VerificationError(e.to_string())))?; debug!( "validate_epoch: zkSNARK with groth16 random parameters for epoch between commitment {} and {} was successfully verified!", previous_commitment, current_commitment ); + Ok(proof) } pub trait Signable { - fn get_signature(&self) -> Result; - fn get_content_to_sign(&self) -> Result; - fn get_public_key(&self) -> Result; + fn get_signature(&self) -> DeimosResult; + fn get_content_to_sign(&self) -> DeimosResult; + fn get_public_key(&self) -> DeimosResult; } // verifies the signature of a given signable item and returns the content of the item if the signature is valid pub fn verify_signature( item: &T, optional_public_key: Option, -) -> Result { +) -> DeimosResult { let public_key_str = match optional_public_key { Some(key) => key, None => item.get_public_key()?, @@ -166,7 +176,7 @@ pub fn verify_signature( if public_key.verify(content.as_bytes(), &signature).is_ok() { Ok(content) } else { - Err(DeimosError::General(GeneralError::InvalidSignature)) + Err(GeneralError::InvalidSignature.into()) } } diff --git a/src/webserver.rs b/src/webserver.rs index f455faa4..ba42b528 100644 --- a/src/webserver.rs +++ b/src/webserver.rs @@ -16,7 +16,7 @@ use std::sync::Arc; use crate::{ cfg::WebServerConfig, - error::DeimosError, + error::DatabaseError, node_types::Sequencer, storage::{ChainEntry, DerivedEntry, Entry, UpdateEntryJson}, utils::{is_not_revoked, validate_proof}, @@ -104,7 +104,7 @@ async fn update_entry( let tree = session.create_tree().unwrap(); - let result: Result, DeimosError> = + let result: Result, DatabaseError> = session.db.get_hashchain(&signature_with_key.id); // if the entry already exists, an update must be performed, otherwise insert let update_proof = match result { @@ -113,42 +113,43 @@ async fn update_entry( Err(_) => false, }; - let update_successful = session.update_entry(&signature_with_key); - - if update_successful { - let new_tree = session.create_tree().unwrap(); - let hashed_id = sha256(&signature_with_key.id); - let node = new_tree.find_leaf_by_label(&hashed_id).unwrap(); - - let proofs = if update_proof { - let new_index = tree.clone().find_node_index(&node).unwrap(); - let update_proof = &tree.clone().update_node(new_index, node).unwrap(); - let pre_processed_string = serde_json::to_string(update_proof).unwrap(); - format!(r#"{{"Update":{}}}"#, pre_processed_string) - } else { - let pre_processed_string = - serde_json::to_string(&tree.clone().insert_node(&node).unwrap()).unwrap(); - format!(r#"{{"Insert":{}}}"#, pre_processed_string) - }; + match session.update_entry(&signature_with_key) { + Ok(_) => { + let new_tree = session.create_tree().unwrap(); + let hashed_id = sha256(&signature_with_key.id); + let node = new_tree.find_leaf_by_label(&hashed_id).unwrap(); + + let proofs = if update_proof { + let new_index = tree.clone().find_node_index(&node).unwrap(); + let update_proof = &tree.clone().update_node(new_index, node).unwrap(); + let pre_processed_string = serde_json::to_string(update_proof).unwrap(); + format!(r#"{{"Update":{}}}"#, pre_processed_string) + } else { + let pre_processed_string = + serde_json::to_string(&tree.clone().insert_node(&node).unwrap()).unwrap(); + format!(r#"{{"Insert":{}}}"#, pre_processed_string) + }; + + if let Err(err) = session.db.add_merkle_proof( + &epoch, + &epoch_operation, + &tree.get_commitment().unwrap(), + &proofs, + ) { + return HttpResponse::InternalServerError() + .json(format!("Error adding merkle proof: {}", err)); + } - if let Err(err) = session.db.add_merkle_proof( - &epoch, - &epoch_operation, - &tree.get_commitment().unwrap(), - &proofs, - ) { - return HttpResponse::InternalServerError() - .json(format!("Error adding merkle proof: {}", err)); - } + if let Err(err) = session.db.increment_epoch_operation() { + return HttpResponse::InternalServerError() + .json(format!("Error incrementing epoch operation: {}", err)); + } - if let Err(err) = session.db.increment_epoch_operation() { - return HttpResponse::InternalServerError() - .json(format!("Error incrementing epoch operation: {}", err)); + HttpResponse::Ok().body("Updated entry successfully") + } + Err(e) => { + HttpResponse::BadRequest().body(format!("Could not update entry: {}", e.to_string())) } - - HttpResponse::Ok().body("Updated entry successfully") - } else { - HttpResponse::BadRequest().body("Could not update entry") } } diff --git a/src/zk_snark/mod.rs b/src/zk_snark/mod.rs index ecee8d1c..99f1999c 100644 --- a/src/zk_snark/mod.rs +++ b/src/zk_snark/mod.rs @@ -1,5 +1,5 @@ use crate::{ - error::{DeimosError, GeneralError}, + error::{DeimosError, GeneralError, ProofError}, storage::ChainEntry, utils::create_and_verify_snark, }; @@ -64,8 +64,8 @@ pub fn decode_and_convert_to_g1affine(encoded_data: &String) -> Result Result Result<(Scalar, &Vec), Deimo let scalar_root = hex_to_scalar(proof.root_hash.as_str()).map_err(DeimosError::General)?; Ok((scalar_root, &proof.path)) } else { - Err(DeimosError::General(GeneralError::MissingArgumentError)) + Err(DeimosError::Proof(ProofError::ProofUnpackError(format!( + "proof path is empty for root hash {}", + proof.root_hash + )))) } } @@ -145,17 +148,14 @@ pub fn deserialize_custom_to_verifying_key( let delta_g2 = decode_and_convert_to_g2affine(&custom_vk.delta_g2)?; let gamma_g2 = decode_and_convert_to_g2affine(&custom_vk.gamma_g2)?; let ic = custom_vk.ic.split(",").try_fold(Vec::new(), |mut acc, s| { - let decoded = engine.decode(s).map_err(|_| { - DeimosError::General(GeneralError::DecodingError( - "Failed to decode ic".to_string(), - )) - })?; - let decoded_string = String::from_utf8(decoded).map_err(|_| { - DeimosError::General(GeneralError::ParsingError("Failed to parse ic".to_string())) - })?; + let decoded = engine + .decode(s) + .map_err(|e| DeimosError::General(GeneralError::DecodingError(format!("ic: {}", e))))?; + let decoded_string = String::from_utf8(decoded) + .map_err(|e| DeimosError::General(GeneralError::ParsingError(format!("ic: {}", e))))?; let ct_option = decode_and_convert_to_g1affine(&decoded_string)?; acc.push(ct_option); - Ok(acc) + Ok::, DeimosError>(acc) })?; Ok(bellman::groth16::VerifyingKey {