Skip to content

Commit

Permalink
feat: adds more features to white and blacklist behavior (#251)
Browse files Browse the repository at this point in the history
* feat: adds more features to white and blacklist behavior

* fix spelling

* Update README.md

Co-authored-by: Harald Hoyer <[email protected]>

* Update Cargo.toml

---------

Co-authored-by: Harald Hoyer <[email protected]>
  • Loading branch information
montekki and haraldh authored Oct 26, 2023
1 parent 4127841 commit 73bbae7
Show file tree
Hide file tree
Showing 7 changed files with 109 additions and 60 deletions.
4 changes: 3 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,25 @@ Deployment is done by deploying a dockerized image of the service.
| `BATCH_FINALIZATION_GAS_LIMIT` | The gas limit of the finalizastion of the whole batch in a call to `finalizeWithdrawals` in Withdrawal Finalizer Contract |
| `WITHDRAWAL_FINALIZER_ACCOUNT_PRIVATE_KEY` | The private key of the account that is going to be submit finalization transactions |
| `TX_RETRY_TIMEOUT_SECS` | Number of seconds to wait for a potentially stuck finalization transaction before readjusting its fees |
| `TOKENS_TO_FINALIZE` | Configures the sets of tokens this instance of finalizer will finalize. It may be configured as a whitelist, a blacklist, a wildcard or completely disable any finalization. For more info see below. |

The configuration structure describing the service config can be found in [`config.rs`](https://github.com/matter-labs/zksync-withdrawal-finalizer/blob/main/bin/withdrawal-finalizer/src/config.rs)

** more about zkSync contracts can be found [here](https://github.com/matter-labs/era-contracts/blob/main/docs/Overview.md)

## Configuring Tokens to finalize.

It may be handy to limit a set of tokens the Finalizer is finalizing. This
configuration may be specified by setting a rule in the `TOKENS_TO_FINALIZE` value.
If this enviromnent variable is not set then by default Finalizer will only finalize
ETH token (`0x000...0800a`).

You may specify `All`, `None`, `BlackList` or `WhiteList` as json documents:

1. `TOKENS_TO_FINALIZE = '"All"'` - Finalize everything
1. `TOKENS_TO_FINALIZE = '"None"'` - Finalize nothing
1. `TOKENS_TO_FINALIZE = '{ "WhiteList":[ "0x3355df6D4c9C3035724Fd0e3914dE96A5a83aaf4" ] }'` - Finalize only these tokens
1. `TOKENS_TO_FINALIZE = '{ "BlackList":[ "0x3355df6D4c9C3035724Fd0e3914dE96A5a83aaf4" ] }'` - Finalize all tokens but these

## License

Expand Down
1 change: 0 additions & 1 deletion bin/withdrawal-finalizer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ edition = "2021"
color-eyre = { workspace = true }
ethers = { workspace = true, default-features = false, features = ["ws", "rustls"] }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["full"] }
url = { workspace = true, features = ["serde"] }
eyre = { workspace = true }
Expand Down
22 changes: 3 additions & 19 deletions bin/withdrawal-finalizer/src/config.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,9 @@
use std::str::FromStr;

use envconfig::Envconfig;
use ethers::types::Address;
use finalizer::TokenList;
use serde::Deserialize;
use url::Url;

#[derive(Deserialize, Debug)]
pub struct TokenList(pub Vec<Address>);

impl FromStr for TokenList {
type Err = serde_json::Error;

fn from_str(s: &str) -> Result<Self, Self::Err> {
let res = serde_json::from_str(s)?;
Ok(TokenList(res))
}
}

/// Withdrawal finalizer configuration.
///
/// Can be read from
Expand Down Expand Up @@ -77,9 +64,6 @@ pub struct Config {
#[envconfig(from = "TX_RETRY_TIMEOUT_SECS")]
pub tx_retry_timeout: usize,

#[envconfig(from = "TOKEN_WHITELIST")]
pub token_whitelist: Option<TokenList>,

#[envconfig(from = "TOKEN_BLACKLIST")]
pub token_blacklist: Option<TokenList>,
#[envconfig(from = "TOKENS_TO_FINALIZE")]
pub tokens_to_finalize: Option<TokenList>,
}
5 changes: 1 addition & 4 deletions bin/withdrawal-finalizer/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,10 +275,7 @@ async fn main() -> Result<()> {
l1_bridge,
config.tx_retry_timeout,
finalizer_account_address,
finalizer::TokensRestrictions::new(
config.token_whitelist.map(|t| t.0),
config.token_blacklist.map(|t| t.0),
),
config.tokens_to_finalize.unwrap_or_default(),
);
let finalizer_handle = tokio::spawn(finalizer.run(client_l2));

Expand Down
5 changes: 5 additions & 0 deletions finalizer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,13 @@ sqlx = { workspace = true, features = ["postgres", "runtime-tokio-rustls"] }
tokio = { workspace = true }
tracing = { workspace = true }
vise = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }

client = { workspace = true }
storage = { workspace = true }
tx-sender = { workspace = true }
withdrawals-meterer = { workspace = true }

[dev-dependencies]
pretty_assertions = { workspace = true }
119 changes: 84 additions & 35 deletions finalizer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

//! Finalization logic implementation.
use std::{collections::HashSet, time::Duration};
use std::{collections::HashSet, str::FromStr, time::Duration};

use accumulator::WithdrawalsAccumulator;
use ethers::{
Expand All @@ -14,6 +14,7 @@ use ethers::{
types::{H256, U256},
};
use futures::TryFutureExt;
use serde::Deserialize;
use sqlx::PgPool;

use client::{
Expand All @@ -35,33 +36,6 @@ mod accumulator;
mod error;
mod metrics;

/// A configuration of a blacklist or whitelist policy for finalizing tokens
pub enum TokensRestrictions {
/// Only finalize the whitelisted tokens
WhiteList(Vec<Address>),

/// Finalize all tokens except for this blacklist
BlackList(Vec<Address>),
}

impl TokensRestrictions {
/// Create new token restrictions set.
pub fn new(
token_whitelist: Option<Vec<Address>>,
token_blacklist: Option<Vec<Address>>,
) -> Option<Self> {
if let Some(whitelist) = token_whitelist {
return Some(Self::WhiteList(whitelist.to_vec()));
}

if let Some(blacklist) = token_blacklist {
return Some(Self::BlackList(blacklist.to_vec()));
}

None
}
}

/// A limit to cap a transaction fee (in ether) for safety reasons.
const TX_FEE_LIMIT: f64 = 0.8;

Expand All @@ -71,6 +45,34 @@ const OUT_OF_FUNDS_BACKOFF: Duration = Duration::from_secs(10);
/// Backoff period if one of the loop iterations has failed.
const LOOP_ITERATION_ERROR_BACKOFF: Duration = Duration::from_secs(5);

/// An `enum` that defines a set of tokens that Finalizer finalizes.
#[derive(Deserialize, Debug, Eq, PartialEq)]
pub enum TokenList {
/// Finalize all known tokens
All,
/// Finalize nothing
None,
/// Finalize everything but these tokens, this is a blacklist.
BlackList(Vec<Address>),
/// Finalize nothing but these tokens, this is a whitelist.
WhiteList(Vec<Address>),
}

impl Default for TokenList {
fn default() -> Self {
Self::WhiteList(vec![client::ETH_TOKEN_ADDRESS])
}
}

impl FromStr for TokenList {
type Err = serde_json::Error;

fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
let res = serde_json::from_str(s)?;
Ok(res)
}
}

/// Finalizer.
pub struct Finalizer<M1, M2> {
pgpool: PgPool,
Expand All @@ -87,7 +89,7 @@ pub struct Finalizer<M1, M2> {
tx_retry_timeout: Duration,
account_address: Address,
withdrawals_meterer: WithdrawalsMeter,
token_restrictions: Option<TokensRestrictions>,
token_list: TokenList,
}

const NO_NEW_WITHDRAWALS_BACKOFF: Duration = Duration::from_secs(5);
Expand Down Expand Up @@ -115,7 +117,7 @@ where
l1_bridge: IL1Bridge<M>,
tx_retry_timeout: usize,
account_address: Address,
token_restrictions: Option<TokensRestrictions>,
token_list: TokenList,
) -> Self {
let withdrawals_meterer =
WithdrawalsMeter::new(pgpool.clone(), MeteringComponent::FinalizedWithdrawals);
Expand All @@ -136,7 +138,7 @@ where
tx_retry_timeout: Duration::from_secs(tx_retry_timeout as u64),
account_address,
withdrawals_meterer,
token_restrictions,
token_list,
}
}

Expand Down Expand Up @@ -318,27 +320,28 @@ where
async fn loop_iteration(&mut self) -> Result<()> {
tracing::debug!("begin iteration of the finalizer loop");

let try_finalize_these = match &self.token_restrictions {
None => {
let try_finalize_these = match &self.token_list {
TokenList::All => {
storage::withdrawals_to_finalize(&self.pgpool, self.query_db_pagination_limit)
.await?
}
Some(TokensRestrictions::WhiteList(w)) => {
TokenList::WhiteList(w) => {
storage::withdrawals_to_finalize_with_whitelist(
&self.pgpool,
self.query_db_pagination_limit,
w,
)
.await?
}
Some(TokensRestrictions::BlackList(b)) => {
TokenList::BlackList(b) => {
storage::withdrawals_to_finalize_with_blacklist(
&self.pgpool,
self.query_db_pagination_limit,
b,
)
.await?
}
TokenList::None => return Ok(()),
};

tracing::debug!("trying to finalize these {try_finalize_these:?}");
Expand Down Expand Up @@ -608,3 +611,49 @@ where

Ok(())
}

#[cfg(test)]
mod tests {
use super::TokenList;
use ethers::abi::Address;
use pretty_assertions::assert_eq;

#[test]
fn tokens_list_de() {
let all = "\"All\"";

let none = "\"None\"";

let all: TokenList = serde_json::from_str(all).unwrap();
assert_eq!(all, TokenList::All);

let none: TokenList = serde_json::from_str(none).unwrap();
assert_eq!(none, TokenList::None);

let black = r#"
{
"BlackList":[
"0x3355df6D4c9C3035724Fd0e3914dE96A5a83aaf4"
]
}
"#;

let usdc_addr: Address = "0x3355df6D4c9C3035724Fd0e3914dE96A5a83aaf4"
.parse()
.unwrap();

let blocked_usdc: TokenList = serde_json::from_str(black).unwrap();
assert_eq!(blocked_usdc, TokenList::BlackList(vec![usdc_addr]));

let white = r#"
{
"WhiteList":[
"0x3355df6D4c9C3035724Fd0e3914dE96A5a83aaf4"
]
}
"#;

let allowed_usdc: TokenList = serde_json::from_str(white).unwrap();
assert_eq!(allowed_usdc, TokenList::WhiteList(vec![usdc_addr]));
}
}

0 comments on commit 73bbae7

Please sign in to comment.