diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fbff77f73d..f2fe94240a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -34,7 +34,10 @@ jobs: run: | ci_run zk ci_run zk fmt --check - ci_run zk lint --check + ci_run zk lint rust --check + ci_run zk lint js --check + ci_run zk lint ts --check + ci_run zk lint md --check unit-tests: runs-on: [self-hosted, CI-worker] @@ -59,7 +62,7 @@ jobs: ci_run ln -s /usr/src/keys/setup keys/setup ci_run zk ci_run zk run verify-keys unpack - ci_run zk contract build-dev + ci_run zk contract build ci_run zk run deploy-erc20 dev ci_run zk run deploy-eip1271 ci_run zk db basic-setup @@ -148,7 +151,7 @@ jobs: ci_run ln -s /usr/src/keys/setup keys/setup ci_run zk ci_run zk run verify-keys unpack - ci_run zk contract build-dev + ci_run zk contract build - name: integration-testkit run: ci_run zk test integration testkit --offline diff --git a/Cargo.lock b/Cargo.lock index 18ef00a7b6..413e2a086d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5801,6 +5801,7 @@ dependencies = [ name = "zksync_config" version = "1.0.0" dependencies = [ + "chrono", "envy", "num", "serde", diff --git a/changelog/core.md b/changelog/core.md index dc7529dd88..a52e3cbae7 100644 --- a/changelog/core.md +++ b/changelog/core.md @@ -8,6 +8,8 @@ All notable changes to the core components will be documented in this file. ### Changed +- Rejected transactions are now stored in the database for 2 weeks only. + ### Added ### Fixed diff --git a/changelog/js-sdk.md b/changelog/js-sdk.md index 0b5a5a064a..fad7b2ea9f 100644 --- a/changelog/js-sdk.md +++ b/changelog/js-sdk.md @@ -8,6 +8,10 @@ All notable changes to `zksync.js` will be documented in this file. ### Changed +### Deprecated + +- WebSocket provider. + ### Fixed ## Version 0.8.4 diff --git a/changelog/rust-sdk.md b/changelog/rust-sdk.md index 9a00da61f1..2a5be30e06 100644 --- a/changelog/rust-sdk.md +++ b/changelog/rust-sdk.md @@ -4,6 +4,10 @@ All notable changes to `zksync_rs` will be documented in this file. ## Unreleased +### Added + +- Constructor of RpcProvider from address and network. + **Version 0.2.2** is being developed. ### Added diff --git a/contracts/contracts/Verifier.sol b/contracts/contracts/Verifier.sol index a873b20fa4..7807904adf 100644 --- a/contracts/contracts/Verifier.sol +++ b/contracts/contracts/Verifier.sol @@ -59,8 +59,7 @@ contract Verifier is KeysWithPlonkVerifier, KeysWithPlonkVerifierOld, Config { uint128 _amount, uint256[] calldata _proof ) external view returns (bool) { - bytes32 commitment = - sha256(abi.encodePacked(uint256(_rootHash) & INPUT_MASK, _accountId, _owner, _tokenId, _amount)); + bytes32 commitment = sha256(abi.encodePacked(_rootHash, _accountId, _owner, _tokenId, _amount)); uint256[] memory inputs = new uint256[](1); inputs[0] = uint256(commitment) & INPUT_MASK; diff --git a/contracts/contracts/ZkSync.sol b/contracts/contracts/ZkSync.sol index 0a73b446b2..e09d1a9813 100644 --- a/contracts/contracts/ZkSync.sol +++ b/contracts/contracts/ZkSync.sol @@ -553,7 +553,7 @@ contract ZkSync is UpgradeableMaster, Storage, Config, Events, ReentrancyGuard { bool trigger = block.number >= priorityRequests[firstPriorityRequestId].expirationBlock && priorityRequests[firstPriorityRequestId].expirationBlock != 0; - if (trigger) { + if ($$(EASY_EXODUS) || trigger) { if (!exodusMode) { exodusMode = true; emit ExodusMode(); diff --git a/contracts/hardhat.config.ts b/contracts/hardhat.config.ts index a1e7843969..4614a80cc8 100644 --- a/contracts/hardhat.config.ts +++ b/contracts/hardhat.config.ts @@ -29,6 +29,8 @@ const localConfig = Object.assign({}, prodConfig); // @ts-ignore localConfig.UPGRADE_NOTICE_PERIOD = 0; localConfig.DUMMY_VERIFIER = process.env.CONTRACTS_TEST_DUMMY_VERIFIER === 'true'; +// @ts-ignore +localConfig.EASY_EXODUS = process.env.CONTRACTS_TEST_EASY_EXODUS === 'true'; const contractDefs = { rinkeby: testnetConfig, diff --git a/core/bin/key_generator/src/main.rs b/core/bin/key_generator/src/main.rs index eda670e532..af03745710 100644 --- a/core/bin/key_generator/src/main.rs +++ b/core/bin/key_generator/src/main.rs @@ -62,8 +62,8 @@ fn main() { create_verifier_contract(config); } Command::CircuitSize => { - count_gates_recursive_verification_keys(); calculate_and_print_max_zksync_main_circuit_size(); + count_gates_recursive_verification_keys(); } } } diff --git a/core/bin/zksync_api/src/api_server/rpc_server/mod.rs b/core/bin/zksync_api/src/api_server/rpc_server/mod.rs index b4512cede7..5dcf94fe9d 100644 --- a/core/bin/zksync_api/src/api_server/rpc_server/mod.rs +++ b/core/bin/zksync_api/src/api_server/rpc_server/mod.rs @@ -18,7 +18,7 @@ use zksync_storage::{ }, ConnectionPool, StorageProcessor, }; -use zksync_types::{tx::TxHash, Address, BatchFee, Fee, TokenLike, TxFeeTypes}; +use zksync_types::{tx::TxHash, Address, BatchFee, BlockNumber, Fee, TokenLike, TxFeeTypes}; // Local uses use crate::{ @@ -184,20 +184,24 @@ impl RpcApp { Some(block) } else { let mut storage = self.access_storage().await?; - let block = storage + let blocks = storage .chain() .block_schema() - .find_block_by_height_or_hash(block_number.to_string()) - .await; + .load_block_range(BlockNumber(block_number as u32), 1) + .await + .unwrap_or_default(); - if let Some(block) = block.clone() { + if !blocks.is_empty() && blocks[0].block_number == block_number { // Unverified blocks can still change, so we can't cache them. - if block.verified_at.is_some() && block.block_number == block_number { - self.cache_of_blocks_info.insert(block_number, block); - } + self.cache_of_blocks_info + .insert(block_number, blocks[0].clone()); } - block + if !blocks.is_empty() { + Some(blocks[0].clone()) + } else { + None + } }; metrics::histogram!("api.rpc.get_block_info", start.elapsed()); diff --git a/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs b/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs index 4639e4556d..d96d84391a 100644 --- a/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs +++ b/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs @@ -59,16 +59,13 @@ impl TokenPriceAPI for CoinGeckoAPI { // If we use 2 day interval we will get hourly prices and not minute by minute which makes // response faster and smaller - let request = self + let market_chart = self .client .get(market_chart_url) - .query(&[("vs_currency", "usd"), ("days", "2")]); - - let api_request_future = tokio::time::timeout(REQUEST_TIMEOUT, request.send()); - - let market_chart = api_request_future + .timeout(REQUEST_TIMEOUT) + .query(&[("vs_currency", "usd"), ("days", "2")]) + .send() .await - .map_err(|_| anyhow::format_err!("CoinGecko API request timeout"))? .map_err(|err| anyhow::format_err!("CoinGecko API request failed: {}", err))? .json::() .await?; diff --git a/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs b/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs index a29965a069..4c2ec5495c 100644 --- a/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs +++ b/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs @@ -35,15 +35,16 @@ impl TokenPriceAPI for CoinMarketCapAPI { )) .expect("failed to join url path"); - let api_request_future = - tokio::time::timeout(REQUEST_TIMEOUT, self.client.get(request_url).send()); - - let mut api_response = api_request_future + let mut api_response = self + .client + .get(request_url) + .timeout(REQUEST_TIMEOUT) + .send() .await - .map_err(|_| anyhow::format_err!("Coinmarketcap API request timeout"))? .map_err(|err| anyhow::format_err!("Coinmarketcap API request failed: {}", err))? .json::() .await?; + let mut token_info = api_response .data .remove(&TokenLike::Symbol(token_symbol.to_string())) diff --git a/core/bin/zksync_api/src/fee_ticker/validator/watcher.rs b/core/bin/zksync_api/src/fee_ticker/validator/watcher.rs index fb3b4024f0..7eca39f5ed 100644 --- a/core/bin/zksync_api/src/fee_ticker/validator/watcher.rs +++ b/core/bin/zksync_api/src/fee_ticker/validator/watcher.rs @@ -38,14 +38,15 @@ impl UniswapTokenWatcher { let query = format!("{{token(id: \"{:#x}\"){{tradeVolumeUSD}}}}", address); - let request = self.client.post(&self.addr).json(&serde_json::json!({ - "query": query.clone(), - })); - let api_request_future = tokio::time::timeout(REQUEST_TIMEOUT, request.send()); - - let response: GraphqlResponse = api_request_future + let response = self + .client + .post(&self.addr) + .json(&serde_json::json!({ + "query": query.clone(), + })) + .timeout(REQUEST_TIMEOUT) + .send() .await - .map_err(|_| anyhow::format_err!("Uniswap API request timeout"))? .map_err(|err| anyhow::format_err!("Uniswap API request failed: {}", err))? .json::() .await?; diff --git a/core/bin/zksync_core/src/lib.rs b/core/bin/zksync_core/src/lib.rs index b3b18c8e88..eaa44334df 100644 --- a/core/bin/zksync_core/src/lib.rs +++ b/core/bin/zksync_core/src/lib.rs @@ -7,6 +7,7 @@ use crate::{ eth_watch::start_eth_watch, mempool::run_mempool_tasks, private_api::start_private_core_api, + rejected_tx_cleaner::run_rejected_tx_cleaner, state_keeper::{start_state_keeper, ZkSyncStateKeeper}, }; use futures::{channel::mpsc, future}; @@ -22,6 +23,7 @@ pub mod committer; pub mod eth_watch; pub mod mempool; pub mod private_api; +pub mod rejected_tx_cleaner; pub mod state_keeper; /// Waits for *any* of the tokio tasks to be finished. @@ -150,6 +152,9 @@ pub async fn run_core( DEFAULT_CHANNEL_CAPACITY, ); + // Start rejected transactions cleaner task. + let rejected_tx_cleaner_task = run_rejected_tx_cleaner(&config, connection_pool.clone()); + // Start block proposer. let proposer_task = run_block_proposer_task( &config, @@ -171,6 +176,7 @@ pub async fn run_core( committer_task, mempool_task, proposer_task, + rejected_tx_cleaner_task, ]; Ok(task_futures) diff --git a/core/bin/zksync_core/src/rejected_tx_cleaner.rs b/core/bin/zksync_core/src/rejected_tx_cleaner.rs new file mode 100644 index 0000000000..c697b0c44a --- /dev/null +++ b/core/bin/zksync_core/src/rejected_tx_cleaner.rs @@ -0,0 +1,37 @@ +//! The cleaner is responsible for removing rejected transactions from the database +//! that were stored 2 or more weeks ago (this value is configurable as well as the actor's sleep time). +//! +//! The purpose is not to store the information about the failed transaction execution +//! which is useful only for a short period of time. Since such transactions are not actually +//! included in the block and don't affect the state hash, there is no much sense to keep +//! them forever. + +// External uses +use tokio::{task::JoinHandle, time}; + +// Workspace deps +use zksync_config::ZkSyncConfig; +use zksync_storage::ConnectionPool; + +#[must_use] +pub fn run_rejected_tx_cleaner(config: &ZkSyncConfig, db_pool: ConnectionPool) -> JoinHandle<()> { + let max_age = config.db.rejected_transactions_max_age(); + let interval = config.db.rejected_transactions_cleaner_interval(); + let mut timer = time::interval(interval); + + tokio::spawn(async move { + loop { + let mut storage = db_pool + .access_storage() + .await + .expect("transactions cleaner couldn't access the database"); + storage + .chain() + .operations_schema() + .remove_rejected_transactions(max_age) + .await + .expect("failed to delete rejected transactions from the database"); + timer.tick().await; + } + }) +} diff --git a/core/bin/zksync_core/src/state_keeper/mod.rs b/core/bin/zksync_core/src/state_keeper/mod.rs index c067ff7adc..1274ac616c 100644 --- a/core/bin/zksync_core/src/state_keeper/mod.rs +++ b/core/bin/zksync_core/src/state_keeper/mod.rs @@ -94,6 +94,13 @@ impl PendingBlock { } } +pub fn system_time_timestamp() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("failed to get system time") + .as_secs() +} + /// Responsible for tx processing and block forming. pub struct ZkSyncStateKeeper { /// Current plasma state @@ -389,10 +396,7 @@ impl ZkSyncStateKeeper { initial_state.unprocessed_priority_op, max_block_size, previous_root_hash, - SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("failed to get system time") - .as_secs(), + system_time_timestamp(), ), available_block_chunk_sizes, max_miniblock_iterations, @@ -546,6 +550,11 @@ impl ZkSyncStateKeeper { let start = Instant::now(); let mut executed_ops = Vec::new(); + // If pending block is empty we update timestamp + if self.pending_block.success_operations.is_empty() { + self.pending_block.timestamp = system_time_timestamp(); + } + // We want to store this variable before moving anything from the pending block. let empty_proposed_block = proposed_block.is_empty(); @@ -928,10 +937,7 @@ impl ZkSyncStateKeeper { .last() .expect("failed to get max block size"), H256::default(), - SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("failed to get system time") - .as_secs(), + system_time_timestamp(), ), ); diff --git a/core/bin/zksync_core/src/state_keeper/tests.rs b/core/bin/zksync_core/src/state_keeper/tests.rs index 67169561b2..ce4162c392 100644 --- a/core/bin/zksync_core/src/state_keeper/tests.rs +++ b/core/bin/zksync_core/src/state_keeper/tests.rs @@ -291,6 +291,7 @@ mod apply_priority_op { mod apply_tx { use super::*; + use zksync_types::gas_counter::{VerifyCost, TX_GAS_LIMIT}; /// Checks if withdrawal is processed correctly by the state_keeper #[test] @@ -385,12 +386,13 @@ mod apply_tx { } /// Checks if processing withdrawal fails because the gas limit is reached. - /// This sends 46 withdrawals (very ineficcient, but all constants in + /// This sends 46 withdrawals (very inefficient, but all constants in /// GasCounter are hardcoded, so I see no way out) #[test] fn gas_limit_reached() { - let withdrawals_number = 46; - let mut tester = StateKeeperTester::new(6 * withdrawals_number, 1, 1); + let withdrawals_number = (TX_GAS_LIMIT - VerifyCost::base_cost().as_u64() * 130 / 100) + / (VerifyCost::WITHDRAW_COST * 130 / 100); + let mut tester = StateKeeperTester::new(6 * withdrawals_number as usize, 1, 1); for i in 1..=withdrawals_number { let withdrawal = create_account_and_withdrawal( &mut tester, @@ -401,10 +403,20 @@ mod apply_tx { Default::default(), ); let result = tester.state_keeper.apply_tx(&withdrawal); - if i < withdrawals_number { - assert!(result.is_ok()) + if i <= withdrawals_number { + assert!( + result.is_ok(), + "i: {}, withdrawals: {}", + i, + withdrawals_number + ) } else { - assert!(result.is_err()) + assert!( + result.is_err(), + "i: {}, withdrawals: {}", + i, + withdrawals_number + ) } } } diff --git a/core/bin/zksync_eth_sender/src/tests/mock.rs b/core/bin/zksync_eth_sender/src/tests/mock.rs index 643f725f1b..a272782ac6 100644 --- a/core/bin/zksync_eth_sender/src/tests/mock.rs +++ b/core/bin/zksync_eth_sender/src/tests/mock.rs @@ -23,6 +23,7 @@ use zksync_eth_client::clients::mock::MockEthereum; #[derive(Debug)] pub(in crate) struct MockDatabase { eth_operations: RwLock>, + aggregated_operations: RwLock>, unprocessed_operations: RwLock>, eth_parameters: RwLock, } @@ -31,12 +32,15 @@ impl MockDatabase { /// Creates a database with emulation of previously stored uncommitted requests. pub fn with_restorable_state( eth_operations: Vec, + aggregated_operations: Vec<(i64, AggregatedOperation)>, + unprocessed_operations: Vec<(i64, AggregatedOperation)>, eth_parameters: ETHParams, ) -> Self { Self { eth_operations: RwLock::new(eth_operations), + aggregated_operations: RwLock::new(aggregated_operations), + unprocessed_operations: RwLock::new(unprocessed_operations), eth_parameters: RwLock::new(eth_parameters), - unprocessed_operations: RwLock::new(Vec::new()), } } @@ -53,6 +57,10 @@ impl MockDatabase { aggregated_operation: (i64, AggregatedOperation), ) -> anyhow::Result<()> { self.unprocessed_operations + .write() + .await + .push(aggregated_operation.clone()); + self.aggregated_operations .write() .await .push(aggregated_operation); @@ -143,6 +151,26 @@ impl DatabaseInterface for MockDatabase { &self, _connection: &mut StorageProcessor<'_>, ) -> anyhow::Result<()> { + let aggregated_operations = self.aggregated_operations.read().await; + let eth_operations = self.eth_operations.read().await; + let mut unprocessed_operations = self.unprocessed_operations.write().await; + + let mut new_unprocessed_operations = Vec::new(); + + for operation in aggregated_operations.iter() { + let is_operation_in_queue = unprocessed_operations + .iter() + .any(|unprocessed_operation| unprocessed_operation.0 == operation.0); + let is_operation_send_to_ethereum = eth_operations + .iter() + .any(|ethereum_operation| ethereum_operation.op.as_ref().unwrap().0 == operation.0); + if !is_operation_in_queue && !is_operation_send_to_ethereum { + new_unprocessed_operations.push(operation.clone()); + } + } + + unprocessed_operations.extend(new_unprocessed_operations); + Ok(()) } @@ -338,35 +366,65 @@ pub(in crate) fn default_eth_parameters() -> ETHParams { /// Creates a default `ETHSender` with mock Ethereum connection/database and no operations in DB. /// Returns the `ETHSender` itself along with communication channels to interact with it. pub(in crate) async fn default_eth_sender() -> ETHSender { - build_eth_sender(1, Vec::new(), default_eth_parameters()).await + build_eth_sender( + 1, + Vec::new(), + Vec::new(), + Vec::new(), + default_eth_parameters(), + ) + .await } /// Creates an `ETHSender` with mock Ethereum connection/database and no operations in DB /// which supports multiple transactions in flight. /// Returns the `ETHSender` itself along with communication channels to interact with it. pub(in crate) async fn concurrent_eth_sender(max_txs_in_flight: u64) -> ETHSender { - build_eth_sender(max_txs_in_flight, Vec::new(), default_eth_parameters()).await + build_eth_sender( + max_txs_in_flight, + Vec::new(), + Vec::new(), + Vec::new(), + default_eth_parameters(), + ) + .await } /// Creates an `ETHSender` with mock Ethereum connection/database and restores its state "from DB". /// Returns the `ETHSender` itself along with communication channels to interact with it. pub(in crate) async fn restored_eth_sender( eth_operations: Vec, + aggregated_operations: Vec<(i64, AggregatedOperation)>, + unprocessed_operations: Vec<(i64, AggregatedOperation)>, eth_parameters: ETHParams, ) -> ETHSender { const MAX_TXS_IN_FLIGHT: u64 = 1; - build_eth_sender(MAX_TXS_IN_FLIGHT, eth_operations, eth_parameters).await + build_eth_sender( + MAX_TXS_IN_FLIGHT, + eth_operations, + aggregated_operations, + unprocessed_operations, + eth_parameters, + ) + .await } /// Helper method for configurable creation of `ETHSender`. async fn build_eth_sender( max_txs_in_flight: u64, eth_operations: Vec, + aggregated_operations: Vec<(i64, AggregatedOperation)>, + unprocessed_operations: Vec<(i64, AggregatedOperation)>, eth_parameters: ETHParams, ) -> ETHSender { let ethereum = EthereumGateway::Mock(MockEthereum::default()); - let db = MockDatabase::with_restorable_state(eth_operations, eth_parameters); + let db = MockDatabase::with_restorable_state( + eth_operations, + aggregated_operations, + unprocessed_operations, + eth_parameters, + ); let options = ETHSenderConfig { sender: Sender { diff --git a/core/bin/zksync_eth_sender/src/tests/mod.rs b/core/bin/zksync_eth_sender/src/tests/mod.rs index 0a93060ded..01707245dc 100644 --- a/core/bin/zksync_eth_sender/src/tests/mod.rs +++ b/core/bin/zksync_eth_sender/src/tests/mod.rs @@ -537,30 +537,70 @@ async fn transaction_failure() { /// they will be processed normally. #[tokio::test] async fn restore_state() { - let (operations, stored_operations) = { + let (stored_eth_operations, aggregated_operations, unprocessed_operations) = { // This `eth_sender` is required to generate the input only. let eth_sender = default_eth_sender().await; - let commit_op = test_data::commit_blocks_operation(0); - let verify_op = test_data::publish_proof_blocks_onchain_operations(0); - let execute_op = test_data::execute_blocks_operations(0); + // Aggregated operations for which Ethereum transactions have been created but have not yet been confirmed. + let processed_commit_op = test_data::commit_blocks_operation(0); + let processed_verify_op = test_data::publish_proof_blocks_onchain_operations(0); + let processed_execute_op = test_data::execute_blocks_operations(0); let deadline_block = eth_sender.get_deadline_block(1); - let commit_op_tx = - create_signed_tx(0, ð_sender, commit_op.clone(), deadline_block, 0).await; + let commit_op_tx = create_signed_tx( + 0, + ð_sender, + processed_commit_op.clone(), + deadline_block, + 0, + ) + .await; let deadline_block = eth_sender.get_deadline_block(1 + WAIT_CONFIRMATIONS); - let verify_op_tx = - create_signed_tx(1, ð_sender, verify_op.clone(), deadline_block, 1).await; + let verify_op_tx = create_signed_tx( + 1, + ð_sender, + processed_verify_op.clone(), + deadline_block, + 1, + ) + .await; let deadline_block = eth_sender.get_deadline_block(1 + 2 * WAIT_CONFIRMATIONS); - let execute_op_tx = - create_signed_tx(2, ð_sender, execute_op.clone(), deadline_block, 2).await; - - let operations = vec![commit_op, verify_op, execute_op]; - let stored_operations = vec![commit_op_tx, verify_op_tx, execute_op_tx]; + let execute_op_tx = create_signed_tx( + 2, + ð_sender, + processed_execute_op.clone(), + deadline_block, + 2, + ) + .await; - (operations, stored_operations) + let stored_eth_operations = vec![commit_op_tx, verify_op_tx, execute_op_tx]; + + // Aggregated operations that have not yet been processed. + let unprocessed_commit_op = test_data::commit_blocks_operation(1); + let unprocessed_verify_op = test_data::publish_proof_blocks_onchain_operations(1); + let unprocessed_execute_op = test_data::execute_blocks_operations(1); + + // All aggregated operations must be in the database even after server restart. + let aggregated_operations = vec![ + processed_commit_op, + processed_verify_op, + processed_execute_op, + unprocessed_commit_op, + unprocessed_verify_op, + unprocessed_execute_op.clone(), + ]; + // Aggregated operations from the table `eth_unprocessed_aggregated_ops` are deleted after the operation is added to the queue, + // therefore, after restarting the server, it may contain not all really unprocessed operations. + let unprocessed_operations = vec![unprocessed_execute_op]; + + ( + stored_eth_operations, + aggregated_operations, + unprocessed_operations, + ) }; let mut eth_parameters = default_eth_parameters(); @@ -568,9 +608,17 @@ async fn restore_state() { eth_parameters.last_verified_block = 1; eth_parameters.last_executed_block = 1; - let mut eth_sender = restored_eth_sender(stored_operations, eth_parameters).await; + let mut eth_sender = restored_eth_sender( + stored_eth_operations, + aggregated_operations.clone(), + unprocessed_operations, + eth_parameters, + ) + .await; - for (eth_op_id, aggregated_operation) in operations.iter().enumerate() { + eth_sender.load_new_operations().await.unwrap(); + + for (eth_op_id, aggregated_operation) in aggregated_operations.iter().enumerate() { // Note that we DO NOT send an operation to `ETHSender` and neither receive it. // We do process operations restored from the DB though. diff --git a/core/lib/config/Cargo.toml b/core/lib/config/Cargo.toml index 9b4ef76766..d42106fece 100644 --- a/core/lib/config/Cargo.toml +++ b/core/lib/config/Cargo.toml @@ -12,6 +12,7 @@ categories = ["cryptography"] [dependencies] zksync_types = { path = "../types", version = "1.0" } zksync_utils = { path = "../utils", version = "1.0" } +chrono = "0.4" url = "2.1" tracing = "0.1.22" num = "0.3.1" diff --git a/core/lib/config/src/configs/database.rs b/core/lib/config/src/configs/database.rs new file mode 100644 index 0000000000..d58f6dc1fd --- /dev/null +++ b/core/lib/config/src/configs/database.rs @@ -0,0 +1,66 @@ +// Built-in uses +use std::time; + +// External uses +use serde::Deserialize; + +// Local uses +use crate::envy_load; + +/// Used database configuration. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct DBConfig { + /// Amount of open connections to the database held by server in the pool. + pub pool_size: usize, + /// Database URL. + pub url: String, + /// Rejected transactions will be stored in the database for this amount of hours. + pub rejected_transactions_max_age: u64, + /// Sleep time (in hours) of the actor responsible for deleting failed transactions from the database. + pub rejected_transactions_cleaner_interval: u64, +} + +impl DBConfig { + const SECS_PER_HOUR: u64 = 3600; + + pub fn from_env() -> Self { + envy_load!("contracts", "DATABASE_") + } + + pub fn rejected_transactions_max_age(&self) -> chrono::Duration { + chrono::Duration::hours(self.rejected_transactions_max_age as i64) + } + + pub fn rejected_transactions_cleaner_interval(&self) -> time::Duration { + time::Duration::from_secs(self.rejected_transactions_cleaner_interval * Self::SECS_PER_HOUR) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> DBConfig { + DBConfig { + pool_size: 10, + url: "postgres://postgres@localhost/plasma".into(), + rejected_transactions_max_age: 336, + rejected_transactions_cleaner_interval: 24, + } + } + + #[test] + fn from_env() { + let config = r#" +DATABASE_POOL_SIZE="10" +DATABASE_URL="postgres://postgres@localhost/plasma" +DATABASE_REJECTED_TRANSACTIONS_MAX_AGE="336" +DATABASE_REJECTED_TRANSACTIONS_CLEANER_INTERVAL="24" + "#; + set_env(config); + + let actual = DBConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/db.rs b/core/lib/config/src/configs/db.rs deleted file mode 100644 index de6fcde6ca..0000000000 --- a/core/lib/config/src/configs/db.rs +++ /dev/null @@ -1,48 +0,0 @@ -// External uses -use serde::Deserialize; - -/// Used database configuration. -#[derive(Debug, Deserialize, Clone, PartialEq)] -pub struct DBConfig { - /// Amount of open connections to the database held by server in the pool. - pub pool_size: usize, - /// Database URL. - pub url: String, -} - -impl DBConfig { - pub fn from_env() -> Self { - Self { - pool_size: std::env::var("DB_POOL_SIZE") - .expect("DB_POOL_SIZE is set") - .parse() - .unwrap(), - url: std::env::var("DATABASE_URL").expect("DATABASE_URL is set"), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::configs::test_utils::set_env; - - fn expected_config() -> DBConfig { - DBConfig { - pool_size: 10, - url: "postgres://postgres@localhost/plasma".into(), - } - } - - #[test] - fn from_env() { - let config = r#" -DB_POOL_SIZE="10" -DATABASE_URL="postgres://postgres@localhost/plasma" - "#; - set_env(config); - - let actual = DBConfig::from_env(); - assert_eq!(actual, expected_config()); - } -} diff --git a/core/lib/config/src/configs/mod.rs b/core/lib/config/src/configs/mod.rs index f06dc7ce6f..5aa1e9878b 100644 --- a/core/lib/config/src/configs/mod.rs +++ b/core/lib/config/src/configs/mod.rs @@ -1,6 +1,6 @@ // Public re-exports pub use self::{ - api::ApiConfig, chain::ChainConfig, contracts::ContractsConfig, db::DBConfig, + api::ApiConfig, chain::ChainConfig, contracts::ContractsConfig, database::DBConfig, dev_liquidity_token_watcher::DevLiquidityTokenWatcherConfig, eth_client::ETHClientConfig, eth_sender::ETHSenderConfig, eth_watch::ETHWatchConfig, misc::MiscConfig, prover::ProverConfig, ticker::TickerConfig, @@ -9,7 +9,7 @@ pub use self::{ pub mod api; pub mod chain; pub mod contracts; -pub mod db; +pub mod database; pub mod dev_liquidity_token_watcher; pub mod eth_client; pub mod eth_sender; diff --git a/core/lib/prover_utils/src/exit_proof.rs b/core/lib/prover_utils/src/exit_proof.rs index d3ae445b45..371ec509e0 100644 --- a/core/lib/prover_utils/src/exit_proof.rs +++ b/core/lib/prover_utils/src/exit_proof.rs @@ -40,6 +40,10 @@ pub fn create_exit_proof( let zksync_exit_circuit = create_exit_circuit_with_public_input(&mut circuit_account_tree, account_id, token_id); + let commitment = zksync_exit_circuit + .pub_data_commitment + .expect("Witness should contract commitment"); + vlog::info!("Proof commitment: {:?}", commitment); let proof = gen_verified_proof_for_exit_circuit(zksync_exit_circuit) .map_err(|e| format_err!("Failed to generate proof: {}", e))?; diff --git a/core/lib/storage/migrations/2021-02-02-071220_contracts-v4/up.sql b/core/lib/storage/migrations/2021-02-02-071220_contracts-v4/up.sql index 6daf27bcb0..f5426517a5 100644 --- a/core/lib/storage/migrations/2021-02-02-071220_contracts-v4/up.sql +++ b/core/lib/storage/migrations/2021-02-02-071220_contracts-v4/up.sql @@ -82,4 +82,3 @@ ALTER TABLE eth_parameters RENAME COLUMN "withdraw_ops" TO "last_executed_block"; UPDATE eth_parameters SET last_executed_block = (SELECT last_verified_block FROM eth_parameters WHERE id = true); - diff --git a/core/lib/storage/migrations/2021-02-09-112044_aggregated_ops_idx/down.sql b/core/lib/storage/migrations/2021-02-09-112044_aggregated_ops_idx/down.sql new file mode 100644 index 0000000000..a1e24a72f3 --- /dev/null +++ b/core/lib/storage/migrations/2021-02-09-112044_aggregated_ops_idx/down.sql @@ -0,0 +1,10 @@ +-- This file should undo anything in `up.sql` + +drop index aggregate_ops_range_idx; +drop index eth_agg_op_binding_idx; + +truncate commit_aggregated_blocks_binding; +truncate execute_aggregated_blocks_binding; + +drop table commit_aggregated_blocks_binding; +drop table execute_aggregated_blocks_binding; diff --git a/core/lib/storage/migrations/2021-02-09-112044_aggregated_ops_idx/up.sql b/core/lib/storage/migrations/2021-02-09-112044_aggregated_ops_idx/up.sql new file mode 100644 index 0000000000..1b960f30b0 --- /dev/null +++ b/core/lib/storage/migrations/2021-02-09-112044_aggregated_ops_idx/up.sql @@ -0,0 +1,29 @@ +CREATE TABLE execute_aggregated_blocks_binding +( + op_id bigserial NOT NULL REFERENCES aggregate_operations (id) on delete cascade, + block_number bigserial NOT NULL REFERENCES blocks (number) on delete cascade, + primary key (block_number) +); + +CREATE TABLE commit_aggregated_blocks_binding +( + op_id bigserial NOT NULL REFERENCES aggregate_operations (id) on delete cascade, + block_number bigserial NOT NULL REFERENCES blocks (number) on delete cascade, + primary key (block_number) +); + + +INSERT INTO execute_aggregated_blocks_binding (op_id, block_number) +SELECT aggregate_operations.id, blocks.number +from aggregate_operations + INNER JOIN blocks ON blocks.number BETWEEN aggregate_operations.from_block AND aggregate_operations.to_block +WHERE aggregate_operations.action_type = 'ExecuteBlocks'; + +INSERT INTO commit_aggregated_blocks_binding (op_id, block_number) +SELECT aggregate_operations.id, blocks.number +from aggregate_operations + INNER JOIN blocks ON blocks.number BETWEEN aggregate_operations.from_block AND aggregate_operations.to_block +WHERE aggregate_operations.action_type = 'CommitBlocks'; + +create index eth_agg_op_binding_idx on eth_aggregated_ops_binding using BTREE (op_id); +create index aggregate_ops_range_idx on aggregate_operations using BTREE (from_block, to_block desc); diff --git a/core/lib/storage/sqlx-data.json b/core/lib/storage/sqlx-data.json index 47565b688d..1bfe68c620 100644 --- a/core/lib/storage/sqlx-data.json +++ b/core/lib/storage/sqlx-data.json @@ -42,6 +42,65 @@ ] } }, + "016aecf9d717ed60a6b650315e6209463f60f9ccfe842fddb97795bf55746fe9": { + "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index as \"block_index?\",\n tx_hash,\n success,\n fail_reason as \"fail_reason?\",\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_transactions\n LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number\n WHERE (\n (primary_account_address = $1 OR from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n COALESCE(block_index, -1) >= $3\n ) OR (\n block_number > $2\n )\n )\n )\n ORDER BY block_number ASC, COALESCE(block_index, -1) ASC\n LIMIT $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "block_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "block_index?", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "tx_hash", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "success", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "commit_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 6, + "name": "verify_tx_hash?", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Int4", + "Int8" + ] + }, + "nullable": [ + false, + true, + false, + false, + true, + true, + true + ] + } + }, "04069d09246f16a6d03be04decaa05456556dc05b964adea34742af0eaef91aa": { "query": "\n SELECT * FROM tokens\n WHERE symbol = $1\n LIMIT 1\n ", "describe": { @@ -186,6 +245,86 @@ ] } }, + "0d173d7985eb999966305a7186ec7d865481b764ba2f666f7b502d7cc33a83ad": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.confirmed, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ), aggr_exec AS (\n SELECT \n aggregate_operations.confirmed, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ), transactions as (\n select\n *\n from (\n select\n concat_ws(',', block_number, block_index) as tx_id,\n tx,\n 'sync-tx:' || encode(tx_hash, 'hex') as hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at\n from\n executed_transactions\n where\n (\n from_account = $1\n or\n to_account = $1\n or\n primary_account_address = $1\n )\n and\n (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6))\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at\n from \n executed_priority_operations\n where \n (\n from_account = $1\n or\n to_account = $1\n )\n and\n (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6))\n ) t\n order by\n block_number desc, created_at desc\n limit \n $7\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\"\n from transactions\n left join aggr_comm committed on\n committed.block_number = transactions.block_number AND committed.confirmed = true\n left join aggr_exec verified on\n verified.block_number = transactions.block_number AND verified.confirmed = true\n order by transactions.block_number desc, created_at desc\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_id!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "hash?", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "eth_block?", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "pq_id?", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "tx!", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "success?", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "commited!", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "verified!", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "created_at!", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Int8", + "Int8", + "Int4", + "Int4", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "0d6babe453e10d8fd58e15eaac7b77d9d5ad315b84d36c66f7abb414202666d1": { "query": "INSERT INTO eth_unprocessed_aggregated_ops (op_id)\n VALUES ($1)", "describe": { @@ -372,6 +511,18 @@ ] } }, + "163c54b9ce64671b284e09c43bab0aadeda9d45e7b7f5ea43c1cae0f49b15b8d": { + "query": "\n INSERT INTO commit_aggregated_blocks_binding\n SELECT \n aggregate_operations.id, blocks.number\n FROM aggregate_operations\n INNER JOIN blocks ON blocks.number BETWEEN aggregate_operations.from_block AND aggregate_operations.to_block\n WHERE aggregate_operations.action_type = 'CommitBlocks' and aggregate_operations.id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + } + }, "17626aba706502252ba06108c8b1563732a3e85094f8d76ce55f1d3487fc605b": { "query": "\n select \n created_at as \"created_at!\"\n from (\n select\n created_at\n from\n executed_transactions\n where\n from_account = $1\n or\n to_account = $1\n or\n primary_account_address = $1\n union all\n select\n created_at\n from \n executed_priority_operations\n where \n from_account = $1\n or\n to_account = $1\n ) t\n order by\n created_at asc\n limit \n 1\n ", "describe": { @@ -421,8 +572,8 @@ "nullable": [] } }, - "18b31bb04ceac3422973692fdac7119b42f8d48fa98af34cfc644a30bd72d7b9": { - "query": "\n WITH block_details AS (\n WITH eth_ops AS (\n SELECT DISTINCT ON (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n eth_tx_hashes.tx_hash,\n aggregate_operations.action_type,\n aggregate_operations.created_at,\n aggregate_operations.confirmed\n FROM aggregate_operations\n left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id\n left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id\n ORDER BY to_block DESC, action_type, confirmed\n )\n SELECT\n blocks.number AS details_block_number,\n committed.tx_hash AS commit_tx_hash,\n verified.tx_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN eth_ops committed ON\n committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n LEFT JOIN eth_ops verified ON\n verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n )\n SELECT\n block_number, \n block_index as \"block_index?\",\n tx_hash,\n success,\n fail_reason as \"fail_reason?\",\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_transactions\n LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number\n WHERE (\n (primary_account_address = $1 OR from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n COALESCE(block_index, -1) >= $3\n ) OR (\n block_number > $2\n )\n )\n )\n ORDER BY block_number ASC, COALESCE(block_index, -1) ASC\n LIMIT $4\n ", + "1a3122983ff3dc5c9a1b6e2b5d68f10e93f9db6aac216c105157048ea5b802ed": { + "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index as \"block_index?\",\n tx_hash,\n success,\n fail_reason as \"fail_reason?\",\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_transactions\n LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number\n WHERE (\n (primary_account_address = $1 OR from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n COALESCE(block_index, -1) <= $3\n ) OR (\n block_number < $2\n )\n )\n )\n ORDER BY block_number DESC, COALESCE(block_index, -1) DESC\n LIMIT $4\n ", "describe": { "columns": [ { @@ -475,65 +626,8 @@ false, false, true, - false, - false - ] - } - }, - "19154231ebeb00783f2fa8402b7b44aca357f8ccd885e016f34848a413a832af": { - "query": "\n WITH eth_ops AS (\n SELECT DISTINCT ON (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n eth_tx_hashes.tx_hash,\n aggregate_operations.action_type,\n aggregate_operations.created_at,\n aggregate_operations.confirmed\n FROM aggregate_operations\n left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id\n left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id\n ORDER BY to_block desc, action_type, confirmed\n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.tx_hash AS \"commit_tx_hash?\",\n verified.tx_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN eth_ops committed ON\n committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n LEFT JOIN eth_ops verified ON\n verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n WHERE false\n OR committed.tx_hash = $1\n OR verified.tx_hash = $1\n OR blocks.root_hash = $1\n OR blocks.number = $2\n ORDER BY blocks.number DESC\n LIMIT 1;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "new_state_root!", - "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "block_size!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 4, - "name": "verify_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 5, - "name": "committed_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 6, - "name": "verified_at?", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false + true, + true ] } }, @@ -1451,51 +1545,80 @@ ] } }, - "4d5ae94e475e07428d7e572c4e81e6b6cbca3d68cc3b770ff8615f229ced8ec6": { - "query": "\n WITH block_details AS (\n WITH eth_ops AS (\n SELECT DISTINCT ON (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n eth_tx_hashes.tx_hash,\n aggregate_operations.action_type,\n aggregate_operations.created_at,\n aggregate_operations.confirmed\n FROM aggregate_operations\n left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id\n left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id\n ORDER BY to_block DESC, action_type, confirmed\n )\n SELECT\n blocks.number AS details_block_number,\n committed.tx_hash AS commit_tx_hash,\n verified.tx_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN eth_ops committed ON\n committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n LEFT JOIN eth_ops verified ON\n verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n )\n SELECT\n block_number, \n block_index,\n eth_hash,\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_priority_operations\n LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number\n WHERE (\n (from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n block_index <= $3\n ) OR (\n block_number < $2\n )\n )\n )\n ORDER BY block_number DESC, block_index DESC\n LIMIT $4\n ", + "50a7a224aeba0065b57858fc989c3a09d45f833b68fbc9909a73817f782dd3c3": { + "query": "\n WITH aggr_exec AS (\n SELECT \n aggregate_operations.confirmed, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ),\n transactions AS (\n SELECT\n *\n FROM (\n SELECT\n concat_ws(',', block_number, block_index) AS tx_id,\n tx,\n 'sync-tx:' || encode(tx_hash, 'hex') AS hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at\n FROM\n executed_transactions\n WHERE\n from_account = $1\n or\n to_account = $1\n or\n primary_account_address = $1\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at\n from \n executed_priority_operations\n where \n from_account = $1\n or\n to_account = $1) t\n order by\n block_number desc, created_at desc\n offset \n $2\n limit \n $3\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\"\n from transactions\n LEFT JOIN aggr_exec verified ON transactions.block_number = verified.block_number\n order by transactions.block_number desc, created_at desc\n ", "describe": { "columns": [ { "ordinal": 0, - "name": "block_number", - "type_info": "Int8" + "name": "tx_id!", + "type_info": "Text" }, { "ordinal": 1, - "name": "block_index", - "type_info": "Int4" + "name": "hash?", + "type_info": "Text" }, { "ordinal": 2, - "name": "eth_hash", - "type_info": "Bytea" + "name": "eth_block?", + "type_info": "Int8" }, { "ordinal": 3, - "name": "commit_tx_hash?", - "type_info": "Bytea" + "name": "pq_id?", + "type_info": "Int8" }, { "ordinal": 4, - "name": "verify_tx_hash?", - "type_info": "Bytea" + "name": "tx!", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "success?", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "commited!", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "verified!", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "created_at!", + "type_info": "Timestamptz" } ], "parameters": { "Left": [ "Bytea", "Int8", - "Int4", "Int8" ] }, "nullable": [ - false, - false, - false, - false, - false - ] + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] } }, "51f7701a34610b1661c5f21b6dd31ddb9fbc3efea4397096eed7ccb42ed21071": { @@ -1845,26 +1968,6 @@ ] } }, - "71a9539df6b4362ab57a5397be0da6fac8ef23554dce5281e22704964c6f2d29": { - "query": "SELECT COUNT(*) FROM prover_job_queue WHERE job_status = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [ - null - ] - } - }, "74a5cc4affa23433b5b7834df6dfa1a7a2c5a65f23289de3de5a4f1b93f89c06": { "query": "SELECT address FROM account_creates WHERE account_id = $1", "describe": { @@ -1930,86 +2033,6 @@ "nullable": [] } }, - "784c391234df627984b8f62354818436789a31bcdad1e9a6235c603bd486e736": { - "query": "\n with eth_ops as (\n select distinct on (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n aggregate_operations.action_type,\n aggregate_operations.confirmed\n from aggregate_operations\n order by to_block DESC, action_type, confirmed\n ), transactions as (\n select\n *\n from (\n select\n concat_ws(',', block_number, block_index) as tx_id,\n tx,\n 'sync-tx:' || encode(tx_hash, 'hex') as hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at\n from\n executed_transactions\n where\n (\n from_account = $1\n or\n to_account = $1\n or\n primary_account_address = $1\n )\n and\n (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6))\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at\n from \n executed_priority_operations\n where \n (\n from_account = $1\n or\n to_account = $1\n )\n and\n (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6))\n ) t\n order by\n block_number desc, created_at desc\n limit \n $7\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\"\n from transactions\n left join eth_ops committed on\n committed.from_block <= transactions.block_number AND committed.to_block >= transactions.block_number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n left join eth_ops verified on\n verified.from_block <= transactions.block_number AND verified.to_block >= transactions.block_number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n order by transactions.block_number desc, created_at desc\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_id!", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "hash?", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "eth_block?", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "pq_id?", - "type_info": "Int8" - }, - { - "ordinal": 4, - "name": "tx!", - "type_info": "Jsonb" - }, - { - "ordinal": 5, - "name": "success?", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "commited!", - "type_info": "Bool" - }, - { - "ordinal": 8, - "name": "verified!", - "type_info": "Bool" - }, - { - "ordinal": 9, - "name": "created_at!", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int8", - "Int8", - "Int4", - "Int4", - "Int8" - ] - }, - "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - } - }, "790d46519ceaa7fbd152f1edf29b85c97ab491488b7302d8df3f57e5fc3eff55": { "query": "\n SELECT account_id FROM account_creates\n WHERE address = $1 AND is_create = $2\n ORDER BY block_number desc\n LIMIT 1\n ", "describe": { @@ -2184,6 +2207,18 @@ ] } }, + "8542fbcf1f669243a111348851c36ec3b01c4c11ac3391d55546d22a9e714e6b": { + "query": "DELETE FROM executed_transactions\n WHERE success = false AND created_at < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz" + ] + }, + "nullable": [] + } + }, "8a039b0bae78afb5d106d84f7d136be17670909814f92a8e8070ba99a9aea21c": { "query": "SELECT * FROM data_restore_last_watched_eth_block LIMIT 1", "describe": { @@ -2311,6 +2346,26 @@ "nullable": [] } }, + "92663f125319988e4b5d80d3d58286ca90a29ec2fa97d87750942c9e0615d1bc": { + "query": "SELECT COUNT(*) FROM prover_job_queue WHERE job_status != $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + null + ] + } + }, "93bd5b76565dfbadecfd66a394127fd5b701d09dc3d61adb30b337fd12d86f6a": { "query": "\n UPDATE aggregate_operations\n SET confirmed = $1\n WHERE id = (SELECT op_id FROM eth_aggregated_ops_binding WHERE eth_op_id = $2)", "describe": { @@ -2796,6 +2851,63 @@ ] } }, + "ba155dc95f19a097d1a16bf35f23371872f72dfb618cb871693752be93fed472": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n ,aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE\n blocks.number <= $1\n ORDER BY blocks.number DESC\n LIMIT $2;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "new_state_root!", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "block_size!", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "commit_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 4, + "name": "verify_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "committed_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "verified_at?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + false + ] + } + }, "baaaff359564c5d1094fcf2650d53cf9dcac5d50fc3a549c6cff53dd472350f7": { "query": "\n SELECT * FROM ticker_price\n WHERE token_id = $1\n LIMIT 1\n ", "describe": { @@ -3030,82 +3142,6 @@ ] } }, - "c0bc284b879b471211d6c2c9a8f45918b674f4c8385ccd66198b65a0666c0616": { - "query": "\n with eth_ops as (\n select distinct on (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n aggregate_operations.action_type,\n aggregate_operations.confirmed\n from aggregate_operations\n order by to_block DESC, action_type, confirmed\n ), transactions as (\n select\n *\n from (\n select\n concat_ws(',', block_number, block_index) as tx_id,\n tx,\n 'sync-tx:' || encode(tx_hash, 'hex') as hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at\n from\n executed_transactions\n where\n from_account = $1\n or\n to_account = $1\n or\n primary_account_address = $1\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at\n from \n executed_priority_operations\n where \n from_account = $1\n or\n to_account = $1) t\n order by\n block_number desc, created_at desc\n offset \n $2\n limit \n $3\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\"\n from transactions\n left join eth_ops verified on\n verified.from_block <= transactions.block_number AND verified.to_block >= transactions.block_number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n order by transactions.block_number desc, created_at desc\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_id!", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "hash?", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "eth_block?", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "pq_id?", - "type_info": "Int8" - }, - { - "ordinal": 4, - "name": "tx!", - "type_info": "Jsonb" - }, - { - "ordinal": 5, - "name": "success?", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "commited!", - "type_info": "Bool" - }, - { - "ordinal": 8, - "name": "verified!", - "type_info": "Bool" - }, - { - "ordinal": 9, - "name": "created_at!", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int8" - ] - }, - "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - } - }, "c16cb52de684232faf3ddf3bc5e4b90388e9b413e690aa5cf891fc4fad293edd": { "query": "DELETE FROM data_restore_events_state", "describe": { @@ -3279,8 +3315,8 @@ "nullable": [] } }, - "c8a1cac4744fc179b2845603a03cfb13fb3536ce8bf871a6e45a3f834412b2de": { - "query": "\n WITH block_details AS (\n WITH eth_ops AS (\n SELECT DISTINCT ON (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n eth_tx_hashes.tx_hash,\n aggregate_operations.action_type,\n aggregate_operations.created_at,\n aggregate_operations.confirmed\n FROM aggregate_operations\n left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id\n left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id\n ORDER BY to_block DESC, action_type, confirmed\n )\n SELECT\n blocks.number AS details_block_number,\n committed.tx_hash AS commit_tx_hash,\n verified.tx_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN eth_ops committed ON\n committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n LEFT JOIN eth_ops verified ON\n verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n )\n SELECT\n block_number, \n block_index,\n eth_hash,\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_priority_operations\n LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number\n WHERE (\n (from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n block_index >= $3\n ) OR (\n block_number > $2\n )\n )\n )\n ORDER BY block_number ASC, block_index ASC\n LIMIT $4\n ", + "c842454191f93c4ab02e9845294b575dfd48a8eaae85996c5e76a36be997f969": { + "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index,\n eth_hash,\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_priority_operations\n LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number\n WHERE (\n (from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n block_index >= $3\n ) OR (\n block_number > $2\n )\n )\n )\n ORDER BY block_number ASC, block_index ASC\n LIMIT $4\n ", "describe": { "columns": [ { @@ -3321,8 +3357,8 @@ false, false, false, - false, - false + true, + true ] } }, @@ -3575,6 +3611,53 @@ ] } }, + "e32e0ba9ec31e6e78de5972548dced78d2a6949ec723b71ce210627dbb92dfe4": { + "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index,\n eth_hash,\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_priority_operations\n LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number\n WHERE (\n (from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n block_index <= $3\n ) OR (\n block_number < $2\n )\n )\n )\n ORDER BY block_number DESC, block_index DESC\n LIMIT $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "block_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "block_index", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "eth_hash", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "commit_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 4, + "name": "verify_tx_hash?", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Int4", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true + ] + } + }, "e3ee3cb9cbe8d05a635e71daea301cf6b2310f89f3d9f8fdabc28e7ebf8d3521": { "query": "\n INSERT INTO eth_account_types VALUES ( $1, $2 )\n ON CONFLICT (account_id) DO UPDATE SET account_type = $2\n ", "describe": { @@ -3648,60 +3731,70 @@ ] } }, - "e866fdb43e0db3e85314aea9b4eda67f1ba666f9ba8af22f9086b04911fda18e": { - "query": "\n WITH block_details AS (\n WITH eth_ops AS (\n SELECT DISTINCT ON (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n eth_tx_hashes.tx_hash,\n aggregate_operations.action_type,\n aggregate_operations.created_at,\n aggregate_operations.confirmed\n FROM aggregate_operations\n left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id\n left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id\n ORDER BY to_block DESC, action_type, confirmed\n )\n SELECT\n blocks.number AS details_block_number,\n committed.tx_hash AS commit_tx_hash,\n verified.tx_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN eth_ops committed ON\n committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n LEFT JOIN eth_ops verified ON\n verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n )\n SELECT\n block_number, \n block_index as \"block_index?\",\n tx_hash,\n success,\n fail_reason as \"fail_reason?\",\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_transactions\n LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number\n WHERE (\n (primary_account_address = $1 OR from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n COALESCE(block_index, -1) <= $3\n ) OR (\n block_number < $2\n )\n )\n )\n ORDER BY block_number DESC, COALESCE(block_index, -1) DESC\n LIMIT $4\n ", + "e515899938d5ced7b83234fcea6ad024184702eca40b1fae1a16467649722a10": { + "query": "\n INSERT INTO execute_aggregated_blocks_binding\n SELECT \n aggregate_operations.id, blocks.number\n FROM aggregate_operations\n INNER JOIN blocks ON blocks.number BETWEEN aggregate_operations.from_block AND aggregate_operations.to_block\n WHERE aggregate_operations.action_type = 'ExecuteBlocks' and aggregate_operations.id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + } + }, + "e7b1a3e830945cfe5c876255bbaa97dae409e1f642539ec898fd5dc3bb991bfc": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n ,aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE false\n OR committed.final_hash = $1\n OR verified.final_hash = $1\n OR blocks.root_hash = $1\n OR blocks.number = $2\n ORDER BY blocks.number DESC\n LIMIT 1;\n ", "describe": { "columns": [ { "ordinal": 0, - "name": "block_number", + "name": "block_number!", "type_info": "Int8" }, { "ordinal": 1, - "name": "block_index?", - "type_info": "Int4" + "name": "new_state_root!", + "type_info": "Bytea" }, { "ordinal": 2, - "name": "tx_hash", - "type_info": "Bytea" + "name": "block_size!", + "type_info": "Int8" }, { "ordinal": 3, - "name": "success", - "type_info": "Bool" + "name": "commit_tx_hash?", + "type_info": "Bytea" }, { "ordinal": 4, - "name": "fail_reason?", - "type_info": "Text" + "name": "verify_tx_hash?", + "type_info": "Bytea" }, { "ordinal": 5, - "name": "commit_tx_hash?", - "type_info": "Bytea" + "name": "committed_at!", + "type_info": "Timestamptz" }, { "ordinal": 6, - "name": "verify_tx_hash?", - "type_info": "Bytea" + "name": "verified_at?", + "type_info": "Timestamptz" } ], "parameters": { "Left": [ "Bytea", - "Int8", - "Int4", "Int8" ] }, "nullable": [ false, - true, false, false, true, + true, false, false ] @@ -4021,63 +4114,6 @@ "nullable": [] } }, - "f226b8aebbe2fa705f70a9495280e4939ce89c54dd257157d1d3ea895ab373e2": { - "query": "\n WITH eth_ops AS (\n SELECT DISTINCT ON (to_block, action_type)\n aggregate_operations.from_block,\n aggregate_operations.to_block,\n eth_tx_hashes.tx_hash,\n aggregate_operations.action_type,\n aggregate_operations.created_at,\n aggregate_operations.confirmed\n FROM aggregate_operations\n left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id\n left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id\n ORDER BY to_block DESC, action_type, confirmed\n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.tx_hash AS \"commit_tx_hash?\",\n verified.tx_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN eth_ops committed ON\n committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true\n LEFT JOIN eth_ops verified ON\n verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true\n WHERE\n blocks.number <= $1\n ORDER BY blocks.number DESC\n LIMIT $2;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "new_state_root!", - "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "block_size!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 4, - "name": "verify_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 5, - "name": "committed_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 6, - "name": "verified_at?", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false - ] - } - }, "f4aaa302a20921ae9ff490ac1a86083c49ee4a9afacf0faeb76aa8e1549f2fe7": { "query": "SELECT * FROM account_creates WHERE block_number > $1 AND block_number <= $2 ", "describe": { diff --git a/core/lib/storage/src/chain/block/mod.rs b/core/lib/storage/src/chain/block/mod.rs index 37b536b6af..f485b7c718 100644 --- a/core/lib/storage/src/chain/block/mod.rs +++ b/core/lib/storage/src/chain/block/mod.rs @@ -271,32 +271,39 @@ impl<'a, 'c> BlockSchema<'a, 'c> { let details = sqlx::query_as!( BlockDetails, r#" - WITH eth_ops AS ( - SELECT DISTINCT ON (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - eth_tx_hashes.tx_hash, - aggregate_operations.action_type, - aggregate_operations.created_at, - aggregate_operations.confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number FROM aggregate_operations - left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id - left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id - ORDER BY to_block DESC, action_type, confirmed + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + ,aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true ) SELECT blocks.number AS "block_number!", blocks.root_hash AS "new_state_root!", blocks.block_size AS "block_size!", - committed.tx_hash AS "commit_tx_hash?", - verified.tx_hash AS "verify_tx_hash?", + committed.final_hash AS "commit_tx_hash?", + verified.final_hash AS "verify_tx_hash?", committed.created_at AS "committed_at!", verified.created_at AS "verified_at?" FROM blocks - INNER JOIN eth_ops committed ON - committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - LEFT JOIN eth_ops verified ON - verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number WHERE blocks.number <= $1 ORDER BY blocks.number DESC @@ -375,35 +382,42 @@ impl<'a, 'c> BlockSchema<'a, 'c> { let result = sqlx::query_as!( BlockDetails, r#" - WITH eth_ops AS ( - SELECT DISTINCT ON (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - eth_tx_hashes.tx_hash, - aggregate_operations.action_type, - aggregate_operations.created_at, - aggregate_operations.confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + ,aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number FROM aggregate_operations - left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id - left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id - ORDER BY to_block desc, action_type, confirmed + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true ) SELECT blocks.number AS "block_number!", blocks.root_hash AS "new_state_root!", blocks.block_size AS "block_size!", - committed.tx_hash AS "commit_tx_hash?", - verified.tx_hash AS "verify_tx_hash?", + committed.final_hash AS "commit_tx_hash?", + verified.final_hash AS "verify_tx_hash?", committed.created_at AS "committed_at!", verified.created_at AS "verified_at?" FROM blocks - INNER JOIN eth_ops committed ON - committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - LEFT JOIN eth_ops verified ON - verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number WHERE false - OR committed.tx_hash = $1 - OR verified.tx_hash = $1 + OR committed.final_hash = $1 + OR verified.final_hash = $1 OR blocks.root_hash = $1 OR blocks.number = $2 ORDER BY blocks.number DESC diff --git a/core/lib/storage/src/chain/operations/mod.rs b/core/lib/storage/src/chain/operations/mod.rs index 4efcaacc5f..3835b847ae 100644 --- a/core/lib/storage/src/chain/operations/mod.rs +++ b/core/lib/storage/src/chain/operations/mod.rs @@ -1,6 +1,7 @@ // Built-in deps use std::time::Instant; // External imports +use chrono::{Duration, Utc}; // Workspace imports use zksync_types::{tx::TxHash, BlockNumber}; // Local imports @@ -233,6 +234,26 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { Ok(()) } + /// Removes all rejected transactions with an age greater than `max_age` from the database. + pub async fn remove_rejected_transactions(&mut self, max_age: Duration) -> QueryResult<()> { + let start = Instant::now(); + + let offset = Utc::now() - max_age; + sqlx::query!( + "DELETE FROM executed_transactions + WHERE success = false AND created_at < $1", + offset + ) + .execute(self.0.conn()) + .await?; + + metrics::histogram!( + "sql.chain.operations.remove_rejected_transactions", + start.elapsed() + ); + Ok(()) + } + /// Stores executed priority operation in database. /// /// This method is made public to fill the database for tests, do not use it for @@ -324,10 +345,35 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { .await? .id; - if !matches!( - operation.get_action_type(), - AggregatedActionType::CreateProofBlocks - ) { + if operation.is_commit() { + sqlx::query!( + r#" + INSERT INTO commit_aggregated_blocks_binding + SELECT + aggregate_operations.id, blocks.number + FROM aggregate_operations + INNER JOIN blocks ON blocks.number BETWEEN aggregate_operations.from_block AND aggregate_operations.to_block + WHERE aggregate_operations.action_type = 'CommitBlocks' and aggregate_operations.id = $1 + "#, + id + ).execute(transaction.conn()).await?; + } + + if operation.is_execute() { + sqlx::query!( + r#" + INSERT INTO execute_aggregated_blocks_binding + SELECT + aggregate_operations.id, blocks.number + FROM aggregate_operations + INNER JOIN blocks ON blocks.number BETWEEN aggregate_operations.from_block AND aggregate_operations.to_block + WHERE aggregate_operations.action_type = 'ExecuteBlocks' and aggregate_operations.id = $1 + "#, + id + ).execute(transaction.conn()).await?; + } + + if !operation.is_create_proof() { sqlx::query!( "INSERT INTO eth_unprocessed_aggregated_ops (op_id) VALUES ($1)", diff --git a/core/lib/storage/src/chain/operations_ext/mod.rs b/core/lib/storage/src/chain/operations_ext/mod.rs index 82253702c6..0a0be4da2c 100644 --- a/core/lib/storage/src/chain/operations_ext/mod.rs +++ b/core/lib/storage/src/chain/operations_ext/mod.rs @@ -381,31 +381,31 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { let mut tx_history = sqlx::query_as!( TransactionsHistoryItem, r#" - with eth_ops as ( - select distinct on (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - aggregate_operations.action_type, - aggregate_operations.confirmed - from aggregate_operations - order by to_block DESC, action_type, confirmed - ), transactions as ( - select + WITH aggr_exec AS ( + SELECT + aggregate_operations.confirmed, + execute_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + WHERE aggregate_operations.confirmed = true + ), + transactions AS ( + SELECT * - from ( - select - concat_ws(',', block_number, block_index) as tx_id, + FROM ( + SELECT + concat_ws(',', block_number, block_index) AS tx_id, tx, - 'sync-tx:' || encode(tx_hash, 'hex') as hash, + 'sync-tx:' || encode(tx_hash, 'hex') AS hash, null as pq_id, null as eth_block, success, fail_reason, block_number, created_at - from + FROM executed_transactions - where + WHERE from_account = $1 or to_account = $1 @@ -447,8 +447,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { coalesce(verified.confirmed, false) as "verified!", created_at as "created_at!" from transactions - left join eth_ops verified on - verified.from_block <= transactions.block_number AND verified.to_block >= transactions.block_number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + LEFT JOIN aggr_exec verified ON transactions.block_number = verified.block_number order by transactions.block_number desc, created_at desc "#, address.as_ref(), offset as i64, limit as i64 @@ -540,14 +539,20 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { let mut tx_history = sqlx::query_as!( TransactionsHistoryItem, r#" - with eth_ops as ( - select distinct on (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - aggregate_operations.action_type, - aggregate_operations.confirmed - from aggregate_operations - order by to_block DESC, action_type, confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.confirmed, + commit_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + WHERE aggregate_operations.confirmed = true + ), aggr_exec AS ( + SELECT + aggregate_operations.confirmed, + execute_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + WHERE aggregate_operations.confirmed = true ), transactions as ( select * @@ -613,10 +618,10 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { coalesce(verified.confirmed, false) as "verified!", created_at as "created_at!" from transactions - left join eth_ops committed on - committed.from_block <= transactions.block_number AND committed.to_block >= transactions.block_number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - left join eth_ops verified on - verified.from_block <= transactions.block_number AND verified.to_block >= transactions.block_number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + left join aggr_comm committed on + committed.block_number = transactions.block_number AND committed.confirmed = true + left join aggr_exec verified on + verified.block_number = transactions.block_number AND verified.confirmed = true order by transactions.block_number desc, created_at desc "#, address.as_ref(), @@ -693,29 +698,36 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { AccountTxReceiptResponse, r#" WITH block_details AS ( - WITH eth_ops AS ( - SELECT DISTINCT ON (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - eth_tx_hashes.tx_hash, - aggregate_operations.action_type, - aggregate_operations.created_at, - aggregate_operations.confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number FROM aggregate_operations - left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id - left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id - ORDER BY to_block DESC, action_type, confirmed + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + , aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true ) SELECT blocks.number AS details_block_number, - committed.tx_hash AS commit_tx_hash, - verified.tx_hash AS verify_tx_hash + committed.final_hash AS commit_tx_hash, + verified.final_hash AS verify_tx_hash FROM blocks - INNER JOIN eth_ops committed ON - committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - LEFT JOIN eth_ops verified ON - verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true - ) + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number + ) SELECT block_number, block_index as "block_index?", @@ -752,28 +764,35 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { AccountTxReceiptResponse, r#" WITH block_details AS ( - WITH eth_ops AS ( - SELECT DISTINCT ON (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - eth_tx_hashes.tx_hash, - aggregate_operations.action_type, - aggregate_operations.created_at, - aggregate_operations.confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + , aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number FROM aggregate_operations - left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id - left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id - ORDER BY to_block DESC, action_type, confirmed + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true ) SELECT blocks.number AS details_block_number, - committed.tx_hash AS commit_tx_hash, - verified.tx_hash AS verify_tx_hash + committed.final_hash AS commit_tx_hash, + verified.final_hash AS verify_tx_hash FROM blocks - INNER JOIN eth_ops committed ON - committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - LEFT JOIN eth_ops verified ON - verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number ) SELECT block_number, @@ -840,28 +859,35 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { AccountOpReceiptResponse, r#" WITH block_details AS ( - WITH eth_ops AS ( - SELECT DISTINCT ON (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - eth_tx_hashes.tx_hash, - aggregate_operations.action_type, - aggregate_operations.created_at, - aggregate_operations.confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number FROM aggregate_operations - left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id - left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id - ORDER BY to_block DESC, action_type, confirmed + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + , aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true ) SELECT blocks.number AS details_block_number, - committed.tx_hash AS commit_tx_hash, - verified.tx_hash AS verify_tx_hash + committed.final_hash AS commit_tx_hash, + verified.final_hash AS verify_tx_hash FROM blocks - INNER JOIN eth_ops committed ON - committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - LEFT JOIN eth_ops verified ON - verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number ) SELECT block_number, @@ -897,28 +923,35 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { AccountOpReceiptResponse, r#" WITH block_details AS ( - WITH eth_ops AS ( - SELECT DISTINCT ON (to_block, action_type) - aggregate_operations.from_block, - aggregate_operations.to_block, - eth_tx_hashes.tx_hash, - aggregate_operations.action_type, - aggregate_operations.created_at, - aggregate_operations.confirmed + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + , aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number FROM aggregate_operations - left join eth_aggregated_ops_binding on eth_aggregated_ops_binding.op_id = aggregate_operations.id - left join eth_tx_hashes on eth_tx_hashes.eth_op_id = eth_aggregated_ops_binding.eth_op_id - ORDER BY to_block DESC, action_type, confirmed + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true ) SELECT blocks.number AS details_block_number, - committed.tx_hash AS commit_tx_hash, - verified.tx_hash AS verify_tx_hash + committed.final_hash AS commit_tx_hash, + verified.final_hash AS verify_tx_hash FROM blocks - INNER JOIN eth_ops committed ON - committed.from_block <= blocks.number AND committed.to_block >= blocks.number AND committed.action_type = 'CommitBlocks' AND committed.confirmed = true - LEFT JOIN eth_ops verified ON - verified.from_block <= blocks.number AND verified.to_block >= blocks.number AND verified.action_type = 'ExecuteBlocks' AND verified.confirmed = true + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number ) SELECT block_number, diff --git a/core/lib/storage/src/connection/mod.rs b/core/lib/storage/src/connection/mod.rs index eaf139c2e9..f5c676e87d 100644 --- a/core/lib/storage/src/connection/mod.rs +++ b/core/lib/storage/src/connection/mod.rs @@ -44,7 +44,7 @@ impl Manager for DbPool { /// the fixed size pool of connection to the database. /// /// The size of the pool and the database URL are configured via environment -/// variables `DB_POOL_SIZE` and `DATABASE_URL` respectively. +/// variables `DATABASE_POOL_SIZE` and `DATABASE_URL` respectively. #[derive(Clone)] pub struct ConnectionPool { pool: Pool, @@ -59,10 +59,10 @@ impl fmt::Debug for ConnectionPool { impl ConnectionPool { /// Establishes a pool of the connections to the database and /// creates a new `ConnectionPool` object. - /// pool_max_size - number of connections in pool, if not set env variable "DB_POOL_SIZE" is going to be used. + /// pool_max_size - number of connections in pool, if not set env variable "DATABASE_POOL_SIZE" is going to be used. pub fn new(pool_max_size: Option) -> Self { let database_url = Self::get_database_url(); - let max_size = pool_max_size.unwrap_or_else(|| parse_env("DB_POOL_SIZE")); + let max_size = pool_max_size.unwrap_or_else(|| parse_env("DATABASE_POOL_SIZE")); let pool = DbPool::create(database_url, max_size as usize); diff --git a/core/lib/storage/src/prover/mod.rs b/core/lib/storage/src/prover/mod.rs index 9231e688c7..8253b023bf 100644 --- a/core/lib/storage/src/prover/mod.rs +++ b/core/lib/storage/src/prover/mod.rs @@ -27,8 +27,8 @@ impl<'a, 'c> ProverSchema<'a, 'c> { pub async fn pending_jobs_count(&mut self) -> QueryResult { let start = Instant::now(); let pending_jobs_count = sqlx::query!( - "SELECT COUNT(*) FROM prover_job_queue WHERE job_status = $1", - ProverJobStatus::Idle.to_number() + "SELECT COUNT(*) FROM prover_job_queue WHERE job_status != $1", + ProverJobStatus::Done.to_number() ) .fetch_one(self.0.conn()) .await? diff --git a/core/lib/storage/src/tests/chain/operations.rs b/core/lib/storage/src/tests/chain/operations.rs index f83dd37494..565e2dd53f 100644 --- a/core/lib/storage/src/tests/chain/operations.rs +++ b/core/lib/storage/src/tests/chain/operations.rs @@ -1,4 +1,5 @@ // External imports +use chrono::{Duration, Utc}; // Workspace imports use zksync_types::{aggregated_operations::AggregatedActionType, BlockNumber}; // Local imports @@ -273,3 +274,99 @@ async fn transaction_resent(mut storage: StorageProcessor<'_>) -> QueryResult<() Ok(()) } + +/// Checks that rejected transactions are removed correctly depending on the given age limit. +#[db_test] +async fn remove_rejected_transactions(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + const BLOCK_NUMBER: i64 = 1; + // Two failed transactions created a week and two weeks ago respectively and one successful. + let timestamp_1 = Utc::now() - Duration::weeks(1); + let executed_tx_1 = NewExecutedTransaction { + block_number: BLOCK_NUMBER, + tx_hash: vec![0x12, 0xAD, 0xBE, 0xEF], + tx: Default::default(), + operation: Default::default(), + from_account: Default::default(), + to_account: None, + success: false, + fail_reason: None, + block_index: None, + primary_account_address: Default::default(), + nonce: Default::default(), + created_at: timestamp_1, + eth_sign_data: None, + batch_id: None, + }; + let timestamp_2 = timestamp_1 - Duration::weeks(1); + let mut executed_tx_2 = executed_tx_1.clone(); + // Set new timestamp and different tx_hash since it's a PK. + executed_tx_2.created_at = timestamp_2; + executed_tx_2.tx_hash = vec![0, 11, 21, 5]; + // Successful one. + let mut executed_tx_3 = executed_tx_1.clone(); + executed_tx_3.success = true; + executed_tx_3.tx_hash = vec![1, 1, 2, 30]; + executed_tx_3.created_at = timestamp_2 - Duration::weeks(1); + // Store them. + storage + .chain() + .operations_schema() + .store_executed_tx(executed_tx_1) + .await?; + storage + .chain() + .operations_schema() + .store_executed_tx(executed_tx_2) + .await?; + storage + .chain() + .operations_schema() + .store_executed_tx(executed_tx_3) + .await?; + // First check, no transactions removed. + storage + .chain() + .operations_schema() + .remove_rejected_transactions(Duration::weeks(3)) + .await?; + let block_number = BlockNumber(0); + let count = storage + .chain() + .stats_schema() + .count_outstanding_proofs(block_number) + .await?; + assert_eq!(count, 3); + // Second transaction should be removed. + storage + .chain() + .operations_schema() + .remove_rejected_transactions(Duration::days(10)) + .await?; + let count = storage + .chain() + .stats_schema() + .count_outstanding_proofs(block_number) + .await?; + assert_eq!(count, 2); + // Finally, no rejected transactions remain. + storage + .chain() + .operations_schema() + .remove_rejected_transactions(Duration::days(4)) + .await?; + let count = storage + .chain() + .stats_schema() + .count_outstanding_proofs(block_number) + .await?; + assert_eq!(count, 1); + // The last one is indeed succesful. + let count = storage + .chain() + .stats_schema() + .count_total_transactions() + .await?; + assert_eq!(count, 1); + + Ok(()) +} diff --git a/core/lib/storage/src/tests/prover.rs b/core/lib/storage/src/tests/prover.rs index b23d68187d..496a9ee69f 100644 --- a/core/lib/storage/src/tests/prover.rs +++ b/core/lib/storage/src/tests/prover.rs @@ -161,20 +161,18 @@ async fn pending_jobs_count(mut storage: &mut StorageProcessor<'_>) -> QueryResu let jobs_count = ProverSchema(&mut storage).pending_jobs_count().await?; assert_eq!(jobs_count, 3); - // Get a job and now the number of idle jobs must be 2. let first_job = get_idle_job_from_queue(&mut storage).await?; let jobs_count = ProverSchema(&mut storage).pending_jobs_count().await?; - assert_eq!(jobs_count, 2); + assert_eq!(jobs_count, 3); // Create next run & repeat checks. let second_job = get_idle_job_from_queue(&mut storage).await?; let jobs_count = ProverSchema(&mut storage).pending_jobs_count().await?; - assert_eq!(jobs_count, 1); + assert_eq!(jobs_count, 3); - // And finally store the proof for the third block. let third_job = get_idle_job_from_queue(&mut storage).await?; let jobs_count = ProverSchema(&mut storage).pending_jobs_count().await?; - assert_eq!(jobs_count, 0); + assert_eq!(jobs_count, 3); // Record prover is working and stopped it. ProverSchema(&mut storage) @@ -196,7 +194,7 @@ async fn pending_jobs_count(mut storage: &mut StorageProcessor<'_>) -> QueryResu ) .await?; let jobs_count = ProverSchema(&mut storage).pending_jobs_count().await?; - assert_eq!(jobs_count, 0); + assert_eq!(jobs_count, 2); ProverSchema(&mut storage) .record_prover_stop("test_prover") diff --git a/core/lib/types/src/aggregated_operations.rs b/core/lib/types/src/aggregated_operations.rs index e93038497c..6a0bdaefe2 100644 --- a/core/lib/types/src/aggregated_operations.rs +++ b/core/lib/types/src/aggregated_operations.rs @@ -203,6 +203,28 @@ impl AggregatedOperation { AggregatedOperation::ExecuteBlocks(op) => op.block_range(), } } + + pub fn is_commit(&self) -> bool { + matches!(self.get_action_type(), AggregatedActionType::CommitBlocks) + } + + pub fn is_execute(&self) -> bool { + matches!(self.get_action_type(), AggregatedActionType::ExecuteBlocks) + } + + pub fn is_create_proof(&self) -> bool { + matches!( + self.get_action_type(), + AggregatedActionType::CreateProofBlocks + ) + } + + pub fn is_publish_proofs(&self) -> bool { + matches!( + self.get_action_type(), + AggregatedActionType::PublishProofBlocksOnchain + ) + } } impl From for AggregatedOperation { diff --git a/core/tests/ts-tests/tests/batch-builder.ts b/core/tests/ts-tests/tests/batch-builder.ts index 24e874f50b..1f8e918d62 100644 --- a/core/tests/ts-tests/tests/batch-builder.ts +++ b/core/tests/ts-tests/tests/batch-builder.ts @@ -68,7 +68,7 @@ Tester.prototype.testBatchBuilderChangePubKey = async function ( const balanceBefore = await sender.getBalance(token); const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); - await Promise.all(handles.map((handle) => handle.awaitVerifyReceipt())); + await Promise.all(handles.map((handle) => handle.awaitReceipt())); expect(await sender.isSigningKeySet(), 'ChangePubKey failed').to.be.true; const balanceAfter = await sender.getBalance(token); expect(balanceBefore.sub(balanceAfter).eq(amount.add(totalFee)), 'Wrong amount in wallet after withdraw').to.be @@ -154,7 +154,7 @@ Tester.prototype.testBatchBuilderGenericUsage = async function ( const senderBefore = await sender.getBalance(token); const receiverBefore = await receiver.getBalance(token); const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); - await Promise.all(handles.map((handle) => handle.awaitVerifyReceipt())); + await Promise.all(handles.map((handle) => handle.awaitReceipt())); const senderAfter = await sender.getBalance(token); const receiverAfter = await receiver.getBalance(token); const targetBalance = await target.getBalance(token); diff --git a/core/tests/ts-tests/tests/forced-exit.ts b/core/tests/ts-tests/tests/forced-exit.ts index 8b732eb281..ea0a2f5b59 100644 --- a/core/tests/ts-tests/tests/forced-exit.ts +++ b/core/tests/ts-tests/tests/forced-exit.ts @@ -1,7 +1,6 @@ import { Tester } from './tester'; import { expect } from 'chai'; import { Wallet, types } from 'zksync'; -import { sleep } from 'zksync/build/utils'; type TokenLike = types.TokenLike; @@ -32,17 +31,7 @@ Tester.prototype.testVerifiedForcedExit = async function ( // Checking that there are some complete withdrawals tx hash for this ForcedExit // we should wait some time for `completeWithdrawals` transaction to be processed - let withdrawalTxHash = null; - const polling_interval = 200; // ms - const polling_timeout = 35000; // ms - const polling_iterations = polling_timeout / polling_interval; - for (let i = 0; i < polling_iterations; i++) { - withdrawalTxHash = await this.syncProvider.getEthTxForWithdrawal(handle.txHash); - if (withdrawalTxHash != null) { - break; - } - await sleep(polling_interval); - } + const withdrawalTxHash = await this.syncProvider.getEthTxForWithdrawal(handle.txHash); expect(withdrawalTxHash, 'Withdrawal was not processed onchain').to.exist; await this.ethProvider.waitForTransaction(withdrawalTxHash as string); diff --git a/core/tests/ts-tests/tests/main.test.ts b/core/tests/ts-tests/tests/main.test.ts index fbcaa42186..886e6e624d 100644 --- a/core/tests/ts-tests/tests/main.test.ts +++ b/core/tests/ts-tests/tests/main.test.ts @@ -112,17 +112,6 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, await tester.testRejectedBatch(alice, bob, token, TX_AMOUNT); }); - step('should execute a withdrawal', async () => { - await tester.testVerifiedWithdraw(alice, token, TX_AMOUNT); - }); - - step('should execute a ForcedExit', async () => { - if (onlyBasic) { - return; - } - await tester.testVerifiedForcedExit(alice, bob, token); - }); - step('should test batch-builder', async () => { // We will pay with different token. const feeToken = token == 'ETH' ? 'wBTC' : 'ETH'; @@ -152,6 +141,17 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, await tester.testBackwardCompatibleEthMessages(alice, david, token, TX_AMOUNT); }); + step('should execute a withdrawal', async () => { + await tester.testVerifiedWithdraw(alice, token, TX_AMOUNT); + }); + + step('should execute a ForcedExit', async () => { + if (onlyBasic) { + return; + } + await tester.testVerifiedForcedExit(alice, bob, token); + }); + it('should check collected fees', async () => { const collectedFee = (await tester.operatorBalance(token)).sub(operatorBalance); expect(collectedFee.eq(tester.runningFee), `Fee collection failed, expected: ${tester.runningFee.toString()}, got: ${collectedFee.toString()}`).to.be.true; diff --git a/core/tests/ts-tests/tests/misc.ts b/core/tests/ts-tests/tests/misc.ts index f31d5906ba..0e930ebc1b 100644 --- a/core/tests/ts-tests/tests/misc.ts +++ b/core/tests/ts-tests/tests/misc.ts @@ -105,7 +105,7 @@ Tester.prototype.testMultipleBatchSigners = async function (wallets: Wallet[], t const senderBefore = await batchSender.getBalance(token); const handles = await submitSignedTransactionsBatch(batchSender.provider, batch, ethSignatures); - await Promise.all(handles.map((handle) => handle.awaitVerifyReceipt())); + await Promise.all(handles.map((handle) => handle.awaitReceipt())); const senderAfter = await batchSender.getBalance(token); // Sender paid totalFee for this cycle. expect(senderBefore.sub(senderAfter).eq(totalFee), 'Batched transfer failed').to.be.true; @@ -233,6 +233,6 @@ Tester.prototype.testBackwardCompatibleEthMessages = async function ( const handles = await submitSignedTransactionsBatch(to.provider, batch, ethSignatures); // We only expect that API doesn't reject this batch due to Eth signature error. - await Promise.all(handles.map((handle) => handle.awaitVerifyReceipt())); + await Promise.all(handles.map((handle) => handle.awaitReceipt())); this.runningFee = this.runningFee.add(totalFee); }; diff --git a/core/tests/ts-tests/tests/withdraw.ts b/core/tests/ts-tests/tests/withdraw.ts index 0d6df5889c..0e9b15fb0a 100644 --- a/core/tests/ts-tests/tests/withdraw.ts +++ b/core/tests/ts-tests/tests/withdraw.ts @@ -2,7 +2,6 @@ import { Tester } from './tester'; import { expect } from 'chai'; import { Wallet, types } from 'zksync'; import { BigNumber } from 'ethers'; -import { sleep } from 'zksync/build/utils'; type TokenLike = types.TokenLike; @@ -28,19 +27,7 @@ Tester.prototype.testVerifiedWithdraw = async function ( // Await for verification with a timeout set (through mocha's --timeout) await handle.awaitVerifyReceipt(); - // Checking that there are some complete withdrawals tx hash for this withdrawal - // we should wait some time for `completeWithdrawals` transaction to be processed - let withdrawalTxHash = null; - const polling_interval = 200; // ms - const polling_timeout = 35000; // ms - const polling_iterations = polling_timeout / polling_interval; - for (let i = 0; i < polling_iterations; i++) { - withdrawalTxHash = await this.syncProvider.getEthTxForWithdrawal(handle.txHash); - if (withdrawalTxHash != null) { - break; - } - await sleep(polling_interval); - } + const withdrawalTxHash = await this.syncProvider.getEthTxForWithdrawal(handle.txHash); expect(withdrawalTxHash, 'Withdrawal was not processed onchain').to.exist; await this.ethProvider.waitForTransaction(withdrawalTxHash as string); diff --git a/etc/env/base/chain.toml b/etc/env/base/chain.toml index 68f88db429..d581c48350 100644 --- a/etc/env/base/chain.toml +++ b/etc/env/base/chain.toml @@ -31,9 +31,16 @@ miniblock_iterations=10 # Maximum amount of miniblock iterations in case of block containing a fast withdrawal request. fast_block_miniblock_iterations=5 -max_aggregated_blocks_to_commit=5 -max_aggregated_blocks_to_execute=5 -block_commit_deadline=300 -block_prove_deadline=3000 -block_execute_deadline=4000 -max_aggregated_tx_gas=4000000 +# Max L2 blocks to commit in one L1 transaction +max_aggregated_blocks_to_commit=10 +# Max L2 blocks to execute in one L1 transaction +max_aggregated_blocks_to_execute=10 +# Time (seconds) after block is created with timestamp T after which L1 aggregated commit operation must be created +block_commit_deadline=1 +# Time (seconds) after block is created with timestamp T after which L1 aggregated prove operation must be created +block_prove_deadline=1 +# Time (seconds) after block is created with timestamp T after which L1 aggregated execute operation must be created +block_execute_deadline=1 +# Max gas that can be used to execute aggregated operation +# for now (should be > 4kk which is max gas for one block commit/verify/execute) +max_aggregated_tx_gas=5000000 diff --git a/etc/env/base/contracts.toml b/etc/env/base/contracts.toml index c61ac18ef6..2e9f9d24cc 100644 --- a/etc/env/base/contracts.toml +++ b/etc/env/base/contracts.toml @@ -15,4 +15,5 @@ GENESIS_ROOT="0x2d5ab622df708ab44944bb02377be85b6f27812e9ae520734873b7a193898ba4 [contracts.test] dummy_verifier=false +easy_exodus=false diff --git a/etc/env/base/database.toml b/etc/env/base/database.toml index 7ae5b4b9dc..5e8df773d2 100644 --- a/etc/env/base/database.toml +++ b/etc/env/base/database.toml @@ -1,5 +1,10 @@ -[db] +[database] # Datbase URL is defined in the `private.toml` # Amount of open connections to the database. pool_size=10 + +# Rejected transactions will be stored in the database for this amount of hours. +rejected_transactions_max_age=336 +# Sleep time (in hours) of the actor responsible for deleting failed transactions. +rejected_transactions_cleaner_interval=24 diff --git a/infrastructure/grafana/dashboards/chain_operations.jsonnet b/infrastructure/grafana/dashboards/chain_operations.jsonnet index 6020da51f1..3a8ec13d5a 100644 --- a/infrastructure/grafana/dashboards/chain_operations.jsonnet +++ b/infrastructure/grafana/dashboards/chain_operations.jsonnet @@ -20,6 +20,7 @@ local metrics = [ "sql.chain.operations.get_last_block_by_aggregated_action", "sql.chain.operations.get_operation", "sql.chain.operations.no_stored_pending_withdrawals", + "sql.chain.operations.remove_rejected_transactions", "sql.chain.operations.store_executed_priority_op", "sql.chain.operations.store_executed_tx", "sql.chain.operations.store_operation", diff --git a/infrastructure/zk/src/init.ts b/infrastructure/zk/src/init.ts index 25686d94b8..7034d529fe 100644 --- a/infrastructure/zk/src/init.ts +++ b/infrastructure/zk/src/init.ts @@ -39,13 +39,13 @@ async function createVolumes() { } async function checkEnv() { - const tools = ['node', 'yarn', 'docker', 'docker-compose', 'cargo', 'psql', 'pg_isready', 'diesel', 'solc']; + const tools = ['node', 'yarn', 'docker', 'docker-compose', 'cargo', 'psql', 'pg_isready', 'diesel']; for (const tool of tools) { await utils.exec(`which ${tool}`); } await utils.exec('cargo sqlx --version'); const { stdout: version } = await utils.exec('node --version'); - // Node v.14.14 is required because + // Node v14.14 is required because // the `fs.rmSync` function was added in v14.14.0 if ('v14.14' >= version) { throw new Error('Error, node.js version 14.14.0 or higher is required'); diff --git a/infrastructure/zk/src/run/verify-keys.ts b/infrastructure/zk/src/run/verify-keys.ts index dd8df7de33..bf54c94540 100644 --- a/infrastructure/zk/src/run/verify-keys.ts +++ b/infrastructure/zk/src/run/verify-keys.ts @@ -10,7 +10,7 @@ function verfiyKeysTarball() { return `verify-keys-${keyDir}-account-${accountTreeDepth}_-balance-${balanceTreeDepth}.tar.gz`; } -export async function gen(command: 'contract' | 'all') { +export async function gen(command: 'contract' | 'all' | 'circuit-size') { const accountTreeDepth = process.env.CHAIN_CIRCUIT_ACCOUNT_TREE_DEPTH; const balanceTreeDepth = process.env.CHAIN_CIRCUIT_BALANCE_TREE_DEPTH; const keyDir = process.env.CHAIN_CIRCUIT_KEY_DIR; @@ -21,9 +21,13 @@ export async function gen(command: 'contract' | 'all') { fs.utimesSync('core/lib/crypto/src/params.rs', time, time); fs.mkdirSync(outputDir, { recursive: true }); await utils.spawn('cargo run --bin key_generator --release -- keys'); + await utils.spawn('cargo run --bin key_generator --release -- contract'); + } else if (command == 'contract') { + await utils.spawn('cargo run --bin key_generator --release -- contract'); + } else if (command == 'circuit-size') { + await utils.spawn('cargo run --bin key_generator --release -- circuit-size'); } - await utils.spawn('cargo run --bin key_generator --release -- contract'); fs.copyFileSync(`${outputDir}/KeysWithPlonkVerifier.sol`, 'contracts/contracts/KeysWithPlonkVerifier.sol'); } @@ -53,8 +57,10 @@ command .description('generate verification keys') .action(async (command?: string) => { command = command || 'all'; - if (command != 'all' && command != 'contract') { - throw new Error('Can only generate "all" or "contract" keys'); + if (command != 'all' && command != 'contract' && command != 'circuit-size') { + throw new Error( + 'Can only generate "all" or "contract" keys, or "circuit-size" for circuit size estimation' + ); } await gen(command); }); diff --git a/infrastructure/zk/src/server.ts b/infrastructure/zk/src/server.ts index 3b2b338906..f9506aefde 100644 --- a/infrastructure/zk/src/server.ts +++ b/infrastructure/zk/src/server.ts @@ -17,7 +17,7 @@ export async function genesis() { await db.reset(); await utils.confirmAction(); await utils.spawn('cargo run --bin zksync_server --release -- --genesis | tee genesis.log'); - const genesisRoot = fs.readFileSync('genesis.log').toString(); + const genesisRoot = fs.readFileSync('genesis.log').toString().trim(); const date = new Date(); const [year, month, day, hour, minute, second] = [ date.getFullYear(), diff --git a/sdk/zksync-rs/src/provider.rs b/sdk/zksync-rs/src/provider.rs index a428a9f2fd..e9293aa441 100644 --- a/sdk/zksync-rs/src/provider.rs +++ b/sdk/zksync-rs/src/provider.rs @@ -184,6 +184,15 @@ impl RpcProvider { } } + /// Creates a new `Provider` object connected to a custom address and the desired zkSync network. + pub fn from_addr_and_network(rpc_addr: impl Into, network: Network) -> Self { + Self { + rpc_addr: rpc_addr.into(), + client: reqwest::Client::new(), + network, + } + } + /// Submits a batch transaction to the zkSync network. /// Returns the hashes of the created transactions. pub async fn send_txs_batch( diff --git a/sdk/zksync.js/package.json b/sdk/zksync.js/package.json index 86155968f5..0e55685048 100644 --- a/sdk/zksync.js/package.json +++ b/sdk/zksync.js/package.json @@ -1,6 +1,6 @@ { "name": "zksync", - "version": "0.8.4-beta", + "version": "0.9.0", "license": "MIT", "main": "build/index.js", "types": "build/index.d.ts", @@ -22,7 +22,7 @@ "@types/mocha": "^8.0.3", "@types/node": "^14.14.5", "chai": "^4.2.0", - "ethers": "^5.0.19", + "ethers": "^5.0.26", "mocha": "^8.2.0", "rollup": "^2.32.1", "rollup-plugin-copy": "^3.3.0", diff --git a/sdk/zksync.js/src/provider.ts b/sdk/zksync.js/src/provider.ts index 5c71f08dc5..15ad7cb58b 100644 --- a/sdk/zksync.js/src/provider.ts +++ b/sdk/zksync.js/src/provider.ts @@ -17,6 +17,9 @@ import { import { isTokenETH, sleep, SYNC_GOV_CONTRACT_INTERFACE, TokenSet } from './utils'; export async function getDefaultProvider(network: Network, transport: 'WS' | 'HTTP' = 'HTTP'): Promise { + if (transport === 'WS') { + console.warn('Websocket support will be removed in future. Use HTTP transport instead.'); + } if (network === 'localhost') { if (transport === 'WS') { return await Provider.newWebsocketProvider('ws://127.0.0.1:3031'); @@ -67,6 +70,9 @@ export class Provider { private constructor(public transport: AbstractJSONRPCTransport) {} + /** + * @deprecated Websocket support will be removed in future. Use HTTP transport instead. + */ static async newWebsocketProvider(address: string): Promise { const transport = await WSTransport.connect(address); const provider = new Provider(transport); diff --git a/sdk/zksync.js/src/transport.ts b/sdk/zksync.js/src/transport.ts index 481a6fef10..9cc09a6262 100644 --- a/sdk/zksync.js/src/transport.ts +++ b/sdk/zksync.js/src/transport.ts @@ -70,6 +70,9 @@ export class HTTPTransport extends AbstractJSONRPCTransport { async disconnect() {} } +/** + * @deprecated Websocket support will be removed in future. Use HTTP transport instead. + */ export class WSTransport extends AbstractJSONRPCTransport { ws: WebSocketAsPromised; private subscriptionCallback: Map void>; diff --git a/sdk/zksync.js/src/utils.ts b/sdk/zksync.js/src/utils.ts index 2a24f68692..808164b1c2 100644 --- a/sdk/zksync.js/src/utils.ts +++ b/sdk/zksync.js/src/utils.ts @@ -39,7 +39,12 @@ export const ERC20_APPROVE_TRESHOLD = BigNumber.from( '57896044618658097711785492504343953926634992332820282019728792003956564819968' ); // 2^255 -export const ERC20_DEPOSIT_GAS_LIMIT = BigNumber.from('300000'); // 300k +// Gas limit that is set for eth deposit by default. For default EOA accounts 60k should be enough, but we reserve +// more gas for smart-contract wallets +export const ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT = BigNumber.from('90000'); // 90k +// For normal wallet/erc20 token 90k gas for deposit should be enough, but for some tokens this can go as high as ~200k +// we try to be safe by default +export const ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT = BigNumber.from('300000'); // 300k const AMOUNT_EXPONENT_BIT_WIDTH = 5; const AMOUNT_MANTISSA_BIT_WIDTH = 35; @@ -711,7 +716,11 @@ export async function getPendingBalance( address: Address, token: TokenLike ): Promise { - const zksyncContract = new Contract(address, SYNC_MAIN_CONTRACT_INTERFACE, ethProvider); + const zksyncContract = new Contract( + syncProvider.contractAddress.mainContract, + SYNC_MAIN_CONTRACT_INTERFACE, + ethProvider + ); const tokenAddress = syncProvider.tokenSet.resolveTokenAddress(token); diff --git a/sdk/zksync.js/src/wallet.ts b/sdk/zksync.js/src/wallet.ts index 351d5c03b3..7f7e82efa4 100644 --- a/sdk/zksync.js/src/wallet.ts +++ b/sdk/zksync.js/src/wallet.ts @@ -31,12 +31,13 @@ import { isTokenETH, MAX_ERC20_APPROVE_AMOUNT, SYNC_MAIN_CONTRACT_INTERFACE, - ERC20_DEPOSIT_GAS_LIMIT, + ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT, signMessagePersonalAPI, getSignedBytesFromMessage, getChangePubkeyMessage, MAX_TIMESTAMP, - getEthereumBalance + getEthereumBalance, + ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT } from './utils'; const EthersErrorCode = ErrorCode; @@ -818,7 +819,7 @@ export class Wallet { try { ethTransaction = await mainZkSyncContract.depositETH(deposit.depositTo, { value: BigNumber.from(deposit.amount), - gasLimit: BigNumber.from('200000'), + gasLimit: BigNumber.from(ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT), gasPrice, ...deposit.ethTxOptions }); @@ -860,9 +861,9 @@ export class Wallet { (estimate) => estimate, () => BigNumber.from('0') ); - txRequest.gasLimit = gasEstimate.gte(ERC20_DEPOSIT_GAS_LIMIT) + txRequest.gasLimit = gasEstimate.gte(ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT) ? gasEstimate - : ERC20_DEPOSIT_GAS_LIMIT; + : ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT; args[args.length - 1] = txRequest; } catch (e) { this.modifyEthersError(e); diff --git a/yarn.lock b/yarn.lock index 8718c3ca7b..c1c653db03 100644 --- a/yarn.lock +++ b/yarn.lock @@ -954,6 +954,21 @@ "@ethersproject/properties" ">=5.0.0-beta.131" "@ethersproject/strings" ">=5.0.0-beta.130" +"@ethersproject/abi@5.0.11", "@ethersproject/abi@^5.0.10": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.11.tgz#d3b6fd955d3172214603d4d123055b3b0b06376b" + integrity sha512-ibZswQsjdFuLSfY2lbRTZM2Uk+ci7tp+mjVK0kjxVol2V32cb7va1r6B4AJU/Ac/VTstCjxtn0KKMfbkPc002w== + dependencies: + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/abi@5.0.7", "@ethersproject/abi@^5.0.1", "@ethersproject/abi@^5.0.5": version "5.0.7" resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.7.tgz#79e52452bd3ca2956d0e1c964207a58ad1a0ee7b" @@ -997,6 +1012,30 @@ "@ethersproject/transactions" "^5.0.5" "@ethersproject/web" "^5.0.6" +"@ethersproject/abstract-provider@5.0.9", "@ethersproject/abstract-provider@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.0.9.tgz#a55410b73e3994842884eb82b1f43e3a9f653eea" + integrity sha512-X9fMkqpeu9ayC3JyBkeeZhn35P4xQkpGX/l+FrxDtEW9tybf/UWXSMi8bGThpPtfJ6q6U2LDetXSpSwK4TfYQQ== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/networks" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/web" "^5.0.12" + +"@ethersproject/abstract-signer@5.0.12", "@ethersproject/abstract-signer@^5.0.10": + version "5.0.12" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.0.12.tgz#04ab597eb87a08faaab19dd5a739339e1e3beb58" + integrity sha512-qt4jAEzQGPZ31My1gFGPzzJHJveYhVycW7RHkuX0W8fvMdg7wr0uvP7mQEptMVrb+jYwsVktCf6gBGwWDpFiTA== + dependencies: + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/abstract-signer@5.0.7", "@ethersproject/abstract-signer@^5.0.4", "@ethersproject/abstract-signer@^5.0.6": version "5.0.7" resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.0.7.tgz#cdbd3bd479edf77c71b7f6a6156b0275b1176ded" @@ -1008,6 +1047,17 @@ "@ethersproject/logger" "^5.0.5" "@ethersproject/properties" "^5.0.3" +"@ethersproject/address@5.0.10", "@ethersproject/address@^5.0.9": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.0.10.tgz#2bc69fdff4408e0570471cd19dee577ab06a10d0" + integrity sha512-70vqESmW5Srua1kMDIN6uVfdneZMaMyRYH4qPvkAXGkbicrCOsA9m01vIloA4wYiiF+HLEfL1ENKdn5jb9xiAw== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/rlp" "^5.0.7" + "@ethersproject/address@5.0.5", "@ethersproject/address@>=5.0.0-beta.128", "@ethersproject/address@^5.0.4", "@ethersproject/address@^5.0.5": version "5.0.5" resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.0.5.tgz#2caa65f6b7125015395b1b54c985ee0b27059cc7" @@ -1038,6 +1088,13 @@ dependencies: "@ethersproject/bytes" "^5.0.4" +"@ethersproject/base64@5.0.8", "@ethersproject/base64@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.0.8.tgz#1bc4b4b8c59c1debf972c7164b96c0b8964a20a1" + integrity sha512-PNbpHOMgZpZ1skvQl119pV2YkCPXmZTxw+T92qX0z7zaMFPypXWTZBzim+hUceb//zx4DFjeGT4aSjZRTOYThg== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/basex@5.0.4", "@ethersproject/basex@^5.0.3": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.0.4.tgz#93e1cd11f9a47281da2389de24f88e13e9d90847" @@ -1046,6 +1103,23 @@ "@ethersproject/bytes" "^5.0.4" "@ethersproject/properties" "^5.0.3" +"@ethersproject/basex@5.0.8", "@ethersproject/basex@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.0.8.tgz#6867fad20047aa29fbd4b880f27894ed04cc7bb8" + integrity sha512-PCVKZIShBQUqAXjJSvaCidThPvL0jaaQZcewJc0sf8Xx05BizaOS8r3jdPdpNdY+/qZtRDqwHTSKjvR/xssyLQ== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/properties" "^5.0.7" + +"@ethersproject/bignumber@5.0.14", "@ethersproject/bignumber@^5.0.13": + version "5.0.14" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.0.14.tgz#605bc61dcbd4a8c6df8b5a7a77c0210273f3de8a" + integrity sha512-Q4TjMq9Gg3Xzj0aeJWqJgI3tdEiPiET7Y5OtNtjTAODZ2kp4y9jMNg97zVcvPedFvGROdpGDyCI77JDFodUzOw== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + bn.js "^4.4.0" + "@ethersproject/bignumber@5.0.8", "@ethersproject/bignumber@>=5.0.0-beta.130", "@ethersproject/bignumber@^5.0.7", "@ethersproject/bignumber@^5.0.8": version "5.0.8" resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.0.8.tgz#cee33bd8eb0266176def0d371b45274b1d2c4ec0" @@ -1064,6 +1138,13 @@ "@ethersproject/logger" "^5.0.5" bn.js "^4.4.0" +"@ethersproject/bytes@5.0.10", "@ethersproject/bytes@^5.0.9": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.0.10.tgz#aa49afe7491ba24ff76fa33d98677351263f9ba4" + integrity sha512-vpu0v1LZ1j1s9kERQIMnVU69MyHEzUff7nqK9XuCU4vx+AM8n9lU2gj7jtJIvGSt9HzatK/6I6bWusI5nyuaTA== + dependencies: + "@ethersproject/logger" "^5.0.8" + "@ethersproject/bytes@5.0.5", "@ethersproject/bytes@>=5.0.0-beta.129", "@ethersproject/bytes@^5.0.4": version "5.0.5" resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.0.5.tgz#688b70000e550de0c97a151a21f15b87d7f97d7c" @@ -1078,6 +1159,28 @@ dependencies: "@ethersproject/bignumber" "^5.0.7" +"@ethersproject/constants@5.0.9", "@ethersproject/constants@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.0.9.tgz#81ac44c3bf612de63eb1c490b314ea1b932cda9f" + integrity sha512-2uAKH89UcaJP/Sc+54u92BtJtZ4cPgcS1p0YbB1L3tlkavwNvth+kNCUplIB1Becqs7BOZr0B/3dMNjhJDy4Dg== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + +"@ethersproject/contracts@5.0.10": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.0.10.tgz#650cbf6f3cf89a63006ea91727a68aee4dc3381f" + integrity sha512-h9kdvllwT6B1LyUXeNQIb7Y6u6ZprP5LUiQIjSqvOehhm1sFZcaVtydsSa0LIg3SBC5QF0M7zH5p7EtI2VD0rQ== + dependencies: + "@ethersproject/abi" "^5.0.10" + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/contracts@5.0.5": version "5.0.5" resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.0.5.tgz#64831a341ec8ca225e83ff3e9437c26b970fd5d7" @@ -1093,6 +1196,20 @@ "@ethersproject/logger" "^5.0.5" "@ethersproject/properties" "^5.0.3" +"@ethersproject/hash@5.0.11", "@ethersproject/hash@^5.0.10": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.0.11.tgz#da89517438bbbf8a39df56fff09f0a71669ae7a7" + integrity sha512-H3KJ9fk33XWJ2djAW03IL7fg3DsDMYjO1XijiUb1hJ85vYfhvxu0OmsU7d3tg2Uv1H1kFSo8ghr3WFQ8c+NL3g== + dependencies: + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/hash@5.0.6", "@ethersproject/hash@>=5.0.0-beta.128", "@ethersproject/hash@^5.0.4": version "5.0.6" resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.0.6.tgz#2a2e8a1470685421217e9e86e9971ca636e609ce" @@ -1125,6 +1242,43 @@ "@ethersproject/transactions" "^5.0.5" "@ethersproject/wordlists" "^5.0.4" +"@ethersproject/hdnode@5.0.9", "@ethersproject/hdnode@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.0.9.tgz#ce65b430d3d3f0cd3c8f9dfaaf376b55881d9dba" + integrity sha512-S5UMmIC6XfFtqhUK4uTjD8GPNzSbE+sZ/0VMqFnA3zAJ+cEFZuEyhZDYnl2ItGJzjT4jsy+uEy1SIl3baYK1PQ== + dependencies: + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/basex" "^5.0.7" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/pbkdf2" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/signing-key" "^5.0.8" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/wordlists" "^5.0.8" + +"@ethersproject/json-wallets@5.0.11", "@ethersproject/json-wallets@^5.0.10": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.0.11.tgz#86fdc41b7762acb443d6a896f6c61231ab2aee5d" + integrity sha512-0GhWScWUlXXb4qJNp0wmkU95QS3YdN9UMOfMSEl76CRANWWrmyzxcBVSXSBu5iQ0/W8wO+xGlJJ3tpA6v3mbIw== + dependencies: + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/hdnode" "^5.0.8" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/pbkdf2" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/random" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + aes-js "3.0.0" + scrypt-js "3.0.1" + "@ethersproject/json-wallets@5.0.7", "@ethersproject/json-wallets@^5.0.6": version "5.0.7" resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.0.7.tgz#4c48753b38ce7bce23a55f25c23f24617cf560e5" @@ -1152,11 +1306,24 @@ "@ethersproject/bytes" "^5.0.4" js-sha3 "0.5.7" +"@ethersproject/keccak256@5.0.8", "@ethersproject/keccak256@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.0.8.tgz#13aaf69e1c8bd15fc59a2ebd055c0878f2a059c8" + integrity sha512-zoGbwXcWWs9MX4NOAZ7N0hhgIRl4Q/IO/u9c/RHRY4WqDy3Ywm0OLamEV53QDwhjwn3YiiVwU1Ve5j7yJ0a/KQ== + dependencies: + "@ethersproject/bytes" "^5.0.9" + js-sha3 "0.5.7" + "@ethersproject/logger@5.0.6", "@ethersproject/logger@>=5.0.0-beta.129", "@ethersproject/logger@^5.0.5": version "5.0.6" resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.0.6.tgz#faa484203e86e08be9e07fef826afeef7183fe88" integrity sha512-FrX0Vnb3JZ1md/7GIZfmJ06XOAA8r3q9Uqt9O5orr4ZiksnbpXKlyDzQtlZ5Yv18RS8CAUbiKH9vwidJg1BPmQ== +"@ethersproject/logger@5.0.9", "@ethersproject/logger@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.0.9.tgz#0e6a0b3ecc938713016954daf4ac7967467aa763" + integrity sha512-kV3Uamv3XOH99Xf3kpIG3ZkS7mBNYcLDM00JSDtNgNB4BihuyxpQzIZPRIDmRi+95Z/R1Bb0X2kUNHa/kJoVrw== + "@ethersproject/networks@5.0.4", "@ethersproject/networks@^5.0.3": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.0.4.tgz#6d320a5e15a0cda804f5da88be0ba846156f6eec" @@ -1164,6 +1331,13 @@ dependencies: "@ethersproject/logger" "^5.0.5" +"@ethersproject/networks@5.0.8", "@ethersproject/networks@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.0.8.tgz#37e6f8c058f2d540373ea5939056cd3de069132e" + integrity sha512-PYpptlO2Tu5f/JEBI5hdlMds5k1DY1QwVbh3LKPb3un9dQA2bC51vd2/gRWAgSBpF3kkmZOj4FhD7ATLX4H+DA== + dependencies: + "@ethersproject/logger" "^5.0.8" + "@ethersproject/pbkdf2@5.0.4", "@ethersproject/pbkdf2@^5.0.3": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.0.4.tgz#a0841d53f5ce9a2b52a65a349d2dc15910b0a767" @@ -1172,6 +1346,14 @@ "@ethersproject/bytes" "^5.0.4" "@ethersproject/sha2" "^5.0.3" +"@ethersproject/pbkdf2@5.0.8", "@ethersproject/pbkdf2@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.0.8.tgz#06a086b1ac04c75e6846afd6cf6170a49a634411" + integrity sha512-UlmAMGbIPaS2xXsI38FbePVTfJMuU9jnwcqVn3p88HxPF4kD897ha+l3TNsBqJqf32UbQL5GImnf1oJkSKq4vQ== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/properties@5.0.4", "@ethersproject/properties@>=5.0.0-beta.131", "@ethersproject/properties@^5.0.3", "@ethersproject/properties@^5.0.4": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.0.4.tgz#a67a1f5a52c30850b5062c861631e73d131f666e" @@ -1179,6 +1361,13 @@ dependencies: "@ethersproject/logger" "^5.0.5" +"@ethersproject/properties@5.0.8", "@ethersproject/properties@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.0.8.tgz#e45d28d25402c73394873dbf058f856c966cae01" + integrity sha512-zEnLMze2Eu2VDPj/05QwCwMKHh506gpT9PP9KPVd4dDB+5d6AcROUYVLoIIQgBYK7X/Gw0UJmG3oVtnxOQafAw== + dependencies: + "@ethersproject/logger" "^5.0.8" + "@ethersproject/providers@5.0.14": version "5.0.14" resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.0.14.tgz#751ccb14b4a8c8e9e4be171818c23f4601be90ba" @@ -1204,6 +1393,31 @@ bech32 "1.1.4" ws "7.2.3" +"@ethersproject/providers@5.0.21": + version "5.0.21" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.0.21.tgz#04e6b7734637a82ea4df22ef49311f419fc5e3bd" + integrity sha512-KyH9TylyLqspbO/2C0ph+0ZpOnb/2GkKQtpcs7IyHZ/wHXdhbClLeaBdO0b4Fpo6zAZWjgIdN6WUOMGkyy7b6A== + dependencies: + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/basex" "^5.0.7" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/networks" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/random" "^5.0.7" + "@ethersproject/rlp" "^5.0.7" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/web" "^5.0.12" + bech32 "1.1.4" + ws "7.2.3" + "@ethersproject/random@5.0.4", "@ethersproject/random@^5.0.3": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.0.4.tgz#98f7cf65b0e588cec39ef24843e391ed5004556f" @@ -1212,6 +1426,14 @@ "@ethersproject/bytes" "^5.0.4" "@ethersproject/logger" "^5.0.5" +"@ethersproject/random@5.0.8", "@ethersproject/random@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.0.8.tgz#8d3726be48e95467abce9b23c93adbb1de009dda" + integrity sha512-4rHtotmd9NjklW0eDvByicEkL+qareIyFSbG1ShC8tPJJSAC0g55oQWzw+3nfdRCgBHRuEE7S8EcPcTVPvZ9cA== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/rlp@5.0.4", "@ethersproject/rlp@^5.0.3": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.0.4.tgz#0090a0271e84ea803016a112a79f5cfd80271a77" @@ -1220,6 +1442,14 @@ "@ethersproject/bytes" "^5.0.4" "@ethersproject/logger" "^5.0.5" +"@ethersproject/rlp@5.0.8", "@ethersproject/rlp@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.0.8.tgz#ff54e206d0ae28640dd054f2bcc7070f06f9dfbe" + integrity sha512-E4wdFs8xRNJfzNHmnkC8w5fPeT4Wd1U2cust3YeT16/46iSkLT8nn8ilidC6KhR7hfuSZE4UqSPzyk76p7cdZg== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/sha2@5.0.4", "@ethersproject/sha2@^5.0.3": version "5.0.4" resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.0.4.tgz#40f639721a27dbe034b3dee021ba20b054586fec" @@ -1229,6 +1459,15 @@ "@ethersproject/logger" "^5.0.5" hash.js "1.1.3" +"@ethersproject/sha2@5.0.8", "@ethersproject/sha2@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.0.8.tgz#9903c67e562739d8b312820b0a265b9c9bf35fc3" + integrity sha512-ILP1ZgyvDj4rrdE+AXrTv9V88m7x87uga2VZ/FeULKPumOEw/4bGnJz/oQ8zDnDvVYRCJ+48VaQBS2CFLbk1ww== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + hash.js "1.1.3" + "@ethersproject/signing-key@5.0.5", "@ethersproject/signing-key@^5.0.4": version "5.0.5" resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.0.5.tgz#acfd06fc05a14180df7e027688bbd23fc4baf782" @@ -1239,6 +1478,16 @@ "@ethersproject/properties" "^5.0.3" elliptic "6.5.3" +"@ethersproject/signing-key@5.0.9", "@ethersproject/signing-key@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.0.9.tgz#37e3038e26b53979d41dd90a2077fb0efd020fcc" + integrity sha512-AobnsEiLv+Z4a/NbbelwB/Lsnc+qxeNejXDlEwbo/nwjijvxLpwiNN+rjx/lQGel1QnQ/d+lEv7xezyUaXdKFQ== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + elliptic "6.5.3" + "@ethersproject/solidity@5.0.5": version "5.0.5" resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.0.5.tgz#97a7d8a67f2d944f208c948fed0d565512bcc2be" @@ -1250,6 +1499,17 @@ "@ethersproject/sha2" "^5.0.3" "@ethersproject/strings" "^5.0.4" +"@ethersproject/solidity@5.0.9": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.0.9.tgz#49100fbe9f364ac56f7ff7c726f4f3d151901134" + integrity sha512-LIxSAYEQgLRXE3mRPCq39ou61kqP8fDrGqEeNcaNJS3aLbmAOS8MZp56uK++WsdI9hj8sNsFh78hrAa6zR9Jag== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/strings@5.0.5", "@ethersproject/strings@>=5.0.0-beta.130", "@ethersproject/strings@^5.0.4": version "5.0.5" resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.0.5.tgz#ed7e99a282a02f40757691b04a24cd83f3752195" @@ -1259,6 +1519,30 @@ "@ethersproject/constants" "^5.0.4" "@ethersproject/logger" "^5.0.5" +"@ethersproject/strings@5.0.9", "@ethersproject/strings@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.0.9.tgz#8e2eb2918b140231e1d1b883d77e43213a8ac280" + integrity sha512-ogxBpcUpdO524CYs841MoJHgHxEPUy0bJFDS4Ezg8My+WYVMfVAOlZSLss0Rurbeeam8CpUVDzM4zUn09SU66Q== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/logger" "^5.0.8" + +"@ethersproject/transactions@5.0.10", "@ethersproject/transactions@^5.0.9": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.0.10.tgz#d50cafd80d27206336f80114bc0f18bc18687331" + integrity sha512-Tqpp+vKYQyQdJQQk4M73tDzO7ODf2D42/sJOcKlDAAbdSni13v6a+31hUdo02qYXhVYwIs+ZjHnO4zKv5BNk8w== + dependencies: + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/rlp" "^5.0.7" + "@ethersproject/signing-key" "^5.0.8" + "@ethersproject/transactions@5.0.6", "@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.0.5": version "5.0.6" resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.0.6.tgz#b8b27938be6e9ed671dbdd35fe98af8b14d0df7c" @@ -1274,6 +1558,15 @@ "@ethersproject/rlp" "^5.0.3" "@ethersproject/signing-key" "^5.0.4" +"@ethersproject/units@5.0.10": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.0.10.tgz#9cca3b65cd0c92fab1bd33f2abd233546dd61987" + integrity sha512-eaiHi9ham5lbC7qpqxpae7OY/nHJUnRUnFFuEwi2VB5Nwe3Np468OAV+e+HR+jAK4fHXQE6PFBTxWGtnZuO37g== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/units@5.0.6": version "5.0.6" resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.0.6.tgz#e1169ecffb7e8d5eab84e1481a4e35df19045708" @@ -1283,6 +1576,27 @@ "@ethersproject/constants" "^5.0.4" "@ethersproject/logger" "^5.0.5" +"@ethersproject/wallet@5.0.11": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.0.11.tgz#9891936089d1b91e22ed59f850bc344b1544bf26" + integrity sha512-2Fg/DOvUltR7aZTOyWWlQhru+SKvq2UE3uEhXSyCFgMqDQNuc2nHXh1SHJtN65jsEbjVIppOe1Q7EQMvhmeeRw== + dependencies: + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/hdnode" "^5.0.8" + "@ethersproject/json-wallets" "^5.0.10" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/random" "^5.0.7" + "@ethersproject/signing-key" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/wordlists" "^5.0.8" + "@ethersproject/wallet@5.0.7": version "5.0.7" resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.0.7.tgz#9d4540f97d534e3d61548ace30f15857209b3f02" @@ -1304,6 +1618,17 @@ "@ethersproject/transactions" "^5.0.5" "@ethersproject/wordlists" "^5.0.4" +"@ethersproject/web@5.0.13", "@ethersproject/web@^5.0.12": + version "5.0.13" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.0.13.tgz#5a92ac6d835d2ebce95b6b645a86668736e2f532" + integrity sha512-G3x/Ns7pQm21ALnWLbdBI5XkW/jrsbXXffI9hKNPHqf59mTxHYtlNiSwxdoTSwCef3Hn7uvGZpaSgTyxs7IufQ== + dependencies: + "@ethersproject/base64" "^5.0.7" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/web@5.0.9", "@ethersproject/web@^5.0.6": version "5.0.9" resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.0.9.tgz#b08f8295f4bfd4777c8723fe9572f5453b9f03cb" @@ -1326,6 +1651,17 @@ "@ethersproject/properties" "^5.0.3" "@ethersproject/strings" "^5.0.4" +"@ethersproject/wordlists@5.0.9", "@ethersproject/wordlists@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.0.9.tgz#f16cc0b317637c3ae9c689ebd7bc2cbbffadd013" + integrity sha512-Sn6MTjZkfbriod6GG6+p43W09HOXT4gwcDVNj0YoPYlo4Zq2Fk6b1CU9KUX3c6aI17PrgYb4qwZm5BMuORyqyQ== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@hapi/address@2.x.x": version "2.1.4" resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5" @@ -6374,6 +6710,42 @@ ethers@^5.0.0, ethers@^5.0.1, ethers@^5.0.12, ethers@^5.0.18, ethers@^5.0.19, et "@ethersproject/web" "5.0.9" "@ethersproject/wordlists" "5.0.5" +ethers@^5.0.26: + version "5.0.28" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.0.28.tgz#3c2a3d41b3639a427a822814870990cfcdd5e0a8" + integrity sha512-prYYCmZMGbrhP2PEXA2re5BpNPjaCP2y5gO1dh1i+fPxdkldQOk+0c0l8KlnxwUztKq4E40xpB0gyURdcAOaAg== + dependencies: + "@ethersproject/abi" "5.0.11" + "@ethersproject/abstract-provider" "5.0.9" + "@ethersproject/abstract-signer" "5.0.12" + "@ethersproject/address" "5.0.10" + "@ethersproject/base64" "5.0.8" + "@ethersproject/basex" "5.0.8" + "@ethersproject/bignumber" "5.0.14" + "@ethersproject/bytes" "5.0.10" + "@ethersproject/constants" "5.0.9" + "@ethersproject/contracts" "5.0.10" + "@ethersproject/hash" "5.0.11" + "@ethersproject/hdnode" "5.0.9" + "@ethersproject/json-wallets" "5.0.11" + "@ethersproject/keccak256" "5.0.8" + "@ethersproject/logger" "5.0.9" + "@ethersproject/networks" "5.0.8" + "@ethersproject/pbkdf2" "5.0.8" + "@ethersproject/properties" "5.0.8" + "@ethersproject/providers" "5.0.21" + "@ethersproject/random" "5.0.8" + "@ethersproject/rlp" "5.0.8" + "@ethersproject/sha2" "5.0.8" + "@ethersproject/signing-key" "5.0.9" + "@ethersproject/solidity" "5.0.9" + "@ethersproject/strings" "5.0.9" + "@ethersproject/transactions" "5.0.10" + "@ethersproject/units" "5.0.10" + "@ethersproject/wallet" "5.0.11" + "@ethersproject/web" "5.0.13" + "@ethersproject/wordlists" "5.0.9" + ethjs-abi@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/ethjs-abi/-/ethjs-abi-0.2.0.tgz#d3e2c221011520fc499b71682036c14fcc2f5b25"